本文整理匯總了Python中torch.initial_seed方法的典型用法代碼示例。如果您正苦於以下問題:Python torch.initial_seed方法的具體用法?Python torch.initial_seed怎麽用?Python torch.initial_seed使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類torch
的用法示例。
在下文中一共展示了torch.initial_seed方法的14個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: worker_init_fn
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import initial_seed [as 別名]
def worker_init_fn(worker_id):
torch_seed = torch.initial_seed()
random.seed(torch_seed + worker_id)
if torch_seed >= 2**32:
torch_seed = torch_seed % 2**32
np.random.seed(torch_seed + worker_id)
示例2: worker_init_fn
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import initial_seed [as 別名]
def worker_init_fn(pid):
np.random.seed(torch.initial_seed() % (2**31-1))
示例3: worker_init
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import initial_seed [as 別名]
def worker_init(worker_id):
seed_all(torch.initial_seed() % 2**32)
示例4: init_np_seed
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import initial_seed [as 別名]
def init_np_seed(worker_id):
seed = torch.initial_seed()
np.random.seed(seed % 4294967296)
示例5: _worker_init_fn
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import initial_seed [as 別名]
def _worker_init_fn(worker_id: int) -> None:
"""Sets a unique but deterministic random seed for background workers.
Only sets the seed for NumPy because PyTorch and Python's own RNGs
take care of reseeding on their own.
See https://github.com/numpy/numpy/issues/9650."""
# Modulo 2**32 because np.random.seed() only accepts values up to 2**32 - 1
initial_seed = torch.initial_seed() % 2**32
worker_seed = initial_seed + worker_id
np.random.seed(worker_seed)
# Be careful from where you call this! Not sure if this is concurrency-safe.
示例6: set_torch_seed_to_all_gens
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import initial_seed [as 別名]
def set_torch_seed_to_all_gens(_):
seed = torch.initial_seed() % (2**32 - 1)
random.seed(seed)
np.random.seed(seed)
示例7: test_seed
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import initial_seed [as 別名]
def test_seed():
torchfunc.seed(0)
assert 0 == torch.initial_seed()
示例8: test_seed_context_manager
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import initial_seed [as 別名]
def test_seed_context_manager():
first_seed = torch.initial_seed()
with torchfunc.seed(0):
assert 0 == torch.initial_seed()
assert torch.initial_seed() == first_seed
示例9: test_seed_decorator
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import initial_seed [as 別名]
def test_seed_decorator():
first_seed = torch.initial_seed()
@torchfunc.seed(0)
def wrapped():
assert 0 == torch.initial_seed()
wrapped()
assert torch.initial_seed() == first_seed
示例10: __init__
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import initial_seed [as 別名]
def __init__(self, value, cuda: bool = False):
self.value = value
self.cuda = cuda
self._last_seed = torch.initial_seed()
np.random.seed(self.value)
torch.manual_seed(self.value)
if self.cuda:
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
示例11: __init__
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import initial_seed [as 別名]
def __init__(self, sample_len, n_samples, xyz, sigma, b, r, dt=0.01, washout=0, normalize=False, seed=None):
"""
Constructor
:param sample_len: Length of the time-series in time steps.
:param n_samples: Number of samples to generate.
:param a:
:param b:
:param c:
"""
# Properties
self.sample_len = sample_len
self.n_samples = n_samples
self.xyz = xyz
self.dt = dt
self.normalize = normalize
self.washout = washout
self.sigma = sigma
self.b = b
self.r = r
# Seed
if seed is not None:
torch.initial_seed(seed)
# end if
# Generate data set
self.outputs = self._generate()
# end __init__
#############################################
# OVERRIDE
#############################################
# Length
示例12: __init__
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import initial_seed [as 別名]
def __init__(self, sample_len, n_samples, xy, a, b, washout=0, normalize=False, seed=None):
"""
Constructor
:param sample_len: Length of the time-series in time steps.
:param n_samples: Number of samples to generate.
:param a:
:param b:
:param c:
"""
# Properties
self.sample_len = sample_len
self.n_samples = n_samples
self.a = a
self.b = b
self.xy = xy
self.normalize = normalize
self.washout = washout
# Seed
if seed is not None:
torch.initial_seed(seed)
# end if
# Generate data set
self.outputs = self._generate()
# end __init__
#############################################
# OVERRIDE
#############################################
# Length
示例13: __getitem__
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import initial_seed [as 別名]
def __getitem__(self, item):
if self.datapipeline is None:
# build datapipeline with random seed the first time when __getitem__ is called
# usually, dataset is already spawned (into subprocess) at this point.
seed = (torch.initial_seed() + item * self._SEED_STEP +
self.ext_seed * self._EXT_SEED_STEP) % self._SEED_DIVIDER
self.datapipeline = datapipeline_builder.build(self.task,
self.cfg,
seed=seed)
logger.info("AdaptorDataset #%d built datapipeline with seed=%d" %
(item, seed))
training_data = self.datapipeline[item]
return training_data
示例14: init_random
# 需要導入模塊: import torch [as 別名]
# 或者: from torch import initial_seed [as 別名]
def init_random(seed: int = None):
"""
Initializes the random generators to allow seeding.
Args:
seed (int): The seed used for all random generators.
"""
global GLOBAL_SEED # pylint: disable=global-statement
if GLOBAL_SEED is not None:
return
if seed is None:
tmp_random = numpy.random.RandomState(None)
GLOBAL_SEED = tmp_random.randint(2**32-1, dtype='uint32')
else:
GLOBAL_SEED = seed
# initialize random generators
numpy.random.seed(GLOBAL_SEED)
random.seed(GLOBAL_SEED)
try:
# try to load torch and initialize random generator if available
import torch
torch.cuda.manual_seed_all(GLOBAL_SEED) # gpu
torch.manual_seed(GLOBAL_SEED) # cpu
except ImportError:
pass
try:
# try to load tensorflow and initialize random generator if available
import tensorflow
tensorflow.random.set_random_seed(GLOBAL_SEED)
except ImportError:
pass
# check whether all calls to torch.* use the same random generator (i.e. same instance)
# works in a short test -- MS
# print(torch.initial_seed())
# logger.info("Seed is {:d}".format(GLOBAL_SEED))
return GLOBAL_SEED