本文整理汇总了Python中utils.util.set_random_seed方法的典型用法代码示例。如果您正苦于以下问题:Python util.set_random_seed方法的具体用法?Python util.set_random_seed怎么用?Python util.set_random_seed使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类utils.util
的用法示例。
在下文中一共展示了util.set_random_seed方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: main
# 需要导入模块: from utils import util [as 别名]
# 或者: from utils.util import set_random_seed [as 别名]
def main():
#### setup options of three networks
parser = argparse.ArgumentParser()
parser.add_argument('-opt_P', type=str, help='Path to option YMAL file of Predictor.')
parser.add_argument('-opt_C', type=str, help='Path to option YMAL file of Corrector.')
parser.add_argument('-opt_F', type=str, help='Path to option YMAL file of SFTMD_Net.')
parser.add_argument('--launcher', choices=['none', 'pytorch'], default='none',
help='job launcher')
parser.add_argument('--local_rank', type=int, default=0)
args = parser.parse_args()
opt_P = option.parse(args.opt_P, is_train=True)
opt_C = option.parse(args.opt_C, is_train=True)
opt_F = option.parse(args.opt_F, is_train=True)
# convert to NoneDict, which returns None for missing keys
opt_P = option.dict_to_nonedict(opt_P)
opt_C = option.dict_to_nonedict(opt_C)
opt_F = option.dict_to_nonedict(opt_F)
#### random seed
seed = opt_P['train']['manual_seed']
if seed is None:
seed = random.randint(1, 10000)
util.set_random_seed(seed)
# create PCA matrix of enough kernel
batch_ker = util.random_batch_kernel(batch=30000, l=opt_P['kernel_size'], sig_min=0.2, sig_max=4.0, rate_iso=1.0, scaling=3, tensor=False)
print('batch kernel shape: {}'.format(batch_ker.shape))
b = np.size(batch_ker, 0)
batch_ker = batch_ker.reshape((b, -1))
pca_matrix = util.PCA(batch_ker, k=opt_P['code_length']).float()
print('PCA matrix shape: {}'.format(pca_matrix.shape))
#### distributed training settings
if args.launcher == 'none': # disabled distributed training
opt_P['dist'] = False
opt_F['dist'] = False
opt_C['dist'] = False
rank = -1
print('Disabled distributed training.')
else:
opt_P['dist'] = True
opt_F['dist'] = True
opt_C['dist'] = True
init_dist()
world_size = torch.distributed.get_world_size() #Returns the number of processes in the current process group
rank = torch.distributed.get_rank() #Returns the rank of current process group
torch.backends.cudnn.benchmark = True
# torch.backends.cudnn.deterministic = True
###### SFTMD train ######
SFTMD_train(opt_F, rank, world_size, pca_matrix)
# choose small opt for SFTMD test
opt_F = opt_F['sftmd']
###### Predictor&Corrector train ######
IKC_train(opt_P, opt_C, opt_F, rank, world_size, pca_matrix)