本文整理汇总了Python中singa.utils.utility.generate_name函数的典型用法代码示例。如果您正苦于以下问题:Python generate_name函数的具体用法?Python generate_name怎么用?Python generate_name使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了generate_name函数的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
def __init__(self, dim, w_param=None, **kwargs):
super(RNNLM, self).__init__(name=generate_name('hidden', 1),
user_type='kHidden')
if w_param == None:
# default: uniform
w_param = Parameter(name=generate_name('w'), init=init)
else:
set_param_field(w_param.param, 'w', True, **kwargs)
setval(self.layer, param=w_param.param)
示例2: __init__
def __init__(self):
'''
'''
self.name = 'accuracy'
self.layer_type = enumLayerType(self.name)
super(Accuracy, self).__init__(name=generate_name(self.name),
type=self.layer_type)
示例3: __init__
def __init__(self, lossname, topk=1, **kwargs):
'''
required
lossname = (string) // softmaxloss, euclideanloss
'''
self.layer_type = enumLayerType(lossname)
super(Loss, self).__init__(name=generate_name(lossname),
type=self.layer_type, **kwargs)
if lossname == 'softmaxloss':
self.layer.softmaxloss_conf.topk = topk
示例4: set_param_field
def set_param_field(param, pname, changename=False, withnumber=True, **kwargs):
'''
param = (ParamProto)
pname = (string) // 'w' for wiehgt, or 'b' for bias
changename = (bool) // update parameter name if True
withnumber = (bool) // add layer number if True
**kwargs
w_lr = (float) // learning rate multiplier for weight, used to
// scale the learning rate when updating parameters.
w_wd = (float) // weight decay multiplier for weight, used to
// scale the weight decay when updating parameters.
b_lr = (float) // learning rate multiplier for bias
b_wd = (float) // weight decay multiplier for bias
'''
assert pname == 'w' or pname == 'b', 'pname should be w or b'
lr_ = param.lr_scale
wd_ = param.wd_scale
initkv = {}
if pname == 'w':
if 'w_lr' in kwargs:
lr_ = kwargs['w_lr']
del kwargs['w_lr']
if 'w_wd' in kwargs:
wd_ = kwargs['w_wd']
del kwargs['w_wd']
for key, val in kwargs.items():
if key.startswith('w_'):
initkv[key[2:]] = val
elif pname == 'b':
if 'b_lr' in kwargs:
lr_ = kwargs['b_lr']
del kwargs['b_lr']
if 'b_wd' in kwargs:
wd_ = kwargs['b_wd']
del kwargs['b_wd']
for key, val in kwargs.items():
if key.startswith('b_'):
initkv[key[2:]] = val
field = {'lr_scale' : lr_, 'wd_scale' : wd_}
# Set/update parameter fields
if param.name.startswith('param') or changename == True:
if 'level' in kwargs: # parameter name for RBM
pname += str(kwargs['level'])
setval(param, name=generate_name(pname, withnumber=withnumber), **field)
else:
setval(param, **field)
# Set/update parameter init fields
setval(param.init, **initkv)
示例5: __init__
def __init__(self, **kwargs):
'''
optional
**kwargs
name = (string) // parameter name
lr = (float) // learning rate multiplier
wd = (float) // weight decay multiplier
init = (string) // init type {'constant','uniform','gaussian'}
value = (int) // value for 'constant'
scale = (float) // [low=-scale, high=scale] for 'uniform'
low = (float) // low value for 'uniform'
high = (float) // high value for 'uniform'
mean = (float) // mean for 'gaussian'
std = (float) // std for 'gaussian'
'''
fields = {'lr_scale' : kwargs['lr'] if 'lr' in kwargs else 1,
'wd_scale' : kwargs['wd'] if 'wd' in kwargs else 1
}
self.param = Message('Param', **fields).proto
if not 'name' in kwargs:
setval(self.param, name=generate_name('param', 1))
else:
pname = kwargs['name']
# parameter name for RBM
if 'level' in kwargs:
pname += str(kwargs['level'])
if pname[0] == 'b':
pname += '2'
setval(self.param, name=pname)
if 'share_from' in kwargs:
setval(self.param, share_from=kwargs['share_from'])
if 'init' in kwargs:
init_values = get_init_values(kwargs['init'], **kwargs)
if not kwargs['init'] == 'none':
pgen = Message('ParamGen', type=enumInitMethod(kwargs['init']),
**init_values)
del kwargs['init']
setval(self.param, init=pgen.proto)
else: # default: uniform
pgen = Message('ParamGen', type=enumInitMethod('uniform'))
setval(self.param, init=pgen.proto)