本文整理汇总了Python中neon.util.param.req_param函数的典型用法代码示例。如果您正苦于以下问题:Python req_param函数的具体用法?Python req_param怎么用?Python req_param使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了req_param函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
def __init__(self, **kwargs):
opt_param(self, ['preprocess_done'], False)
opt_param(self, ['dotransforms', 'square_crop'], False)
opt_param(self, ['mean_norm', 'unit_norm'], False)
opt_param(self, ['shuffle_macro'], False)
opt_param(self, ['tdims'], 0)
opt_param(self, ['label_list'], ['l_id'])
opt_param(self, ['num_channels'], 3)
opt_param(self, ['num_workers'], 6)
opt_param(self, ['backend_type'], 'np.float32')
self.__dict__.update(kwargs)
if self.backend_type in ['float16', 'np.float16', 'numpy.float16']:
self.backend_type = np.float16
elif self.backend_type in ['float32', 'np.float32', 'numpy.float32']:
self.backend_type = np.float32
else:
raise ValueError('Datatype not understood')
logger.warning("Imageset initialized with dtype %s", self.backend_type)
req_param(self, ['cropped_image_size', 'output_image_size',
'imageset', 'save_dir', 'repo_path', 'macro_size'])
opt_param(self, ['image_dir'], os.path.join(self.repo_path,
self.imageset))
self.rgb = True if self.num_channels == 3 else False
self.norm_factor = 128. if self.mean_norm else 256.
self.img_dtype = np.int8
示例2: link_local
def link_local(self):
req_param(self, ['nifm', 'ifmshape', 'fshape'])
opt_param(self, ['ofmlocs', 'links'])
opt_param(self, ['deltasbuf', 'outputbuf'])
opt_param(self, ['nofm'], self.nifm)
opt_param(self, ['pooling'], False)
opt_param(self, ['stride'], 1)
opt_param(self, ['pad'], 0)
assert len(self.ifmshape) == len(self.fshape)
ofmshape = []
for dim in range(len(self.ifmshape)):
assert self.ifmshape[dim] >= self.fshape[dim]
num = self.ifmshape[dim] - self.fshape[dim] + 2 * self.pad
ofmshape.extend([num // self.stride + 1])
self.ofmshape = tuple(ofmshape)
self.negpad = -self.pad
self.ifmsize = np.prod(self.ifmshape)
self.ofmsize = np.prod(self.ofmshape)
self.fpsize = np.prod(self.fshape)
self.fsize = self.nifm * self.fpsize
self.nout = self.nofm * self.ofmsize
logger.debug('name=%s, nifm=%d, ifmshape=%s, ofmshape=%s',
self.name, self.nifm, self.ifmshape, self.ofmshape)
示例3: __init__
def __init__(self, **kwargs):
self.accumulate = True
# Reusing deltas not supported for RNNs yet
self.reuse_deltas = False
super(RNN, self).__init__(**kwargs)
req_param(self, ['unrolls'])
self.rec_layer = self.layers[1]
示例4: initialize
def initialize(self, kwargs):
super(DataLayer, self).initialize(kwargs)
self.reset_counter()
if self.is_local is True:
req_param(self, ['nofm', 'ofmshape'])
self.nout = self.nofm * np.prod(self.ofmshape)
else:
req_param(self, ['nout'])
示例5: initialize
def initialize(self, kwargs):
super(RecurrentCostLayer, self).initialize(kwargs)
req_param(self, ['cost', 'ref_layer'])
opt_param(self, ['ref_label'], 'targets')
self.targets = None
self.cost.olayer = self.prev_layer
self.cost.initialize(kwargs)
self.deltas = self.cost.get_deltabuf()
示例6: __init__
def __init__(self, **kwargs):
self.initialized = False
self.__dict__.update(kwargs)
req_param(self, ['layers', 'batch_size'])
opt_param(self, ['step_print'], -1)
opt_param(self, ['accumulate'], False)
opt_param(self, ['reuse_deltas'], True)
opt_param(self, ['timing_plots'], False)
示例7: __init__
def __init__(self, **kwargs):
self.initialized = False
self.__dict__.update(kwargs)
req_param(self, ['dataset', 'model'])
opt_param(self, ['backend'])
opt_param(self, ['live'], False)
if self.backend is not None:
self.initialize(self.backend)
示例8: __init__
def __init__(self, **kwargs):
self.initialized = False
self.__dict__.update(kwargs)
req_param(self, ["layers", "batch_size"])
opt_param(self, ["step_print"], -1)
opt_param(self, ["accumulate"], False)
opt_param(self, ["reuse_deltas"], True)
opt_param(self, ["timing_plots"], False)
opt_param(self, ["serialize_schedule"])
示例9: initialize
def initialize(self, kwargs):
if self.initialized:
return
self.__dict__.update(kwargs)
req_param(self, ['backend', 'batch_size'])
self.output = None
self.deltas = None
self.initialized = True
示例10: __init__
def __init__(self, **kwargs):
self.accumulate = True
super(Balance, self).__init__(**kwargs)
req_param(self, ['classlayers', 'stylelayers'])
self.cost_layer = self.classlayers[-1]
self.out_layer = self.layers[-2]
self.class_layer = self.classlayers[-2]
self.branch_layer = self.stylelayers[-2]
self.pathways = [self.layers, self.classlayers, self.stylelayers]
self.kwargs = kwargs
示例11: initialize
def initialize(self, kwargs):
super(CrossMapPoolingLayer, self).initialize(kwargs)
req_param(self, ['nofm'])
self.initialize_local()
self.allocate_output_bufs()
self.allocate_param_bufs()
opt_param(self, ['updatebuf'], None)
if isinstance(self.backend, CPU):
self.updatebuf = self.backend.empty((1, 1))
示例12: initialize
def initialize(self, kwargs):
req_param(self, ["ksize", "alpha", "beta"])
self.alpha = self.alpha * 1.0 / self.ksize
super(CrossMapResponseNormLayer, self).initialize(kwargs)
self.nout = self.nin
self.ofmshape, self.nofm = self.ifmshape, self.nifm
self.allocate_output_bufs()
self.tempbuf = None
if isinstance(self.backend, CPU) and not self.prev_layer.is_data:
self.tempbuf = self.backend.empty((1, self.ifmshape[-2], self.ifmshape[-1], self.batch_size))
示例13: initialize
def initialize(self, kwargs):
"""
Initialize the Batch Normalization transform. This function will be
called from WeightLayer.initialize with a reference to the layer.
Arguments:
_eps (numeric, optional): value used for numerical stability when
normalizing by variance
_iscale (numeric, optional): explicitly set an affine scale value
to be used in inference instead of
calculated scale from training
_ishift (numeric, optional): explicitly set an affine shift value
to be used in inference instead of
calculated shift from training
"""
self.__dict__.update(kwargs)
self.dtype = self.layer.weight_dtype
self.bigtype = np.float32 if self.dtype is np.float16 else self.dtype
opt_param(self, ['_iscale', '_ishift'])
opt_param(self, ['_eps'], 1e-6)
req_param(self, ['layer'])
self.backend = self.layer.backend
self.is_local = self.layer.is_local
self.batch_size = self.layer.batch_size
if self.is_local:
self.in1d = (self.layer.nofm, 1)
self.ofmsize = self.layer.ofmsize
self.orig_shape = (self.layer.nofm * self.ofmsize, self.batch_size)
self.in_shape = (self.layer.nofm, self.ofmsize * self.batch_size)
else:
self.in_shape = (self.layer.nout, self.batch_size)
self.in1d = (self.layer.nout, 1)
self.train_mode = True
logger.info("BatchNormalization set to train mode")
self.nbatches = 0
self._xhat = self.backend.zeros(self.in_shape, dtype=self.dtype)
self._mean = self.backend.zeros(self.in1d, dtype=self.bigtype)
self._vars = self.backend.zeros(self.in1d, dtype=self.bigtype)
# Global mean and var to be used during inference
self._gmean = self.backend.zeros(self.in1d, dtype=self.bigtype)
self._gvars = self.backend.zeros(self.in1d, dtype=self.bigtype)
# learned params and their update buffers
self._beta = self.backend.zeros(self.in1d, dtype=self.bigtype)
self._gamma = self.backend.ones(self.in1d, dtype=self.bigtype)
self.layer.params.extend([self._beta, self._gamma])
self._beta_updates = self.backend.zeros(self.in1d, dtype=self.bigtype)
self._gamma_updates = self.backend.zeros(self.in1d, dtype=self.bigtype)
self.layer.updates.extend([self._beta_updates, self._gamma_updates])
示例14: initialize
def initialize(self, kwargs):
self.__dict__.update(kwargs)
opt_param(self, ['backend'], self.olayer.backend)
opt_param(self, ['batch_size'], self.olayer.batch_size)
opt_param(self, ['olayer_data'], 'output')
req_param(self.olayer, [self.olayer_data])
# if not hasattr(self.olayer, self.olayer_data):
# raise ValueError("Layer %s does not have buffer %s" %
# (self.olayer.name, self.olayer_data))
# else:
self.set_outputbuf(getattr(self.olayer, self.olayer_data))
示例15: initialize
def initialize(self, kwargs):
super(WeightLayer, self).initialize(kwargs)
req_param(self, ['weight_init', 'lrule_init', 'nin', 'nout'])
opt_param(self, ['accumulate'], False)
opt_param(self, ['batch_norm'], False)
self.weight_init.initialize(self.backend)
self.params = []
self.updates = []
if self.batch_norm:
self.bn = BatchNorm()
kwargs['layer'] = self
self.bn.initialize(kwargs)