本文整理汇总了Python中mxnet.init.Xavier方法的典型用法代码示例。如果您正苦于以下问题:Python init.Xavier方法的具体用法?Python init.Xavier怎么用?Python init.Xavier使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类mxnet.init
的用法示例。
在下文中一共展示了init.Xavier方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: net_define
# 需要导入模块: from mxnet import init [as 别名]
# 或者: from mxnet.init import Xavier [as 别名]
def net_define():
net = nn.Sequential()
with net.name_scope():
net.add(nn.Embedding(config.MAX_WORDS, config.EMBEDDING_DIM))
net.add(rnn.GRU(128,layout='NTC',bidirectional=True, num_layers=2, dropout=0.2))
net.add(transpose(axes=(0,2,1)))
# net.add(nn.MaxPool2D(pool_size=(config.MAX_LENGTH,1)))
# net.add(nn.Conv2D(128, kernel_size=(101,1), padding=(50,0), groups=128,activation='relu'))
net.add(PrimeConvCap(8,32, kernel_size=(1,1), padding=(0,0)))
# net.add(AdvConvCap(8,32,8,32, kernel_size=(1,1), padding=(0,0)))
net.add(CapFullyBlock(8*(config.MAX_LENGTH)/2, num_cap=12, input_units=32, units=16, route_num=5))
# net.add(CapFullyBlock(8*(config.MAX_LENGTH-8), num_cap=12, input_units=32, units=16, route_num=5))
# net.add(CapFullyBlock(8, num_cap=12, input_units=32, units=16, route_num=5))
net.add(nn.Dropout(0.2))
# net.add(LengthBlock())
net.add(nn.Dense(6, activation='sigmoid'))
net.initialize(init=init.Xavier())
return net
示例2: net_define_eu
# 需要导入模块: from mxnet import init [as 别名]
# 或者: from mxnet.init import Xavier [as 别名]
def net_define_eu():
net = nn.Sequential()
with net.name_scope():
net.add(nn.Embedding(config.MAX_WORDS, config.EMBEDDING_DIM))
net.add(rnn.GRU(128,layout='NTC',bidirectional=True, num_layers=1, dropout=0.2))
net.add(transpose(axes=(0,2,1)))
net.add(nn.GlobalMaxPool1D())
'''
net.add(FeatureBlock1())
'''
net.add(extendDim(axes=3))
net.add(PrimeConvCap(16, 32, kernel_size=(1,1), padding=(0,0),strides=(1,1)))
net.add(CapFullyNGBlock(16, num_cap=12, input_units=32, units=16, route_num=3))
net.add(nn.Dropout(0.2))
net.add(nn.Dense(6, activation='sigmoid'))
net.initialize(init=init.Xavier())
return net
示例3: main
# 需要导入模块: from mxnet import init [as 别名]
# 或者: from mxnet.init import Xavier [as 别名]
def main():
args = parse_args()
ctx = mx.gpu(0)
scale_factor = 0.0005
##############################################################
### Load Dataset ###
##############################################################
train_data = gluon.data.DataLoader(gluon.data.vision.MNIST(train=True,
transform=transform),args.batch_size,
shuffle=True)
test_data = gluon.data.DataLoader(gluon.data.vision.MNIST(train=False,
transform=transform),
args.test_batch_size,
shuffle=False)
##############################################################
## Load network and set optimizer ##
##############################################################
capsule_net = CapsuleNet()
capsule_net.initialize(ctx=ctx, init=init.Xavier())
margin_loss = CapsuleMarginLoss()
reconstructions_loss = L2Loss()
# convert to static graph for speedup
# capsule_net.hybridize()
train(capsule_net, args.epochs,ctx,train_data,test_data, margin_loss,
reconstructions_loss, args.batch_size, scale_factor)
示例4: __init__
# 需要导入模块: from mxnet import init [as 别名]
# 或者: from mxnet.init import Xavier [as 别名]
def __init__(self, num_locations, num_cap, input_units, units,
route_num=3, **kwargs):
super(CapFullyBlock, self).__init__(**kwargs)
self.route_num = route_num
self.num_cap = num_cap
self.units = units
self.num_locations = num_locations
self.w_ij = self.params.get(
'weight', shape=(input_units, units, self.num_cap, self.num_locations)
,init=init.Xavier())
示例5: __init__
# 需要导入模块: from mxnet import init [as 别名]
# 或者: from mxnet.init import Xavier [as 别名]
def __init__(self, classes, embedding_size, lamda, weight_initializer=init.Xavier(magnitude=2.24),
dtype='float32', **kwargs):
super().__init__(**kwargs)
self._lamda = lamda
self._classes = classes
self._dtype = dtype
self.centers = self.params.get('centers', shape=(classes, embedding_size), init=weight_initializer,
dtype=dtype, allow_deferred_init=True)
示例6: CapsNet
# 需要导入模块: from mxnet import init [as 别名]
# 或者: from mxnet.init import Xavier [as 别名]
def CapsNet(batch_size, ctx):
net = nn.Sequential()
with net.name_scope():
net.add(nn.Conv2D(channels=256, kernel_size=9, strides=1, padding=(0,0), activation='relu'))
net.add(PrimaryConv(dim_vector=8, n_channels=32, kernel_size=9, strides=2, context=ctx, padding=(0,0)))
net.add(DigitCaps(num_capsule=10, dim_vector=16, context=ctx))
net.add(Length())
net.initialize(ctx=ctx, init=init.Xavier())
return net
示例7: get_built_in_network
# 需要导入模块: from mxnet import init [as 别名]
# 或者: from mxnet.init import Xavier [as 别名]
def get_built_in_network(name, *args, **kwargs):
def _get_finetune_network(model_name, num_classes, ctx, **kwargs):
kwargs['pretrained'] = True
finetune_net = get_model(model_name, **kwargs)
# change the last fully connected layer to match the number of classes
with finetune_net.name_scope():
if hasattr(finetune_net, 'output'):
finetune_net.output = gluon.nn.Dense(num_classes)
finetune_net.output.initialize(init.Xavier(), ctx=ctx)
elif hasattr(finetune_net, '_fc'):
finetune_net._fc = gluon.nn.Dense(num_classes)
finetune_net._fc.initialize(init.Xavier(), ctx=ctx)
else:
assert hasattr(finetune_net, 'fc')
finetune_net.fc = gluon.nn.Dense(num_classes)
finetune_net.fc.initialize(init.Xavier(), ctx=ctx)
# initialize and context
finetune_net.collect_params().reset_ctx(ctx)
# finetune_net.load_parameters(opt.resume_params, ctx=context, cast_dtype=True)
finetune_net.hybridize()
return finetune_net
def _get_cifar_network(name, num_classes, ctx=mx.cpu(), *args, **kwargs):
name = name.lower()
assert 'cifar' in name
net = get_model(name, *args, **kwargs)
net.initialize(ctx=ctx)
return net
name = name.lower()
if 'cifar' in name:
return _get_cifar_network(name, *args, **kwargs)
else:
return _get_finetune_network(name, *args, **kwargs)