當前位置: 首頁>>代碼示例>>Python>>正文


Python initializers.Uniform方法代碼示例

本文整理匯總了Python中chainer.initializers.Uniform方法的典型用法代碼示例。如果您正苦於以下問題:Python initializers.Uniform方法的具體用法?Python initializers.Uniform怎麽用?Python initializers.Uniform使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在chainer.initializers的用法示例。


在下文中一共展示了initializers.Uniform方法的8個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: __init__

# 需要導入模塊: from chainer import initializers [as 別名]
# 或者: from chainer.initializers import Uniform [as 別名]
def __init__(self, vocab, vocab_ngram_tokens, n_units, n_units_char, dropout,
                 subword):  # dropout ratio, zero indicates no dropout
        super(RNN, self).__init__()
        with self.init_scope():
            self.embed = L.EmbedID(
                len(vocab_ngram_tokens.lst_words) + 2, n_units_char,
                initialW=I.Uniform(1. / n_units_char))  # ngram tokens embedding  plus 2 for OOV and end symbol.
            if 'lstm' in subword:
                self.mid = L.LSTM(n_units_char, n_units_char * 2)
            self.out = L.Linear(n_units_char * 2, n_units_char)  # the feed-forward output layer
            if 'bilstm' in subword:
                self.mid_b = L.LSTM(n_units_char, n_units_char * 2)
                self.out_b = L.Linear(n_units_char * 2, n_units_char)

            self.n_ngram = vocab_ngram_tokens.metadata["max_gram"] - vocab_ngram_tokens.metadata["min_gram"] + 1
            self.final_out = L.Linear(n_units * (self.n_ngram), n_units)

            self.dropout = dropout
            self.vocab = vocab
            self.vocab_ngram_tokens = vocab_ngram_tokens
            self.subword = subword 
開發者ID:vecto-ai,項目名稱:vecto,代碼行數:23,代碼來源:subword.py

示例2: __init__

# 需要導入模塊: from chainer import initializers [as 別名]
# 或者: from chainer.initializers import Uniform [as 別名]
def __init__(self, n_codebooks, n_centroids, n_vocab, embed_dim, tau, embed_mat):
        super(EmbeddingCompressor, self).__init__()
        """
        M: number of codebooks (subcodes)
        K: number of vectors in each codebook
        """
        self.M = n_codebooks
        self.K = n_centroids
        self.n_vocab = n_vocab
        self.embed_dim = embed_dim
        self.tau = tau

        M = self.M
        K = self.K
        u_init = I.Uniform(scale=0.01)
        with self.init_scope():
            self.embed_mat = L.EmbedID(n_vocab, embed_dim, initialW=embed_mat)
            self.l1 = L.Linear(embed_dim, M * K // 2, initialW=u_init, initial_bias=u_init)
            self.l2 = L.Linear(M * K // 2, M * K, initialW=u_init, initial_bias=u_init)
            self.codebook = chainer.Parameter(initializer=u_init, shape=(M * K, embed_dim)) 
開發者ID:chainer,項目名稱:models,代碼行數:22,代碼來源:net.py

示例3: create_initializer

# 需要導入模塊: from chainer import initializers [as 別名]
# 或者: from chainer.initializers import Uniform [as 別名]
def create_initializer(init_type, scale=None, fillvalue=None):
    if init_type == 'identity':
        return initializers.Identity() if scale is None else initializers.Identity(scale=scale)
    if init_type == 'constant':
        return initializers.Constant(fillvalue)
    if init_type == 'zero':
        return initializers.Zero()
    if init_type == 'one':
        return initializers.One()
    if init_type == 'normal':
        return initializers.Normal() if scale is None else initializers.Normal(scale)
    if init_type == 'glorotNormal':
        return initializers.GlorotNormal() if scale is None else initializers.GlorotNormal(scale)
    if init_type == 'heNormal':
        return initializers.HeNormal() if scale is None else initializers.HeNormal(scale)
    if init_type == 'orthogonal':
        return initializers.Orthogonal(
            scale) if scale is None else initializers.Orthogonal(scale)
    if init_type == 'uniform':
        return initializers.Uniform(
            scale) if scale is None else initializers.Uniform(scale)
    if init_type == 'leCunUniform':
        return initializers.LeCunUniform(
            scale) if scale is None else initializers.LeCunUniform(scale)
    if init_type == 'glorotUniform':
        return initializers.GlorotUniform(
            scale) if scale is None else initializers.GlorotUniform(scale)
    if init_type == 'heUniform':
        return initializers.HeUniform(
            scale) if scale is None else initializers.HeUniform(scale)
    raise ValueError("Unknown initializer type: {0}".format(init_type)) 
開發者ID:fabiencro,項目名稱:knmt,代碼行數:33,代碼來源:rnn_cells.py

示例4: __call__

# 需要導入模塊: from chainer import initializers [as 別名]
# 或者: from chainer.initializers import Uniform [as 別名]
def __call__(self, array):
        scale = 1 / np.sqrt(array.shape[-1])
        initializers.Uniform(scale)(array) 
開發者ID:pfnet-research,項目名稱:chainer-compiler,代碼行數:5,代碼來源:bbox_head.py

示例5: __init__

# 需要導入模塊: from chainer import initializers [as 別名]
# 或者: from chainer.initializers import Uniform [as 別名]
def __init__(self, n_vocab, n_units, loss_func):
        super(ContinuousBoW, self).__init__()

        with self.init_scope():
            self.embed = L.EmbedID(n_vocab + 2, n_units, initialW=I.Uniform(1. / n_units)) # plus 2 for OOV and end symbol.
            self.loss_func = loss_func 
開發者ID:vecto-ai,項目名稱:vecto,代碼行數:8,代碼來源:word.py

示例6: __init__

# 需要導入模塊: from chainer import initializers [as 別名]
# 或者: from chainer.initializers import Uniform [as 別名]
def __init__(self, n_vocab, n_units, loss_func):
        super(ContinuousBoW, self).__init__()

        with self.init_scope():
            self.embed = L.EmbedID(
                n_vocab, n_units, initialW=I.Uniform(1. / n_units))
            self.loss_func = loss_func 
開發者ID:chainer,項目名稱:chainer,代碼行數:9,代碼來源:train_word2vec.py

示例7: generate_params

# 需要導入模塊: from chainer import initializers [as 別名]
# 或者: from chainer.initializers import Uniform [as 別名]
def generate_params(self):
        initial_bias = initializers.Uniform(scale=1., dtype=self.dtype)
        return initial_bias, 
開發者ID:chainer,項目名稱:chainer,代碼行數:5,代碼來源:test_convolution_nd.py

示例8: get_initializers

# 需要導入模塊: from chainer import initializers [as 別名]
# 或者: from chainer.initializers import Uniform [as 別名]
def get_initializers(self):
        if self.initialW == 'zero':
            weight_initializer = initializers.constant.Zero()
        elif self.initialW == 'random':
            weight_initializer = initializers.GlorotUniform(
                rng=numpy.random.RandomState(seed=0))

        if self.initial_bias == 'zero':
            bias_initializer = initializers.constant.Zero()
        elif self.initial_bias == 'random':
            bias_initializer = initializers.Uniform(
                rng=numpy.random.RandomState(seed=0))

        return weight_initializer, bias_initializer 
開發者ID:chainer,項目名稱:chainer,代碼行數:16,代碼來源:test_link_n_step_rnn.py


注:本文中的chainer.initializers.Uniform方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。