當前位置: 首頁>>代碼示例>>Python>>正文


Python constants.BATCH_SIZE屬性代碼示例

本文整理匯總了Python中constants.BATCH_SIZE屬性的典型用法代碼示例。如果您正苦於以下問題:Python constants.BATCH_SIZE屬性的具體用法?Python constants.BATCH_SIZE怎麽用?Python constants.BATCH_SIZE使用的例子?那麽, 這裏精選的屬性代碼示例或許可以為您提供幫助。您也可以進一步了解該屬性所在constants的用法示例。


在下文中一共展示了constants.BATCH_SIZE屬性的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: get_train_batch

# 需要導入模塊: import constants [as 別名]
# 或者: from constants import BATCH_SIZE [as 別名]
def get_train_batch():
    """
    Loads c.BATCH_SIZE clips from the database of preprocessed training clips.

    @return: An array of shape
            [c.BATCH_SIZE, c.TRAIN_HEIGHT, c.TRAIN_WIDTH, (3 * (c.HIST_LEN + 1))].
    """
    clips = np.empty([c.BATCH_SIZE, c.TRAIN_HEIGHT, c.TRAIN_WIDTH, (3 * (c.HIST_LEN + 1))],
                     dtype=np.float32)
    for i in xrange(c.BATCH_SIZE):
        path = c.TRAIN_DIR_CLIPS + str(np.random.choice(c.NUM_CLIPS)) + '.npz'
        clip = np.load(path)['arr_0']

        clips[i] = clip

    return clips 
開發者ID:dyelax,項目名稱:Adversarial_Video_Generation,代碼行數:18,代碼來源:utils.py

示例2: train

# 需要導入模塊: import constants [as 別名]
# 或者: from constants import BATCH_SIZE [as 別名]
def train(self):
        """
        Runs a training loop on the model.
        """
        while True:
            inputs, targets = self.data_reader.get_train_batch(c.BATCH_SIZE, c.SEQ_LEN)
            print 'Training model...'

            feed_dict = {self.model.inputs: inputs, self.model.targets: targets}
            global_step, loss, _ = self.sess.run([self.model.global_step,
                                                  self.model.loss,
                                                  self.model.train_op],
                                                 feed_dict=feed_dict)

            print 'Step: %d | loss: %f' % (global_step, loss)
            if global_step % c.MODEL_SAVE_FREQ == 0:
                print 'Saving model...'
                self.saver.save(self.sess, join(c.MODEL_SAVE_DIR, self.artist_name + '.ckpt'),
                                global_step=global_step) 
開發者ID:dyelax,項目名稱:encore.ai,代碼行數:21,代碼來源:runner.py

示例3: main2

# 需要導入模塊: import constants [as 別名]
# 或者: from constants import BATCH_SIZE [as 別名]
def main2():
    num_utterances_per_speaker = 50
    num_speakers = 100
    num_samples = num_speakers * num_utterances_per_speaker
    kx_train = np.zeros(shape=(num_samples, 32, 64, 1))
    ky_train = np.zeros(shape=(num_samples, num_speakers))
    for i in range(num_samples):
        speaker_id = i % num_speakers
        ky_train[i, speaker_id] = 1
        kx_train[i] = speaker_id
    kx_test = np.array(kx_train)
    ky_test = np.array(ky_train)

    tpshn = TripletBatcherSelectHardNegatives(kx_train, ky_train, kx_test, ky_test, None)
    tp = TripletBatcher(kx_train, ky_train, kx_test, ky_test)
    avg = []
    avg2 = []
    while True:
        bx, by = tp.get_batch(BATCH_SIZE, is_test=False)
        avg.append(float(triplet_loss.deep_speaker_loss(predict(bx), predict(bx))))

        bx, by = tpshn.get_batch(BATCH_SIZE, is_test=False, predict=predict)
        avg2.append(float(triplet_loss.deep_speaker_loss(predict(bx), predict(bx))))

        print(np.mean(avg), np.mean(avg2)) 
開發者ID:milvus-io,項目名稱:bootcamp,代碼行數:27,代碼來源:batcher_test.py

示例4: create_txs

# 需要導入模塊: import constants [as 別名]
# 或者: from constants import BATCH_SIZE [as 別名]
def create_txs(ipc_path, rpc_host, rpc_port, signer_addr, airdropper_addr, omgtoken_addr, verify_eth,
               processed_file, unsigned_file):

    if ipc_path and (rpc_host or rpc_port):
        raise Exception("both ipc and rpc cannot be specified")
    if ipc_path:
        web3 = Web3(IPCProvider(ipc_path))
    else:
        web3 = Web3(RPCProvider(host=rpc_host,
                                port=rpc_port))

    airdropper, omgToken = get_contracts(web3,
                                         airdropper_addr=airdropper_addr,
                                         omgtoken_addr=omgtoken_addr)

    creator = Creator(signer_addr, airdropper, omgToken, GAS_LIMIT, GAS_PRICE, GAS_RESERVE,
                      verify_eth=verify_eth)

    airdrops = json.loads(processed_file.read())

    unsigned = creator.create_txs(airdrops, BATCH_SIZE)

    unsigned_file.write(json.dumps(unsigned, sort_keys=True)) 
開發者ID:omgnetwork,項目名稱:airdrop,代碼行數:25,代碼來源:create_txs.py

示例5: test_recover_sent_airdrops

# 需要導入模塊: import constants [as 別名]
# 或者: from constants import BATCH_SIZE [as 別名]
def test_recover_sent_airdrops(web3, prepared_contracts, transactions, signed, airdrops,
                               creator):
    """
    Assuming partially sent airdrops, when there's need to sign transactions again
    e.g. when it turned out that too little gas was allowed (unlikely)
    """
    airdropper, omg_token = prepared_contracts

    Sender(web3).send_transactions(signed[:1], transactions[:1])

    # airdrop partially done by now
    check_entirely_airdropped(airdrops[0:BATCH_SIZE], omg_token)

    not_airdropped = Sender(web3).recover_unsent_airdrops(airdrops, signed, airdropper, omg_token)

    assert not_airdropped == airdrops[BATCH_SIZE:]

    unsigned = creator.create_txs(not_airdropped, BATCH_SIZE)
    new_signed = Signer(web3).sign_transactions(unsigned)
    Sender(web3).send_transactions(new_signed, unsigned)

    check_entirely_airdropped(airdrops, omg_token) 
開發者ID:omgnetwork,項目名稱:airdrop,代碼行數:24,代碼來源:test_utils.py

示例6: get_train_batch

# 需要導入模塊: import constants [as 別名]
# 或者: from constants import BATCH_SIZE [as 別名]
def get_train_batch():
	"""
	Loads c.BATCH_SIZE clips from the database of preprocessed training clips.

	@return: An array of shape
			[c.BATCH_SIZE, c.TRAIN_HEIGHT, c.TRAIN_WIDTH, (3 * (c.HIST_LEN + 1))].
	"""
	clips = np.empty([c.BATCH_SIZE, (3 * (c.HIST_LEN + 1)),c.TRAIN_HEIGHT, c.TRAIN_WIDTH],
					 dtype=np.float32)

	print('batchsize', c.BATCH_SIZE)
	print('test dir clips', c.TRAIN_DIR_CLIPS)
	# for i in xrange(c.BATCH_SIZE):
	for i in range(c.BATCH_SIZE):
		path = c.TRAIN_DIR_CLIPS + str(np.random.choice(c.NUM_CLIPS - 1)) + '.npz'
		print('path:', path)
		clip = np.load(path)['arr_0']

		clips[i] = clip

	return clips 
開發者ID:alokwhitewolf,項目名稱:Video-frame-prediction-by-multi-scale-GAN,代碼行數:23,代碼來源:utils.py

示例7: test

# 需要導入模塊: import constants [as 別名]
# 或者: from constants import BATCH_SIZE [as 別名]
def test(self):
        """
        Runs one test step on the generator network.
        """
        batch = get_test_batch(c.BATCH_SIZE, num_rec_out=self.num_test_rec)
        self.g_model.test_batch(
            batch, self.global_step, num_rec_out=self.num_test_rec) 
開發者ID:dyelax,項目名稱:Adversarial_Video_Generation,代碼行數:9,代碼來源:avg_runner.py

示例8: save

# 需要導入模塊: import constants [as 別名]
# 或者: from constants import BATCH_SIZE [as 別名]
def save(artist, model_path, num_save):
    sample_save_dir = c.get_dir('../save/samples/')
    sess = tf.Session()

    print artist

    data_reader = DataReader(artist)
    vocab = data_reader.get_vocab()

    print 'Init model...'
    model = LSTMModel(sess,
                      vocab,
                      c.BATCH_SIZE,
                      c.SEQ_LEN,
                      c.CELL_SIZE,
                      c.NUM_LAYERS,
                      test=True)

    saver = tf.train.Saver()
    sess.run(tf.initialize_all_variables())

    saver.restore(sess, model_path)
    print 'Model restored from ' + model_path

    artist_save_dir = c.get_dir(join(sample_save_dir, artist))
    for i in xrange(num_save):
        print i

        path = join(artist_save_dir, str(i) + '.txt')
        sample = model.generate()
        processed_sample = process_sample(sample)

        with open(path, 'w') as f:
            f.write(processed_sample) 
開發者ID:dyelax,項目名稱:encore.ai,代碼行數:36,代碼來源:save_samples.py

示例9: main

# 需要導入模塊: import constants [as 別名]
# 或者: from constants import BATCH_SIZE [as 別名]
def main():
    select = True
    try:
        sys.argv[1]
    except:
        select = False
    print('select', select)

    working_dir = '/media/philippe/8TB/deep-speaker'
    # by construction this  losses should be much higher than the normal losses.
    # we select batches this way.
    batch_input_shape = [None, NUM_FRAMES, NUM_FBANKS, 1]
    print('Testing with the triplet losses.')
    dsm = DeepSpeakerModel(batch_input_shape, include_softmax=False)
    triplet_checkpoint = load_best_checkpoint(CHECKPOINTS_TRIPLET_DIR)
    pre_training_checkpoint = load_best_checkpoint(CHECKPOINTS_SOFTMAX_DIR)
    if triplet_checkpoint is not None:
        print(f'Loading triplet checkpoint: {triplet_checkpoint}.')
        dsm.m.load_weights(triplet_checkpoint)
    elif pre_training_checkpoint is not None:
        print(f'Loading pre-training checkpoint: {pre_training_checkpoint}.')
        # If `by_name` is True, weights are loaded into layers only if they share the
        # same name. This is useful for fine-tuning or transfer-learning models where
        # some of the layers have changed.
        dsm.m.load_weights(pre_training_checkpoint, by_name=True)
    dsm.m.compile(optimizer='adam', loss=deep_speaker_loss)
    kc = KerasFormatConverter(working_dir)
    if select:
        print('TripletBatcherSelectHardNegatives()')
        batcher = TripletBatcherSelectHardNegatives(kc.kx_train, kc.ky_train, kc.kx_test, kc.ky_test, dsm)
    else:
        print('TripletBatcher()')
        batcher = TripletBatcher(kc.kx_train, kc.ky_train, kc.kx_test, kc.ky_test)
    batch_size = BATCH_SIZE
    losses = []
    while True:
        _bx, _by = batcher.get_batch(batch_size, is_test=False)
        losses.append(dsm.m.evaluate(_bx, _by, verbose=0, batch_size=BATCH_SIZE))
        print(np.mean(losses)) 
開發者ID:milvus-io,項目名稱:bootcamp,代碼行數:41,代碼來源:batcher_test.py

示例10: minted_and_credited

# 需要導入模塊: import constants [as 別名]
# 或者: from constants import BATCH_SIZE [as 別名]
def minted_and_credited(token, airdropper, chain, accounts):
    txn_hash = token.transact().mint(accounts[0], BATCH_SIZE * LARGEST_AMOUNT)
    chain.wait.for_receipt(txn_hash)

    txn_hash = token.transact().transfer(airdropper.address, BATCH_SIZE * LARGEST_AMOUNT)
    chain.wait.for_receipt(txn_hash) 
開發者ID:omgnetwork,項目名稱:airdrop,代碼行數:8,代碼來源:test_contract.py

示例11: test_flow

# 需要導入模塊: import constants [as 別名]
# 或者: from constants import BATCH_SIZE [as 別名]
def test_flow(token, airdropper, chain, accounts, minted_and_credited):

    txn_hash = airdropper.transact().multisend(token.address, accounts[1:2], [10])
    chain.wait.for_receipt(txn_hash)

    # return to owner
    remainder = token.call().balanceOf(airdropper.address)
    txn_hash = airdropper.transact().multisend(token.address, [accounts[0]], [remainder])
    chain.wait.for_receipt(txn_hash)

    assert token.call().balanceOf(accounts[0]) == BATCH_SIZE * LARGEST_AMOUNT - 10
    assert token.call().balanceOf(accounts[1]) == 10
    assert token.call().balanceOf(airdropper.address) == 0 
開發者ID:omgnetwork,項目名稱:airdrop,代碼行數:15,代碼來源:test_contract.py

示例12: test_list_processing_and_cost

# 需要導入模塊: import constants [as 別名]
# 或者: from constants import BATCH_SIZE [as 別名]
def test_list_processing_and_cost(token, airdropper, chain, minted_and_credited):
    beneficiaries = [urandom(20) for _ in xrange(BATCH_SIZE)]
    txn_hash = airdropper.transact().multisend(token.address,
                                               beneficiaries,
                                               [LARGEST_AMOUNT] * len(beneficiaries))

    peracc = chain.web3.eth.getTransactionReceipt(txn_hash)['gasUsed'] / len(beneficiaries)
    for account in beneficiaries:
        assert token.call().balanceOf(account) == LARGEST_AMOUNT

    assert peracc <= 33000  # golden number 
開發者ID:omgnetwork,項目名稱:airdrop,代碼行數:13,代碼來源:test_contract.py

示例13: airdrops

# 需要導入模塊: import constants [as 別名]
# 或者: from constants import BATCH_SIZE [as 別名]
def airdrops():
    """
    uses a pre-prepared json file with processed airdrops (see README.md)

    it is also a truncated list of airdrops, just enough for 2 uneven transactions
    """

    with open("data/processed.json") as f:
        airdrops = json.loads(f.read())

    return airdrops[0:BATCH_SIZE + 10] 
開發者ID:omgnetwork,項目名稱:airdrop,代碼行數:13,代碼來源:test_utils.py

示例14: test_entire_flow

# 需要導入模塊: import constants [as 別名]
# 或者: from constants import BATCH_SIZE [as 別名]
def test_entire_flow(web3, prepared_contracts, creator, input_file):

    airdropper, omg_token = prepared_contracts
    airdrops = process(input_file.read())
    transactions = creator.create_txs(airdrops, BATCH_SIZE)

    # this being a long-running test, the unlocking from web3 fixture might have expired
    web3.personal.unlockAccount(web3.eth.accounts[0], "")

    signed = Signer(web3).sign_transactions(transactions)
    Sender(web3).send_transactions(signed, transactions)

    check_entirely_airdropped(airdrops, omg_token) 
開發者ID:omgnetwork,項目名稱:airdrop,代碼行數:15,代碼來源:test_utils.py

示例15: test_small_flow

# 需要導入模塊: import constants [as 別名]
# 或者: from constants import BATCH_SIZE [as 別名]
def test_small_flow(web3, prepared_contracts, creator, airdrops):
    _, omg_token = prepared_contracts

    transactions = creator.create_txs(airdrops, BATCH_SIZE)
    signed = Signer(web3).sign_transactions(transactions)
    Sender(web3).send_transactions(signed, transactions)

    check_entirely_airdropped(airdrops, omg_token) 
開發者ID:omgnetwork,項目名稱:airdrop,代碼行數:10,代碼來源:test_utils.py


注:本文中的constants.BATCH_SIZE屬性示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。