本文整理汇总了Python中util.chunks方法的典型用法代码示例。如果您正苦于以下问题:Python util.chunks方法的具体用法?Python util.chunks怎么用?Python util.chunks使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类util
的用法示例。
在下文中一共展示了util.chunks方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: generate
# 需要导入模块: import util [as 别名]
# 或者: from util import chunks [as 别名]
def generate(config, dnat=False):
public_ip = config["public_ip"]
current_ip = config["base_ip"]
dnsmasq_content = ""
for group in config["groups"].values():
if not dnat:
c = chunks([proxy["domain"] for proxy in group["proxies"]], 5)
else:
c = chunks([proxy["domain"] for proxy in group["proxies"] if not proxy["dnat"]], 5)
for chunk in c:
if not dnat:
dnsmasq_content += generate_dns(chunk, public_ip)
else:
dnsmasq_content += generate_dns(chunk, current_ip)
if dnat:
for group in config["groups"].values():
for proxy in group["proxies"]:
if proxy["dnat"]:
current_ip = long2ip(ip2long(current_ip) + 1)
dnsmasq_content += generate_dns(proxy["domain"], current_ip)
return dnsmasq_content
示例2: refresh_job
# 需要导入模块: import util [as 别名]
# 或者: from util import chunks [as 别名]
def refresh_job(self, j):
j.extranonce2 = self.increment_nonce(j.extranonce2)
coinbase = j.coinbase1 + self.extranonce + j.extranonce2 + j.coinbase2
coinbase_hash = sha256(sha256(unhexlify(coinbase)).digest()).digest()
merkle_root = coinbase_hash
for hash_ in j.merkle_branch:
merkle_root = sha256(sha256(merkle_root + unhexlify(hash_)).digest()).digest()
merkle_root_reversed = ''
for word in chunks(merkle_root, 4):
merkle_root_reversed += word[::-1]
merkle_root = hexlify(merkle_root_reversed)
j.block_header = ''.join([j.version, j.prevhash, merkle_root, j.ntime, j.nbits])
j.time = time()
return j
示例3: train
# 需要导入模块: import util [as 别名]
# 或者: from util import chunks [as 别名]
def train(self, X_train, X_val):
train_true = filter(lambda x: x[2]==1, X_train)
train_false = filter(lambda x: x[2]==0, X_train)
val_true = filter(lambda x: x[2]==1, X_val)
val_false = filter(lambda x: x[2]==0, X_val)
n_train_true = len(train_true)
n_val_true = len(val_true)
make_epoch_helper = functools.partial(make_epoch, train_true=train_true, train_false=train_false, val_true=val_true, val_false=val_false)
logging.info("Starting training...")
epoch_iterator = ParallelBatchIterator(make_epoch_helper, range(P.N_EPOCHS), ordered=False, batch_size=1, multiprocess=False, n_producers=1)
for epoch_values in epoch_iterator:
self.pre_epoch()
train_epoch_data, val_epoch_data = epoch_values
train_epoch_data = util.chunks(train_epoch_data, P.BATCH_SIZE_TRAIN)
val_epoch_data = util.chunks(val_epoch_data, P.BATCH_SIZE_VALIDATION)
self.do_batches(self.train_fn, train_epoch_data, self.train_metrics)
self.do_batches(self.val_fn, val_epoch_data, self.val_metrics)
self.post_epoch()
logging.info("Setting learning rate to {}".format(P.LEARNING_RATE * ((0.985)**self.epoch)))
self.l_r.set_value(P.LEARNING_RATE * ((0.985)**self.epoch))
示例4: queue_work
# 需要导入模块: import util [as 别名]
# 或者: from util import chunks [as 别名]
def queue_work(self, work, miner=None):
target = ''.join(list(chunks('%064x' % self.server_difficulty, 2))[::-1])
self.switch.queue_work(self, work.block_header, target, work.job_id, work.extranonce2, miner)
示例5: decode
# 需要导入模块: import util [as 别名]
# 或者: from util import chunks [as 别名]
def decode(self, server, block_header, target, job_id = None, extranonce2 = None):
if block_header:
job = Object()
binary_data = block_header.decode('hex')
data0 = np.zeros(64, np.uint32)
data0 = np.insert(data0, [0] * 16, unpack('IIIIIIIIIIIIIIII', binary_data[:64]))
job.target = np.array(unpack('IIIIIIII', target.decode('hex')), dtype=np.uint32)
job.header = binary_data[:68]
job.merkle_end = np.uint32(unpack('I', binary_data[64:68])[0])
job.time = np.uint32(unpack('I', binary_data[68:72])[0])
job.difficulty = np.uint32(unpack('I', binary_data[72:76])[0])
job.state = sha256(STATE, data0)
job.f = np.zeros(8, np.uint32)
job.state2 = partial(job.state, job.merkle_end, job.time, job.difficulty, job.f)
job.targetQ = 2**256 / int(''.join(list(chunks(target, 2))[::-1]), 16)
job.job_id = job_id
job.extranonce2 = extranonce2
job.server = server
calculateF(job.state, job.merkle_end, job.time, job.difficulty, job.f, job.state2)
if job.difficulty != self.difficulty:
self.set_difficulty(job.difficulty)
return job
示例6: set_difficulty
# 需要导入模块: import util [as 别名]
# 或者: from util import chunks [as 别名]
def set_difficulty(self, difficulty):
self.difficulty = difficulty
bits = '%08x' % difficulty.byteswap()
true_target = '%064x' % (int(bits[2:], 16) * 2 ** (8 * (int(bits[:2], 16) - 3)),)
true_target = ''.join(list(chunks(true_target, 2))[::-1])
self.true_target = np.array(unpack('IIIIIIII', true_target.decode('hex')), dtype=np.uint32)
示例7: _start_producers
# 需要导入模块: import util [as 别名]
# 或者: from util import chunks [as 别名]
def _start_producers(self, result_queue):
jobs = Queue()
n_workers = self.n_producers
batch_count = 0
# Flag used for keeping values in queue in order
last_queued_job = Value('i', -1)
chunks = util.chunks(self.X,self.batch_size)
# Add jobs to queue
for job_index, X_batch in enumerate(chunks):
batch_count += 1
jobs.put( (job_index,X_batch) )
# Add poison pills to queue (to signal workers to stop)
for i in xrange(n_workers):
jobs.put((-1,None))
# Define producer function
produce = partial(_produce_helper,
generator=self.generator,
jobs=jobs,
result_queue=result_queue,
last_queued_job=last_queued_job,
ordered=self.ordered)
# Start worker processes or threads
for i in xrange(n_workers):
name = "ParallelBatchIterator worker {0}".format(i)
if self.multiprocess:
p = Process(target=produce, args=(i,), name=name)
else:
p = Thread(target=produce, args=(i,), name=name)
# Make the process daemon, so the main process can die without these finishing
#p.daemon = True
p.start()
return batch_count, jobs