本文整理汇总了Python中toolz.map函数的典型用法代码示例。如果您正苦于以下问题:Python map函数的具体用法?Python map怎么用?Python map使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了map函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: compute_up
def compute_up(expr, args, **kwargs):
return odo(
compute_up(
expr,
VarArgs(map(_pd_from_dshape, args, map(discover, expr.args))),
**kwargs
),
list,
)
示例2: card_entry
def card_entry(info):
create_card_node = lambda x: graph.create(
Node('Card', slot=x[0], name=x[1]))[0]
mark, cards, switch = info
with open(log_file, 'a') as flog:
flog.write("{0}:{1}\n".format(switch, mark))
if cards and mark == 'success':
ip = switch.split(',')[0]
switch_node = graph.find_one(
'Switch', property_key='ip', property_value=ip)
card_nodes = map(create_card_node, cards)
list(map(lambda x: graph.create((switch_node, 'HAS', x)), card_nodes))
示例3: card_entry
def card_entry(info):
create_card_node = lambda x: graph.create(
Node('Card', slot=x[0], name=x[1]))[0]
mark, result, olt = info
with open(log_file, 'a') as logging:
logging.write("{0}:{1}\n".format(olt, mark))
if result and mark == 'success':
ip = olt.split(',')[0]
node = graph.find_one(
'Olt', property_key='ip', property_value=ip)
card_nodes = map(create_card_node, result)
list(map(lambda x: graph.create((node, 'HAS', x)), card_nodes))
示例4: deepmap
def deepmap(func, data, n=1):
"""
>>> inc = lambda x: x + 1
>>> list(deepmap(inc, [1, 2], n=1))
[2, 3]
>>> list(deepmap(inc, [(1, 2), (3, 4)], n=2))
[(2, 3), (4, 5)]
"""
if n == 1:
return map(func, data)
else:
return map(compose(tuple, partial(deepmap, func, n=n-1)), data)
示例5: get_input_example
def get_input_example(okform_dir, malformed_dir, id_):
cap_title_path = str(Path(malformed_dir) / Path(id_)) + ".auxil"
doc_path = str(Path(okform_dir) / Path(id_))
_, docs = separate_title_from_body(doc_path + ".auxil", doc_path + ".paf")
with codecs.open(cap_title_path, "r", "utf8") as f:
for l in f:
pass
titles = list(map(convert_sentence_auxil_to_request, json.loads(l)["sents"]))
doc_sents = list(map(convert_sentence_auxil_to_request, docs))
return {"capitalizedSentences": titles, "otherSentences": doc_sents}
示例6: interface_check
def interface_check():
clear_log()
cmd = "match(s:Switch) where s.model='T64G' or s.model='S9306' or s.model='S9303' or s.model='S8905' return s.ip,s.model"
# cmd = "match(s:Switch) where s.model='S9306' or s.model='s9303' return s.ip,s.model limit 2"
nodes = graph.cypher.execute(cmd)
switchs = [(x[0], x[1]) for x in nodes]
list(map(compose(output_interface, get_interface), switchs))
示例7: extract_and_capitalize_headlines_from_corpus
def extract_and_capitalize_headlines_from_corpus(corpus_dir, docids):
"""
Iterate through all the files in `corpus_dir`,
extract the headlines, capitalized and return them
Parameter:
---------------
corpus_dir: string
docids: list of string
the document to be processed
Return:
--------------
generator of (docid, headlines): (str, list<list<str>>)
"""
get_tokens = partial(map, partial(get_in, ["token"]))
get_features = partial(get_in, ["features"])
make_capitalized_title_new = lambda words: make_capitalized_title(title_words=words)
for docid in docids:
p = Path(corpus_dir) / Path(docid)
auxil_p = p.with_suffix(".auxil")
paf_p = p.with_suffix(".paf")
if auxil_p.exists() and paf_p.exists():
try:
titles, _ = separate_title_from_body(str(auxil_p), str(paf_p))
except Exception as e:
yield (e, None)
# pipeline:
# -> get features
# -> get tokens
# -> capitalize headline
yield (None, (p.name, list(map(compose(make_capitalized_title_new, get_tokens, get_features), titles))))
示例8: sql_to_iterator
def sql_to_iterator(t, **kwargs):
engine = t.bind
with engine.connect() as conn:
result = conn.execute(sa.sql.select([t]))
result = map(tuple, result) # Turn RowProxy into tuple
for item in result:
yield item
示例9: compute_up
def compute_up(t, lhs, rhs, **kwargs):
""" Join Operation for Python Streaming Backend
Note that a pure streaming Join is challenging/impossible because any row
in one seq might connect to any row in the other, requiring simultaneous
complete access.
As a result this approach compromises and fully realizes the LEFT sequence
while allowing the RIGHT sequence to stream. As a result
Always put your bigger collection on the RIGHT side of the Join.
"""
if lhs == rhs:
lhs, rhs = itertools.tee(lhs, 2)
on_left = [t.lhs.fields.index(col) for col in listpack(t.on_left)]
on_right = [t.rhs.fields.index(col) for col in listpack(t.on_right)]
left_default = (None if t.how in ('right', 'outer')
else toolz.itertoolz.no_default)
right_default = (None if t.how in ('left', 'outer')
else toolz.itertoolz.no_default)
pairs = toolz.join(on_left, lhs,
on_right, rhs,
left_default=left_default,
right_default=right_default)
assemble = pair_assemble(t)
return map(assemble, pairs)
示例10: stream_decompress
def stream_decompress(fmt, data):
""" Decompress a block of compressed bytes into a stream of strings """
if fmt == 'gz':
return gzip.GzipFile(fileobj=BytesIO(data))
if fmt == 'bz2':
return bz2_stream(data)
else:
return map(bytes.decode, BytesIO(data))
示例11: set_attributes
def set_attributes(self):
list(map(
lambda property_name: setattr(
type(self.mocked_object_label_a),
property_name,
mock.PropertyMock(name=property_name, return_value=1)
),
self.attributes
))
示例12: resource_json
def resource_json(uri, open=open, **kwargs):
f = open(uri)
try:
data = json.load(f)
f.close()
return data
except ValueError:
f = open(uri)
data = map(json.loads, f)
return data
示例13: svlan_entry
def svlan_entry(lock, info):
cmd = "match (n:Olt{ip:{ip}}) create unique (n)-[:USE{port:{port}}]-(:Svlan{value:{value}})"
mark, result, olt = info
ip = olt.split(',')[0]
with lock:
with open(log_file, 'a') as logging:
logging.write("{0}:{1}\n".format(olt, mark))
if result and mark == 'success':
with lock:
list(map(lambda x: graph.cypher.execute(
cmd, {"ip": ip, "port": x[0], "value": x[1]}), result))
示例14: compute_up
def compute_up(t, seq, **kwargs):
try:
row = first(seq)
except StopIteration:
return ()
seq = concat([[row], seq]) # re-add row to seq
if isinstance(row, list):
seq = map(tuple, seq)
return unique(seq)
示例15: test_averages
def test_averages(self):
self.set_attributes()
list(
map(
lambda property_name: self.assertEqual(
getattr(self.confusion_matrix, "average_" + property_name),
1
),
self.attributes
)
)