本文整理汇总了Python中rethinkdb.expr函数的典型用法代码示例。如果您正苦于以下问题:Python expr函数的具体用法?Python expr怎么用?Python expr使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了expr函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_bar_data
def get_bar_data(question_data):
r.branch(
(
r.expr(question_data["response_format"] == Question().RESPONSE_MULTIPLE_CHOICE)
| (question_data["response_format"] == Question().RESPONSE_RATING)
),
r.branch(
(question_data["response_format"] == Question().RESPONSE_MULTIPLE_CHOICE),
{
"labels": question[1].distinct(),
"series": [
question[1].distinct().do(lambda val: question[1].filter(lambda foo: foo == val).count())
],
},
(question_data["response_format"] == Question().RESPONSE_RATING),
{
"labels": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
"series": [
r.expr([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]).map(
lambda val: question[1].filter(lambda foo: foo == val).count()
)
],
},
[],
),
[],
)
示例2: test_connect_close_expr
def test_connect_close_expr(self):
c = r.connect(port=self.port)
r.expr(1).run(c)
c.close()
self.assertRaisesRegexp(
r.RqlDriverError, "Connection is closed.",
r.expr(1).run, c)
示例3: test_connect_close_reconnect
def test_connect_close_reconnect(self):
c = r.connect(port=self.port)
r.expr(1).run(c)
c.close()
c.close()
c.reconnect()
r.expr(1).run(c)
示例4: test_shutdown
def test_shutdown(self):
c = r.connect(port=self.port)
r.expr(1).run(c)
self.servers.stop()
sleep(0.2)
self.assertRaisesRegexp(
r.RqlDriverError, "Connection is closed.",
r.expr(1).run, c)
示例5: generate_stats
def generate_stats(rdb_conn):
issues = r.table(ISSUES_TABLE)
issues_with_milestone = issues.filter(lambda issue: issue['milestone'] != None)
milestones = issues_with_milestone.map(lambda issue: issue['milestone']['title']).distinct()
# Generate user stats (how many issues assigned to this user have been opened and closed) for a particular set of issues
def user_stats(issue_set):
# Remove issues that don't have owners from the issue set
issue_set = issue_set.filter(lambda issue: issue['assignee'] != None)
# Get a list of users issues are assigned to
owners = issue_set.map(lambda issue: issue['assignee']).distinct()
# Count the issues with a given owner and state (shorthand since we reuse this)
def count_issues(owner,state):
return issue_set.filter(lambda issue: (issue['assignee']['login'] == owner['login']) & (issue['state'] == state)).count()
# Return a list of documents with stats for each owner
return owners.map(lambda owner: {
'owner': owner['login'],
'owner_avatar_url': owner['avatar_url'],
'open_issues': count_issues(owner,'open'),
'closed_issues': count_issues(owner,'closed'),
})
# Return owner stats for a particular milestone (filter issues to just include a milestone)
def user_stats_by_milestone(m):
return user_stats(issues_with_milestone.filter(lambda issue: issue['milestone']['title'] == m))
# Return the number of issues with a particular state (and optionally a particular milestone)
def num_issues(state, milestone=None):
if milestone is None:
issue_set = issues
else:
issue_set = issues_with_milestone.filter(lambda issue: issue['milestone']['title'] == milestone)
return issue_set.filter(lambda issue: issue['state'] == state).count()
# Two key things:
# - we have to call coerce_to('array') since we get a sequence, and this will error otherwise
# - we have to call list() on the stats to make sure we pull down all the data from a Cursor
report = r.expr({
'datetime': r.js('(new Date).toISOString()'),
'by_milestone': r.expr([{
'milestone': 'all',
'open_issues': num_issues('open'),
'closed_issues': num_issues('closed'),
'user_stats': user_stats(issues).coerce_to('array')
}]).union(milestones.map(lambda m: {
'milestone': m,
'open_issues': num_issues('open', m),
'closed_issues': num_issues('closed', m),
'user_stats': user_stats_by_milestone(m).coerce_to('array')
}))
})
# Add the generated report to the database
print "Generating and inserting new user stats at %s" % datetime.now().strftime("%Y-%m-%d %H:%M")
r.table(STATS_TABLE).insert(r.expr([report])).run(rdb_conn)
示例6: test_port_conversion
def test_port_conversion(self):
c = r.connect(port=str(self.port))
r.expr(1).run(c)
c.close()
self.assertRaisesRegexp(
r.RqlDriverError,
"Could not convert port abc to an integer.",
lambda: r.connect(port='abc'))
示例7: rql_highest_revs
def rql_highest_revs(query, field):
"""
r.db("psh").table("images").groupedMapReduce(
function(image) {
return image('dockerfile')
},
function(image) {
return {rev: image('rev'), id: image('id')}
},
function(left, right) {
return r.branch(left('rev').gt(right('rev')), left, right)
}
).map(
function(group) {
return group('reduction')("id")
}
)
"""
ids = query.grouped_map_reduce(
lambda image: image[field],
lambda image: {"rev": image["rev"], "id": image["id"]},
lambda left, right: r.branch(left["rev"]>right["rev"], left, right)
).map(lambda group: group["reduction"]["id"]).coerce_to("array").run()
return query.filter(lambda doc: r.expr(ids).contains(doc["id"]))
示例8: get_shared_records_for_report
def get_shared_records_for_report(self, index_values, consider_demand_type=False):
if consider_demand_type:
report_index = "has_peak"
else:
report_index = "no_peak"
data = self.uow.run_list(
self.compiled_energy_records_table
.get_all(*index_values, index=report_index)
.inner_join(self.compiled_energy_records_table,
lambda arow, brow: r.expr(arow['comparison_value'] == brow['comparison_value'])
.and_(arow['comparison_type'] == 'temp').and_(brow['comparison_type'] == 'temp')
.and_(arow['year'] != brow['year']))
.zip()
.group(lambda record: {'year': record['year'], 'account_id': record['account_id'],
'value': record['comparison_value']})
.map(lambda record: {'sum_btu': record['sum_btu'],
'p_norm': record['sum_price_normalization'],
's_norm': record['sum_size_normalization'],
'hrs': record['sum_hours_in_record']})
.reduce(lambda a, b: {'sum_btu': a['sum_btu'] + b['sum_btu'],
'p_norm': a['p_norm'] + b['p_norm'],
's_norm': a['s_norm'] + b['s_norm'],
'hrs': a['hrs'] + b['hrs']})
.ungroup())
return data
示例9: new_usuario
def new_usuario(self, data):
"""
Método que cria um usuário com os dados informados.
"""
data['criado_em'] = r.expr(datetime.now(
r.make_timezone(config('TIMEZONE', default='-03:00'))))
return self.insert(data)
示例10: _
def _():
# *** First, find the list of sets
# directly containing the member ID. ***
query = (cls.start_accepted_query()
.filter(r.row['members'].contains(
lambda member: member['id'] == unit_id
)))
sets = query.run(database.db_conn)
# *** Second, find all the sets containing
# those sets... recursively. ***
found_sets, all_sets = sets, []
while found_sets:
set_ids = {set_['entity_id'] for set_ in found_sets}
all_sets += found_sets
query = (cls.start_accepted_query()
.filter(r.row['members'].contains(
lambda member:
r.expr(set_ids).contains(member['id'])
)))
found_sets = query.run(database.db_conn)
return all_sets
示例11: import_from_queue
def import_from_queue(progress, conn, task_queue, error_queue, replace_conflicts, durability, write_count):
if progress[0] is not None and not replace_conflicts:
# We were interrupted and it's not ok to overwrite rows, check that the batch either:
# a) does not exist on the server
# b) is exactly the same on the server
task = progress[0]
pkey = r.db(task[0]).table(task[1]).info().run(conn)["primary_key"]
for i in reversed(range(len(task[2]))):
obj = pickle.loads(task[2][i])
if pkey not in obj:
raise RuntimeError("Connection error while importing. Current row has no specified primary key, so cannot guarantee absence of duplicates")
row = r.db(task[0]).table(task[1]).get(obj[pkey]).run(conn)
if row == obj:
write_count[0] += 1
del task[2][i]
else:
raise RuntimeError("Duplicate primary key `%s`:\n%s\n%s" % (pkey, str(obj), str(row)))
task = task_queue.get() if progress[0] is None else progress[0]
while not isinstance(task, StopIteration):
try:
# Unpickle objects (TODO: super inefficient, would be nice if we could pass down json)
objs = [pickle.loads(obj) for obj in task[2]]
conflict_action = 'replace' if replace_conflicts else 'error'
res = r.db(task[0]).table(task[1]).insert(r.expr(objs, nesting_depth=max_nesting_depth), durability=durability, conflict=conflict_action).run(conn)
except:
progress[0] = task
raise
if res["errors"] > 0:
raise RuntimeError("Error when importing into table '%s.%s': %s" %
(task[0], task[1], res["first_error"]))
write_count[0] += len(objs)
task = task_queue.get()
示例12: _query_rethinkdb
def _query_rethinkdb(self, cdx_query):
start_key = cdx_query.key.decode('utf-8')
end_key = cdx_query.end_key.decode('utf-8')
reql = self.r.table(self.table).between(
[start_key[:150], rethinkdb.minval],
[end_key[:150]+'!', rethinkdb.maxval],
index='abbr_canon_surt_timestamp')
reql = reql.order_by(index='abbr_canon_surt_timestamp')
# filters have to come after order_by apparently
# TODO support for POST, etc
# http_method='WARCPROX_WRITE_RECORD' for screenshots, thumbnails
reql = reql.filter(
lambda capture: rethinkdb.expr(
['WARCPROX_WRITE_RECORD','GET']).contains(
capture['http_method']))
reql = reql.filter(
lambda capture: (capture['canon_surt'] >= start_key)
& (capture['canon_surt'] < end_key))
if cdx_query.limit:
reql = reql.limit(cdx_query.limit)
logging.debug('rethinkdb query: %s', reql)
results = reql.run()
return results
示例13: test_repl
def test_repl(self):
# Calling .repl() should set this connection as global state
# to be used when `run` is not otherwise passed a connection.
c = r.connect(port=self.port).repl()
r.expr(1).run()
c.repl() # is idempotent
r.expr(1).run()
c.close()
self.assertRaisesRegexp(
r.RqlDriverError, "Connection is closed",
r.expr(1).run)
示例14: new_record
def new_record(location):
album_info = rdio.call('search', {'query': request.form['album'],
'types': 'album'})
if album_info['result']['number_results'] != 0:
for x in album_info['result']['results']:
if x['artist'].upper() == request.form['artist'].upper():
album_art = x['icon']
release_date = x['releaseDate']
duration = x['duration']
duration = duration/60
tracks = x['length']
artist_key = x['artistKey']
else:
album_art = 'http://musicunderfire.com/wp-content/uploads/2012/06/No-album-art-itunes-300x300.jpg'
release_date = ''
duration = 0
tracks = 0
artist_key = ''
new_record = m.Records()
new_record.user = session['user']
new_record.artist = request.form['artist']
new_record.album = request.form['album']
new_record.album_art = album_art
new_record.release_date = release_date
new_record.duration = duration
new_record.tracks = tracks
new_record.record_condition = ''
new_record.sleeve_condition = ''
new_record.color = ''
new_record.size = ''
new_record.notes = ''
new_record.date_added = r.expr(datetime.datetime.now(
timezone('US/Central')))
new_record.user_artwork=''
new_record.save()
condition = m.Condition.order_by('order').fetch()
size = m.Size.order_by('order').fetch()
record = m.Records.get(id=new_record['id'])
if location == 'grid':
return render_template('new_record.html',
s=record,
condition=condition,
size=size)
else:
return render_template('add_list.html',
s=record,
condition=condition,
size=size)
示例15: evaluate
def evaluate(querystring):
return r.expr(eval(querystring, {
'r': r,
'__builtins__': {
'True': True,
'False': False,
'None': None,
}
}))