本文整理汇总了Python中r2.lib.utils.timeago函数的典型用法代码示例。如果您正苦于以下问题:Python timeago函数的具体用法?Python timeago怎么用?Python timeago使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了timeago函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_hot
def get_hot(srs, only_fullnames=False):
"""Get the (fullname, hotness, epoch_seconds) for the hottest
links in a subreddit. Use the query-cache to avoid some lookups
if we can."""
from r2.lib.db.thing import Query
from r2.lib.db.queries import CachedResults
ret = []
queries = [sr.get_links("hot", "all") for sr in srs]
# fetch these all in one go
cachedresults = filter(lambda q: isinstance(q, CachedResults), queries)
CachedResults.fetch_multi(cachedresults)
for q in queries:
if isinstance(q, Query):
links = cached_query(q, sr)
res = [(link._fullname, link._hot, epoch_seconds(link._date)) for link in links]
elif isinstance(q, CachedResults):
# we're relying on an implementation detail of
# CachedResults here, where it's storing tuples that look
# exactly like the return-type we want, to make our
# sorting a bit cheaper
res = list(q.data)
# remove any that are too old
age_limit = epoch_seconds(utils.timeago("%d days" % g.HOT_PAGE_AGE))
res = [(fname if only_fullnames else (fname, hot, date)) for (fname, hot, date) in res if date > age_limit]
ret.append(res)
return ret
示例2: user_vote_change_links
def user_vote_change_links(period = '1 day'):
rel = Vote.rel(Account, Link)
type = tdb.rel_types_id[rel._type_id]
# rt = rel table
# dt = data table
rt, account_tt, link_tt, dt = type.rel_table
aliases = tdb.alias_generator()
author_dt = dt.alias(aliases.next())
link_dt = tdb.types_id[Link._type_id].data_table[0].alias(aliases.next())
# Create an SQL CASE statement for the subreddit vote multiplier
cases = []
for subreddit in subreddits_with_custom_karma_multiplier():
cases.append( (sa.cast(link_dt.c.value,sa.Integer) == subreddit._id,
subreddit.post_karma_multiplier) )
cases.append( (True, g.post_karma_multiplier) ) # The default article multiplier
date = utils.timeago(period)
s = sa.select([author_dt.c.value, sa.func.sum(sa.cast(rt.c.name, sa.Integer) * sa.case(cases))],
sa.and_(rt.c.date >= date,
author_dt.c.thing_id == rt.c.rel_id,
author_dt.c.key == 'author_id',
link_tt.c.thing_id == rt.c.thing2_id,
link_tt.c.date >= date,
link_dt.c.key == 'sr_id',
link_dt.c.thing_id == rt.c.thing2_id),
group_by = author_dt.c.value)
rows = s.execute().fetchall()
return [(int(r.value), r.sum) for r in rows]
示例3: time_listings
def time_listings(times = ('year','month','week','day','hour', 'all')):
oldests = dict((t, epoch_seconds(timeago('1 %s' % t)))
for t in times if t != 'all')
if 'all' in times:
oldests['all'] = 0
@mr_tools.dataspec_m_thing(('author_id', int),)
def process(link):
assert link.thing_type == 'link'
timestamp = link.timestamp
fname = make_fullname(Link, link.thing_id)
if not link.spam and not link.deleted:
author_id = link.author_id
ups, downs = link.ups, link.downs
sc = score(ups, downs)
contr = controversy(ups, downs)
h = _hot(ups, downs, timestamp)
for tkey, oldest in oldests.iteritems():
if timestamp > oldest:
yield ('user-top-%s-%d' % (tkey, author_id),
sc, timestamp, fname)
yield ('user-controversial-%s-%d' % (tkey, author_id),
contr, timestamp, fname)
if tkey == 'all':
yield ('user-new-%s-%d' % (tkey, author_id),
timestamp, timestamp, fname)
yield ('user-hot-%s-%d' % (tkey, author_id),
h, timestamp, fname)
mr_tools.mr_map(process)
示例4: normalized_hot_cached
def normalized_hot_cached(sr_ids):
results = []
srs = Subreddit._byID(sr_ids, data = True, return_dict = False)
for sr in srs:
#items = get_hot(sr)
items = filter(lambda l: l._date > utils.timeago('%d day' % g.HOT_PAGE_AGE),
get_hot(sr))
if not items:
continue
top_score = max(items[0]._hot, 1)
top, rest = items[:2], items[2:]
if top:
normals = [l._hot / top_score for l in top]
results.extend((l, random.choice(normals)) for l in top)
#random.shuffle(normals)
#results.extend((l, normals.pop()) for l in top)
if rest:
results.extend((l, l._hot / top_score) for l in rest)
results.sort(key = lambda x: (x[1], x[0]._hot), reverse = True)
return [l[0]._fullname for l in results]
示例5: time_listings
def time_listings(times = ('year','month','week','day','hour')):
oldests = dict((t, epoch_seconds(timeago('1 %s' % t)))
for t in times)
@mr_tools.dataspec_m_thing(("url", str),('sr_id', int),)
def process(link):
assert link.thing_type == 'link'
timestamp = link.timestamp
fname = make_fullname(Link, link.thing_id)
if not link.spam and not link.deleted:
sr_id = link.sr_id
if link.url:
domains = UrlParser(link.url).domain_permutations()
else:
domains = []
ups, downs = link.ups, link.downs
for tkey, oldest in oldests.iteritems():
if timestamp > oldest:
sc = score(ups, downs)
contr = controversy(ups, downs)
yield ('sr-top-%s-%d' % (tkey, sr_id),
sc, timestamp, fname)
yield ('sr-controversial-%s-%d' % (tkey, sr_id),
contr, timestamp, fname)
for domain in domains:
yield ('domain/top/%s/%s' % (tkey, domain),
sc, timestamp, fname)
yield ('domain/controversial/%s/%s' % (tkey, domain),
contr, timestamp, fname)
mr_tools.mr_map(process)
示例6: add_props
def add_props(cls, user, wrapped):
from r2.lib.count import incr_counts
from r2.lib.media import thumbnail_url
from r2.lib.utils import timeago
saved = Link._saved(user, wrapped) if user else {}
hidden = Link._hidden(user, wrapped) if user else {}
#clicked = Link._clicked(user, wrapped) if user else {}
clicked = {}
for item in wrapped:
show_media = (c.user.pref_media == 'on' or
(item.promoted and item.has_thumbnail
and c.user.pref_media != 'off') or
(c.user.pref_media == 'subreddit' and
item.subreddit.show_media))
if not show_media:
item.thumbnail = ""
elif item.has_thumbnail:
item.thumbnail = thumbnail_url(item)
else:
item.thumbnail = g.default_thumb
item.score = max(0, item.score)
item.domain = (domain(item.url) if not item.is_self
else 'self.' + item.subreddit.name)
if not hasattr(item,'top_link'):
item.top_link = False
item.urlprefix = ''
item.saved = bool(saved.get((user, item, 'save')))
item.hidden = bool(hidden.get((user, item, 'hide')))
item.clicked = bool(clicked.get((user, item, 'click')))
item.num = None
item.score_fmt = Score.number_only
item.permalink = item.make_permalink(item.subreddit)
if item.is_self:
item.url = item.make_permalink(item.subreddit, force_domain = True)
if c.user_is_admin:
item.hide_score = False
elif item.promoted:
item.hide_score = True
elif c.user == item.author:
item.hide_score = False
elif item._date > timeago("2 hours"):
item.hide_score = True
else:
item.hide_score = False
if c.user_is_loggedin and item.author._id == c.user._id:
item.nofollow = False
elif item.score <= 1 or item._spam or item.author._spam:
item.nofollow = True
else:
item.nofollow = False
if c.user_is_loggedin:
incr_counts(wrapped)
示例7: share
def share(link, emails, from_name = "", reply_to = "", body = ""):
"""Queues a 'share link' email."""
now = datetime.datetime.now(g.tz)
ival = now - timeago(g.new_link_share_delay)
date = max(now,link._date + ival)
Email.handler.add_to_queue(c.user, link, emails, from_name, g.share_reply,
date, request.ip, Email.Kind.SHARE,
body = body, reply_to = reply_to)
示例8: _get_cutoffs
def _get_cutoffs(intervals):
cutoffs = {}
for interval in intervals:
if interval == "all":
cutoffs["all"] = 0.0
else:
cutoffs[interval] = epoch_seconds(timeago("1 %s" % interval))
return cutoffs
示例9: fix_all_broken_things
def fix_all_broken_things(delete=False):
from r2.models import Link, Comment
# 2009-07-21 is the first broken thing at the time of writing.
from_time = datetime.datetime(2009, 7, 21, tzinfo=g.tz)
to_time = utils.timeago("60 seconds")
for (cls, attrs) in ((Link, ("author_id", "sr_id")), (Comment, ("author_id", "sr_id", "body", "link_id"))):
utils.find_broken_things(cls, attrs, from_time, to_time, delete=delete)
示例10: simplified_timesince
def simplified_timesince(date, include_tense=True):
if date > timeago("1 minute"):
return _("just now")
since = timesince(date)
if include_tense:
return _("%s ago") % since
else:
return since
示例11: simplified_timesince
def simplified_timesince(date, include_tense=True):
if date > timeago("1 minute"):
return _("just now")
since = []
since.append(timesince(date))
if include_tense:
since.append(_("ago"))
return " ".join(since)
示例12: keep_fn
def keep_fn(self):
"""For merged time-listings, don't show items that are too old
(this can happen when mr_top hasn't run in a while)"""
if self.time != 'all' and c.default_sr:
oldest = timeago('1 %s' % (str(self.time),))
def keep(item):
return item._date > oldest and item.keep_item(item)
return keep
else:
return ListingController.keep_fn(self)
示例13: port_cassahides
def port_cassahides():
from r2.models import SaveHide, CassandraHide
from r2.lib.db.tdb_cassandra import CL
from r2.lib.db.operators import desc
from r2.lib.utils import fetch_things2, timeago, progress
q = SaveHide._query(SaveHide.c._date > timeago("1 week"), SaveHide.c._name == "hide", sort=desc("_date"))
q = fetch_things2(q)
q = progress(q, estimate=1953374)
for sh in q:
CassandraHide._hide(sh._thing1, sh._thing2, write_consistency_level=CL.ONE)
示例14: all_comments
def all_comments():
q = Comment._query(Comment.c._score > 2,
Comment.c.sr_id != 6,
Comment.c._date > timeago('1 weeks'),
sort = desc('_date'),
limit = 200,
data = True)
comments = list(q)
while comments:
for l in comments:
yield l
comments = list(q._after(l))
示例15: vote_stats
def vote_stats(config):
stats = {}
link_votes = Vote.rel(Account, Link)
comment_votes = Vote.rel(Account, Comment)
for name, rel in (('link', link_votes), ('comment', comment_votes)):
table = get_rel_table(rel._type_id)[0]
q = table.count(table.c.date > timeago('1 day'))
stats[name+'_vote_count_past_day'] = q.execute().fetchone()[0]
stats['vote_count_past_day'] = stats['link_vote_count_past_day'] + stats['comment_vote_count_past_day']
return stats