当前位置: 首页>>代码示例>>Python>>正文


Python Reddit.get_unread方法代码示例

本文整理汇总了Python中praw.Reddit.get_unread方法的典型用法代码示例。如果您正苦于以下问题:Python Reddit.get_unread方法的具体用法?Python Reddit.get_unread怎么用?Python Reddit.get_unread使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在praw.Reddit的用法示例。


在下文中一共展示了Reddit.get_unread方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: __init__

# 需要导入模块: from praw import Reddit [as 别名]
# 或者: from praw.Reddit import get_unread [as 别名]
class Reddit_Cleverbot:

  def __init__(self, username, password, subreddit='all', useragent=USERAGENT):
    self.username = username
    self.password = password
    self.useragent = useragent
    self.subreddit = subreddit
    self.reddit = Reddit(useragent)
    self.reddit.login(username, password)
    self.stopped = True
    self.thread = None
    self.done = set()
    self.conversations = dict()

  def random_hot_comment(self):
    sub = self.reddit.get_subreddit(self.subreddit)
    hot = [post for post in sub.get_hot(limit=25)]
    post = random.choice(hot)
    comments = praw.helpers.flatten_tree(post.comments)
    # filter the comments to remove already-replied ones
    comments = [comment for comment in comments if comment not in self.done and isinstance(comment, praw.objects.Comment)]
    return random.choice(comments[0:100])

  def random_comment(self):
    comments = self.reddit.get_comments(self.subreddit)
    # filter the comments to remove already-replied ones
    comments = [comment for comment in comments if comment not in self.done]
    return random.choice(comments)

  def get_summoned_comments(self):
    comments = self.reddit.get_comments(self.subreddit)
    children = [comment for comment in comments 
      if comment not in self.done and SUMMON in comment.body]
    # print "--> " + str(len(children)) + " summons found!"
    return [self.reddit.get_info(thing_id=comment.parent_id) for comment in children]

  def reply(self, comment):
    if self.reddit.get_info(thing_id=comment.parent_id).author.name == self.username:
      # TODO: handle a threaded conversation over restarts. will need a DB. ugh
      pass
    if comment.parent_id in self.conversations:
      cleverbot = self.conversations[comment.parent_id]
    else:
      cleverbot = Cleverbot()
    response = cleverbot.ask(comment.body)
    post = comment.reply(response)
    self.done.add(comment.id)
    self.conversations[post.id] = copy(cleverbot)

  def reply_unread(self, interval):
    for item in self.reddit.get_unread():
      if item.parent_id not in self.conversations:
        print "Could not find conversation! Ignoring for now."
        pass
      self.reply(item)
      item.mark_as_read()
      time.sleep(interval)

  def reply_to_summons(self):
    summons = self.get_summoned_comments()
    for comment in summons:
      self.reply(comment)

  def _run_random(self, interval):
    while not self.stopped:
      self.reply_unread(interval)
      self.reply(self.random_hot_comment())
      time.sleep(interval)

  def run_random(self, interval):
    self.stopped = False
    self.thread = Thread(target=self._run_random, args=(interval,))
    self.thread.start()

  def stop(self):
    self.stopped = True
    #self.thread.join()
开发者ID:damianw,项目名称:Reddit_Cleverbot,代码行数:79,代码来源:reddit_cleverbot.py

示例2: Reddit

# 需要导入模块: from praw import Reddit [as 别名]
# 或者: from praw.Reddit import get_unread [as 别名]
#!/usr/bin/env python
"""Returns the number of message replies / private messages of a Reddit user"""

from sys import exit
from configparser import ConfigParser

from praw import Reddit


r = Reddit(user_agent="Awesome WM Mail Check")

config = ConfigParser()
config.read('user.ini')

if config and 'reddit' in config:
    r.login(config['reddit']['username'], config['reddit']['password'])
else:
    r.login()  # Prompt user for password.

count = 0
for _ in r.get_unread():
    count += 1

exit(count)
开发者ID:alprs,项目名称:awesome-widgets,代码行数:26,代码来源:check_mail.py


注:本文中的praw.Reddit.get_unread方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。