当前位置: 首页>>代码示例>>Python>>正文


Python cfg_file.get函数代码示例

本文整理汇总了Python中models.cfg_file.get函数的典型用法代码示例。如果您正苦于以下问题:Python get函数的具体用法?Python get怎么用?Python get使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了get函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: main

def main():
    logging.config.fileConfig(path_to_cfg)
    start_utc = datetime.utcnow()
    start_time = time()

    global r
    try:
        r = reddit.Reddit(user_agent=cfg_file.get('reddit', 'user_agent'))
        logging.info('Logging in as %s', cfg_file.get('reddit', 'username'))
        r.login(cfg_file.get('reddit', 'username'),
            cfg_file.get('reddit', 'password'))
    except Exception as e:
        logging.error('  ERROR: %s', e)

    mod_subreddit = r.get_subreddit('mod')


    #
    # Do actions on individual subreddits
    #
    
    # get subreddit list
    subreddits = Subreddit.query.filter(Subreddit.enabled == True).all()
    sr_dict = dict()
    for subreddit in subreddits:
        sr_dict[subreddit.name.lower()] = subreddit
    
    # do actions on subreddits
    do_subreddits(mod_subreddit, sr_dict, start_utc)
    

    #
    # Do actions on networks
    #
    mods_checked = 0

    # get network list
    networks = Network.query.filter(Network.enabled == True).all()
    
    # do actions on each network
    for network in networks:
        # get subreddits in network
        network_subs = Subreddit.query.filter(Subreddit.network == network.id, Subreddit.enabled == True).all()
        network_sr_dict = dict()
        for subreddit in network_subs:
            network_sr_dict[subreddit.name.lower()] = subreddit
        
        # do subreddit actions on subreddits
        do_subreddits(mod_subreddit, network_sr_dict, start_utc)
        
        # check network mods
        logging.info('Checking network moderators')
        for subreddit in network_sr_dict.itervalues():
            # only check subs in networks
            if subreddit.network:
                mods_checked += check_network_moderators(network, network_sr_dict)
    
    logging.info('  Checked %s networks, added %s moderators', len(networks), mods_checked)

    logging.info('Completed full run in %s', elapsed_since(start_time))
开发者ID:Dakta,项目名称:DAutoModerator,代码行数:60,代码来源:modbot.py

示例2: main

def main():
    logging.config.fileConfig(path_to_cfg)
    start_utc = datetime.utcnow()
    start_time = time()

    global r
    try:
        r = reddit.Reddit(user_agent=cfg_file.get('reddit', 'user_agent'))
        logging.info('Logging in as %s', cfg_file.get('reddit', 'username'))
        r.login(cfg_file.get('reddit', 'username'),
            cfg_file.get('reddit', 'password'))

        subreddits = Subreddit.query.filter(Subreddit.enabled == True).all()
        sr_dict = dict()
        for subreddit in subreddits:
            sr_dict[subreddit.name.lower()] = subreddit
        mod_subreddit = r.get_subreddit('mod')
    except Exception as e:
        logging.error('  ERROR: %s', e)

    # check reports
    items = mod_subreddit.get_reports(limit=1000)
    stop_time = datetime.utcnow() - REPORT_BACKLOG_LIMIT
    check_items('report', items, sr_dict, stop_time)

    # check spam
    items = mod_subreddit.get_spam(limit=1000)
    stop_time = (db.session.query(func.max(Subreddit.last_spam))
                 .filter(Subreddit.enabled == True).one()[0])
    check_items('spam', items, sr_dict, stop_time)

    # check new submissions
    items = mod_subreddit.get_new_by_date(limit=1000)
    stop_time = (db.session.query(func.max(Subreddit.last_submission))
                 .filter(Subreddit.enabled == True).one()[0])
    check_items('submission', items, sr_dict, stop_time)

    # check new comments
    comment_multi = '+'.join([s.name for s in subreddits
                              if not s.reported_comments_only])
    if comment_multi:
        comment_multi_sr = r.get_subreddit(comment_multi)
        items = comment_multi_sr.get_comments(limit=1000)
        stop_time = (db.session.query(func.max(Subreddit.last_comment))
                     .filter(Subreddit.enabled == True).one()[0])
        check_items('comment', items, sr_dict, stop_time)

    # respond to modmail
    try:
        respond_to_modmail(r.user.get_modmail(), start_utc)
    except Exception as e:
        logging.error('  ERROR: %s', e)

    # check reports html
    try:
        check_reports_html(sr_dict)
    except Exception as e:
        logging.error('  ERROR: %s', e)

    logging.info('Completed full run in %s', elapsed_since(start_time))
开发者ID:LateNitePie,项目名称:AutoModerator,代码行数:60,代码来源:modbot.py

示例3: main

def main():
    global r
    logging.config.fileConfig(path_to_cfg)
    # the below only works with re2
    # re.set_fallback_notification(re.FALLBACK_EXCEPTION)

    # which queues to check and the function to call
    queue_funcs = {'report': 'get_reports',
                   'spam': 'get_mod_queue',
                   'submission': 'get_new',
                   'comment': 'get_comments'}

    while True:
        try:
            r = praw.Reddit(user_agent=cfg_file.get('reddit', 'user_agent'))
            logging.info('Logging in as {0}'
                         .format(cfg_file.get('reddit', 'username')))
            r.login(cfg_file.get('reddit', 'username'),
                    cfg_file.get('reddit', 'password'))
            sr_dict, cond_dict = initialize(queue_funcs.keys())
            break
        except Exception as e:
            logging.error('ERROR: {0}'.format(e))
    
    run_counter = 0
    while True:
        run_counter += 1
        try:
            # only check reports every 10 runs
            # sleep afterwards in case ^C is needed
            if run_counter % 10 == 0:
                check_queues(queue_funcs, sr_dict, cond_dict)

                Condition.clear_standard_cache()
                if process_messages():
                    sr_dict, cond_dict = initialize(queue_funcs.keys(),
                                                    reload_mod_subs=False)
                logging.info('Sleeping ({0})'.format(datetime.now()))
                sleep(5)
                run_counter = 0
            else:
                check_queues({q: queue_funcs[q]
                              for q in queue_funcs
                              if q != 'report'},
                             sr_dict, cond_dict)
                if process_messages():
                    sr_dict, cond_dict = initialize(queue_funcs.keys(),
                                                    reload_mod_subs=False)
        except (praw.errors.ModeratorRequired,
                praw.errors.ModeratorOrScopeRequired,
                HTTPError) as e:
            if not isinstance(e, HTTPError) or e.response.status_code == 403:
                logging.info('Re-initializing due to {0}'.format(e))
                sr_dict, cond_dict = initialize(queue_funcs.keys())
        except KeyboardInterrupt:
            raise
        except Exception as e:
            logging.error('ERROR: {0}'.format(e))
            session.rollback()
开发者ID:sfwpn,项目名称:PornOverlord,代码行数:59,代码来源:automoderator.py

示例4: main

def main():
    global r
    logging.config.fileConfig(path_to_cfg)
    
    while True:
        try:
            r = praw.Reddit(user_agent=cfg_file.get('reddit', 'user_agent'))
            logging.info('Logging in as {0}'
                         .format(cfg_file.get('reddit', 'username')))
            r.login(cfg_file.get('reddit', 'username'),
                    cfg_file.get('reddit', 'password'))
            sr_dict = get_enabled_subreddits()            
            settings_dict = {subreddit: update_from_wiki(sr, cfg_file.get('reddit', 'owner_username')) for subreddit, sr in sr_dict.iteritems()}
            break
        except Exception as e:
            logging.error('ERROR: {0}'.format(e))
            traceback.print_exc(file=sys.stdout)

    while True:
        try:
            bans_to_remove = session.query(Ban).filter(Ban.unban_after <= datetime.utcnow()).all()
            logging.debug("\nChecking due bans")
            
            for ban in bans_to_remove:
                logging.debug("  Unbanning /u/{0} from /r/{1}".format(ban.user, ban.subreddit))
                sr = sr_dict[ban.subreddit]
                sr.remove_ban(ban.user)
                session.add(Log(ban.user, ban.subreddit, 'unban'))
                session.delete(ban)
            
            sleep(5)
            logging.info("\nLOOP\n")

            updated_srs = process_messages(sr_dict, settings_dict)
            if updated_srs:
                if any(subreddit not in sr_dict.keys() for subreddit in updated_srs):
                    # settings and mod subs out of sync, reload everything
                    settings_dict = sr_dict.copy()
                    sr_dict = get_enabled_subreddits(reload_mod_subs=True)
                else:
                    sr_dict = get_enabled_subreddits(reload_mod_subs=False)
                    
                settings_dict.update(updated_srs)
            
        except (praw.errors.ModeratorRequired,
                praw.errors.ModeratorOrScopeRequired,
                praw.requests.HTTPError) as e:
            if not isinstance(e, praw.requests.HTTPError) or e.response.status_code == 403:
                logging.info('Re-initializing due to {0}'.format(e))
                sr_dict = get_enabled_subreddits()
        except KeyboardInterrupt:
            raise
        except Exception as e:
            logging.error('ERROR: {0}'.format(e))
            import traceback
            traceback.print_exc()
开发者ID:Dakta,项目名称:ban_timer,代码行数:56,代码来源:ban_timer.py

示例5: main

def main():
    # we get cfg_file from models.py
    # see import at the top
    
    username = cfg_file.get('reddit', 'username')
    password = cfg_file.get('reddit', 'password')
    
    print "Logging in..."
    comments = Monitor(username, password)
    print "  Success!"
    
    comments.monitor_comments()
开发者ID:Dakta,项目名称:reddit-scraper,代码行数:12,代码来源:scrape_comments.py

示例6: main

def main():
    logging.config.fileConfig(path_to_cfg)
    start_utc = datetime.utcnow()
    start_time = time()

    global r
    try:
        r = praw.Reddit(user_agent=cfg_file.get('reddit', 'user_agent'))
        logging.info('Logging in as %s', cfg_file.get('reddit', 'username'))
        r.login(cfg_file.get('reddit', 'username'),
            cfg_file.get('reddit', 'password'))
        log_request('login')

        subreddits = session.query(Subreddit).filter(
                        Subreddit.enabled == True).all()
        logging.info('Getting list of moderated subreddits')
        modded_subs = list([s.display_name.lower()
                            for s in r.get_my_moderation(limit=None)])
        log_request('mod_subs', len(modded_subs) / 100 + 1)

        # build sr_dict including only subs both in db and modded_subs
        sr_dict = dict()
        cond_dict = dict()
        ms_dict = dict()
        for subreddit in subreddits:
            if subreddit.name.lower() in modded_subs:
                sr_dict[subreddit.name.lower()] = subreddit
                conditions = subreddit.conditions.all()
                cond_dict[subreddit.name.lower()] = {
                    'report': filter_conditions('report', conditions),
                    'spam': filter_conditions('spam', conditions),
                    'submission': filter_conditions('submission', conditions),
                    'comment': filter_conditions('comment', conditions) }
                message_schedules = subreddit.message_schedules.all()
                ms_dict[subreddit.name.lower()] = message_schedules

    except Exception as e:
        logging.error('  ERROR: %s', e)

    check_queues(sr_dict, cond_dict)

    check_message_schedules(sr_dict, ms_dict)

    # respond to modmail
    try:
        respond_to_modmail(r.get_mod_mail(), start_utc)
    except Exception as e:
        logging.error('  ERROR: %s', e)

    logging.info('Completed full run in %s (%s due to reddit requests - %s)',
                    elapsed_since(start_time),
                    timedelta(seconds=sum(log_request.counts.values())*2),
                    log_request.counts)
开发者ID:andytuba,项目名称:PeriodicPosterBot,代码行数:53,代码来源:modbot.py

示例7: build_message

    def build_message(self, text, item, match,
                      disclaimer=False, permalink=False, intro=False):
        """Builds a message/comment for the bot to post or send."""
        if intro:
            message = cfg_file.get('reddit', 'intro')
            message = message + " " + text
        else:
            message = text
        if disclaimer:
            message = message+'\n\n'+cfg_file.get('reddit', 'disclaimer')
        if permalink and '{{permalink}}' not in message:
            message = '{{permalink}}\n\n'+message
        message = replace_placeholders(message, item, match)

        return message
开发者ID:sfwpn,项目名称:PornOverlord,代码行数:15,代码来源:automoderator.py

示例8: check_queues

def check_queues(queue_funcs, sr_dict, cond_dict):
    """Checks all the queues for new items to process."""
    global r

    for queue in queue_funcs:
        subreddits = [s for s in sr_dict if len(cond_dict[s][queue]) > 0]
        if len(subreddits) == 0:
            continue

        multireddits = build_multireddit_groups(subreddits)

        # fetch and process the items for each multireddit
        for multi in multireddits:
            if queue == 'report':
                limit = cfg_file.get('reddit', 'report_backlog_limit_hours')
                stop_time = datetime.utcnow() - timedelta(hours=int(limit))
            else:
                stop_time = max(getattr(sr, 'last_'+queue)
                                 for sr in sr_dict.values()
                                 if sr.name in multi)

            queue_subreddit = r.get_subreddit('+'.join(multi))
            if queue_subreddit:
                queue_func = getattr(queue_subreddit, queue_funcs[queue])
                items = queue_func(limit=None)
                check_items(queue, items, stop_time, sr_dict, cond_dict)
开发者ID:goldguy81,项目名称:AutoModerator,代码行数:26,代码来源:automoderator.py

示例9: respond_to_modmail

def respond_to_modmail(modmail, start_time):
    """Responds to modmail if any submitters sent one before approval."""
    cache = list()
    # respond to any modmail sent in the configured window of time
    time_window = timedelta(minutes=int(cfg_file.get('reddit',
                                   'modmail_response_window_mins')))
    approvals = session.query(ActionLog).filter(
                    and_(ActionLog.action == 'approve',
                         ActionLog.action_time >= start_time - time_window)
                    ).all()

    for item in approvals:
        found = None
        done = False

        for i in cache:
            if datetime.utcfromtimestamp(i.created_utc) < item.created_utc:
                done = True
                break
            if (i.dest.lower() == '#'+item.subreddit.name.lower() and
                    i.author.name == item.user and
                    not i.replies):
                found = i
                break

        if not found and not done:
            for i in modmail:
                cache.append(i)
                if datetime.utcfromtimestamp(i.created_utc) < item.created_utc:
                    break
                if (i.dest.lower() == '#'+item.subreddit.name.lower() and
                        i.author.name == item.user and
                        not i.replies):
                    found = i
                    break

        if found:
            found.reply('Your submission has been approved automatically by '+
                cfg_file.get('reddit', 'username')+'. For future submissions '
                'please wait at least '+cfg_file.get('reddit',
                'modmail_response_window_mins')+' minutes before messaging '
                'the mods, this post would have been approved automatically '
                'even without you sending this message.')
            log_request('modmail')

    log_request('modmail_listing', len(cache) / 100 + 1)
开发者ID:andytuba,项目名称:PeriodicPosterBot,代码行数:46,代码来源:modbot.py

示例10: send_error_message

def send_error_message(user, sr_name, error):
    """Sends an error message to the user if a wiki update failed."""
    global r
    r.send_message(user,
                   'Error updating from wiki in /r/{0}'.format(sr_name),
                   '### Error updating from [wiki configuration in /r/{0}]'
                   '(http://www.reddit.com/r/{0}/wiki/{1}):\n\n---\n\n'
                   '{2}\n\n---\n\n[View configuration documentation](https://'
                   'github.com/Deimos/AutoModerator/wiki/Wiki-Configuration)'
                   .format(sr_name,
                           cfg_file.get('reddit', 'wiki_page_name'),
                           error))
开发者ID:goldguy81,项目名称:AutoModerator,代码行数:12,代码来源:automoderator.py

示例11: get_user_info

def get_user_info(username, condition):
    """Gets user info from cache, or from reddit if not cached or expired."""
    global r

    try:
        cache_row = (session.query(UserCache)
                        .filter(UserCache.user == username)
                        .one())
        # see if the condition includes a check that expires
        if (condition.is_gold or
                condition.link_karma or
                condition.comment_karma or
                condition.combined_karma):
            expiry = timedelta(hours=int(cfg_file.get('reddit',
                                           'user_cache_expiry_hours')))
        else:
            expiry = None

        # if not past the expiry, return cached data
        if (not expiry or
                datetime.utcnow() - cache_row.info_last_check < expiry):
            cached = r.get_redditor(username, fetch=False)
            cached.is_gold = cache_row.is_gold
            cached.created_utc = timegm(cache_row.created_utc.timetuple())
            cached.link_karma = cache_row.link_karma
            cached.comment_karma = cache_row.comment_karma
            
            return cached
    except NoResultFound:
        cache_row = UserCache()
        cache_row.user = username
        session.add(cache_row)

    # fetch the user's info from reddit
    try:
        user = r.get_redditor(username)
        log_request('user')

        # save to cache
        cache_row.is_gold = user.is_gold
        cache_row.created_utc = datetime.utcfromtimestamp(user.created_utc)
        cache_row.link_karma = user.link_karma
        cache_row.comment_karma = user.comment_karma
        cache_row.info_last_check = datetime.utcnow()
        session.commit()
    except urllib2.HTTPError as e:
        if e.code == 404:
            # weird case where the user is deleted but API still shows username
            return None
        else:
            raise

    return user
开发者ID:andytuba,项目名称:PeriodicPosterBot,代码行数:53,代码来源:modbot.py

示例12: main

def main():
    r = praw.Reddit(user_agent=cfg_file.get('reddit', 'user_agent'))
    r.login(cfg_file.get('reddit', 'username'),
            cfg_file.get('reddit', 'password'))

    # update exclude_banned_modqueue values for subreddits
    subreddits = (session.query(Subreddit)
                         .filter(Subreddit.enabled == True)
                         .all())
    for sr in subreddits:
        try:
            settings = r.get_subreddit(sr.name).get_settings()
            sr.exclude_banned_modqueue = settings['exclude_banned_modqueue']
        except Exception as e:
            sr.exclude_banned_modqueue = False

    session.commit()

    # delete old log entries
    log_retention_days = int(cfg_file.get('database', 'log_retention_days'))
    log_cutoff = datetime.utcnow() - timedelta(days=log_retention_days)
    deleted = session.query(Log).filter(Log.datetime < log_cutoff).delete()
    session.commit()
    print 'Deleted {0} log rows'.format(deleted)
开发者ID:91pavan,项目名称:AutoModerator,代码行数:24,代码来源:maintenance.py

示例13: check_queues

def check_queues(sr_dict, cond_dict):
    """Checks all the queues for new items to process."""
    global r

    for queue in QUEUES:
        subreddits = get_subreddits_for_queue(sr_dict, cond_dict, queue)
        if not subreddits:
            continue

        if queue == 'report':
            report_backlog_limit = timedelta(hours=int(cfg_file.get('reddit',
                                                'report_backlog_limit_hours')))
            stop_time = datetime.utcnow() - report_backlog_limit
        else:
            last_attr = getattr(Subreddit, 'last_'+queue)
            stop_time = (session.query(func.max(last_attr))
                         .filter(Subreddit.enabled == True).one()[0])

        # issues with request being too long at multireddit of ~3000 chars
        # so split into multiple checks if it's longer than that
        # split comment checks into groups of max 40 subreddits as well
        multireddits = []
        current_multi = []
        current_len = 0
        for sub in subreddits:
            if (current_len > 3000 or
                    queue == 'comment' and len(current_multi) >= 40):
                multireddits.append('+'.join(current_multi))
                current_multi = []
                current_len = 0
            current_multi.append(sub)
            current_len += len(sub) + 1
        multireddits.append('+'.join(current_multi))

        # fetch and process the items for each multireddit
        for multi in multireddits:
            queue_subreddit = r.get_subreddit(multi)
            if queue_subreddit:
                queue_method = getattr(queue_subreddit, QUEUES[queue])
                items = queue_method(limit=1000)
                check_items(queue, items, sr_dict, cond_dict, stop_time)
开发者ID:cmcdowell,项目名称:AutoModerator,代码行数:41,代码来源:modbot.py

示例14: respond_to_modmail

def respond_to_modmail(modmail, start_time):
    """Responds to modmail if any submitters sent one before approval."""
    cache = list()
    approvals = ActionLog.query.filter(
                    and_(ActionLog.action == 'approve',
                         ActionLog.action_time >= start_time)).all()

    for item in approvals:
        found = None
        done = False

        for i in cache:
            if datetime.utcfromtimestamp(i.created_utc) < item.created_utc:
                done = True
                break
            if (i.dest.lower() == '#'+item.subreddit.name.lower() and
                    i.author.name == item.user and
                    not i.replies):
                found = i
                break

        if not found and not done:
            for i in modmail:
                cache.append(i)
                if datetime.utcfromtimestamp(i.created_utc) < item.created_utc:
                    break
                if (i.dest.lower() == '#'+item.subreddit.name.lower() and
                        i.author.name == item.user and
                        not i.replies):
                    found = i
                    break

        if found:
            found.reply('Your submission has been approved automatically by '+
                cfg_file.get('reddit', 'username')+'. For future submissions '
                'please wait at least 5 minutes before messaging the mods, '
                'this post would have been approved automatically even '
                'without you sending this message.')
开发者ID:LateNitePie,项目名称:AutoModerator,代码行数:38,代码来源:modbot.py

示例15: main

def main():
    global r
    logging.config.fileConfig(path_to_cfg)

    # which queues to check and the function to call
    queue_funcs = {'report': 'get_reports',
                   'spam': 'get_mod_queue',
                   'submission': 'get_new',
                   'comment': 'get_comments'}

    while True:
        try:
            r = praw.Reddit(user_agent=cfg_file.get('reddit', 'user_agent'))
            logging.info('Logging in as {0}'
                         .format(cfg_file.get('reddit', 'username')))
            r.login(cfg_file.get('reddit', 'username'),
                    cfg_file.get('reddit', 'password'))
            sr_dict = get_enabled_subreddits()
            Condition.update_standards()
            cond_dict = load_all_conditions(sr_dict, queue_funcs.keys())
            break
        except Exception as e:
            logging.error('ERROR: {0}'.format(e))

    reports_mins = int(cfg_file.get('reddit', 'reports_check_period_mins'))
    reports_check_period = timedelta(minutes=reports_mins)
    last_reports_check = time()

    while True:
        try:
            # if the standard conditions have changed, reinit all conditions
            if Condition.update_standards():
                logging.info('Updating standard conditions from database')
                cond_dict = load_all_conditions(sr_dict, queue_funcs.keys())

            # check reports if past checking period
            if elapsed_since(last_reports_check) > reports_check_period:
                last_reports_check = time()
                check_queues({'report': queue_funcs['report']},
                             sr_dict, cond_dict)
                             
            check_queues({q: queue_funcs[q]
                          for q in queue_funcs
                          if q != 'report'},
                         sr_dict, cond_dict)

            updated_srs = process_messages()
            if updated_srs:
                if any(sr not in sr_dict for sr in updated_srs):
                    sr_dict = get_enabled_subreddits(reload_mod_subs=True)
                else:
                    sr_dict = get_enabled_subreddits(reload_mod_subs=False)
                for sr in updated_srs:
                    update_conditions_for_sr(cond_dict,
                                             queue_funcs.keys(),
                                             sr_dict[sr])
        except (praw.errors.ModeratorRequired,
                praw.errors.ModeratorOrScopeRequired,
                HTTPError) as e:
            if not isinstance(e, HTTPError) or e.response.status_code == 403:
                logging.info('Re-initializing due to {0}'.format(e))
                sr_dict = get_enabled_subreddits()
        except KeyboardInterrupt:
            raise
        except Exception as e:
            logging.error('ERROR: {0}'.format(e))
            session.rollback()
开发者ID:goldguy81,项目名称:AutoModerator,代码行数:67,代码来源:automoderator.py


注:本文中的models.cfg_file.get函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。