当前位置: 首页>>代码示例>>Python>>正文


Python g.reset_caches函数代码示例

本文整理汇总了Python中pylons.g.reset_caches函数的典型用法代码示例。如果您正苦于以下问题:Python reset_caches函数的具体用法?Python reset_caches怎么用?Python reset_caches使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了reset_caches函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: handle_items

def handle_items(queue, callback, ack = True, limit = 1, drain = False,
                 verbose=True, sleep_time = 1):
    """Call callback() on every item in a particular queue. If the
       connection to the queue is lost, it will die. Intended to be
       used as a long-running process."""
    from pylons import c

    chan = connection_manager.get_channel()
    countdown = None

    while True:

        # NB: None != 0, so we don't need an "is not None" check here
        if countdown == 0:
            break

        msg = chan.basic_get(queue)
        if not msg and drain:
            return
        elif not msg:
            time.sleep(sleep_time)
            continue

        if countdown is None and drain and 'message_count' in msg.delivery_info:
            countdown = 1 + msg.delivery_info['message_count']

        g.reset_caches()
        c.use_write_db = {}

        items = []

        while msg and countdown != 0:
            items.append(msg)
            if countdown is not None:
                countdown -= 1
            if len(items) >= limit:
                break # the innermost loop only
            msg = chan.basic_get(queue)

        try:
            count_str = ''
            if 'message_count' in items[-1].delivery_info:
                # the count from the last message, if the count is
                # available
                count_str = '(%d remaining)' % items[-1].delivery_info['message_count']
            if verbose:
                print "%s: %d items %s" % (queue, len(items), count_str)
            callback(items, chan)

            if ack:
                # ack *all* outstanding messages
                chan.basic_ack(0, multiple=True)

            # flush any log messages printed by the callback
            sys.stdout.flush()
        except:
            for item in items:
                # explicitly reject the items that we've not processed
                chan.basic_reject(item.delivery_tag, requeue = True)
            raise
开发者ID:constantAmateur,项目名称:sciteit,代码行数:60,代码来源:amqp.py

示例2: pre

    def pre(self):

        c.start_time = datetime.now(g.tz)
        g.reset_caches()

        c.domain_prefix = request.environ.get("reddit-domain-prefix",
                                              g.domain_prefix)
        c.secure = request.host in g.secure_domains

        #check if user-agent needs a dose of rate-limiting
        if not c.error_page:
            ratelimit_throttled()
            ratelimit_agents()

        c.allow_loggedin_cache = False
        
        c.show_wiki_actions = False
        
        # the domain has to be set before Cookies get initialized
        set_subreddit()
        c.errors = ErrorSet()
        c.cookies = Cookies()
        # if an rss feed, this will also log the user in if a feed=
        # GET param is included
        set_content_type()
开发者ID:CryptArc,项目名称:reddit,代码行数:25,代码来源:reddit_base.py

示例3: pre

    def pre(self):
        action = request.environ["pylons.routes_dict"].get("action")
        if action:
            c.request_timer = g.stats.get_timer(request_timer_name(action))
        else:
            c.request_timer = SimpleSillyStub()

        c.response_wrapper = None
        c.start_time = datetime.now(g.tz)
        c.request_timer.start()
        g.reset_caches()

        c.domain_prefix = request.environ.get("reddit-domain-prefix",
                                              g.domain_prefix)
        c.secure = request.host in g.secure_domains

        #check if user-agent needs a dose of rate-limiting
        if not c.error_page:
            ratelimit_throttled()
            ratelimit_agents()

        c.allow_loggedin_cache = False

        # the domain has to be set before Cookies get initialized
        set_subreddit()
        c.errors = ErrorSet()
        c.cookies = Cookies()
        # if an rss feed, this will also log the user in if a feed=
        # GET param is included
        set_content_type()
        c.request_timer.intermediate("minimal-pre")
开发者ID:briankiewel,项目名称:reddit,代码行数:31,代码来源:reddit_base.py

示例4: pre

    def pre(self):
        action = request.environ["pylons.routes_dict"].get("action")
        if action:
            if not self._get_action_handler():
                action = 'invalid'
            controller = request.environ["pylons.routes_dict"]["controller"]
            key = "{}.{}".format(controller, action)
            c.request_timer = g.stats.get_timer(request_timer_name(key))
        else:
            c.request_timer = SimpleSillyStub()

        c.response_wrapper = None
        c.start_time = datetime.now(g.tz)
        c.request_timer.start()
        g.reset_caches()

        c.domain_prefix = request.environ.get("reddit-domain-prefix",
                                              g.domain_prefix)
        c.secure = request.environ["wsgi.url_scheme"] == "https"
        c.request_origin = request.host_url

        #check if user-agent needs a dose of rate-limiting
        if not c.error_page:
            ratelimit_throttled()
            ratelimit_agents()

        c.allow_loggedin_cache = False
        c.allow_framing = False

        c.cdn_cacheable = (request.via_cdn and
                           g.login_cookie not in request.cookies)

        # the domain has to be set before Cookies get initialized
        set_subreddit()
        c.errors = ErrorSet()
        c.cookies = Cookies()
        # if an rss feed, this will also log the user in if a feed=
        # GET param is included
        set_content_type()

        c.request_timer.intermediate("minimal-pre")
        # True/False forces. None updates for most non-POST requests
        c.update_last_visit = None

        g.stats.count_string('user_agents', request.user_agent)

        if not self.defer_ratelimiting:
            self.run_sitewide_ratelimits()
            c.request_timer.intermediate("minimal-ratelimits")

        hooks.get_hook("reddit.request.minimal_begin").call()
开发者ID:Bebetz,项目名称:reddit,代码行数:51,代码来源:reddit_base.py

示例5: pre

    def pre(self):
        action = request.environ["pylons.routes_dict"].get("action")
        if action:
            if not self._get_action_handler():
                action = 'invalid'
            c.request_timer = g.stats.get_timer(request_timer_name(action))
        else:
            c.request_timer = SimpleSillyStub()

        c.response_wrapper = None
        c.start_time = datetime.now(g.tz)
        c.request_timer.start()
        g.reset_caches()

        c.domain_prefix = request.environ.get("reddit-domain-prefix",
                                              g.domain_prefix)
        c.secure = request.host in g.secure_domains

        # wsgi.url_scheme is used in generating absolute urls, such as by webob
        # for translating some of our relative-url redirects to rfc compliant
        # absolute-url ones. TODO: consider using one of webob's methods of
        # setting wsgi.url_scheme based on incoming request headers added by
        # upstream things like stunnel/haproxy.
        if c.secure:
            request.environ["wsgi.url_scheme"] = "https"

        c.request_origin = request.host_url

        #check if user-agent needs a dose of rate-limiting
        if not c.error_page:
            ratelimit_throttled()
            ratelimit_agents()

        c.allow_loggedin_cache = False

        # the domain has to be set before Cookies get initialized
        set_subreddit()
        c.errors = ErrorSet()
        c.cookies = Cookies()
        # if an rss feed, this will also log the user in if a feed=
        # GET param is included
        set_content_type()

        c.request_timer.intermediate("minimal-pre")
        # True/False forces. None updates for most non-POST requests
        c.update_last_visit = None

        g.stats.count_string('user_agents', request.user_agent)

        hooks.get_hook("reddit.request.minimal_begin").call()
开发者ID:AlbertoPeon,项目名称:reddit,代码行数:50,代码来源:reddit_base.py

示例6: _callback

    def _callback(msg):
        if verbose:
            count_str = ''
            if 'message_count' in msg.delivery_info:
                # the count from the last message, if the count is
                # available
                count_str = '(%d remaining)' % msg.delivery_info['message_count']

            print "%s: 1 item %s" % (queue, count_str)

        g.reset_caches()
        ret = callback(msg)
        msg.channel.basic_ack(msg.delivery_tag)
        sys.stdout.flush()
        return ret
开发者ID:donslice,项目名称:reddit,代码行数:15,代码来源:amqp.py

示例7: get_step_state

def get_step_state(emr_connection, jobflowid, step_name, update=False):
    """Return the state of a step.

    If jobflowid/step_name combination is not unique this will return the state
    of the most recent step.

    """

    g.reset_caches()
    steps = get_step_states(emr_connection, jobflowid, _update=update)

    for name, state in reversed(steps):
        if name == step_name:
            return state
    else:
        return NOTFOUND
开发者ID:Matthew94,项目名称:reddit,代码行数:16,代码来源:emr_helpers.py

示例8: _callback

    def _callback(msg):
        if verbose:
            count_str = ""
            if "message_count" in msg.delivery_info:
                # the count from the last message, if the count is
                # available
                count_str = "(%d remaining)" % msg.delivery_info["message_count"]

            print "%s: 1 item %s" % (queue, count_str)

        g.reset_caches()
        c.use_write_db = {}

        ret = callback(msg)
        msg.channel.basic_ack(msg.delivery_tag)
        sys.stdout.flush()
        return ret
开发者ID:new-day-international,项目名称:reddit,代码行数:17,代码来源:amqp.py

示例9: pre

    def pre(self):
        c.start_time = datetime.now(g.tz)
        g.reset_caches()

        c.domain_prefix = request.environ.get("reddit-domain-prefix", 
                                              g.domain_prefix)
        #check if user-agent needs a dose of rate-limiting
        if not c.error_page:
            ratelimit_agents()
            ratelimit_throttled()

        c.allow_loggedin_cache = False

        # the domain has to be set before Cookies get initialized
        set_subreddit()
        c.errors = ErrorSet()
        c.cookies = Cookies()
开发者ID:XieConnect,项目名称:reddit,代码行数:17,代码来源:reddit_base.py

示例10: main

def main():
    now = datetime.datetime.now(TIMEZONE)

    # calculate and store the new day's gold goal
    determine_gold_goal(now.date())

    # post a new thread if we met our revenue goal
    yesterday = (now - datetime.timedelta(days=1)).date()
    post_if_goal_reached(yesterday)

    # look at old (now complete) threads if any
    activate_requested_names(but_not=yesterday)

    # wait until all our amqp / permacache changes are flushed from the
    # in-process queue.
    worker.join()
    g.reset_caches()

    # update the sidebar with a list of names
    update_sidebar()
开发者ID:GodOfConquest,项目名称:reddit-plugin-gold,代码行数:20,代码来源:gold_end_of_day.py

示例11: long

    return True

resume_id = long(sys.argv[1]) if len(sys.argv) > 1 else None

msg_accounts = Account._query(sort=desc("_date"), data=True)

if resume_id:
    msg_accounts._filter(Account.c._id < resume_id)

for account in progress(fetch_things2(msg_accounts), estimate=resume_id):
    current_inbox_count = account.inbox_count
    unread_messages = list(queries.get_unread_inbox(account))

    if account._id % 100000 == 0:
        g.reset_caches()

    if not len(unread_messages):
        if current_inbox_count:
            account._incr('inbox_count', -current_inbox_count)
    else:
        msgs = Message._by_fullname(
            unread_messages,
            data=True,
            return_dict=False,
            ignore_missing=True,
        )
        kept_msgs = sum(1 for msg in msgs if _keep(msg, account))

        if kept_msgs or current_inbox_count:
            account._incr('inbox_count', kept_msgs - current_inbox_count)
开发者ID:0xcd03,项目名称:reddit,代码行数:30,代码来源:msgtime_to_inbox_count.py

示例12: post

    def post(self):
        response = c.response
        content = filter(None, response.content)
        if isinstance(content, (list, tuple)):
            content = ''.join(content)
        for w in c.response_wrappers:
            content = w(content)
        response.content = content
        if c.response_content_type:
            response.headers['Content-Type'] = c.response_content_type

        if c.user_is_loggedin and not c.allow_loggedin_cache:
            response.headers['Cache-Control'] = 'no-cache'
            response.headers['Pragma'] = 'no-cache'

        if c.deny_frames:
            response.headers["X-Frame-Options"] = "DENY"

        #return
        #set content cache
        if (g.page_cache_time
            and request.method.upper() == 'GET'
            and (not c.user_is_loggedin or c.allow_loggedin_cache)
            and not c.used_cache
            and response.status_code not in (429, 503)
            and response.content and response.content[0]):
            try:
                g.rendercache.set(self.request_key(),
                                  (response, c.cookies),
                                  g.page_cache_time)
            except MemcachedError:
                # the key was too big to set in the rendercache
                g.log.debug("Ignored too-big render cache")

        # send cookies
        for k,v in c.cookies.iteritems():
            if v.dirty:
                response.set_cookie(key     = k,
                                    value   = quote(v.value),
                                    domain  = v.domain,
                                    expires = v.expires)

        end_time = datetime.now(g.tz)

        if ('pylons.routes_dict' in request.environ and
            'action' in request.environ['pylons.routes_dict']):
            action = str(request.environ['pylons.routes_dict']['action'])
        else:
            action = "unknown"
            log_text("unknown action", "no action for %r" % path_info,
                     "warning")
        if g.usage_sampling >= 1.0 or rand.random() < g.usage_sampling:

            amqp.add_kw("usage_q",
                        start_time = c.start_time,
                        end_time = end_time,
                        sampling_rate = g.usage_sampling,
                        action = action)

        check_request(end_time)

        # this thread is probably going to be reused, but it could be
        # a while before it is. So we might as well dump the cache in
        # the mean time so that we don't have dead objects hanging
        # around taking up memory
        g.reset_caches()

        # push data to statsd
        if 'pylons.action_method' in request.environ:
            # only report web timing data if an action handler was called
            g.stats.transact('web.%s' % action,
                             (end_time - c.start_time).total_seconds())
        g.stats.flush_timing_stats()
开发者ID:blitz80690,项目名称:reddit,代码行数:73,代码来源:reddit_base.py

示例13: post

    def post(self):
        c.request_timer.intermediate("action")

        # if the action raised an HTTPException (i.e. it aborted) then pylons
        # will have replaced response with the exception itself.
        c.is_exception_response = getattr(response, "_exception", False)

        if c.response_wrapper and not c.is_exception_response:
            content = flatten_response(response.content)
            wrapped_content = c.response_wrapper(content)
            response.content = wrapped_content

        if c.user_is_loggedin and not c.allow_loggedin_cache:
            response.headers['Cache-Control'] = 'no-cache'
            response.headers['Pragma'] = 'no-cache'

        if c.deny_frames:
            response.headers["X-Frame-Options"] = "DENY"

        #set content cache
        if (g.page_cache_time
            and request.method.upper() == 'GET'
            and (not c.user_is_loggedin or c.allow_loggedin_cache)
            and not c.used_cache
            and response.status_int != 429
            and not response.status.startswith("5")
            and not c.is_exception_response):
            try:
                g.pagecache.set(self.request_key(),
                                (response._current_obj(), c.cookies),
                                g.page_cache_time)
            except MemcachedError as e:
                # this codepath will actually never be hit as long as
                # the pagecache memcached client is in no_reply mode.
                g.log.warning("Ignored exception (%r) on pagecache "
                              "write for %r", e, request.path)

        # send cookies
        for k, v in c.cookies.iteritems():
            if v.dirty:
                response.set_cookie(key=k,
                                    value=quote(v.value),
                                    domain=v.domain,
                                    expires=v.expires,
                                    secure=getattr(v, 'secure', False),
                                    httponly=getattr(v, 'httponly', False))

        if self.should_update_last_visit():
            c.user.update_last_visit(c.start_time)

        hooks.get_hook("reddit.request.end").call()

        # this thread is probably going to be reused, but it could be
        # a while before it is. So we might as well dump the cache in
        # the mean time so that we don't have dead objects hanging
        # around taking up memory
        g.reset_caches()

        c.request_timer.intermediate("post")

        # push data to statsd
        c.request_timer.stop()
        g.stats.flush()
开发者ID:ChrisCinelli,项目名称:reddit,代码行数:63,代码来源:reddit_base.py

示例14: handle_items

def handle_items(queue, callback, ack=True, limit=1, min_size=0, drain=False, verbose=True, sleep_time=1):
    """Call callback() on every item in a particular queue. If the
    connection to the queue is lost, it will die. Intended to be
    used as a long-running process."""
    if limit < min_size:
        raise ValueError("min_size must be less than limit")
    from pylons import c
    from raven import Client

    if "sentry_dsn" in g.config:
        raven_client = Client(g.config["sentry_dsn"])
    else:
        raven_client = None

    chan = connection_manager.get_channel()
    countdown = None

    while True:
        # NB: None != 0, so we don't need an "is not None" check here
        if countdown == 0:
            break

        msg = chan.basic_get(queue)
        if not msg and drain:
            return
        elif not msg:
            time.sleep(sleep_time)
            continue

        if countdown is None and drain and "message_count" in msg.delivery_info:
            countdown = 1 + msg.delivery_info["message_count"]

        g.reset_caches()
        c.use_write_db = {}

        items = [msg]

        while countdown != 0:
            if countdown is not None:
                countdown -= 1
            if len(items) >= limit:
                break  # the innermost loop only
            msg = chan.basic_get(queue)
            if msg is None:
                if len(items) < min_size:
                    time.sleep(sleep_time)
                else:
                    break
            else:
                items.append(msg)

        try:
            count_str = ""
            if "message_count" in items[-1].delivery_info:
                # the count from the last message, if the count is
                # available
                count_str = "(%d remaining)" % items[-1].delivery_info["message_count"]
            if verbose:
                print "%s: %d items %s" % (queue, len(items), count_str)
            callback(items, chan)

            if ack:
                # ack *all* outstanding messages
                chan.basic_ack(ALL_READ_MESSAGES, multiple=True)

            # flush any log messages printed by the callback
            sys.stdout.flush()
        except:
            if raven_client:
                raven_client.captureException()
            for item in items:
                # explicitly reject the items that we've not processed
                chan.basic_reject(item.delivery_tag, requeue=True)
            raise
开发者ID:new-day-international,项目名称:reddit,代码行数:74,代码来源:amqp.py

示例15: describe_jobflows

def describe_jobflows(emr_connection, _update=False):
    g.reset_caches()
    jobflows = describe_jobflows_cached(emr_connection, _update=_update)
    return jobflows
开发者ID:1900,项目名称:reddit,代码行数:4,代码来源:emr_helpers.py


注:本文中的pylons.g.reset_caches函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。