本文整理汇总了Python中r2.config.queues.declare_queues函数的典型用法代码示例。如果您正苦于以下问题:Python declare_queues函数的具体用法?Python declare_queues怎么用?Python declare_queues使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了declare_queues函数的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
def __init__(self, global_conf, app_conf, paths, **extra):
"""
Globals acts as a container for objects available throughout
the life of the application.
One instance of Globals is created by Pylons during
application initialization and is available during requests
via the 'g' variable.
``global_conf``
The same variable used throughout ``config/middleware.py``
namely, the variables from the ``[DEFAULT]`` section of the
configuration file.
``app_conf``
The same ``kw`` dictionary used throughout
``config/middleware.py`` namely, the variables from the
section in the config file for your application.
``extra``
The configuration returned from ``load_config`` in
``config/middleware.py`` which may be of use in the setup of
your global variables.
"""
global_conf.setdefault("debug", False)
self.config = ConfigValueParser(global_conf)
self.config.add_spec(self.spec)
self.plugins = PluginLoader(self.config.get("plugins", []))
self.queues = queues.declare_queues()
self.paths = paths
self.running_as_script = global_conf.get('running_as_script', False)
# turn on for language support
self.lang = getattr(self, 'site_lang', 'en')
self.languages, self.lang_name = \
get_active_langs(default_lang=self.lang)
all_languages = self.lang_name.keys()
all_languages.sort()
self.all_languages = all_languages
# set default time zone if one is not set
tz = global_conf.get('timezone', 'UTC')
self.tz = pytz.timezone(tz)
dtz = global_conf.get('display_timezone', tz)
self.display_tz = pytz.timezone(dtz)
示例2: setup
def setup(self):
self.queues = queues.declare_queues(self)
################# PROVIDERS
self.media_provider = select_provider(
self.config,
self.pkg_resources_working_set,
"r2.provider.media",
self.media_provider,
)
self.startup_timer.intermediate("providers")
################# CONFIGURATION
# AMQP is required
if not self.amqp_host:
raise ValueError("amqp_host not set in the .ini")
if not self.cassandra_seeds:
raise ValueError("cassandra_seeds not set in the .ini")
# heavy load mode is read only mode with a different infobar
if self.heavy_load_mode:
self.read_only_mode = True
origin_prefix = self.domain_prefix + "." if self.domain_prefix else ""
self.origin = "http://" + origin_prefix + self.domain
self.trusted_domains = set([self.domain])
if self.https_endpoint:
https_url = urlparse(self.https_endpoint)
self.trusted_domains.add(https_url.hostname)
# load the unique hashed names of files under static
static_files = os.path.join(self.paths.get('static_files'), 'static')
names_file_path = os.path.join(static_files, 'names.json')
if os.path.exists(names_file_path):
with open(names_file_path) as handle:
self.static_names = json.load(handle)
else:
self.static_names = {}
# make python warnings go through the logging system
logging.captureWarnings(capture=True)
log = logging.getLogger('reddit')
# when we're a script (paster run) just set up super simple logging
if self.running_as_script:
log.setLevel(logging.INFO)
log.addHandler(logging.StreamHandler())
# if in debug mode, override the logging level to DEBUG
if self.debug:
log.setLevel(logging.DEBUG)
# attempt to figure out which pool we're in and add that to the
# LogRecords.
try:
with open("/etc/ec2_asg", "r") as f:
pool = f.read().strip()
# clean up the pool name since we're putting stuff after "-"
pool = pool.partition("-")[0]
except IOError:
pool = "reddit-app"
self.log = logging.LoggerAdapter(log, {"pool": pool})
# set locations
self.locations = {}
if not self.media_domain:
self.media_domain = self.domain
if self.media_domain == self.domain:
print >> sys.stderr, ("Warning: g.media_domain == g.domain. " +
"This may give untrusted content access to user cookies")
for arg in sys.argv:
tokens = arg.split("=")
if len(tokens) == 2:
k, v = tokens
self.log.debug("Overriding g.%s to %s" % (k, v))
setattr(self, k, v)
self.reddit_host = socket.gethostname()
self.reddit_pid = os.getpid()
if hasattr(signal, 'SIGUSR1'):
# not all platforms have user signals
signal.signal(signal.SIGUSR1, thread_dump)
locale.setlocale(locale.LC_ALL, self.locale)
# Pre-calculate ratelimit values
self.RL_RESET_SECONDS = self.config["RL_RESET_MINUTES"] * 60
self.RL_MAX_REQS = int(self.config["RL_AVG_REQ_PER_SEC"] *
self.RL_RESET_SECONDS)
self.RL_OAUTH_RESET_SECONDS = self.config["RL_OAUTH_RESET_MINUTES"] * 60
self.RL_OAUTH_MAX_REQS = int(self.config["RL_OAUTH_AVG_REQ_PER_SEC"] *
self.RL_OAUTH_RESET_SECONDS)
#.........这里部分代码省略.........
示例3: setup
def setup(self):
self.env = ''
if (
# handle direct invocation of "nosetests"
"test" in sys.argv[0] or
# handle "setup.py test" and all permutations thereof.
"setup.py" in sys.argv[0] and "test" in sys.argv[1:]
):
self.env = "unit_test"
self.queues = queues.declare_queues(self)
self.extension_subdomains = dict(
simple="mobile",
i="compact",
api="api",
rss="rss",
xml="xml",
json="json",
)
################# PROVIDERS
self.auth_provider = select_provider(
self.config,
self.pkg_resources_working_set,
"r2.provider.auth",
self.authentication_provider,
)
self.media_provider = select_provider(
self.config,
self.pkg_resources_working_set,
"r2.provider.media",
self.media_provider,
)
self.cdn_provider = select_provider(
self.config,
self.pkg_resources_working_set,
"r2.provider.cdn",
self.cdn_provider,
)
self.ticket_provider = select_provider(
self.config,
self.pkg_resources_working_set,
"r2.provider.support",
# TODO: fix this later, it refuses to pick up
# g.config['ticket_provider'] value, so hardcoding for now.
# really, the next uncommented line should be:
#self.ticket_provider,
# instead of:
"zendesk",
)
self.image_resizing_provider = select_provider(
self.config,
self.pkg_resources_working_set,
"r2.provider.image_resizing",
self.image_resizing_provider,
)
self.email_provider = select_provider(
self.config,
self.pkg_resources_working_set,
"r2.provider.email",
self.email_provider,
)
self.startup_timer.intermediate("providers")
################# CONFIGURATION
# AMQP is required
if not self.amqp_host:
raise ValueError("amqp_host not set in the .ini")
if not self.cassandra_seeds:
raise ValueError("cassandra_seeds not set in the .ini")
# heavy load mode is read only mode with a different infobar
if self.heavy_load_mode:
self.read_only_mode = True
origin_prefix = self.domain_prefix + "." if self.domain_prefix else ""
self.origin = self.default_scheme + "://" + origin_prefix + self.domain
self.trusted_domains = set([self.domain])
if self.https_endpoint:
https_url = urlparse(self.https_endpoint)
self.trusted_domains.add(https_url.hostname)
# load the unique hashed names of files under static
static_files = os.path.join(self.paths.get('static_files'), 'static')
names_file_path = os.path.join(static_files, 'names.json')
if os.path.exists(names_file_path):
with open(names_file_path) as handle:
self.static_names = json.load(handle)
else:
self.static_names = {}
# make python warnings go through the logging system
logging.captureWarnings(capture=True)
log = logging.getLogger('reddit')
# when we're a script (paster run) just set up super simple logging
#.........这里部分代码省略.........
示例4: setup
def setup(self):
self.queues = queues.declare_queues(self)
################# CONFIGURATION
# AMQP is required
if not self.amqp_host:
raise ValueError("amqp_host not set in the .ini")
# This requirement doesn't *have* to be a requirement, but there are
# bugs at the moment that will pop up if you violate it
# XXX: get rid of these options. new query cache is always on.
if self.write_query_queue and not self.use_query_cache:
raise Exception("write_query_queue requires use_query_cache")
if not self.cassandra_seeds:
raise ValueError("cassandra_seeds not set in the .ini")
# heavy load mode is read only mode with a different infobar
if self.heavy_load_mode:
self.read_only_mode = True
origin_prefix = self.domain_prefix + "." if self.domain_prefix else ""
self.origin = "http://" + origin_prefix + self.domain
self.secure_domains = set([urlparse(self.payment_domain).netloc])
self.trusted_domains = set([self.domain])
self.trusted_domains.update(self.authorized_cnames)
if self.https_endpoint:
https_url = urlparse(self.https_endpoint)
self.secure_domains.add(https_url.netloc)
self.trusted_domains.add(https_url.hostname)
if getattr(self, 'oauth_domain', None):
self.secure_domains.add(self.oauth_domain)
# load the unique hashed names of files under static
static_files = os.path.join(self.paths.get('static_files'), 'static')
names_file_path = os.path.join(static_files, 'names.json')
if os.path.exists(names_file_path):
with open(names_file_path) as handle:
self.static_names = json.load(handle)
else:
self.static_names = {}
#setup the logger
self.log = logging.getLogger('reddit')
self.log.addHandler(logging.StreamHandler())
if self.debug:
self.log.setLevel(logging.DEBUG)
else:
self.log.setLevel(logging.INFO)
# set log level for pycountry which is chatty
logging.getLogger('pycountry.db').setLevel(logging.CRITICAL)
if not self.media_domain:
self.media_domain = self.domain
if self.media_domain == self.domain:
print ("Warning: g.media_domain == g.domain. " +
"This may give untrusted content access to user cookies")
for arg in sys.argv:
tokens = arg.split("=")
if len(tokens) == 2:
k, v = tokens
self.log.debug("Overriding g.%s to %s" % (k, v))
setattr(self, k, v)
self.reddit_host = socket.gethostname()
self.reddit_pid = os.getpid()
if hasattr(signal, 'SIGUSR1'):
# not all platforms have user signals
signal.signal(signal.SIGUSR1, thread_dump)
self.startup_timer.intermediate("configuration")
################# ZOOKEEPER
# for now, zookeeper will be an optional part of the stack.
# if it's not configured, we will grab the expected config from the
# [live_config] section of the ini file
zk_hosts = self.config.get("zookeeper_connection_string")
if zk_hosts:
from r2.lib.zookeeper import (connect_to_zookeeper,
LiveConfig, LiveList)
zk_username = self.config["zookeeper_username"]
zk_password = self.config["zookeeper_password"]
self.zookeeper = connect_to_zookeeper(zk_hosts, (zk_username,
zk_password))
self.live_config = LiveConfig(self.zookeeper, LIVE_CONFIG_NODE)
self.throttles = LiveList(self.zookeeper, "/throttles",
map_fn=ipaddress.ip_network,
reduce_fn=ipaddress.collapse_addresses)
else:
self.zookeeper = None
parser = ConfigParser.RawConfigParser()
parser.read([self.config["__file__"]])
self.live_config = extract_live_config(parser, self.plugins)
self.throttles = tuple() # immutable since it's not real
self.startup_timer.intermediate("zookeeper")
#.........这里部分代码省略.........
示例5: setup
def setup(self):
self.queues = queues.declare_queues(self)
################# CONFIGURATION
# AMQP is required
if not self.amqp_host:
raise ValueError("amqp_host not set in the .ini")
if not self.cassandra_seeds:
raise ValueError("cassandra_seeds not set in the .ini")
# heavy load mode is read only mode with a different infobar
if self.heavy_load_mode:
self.read_only_mode = True
origin_prefix = self.domain_prefix + "." if self.domain_prefix else ""
self.origin = "http://" + origin_prefix + self.domain
self.secure_domains = set([urlparse(self.payment_domain).netloc])
self.trusted_domains = set([self.domain])
self.trusted_domains.update(self.authorized_cnames)
if self.https_endpoint:
https_url = urlparse(self.https_endpoint)
self.secure_domains.add(https_url.netloc)
self.trusted_domains.add(https_url.hostname)
if getattr(self, "oauth_domain", None):
self.secure_domains.add(self.oauth_domain)
# load the unique hashed names of files under static
static_files = os.path.join(self.paths.get("static_files"), "static")
names_file_path = os.path.join(static_files, "names.json")
if os.path.exists(names_file_path):
with open(names_file_path) as handle:
self.static_names = json.load(handle)
else:
self.static_names = {}
# if we're a web app running on old uwsgi, force load the logging
# config from the file since uwsgi didn't do it for us
if not self.running_as_script and self.old_uwsgi_load_logging_config:
logging.config.fileConfig(self.config["__file__"])
# make python warnings go through the logging system
logging.captureWarnings(capture=True)
log = logging.getLogger("reddit")
# when we're a script (paster run) just set up super simple logging
if self.running_as_script:
log.setLevel(logging.INFO)
log.addHandler(logging.StreamHandler())
# if in debug mode, override the logging level to DEBUG
if self.debug:
log.setLevel(logging.DEBUG)
# attempt to figure out which pool we're in and add that to the
# LogRecords.
try:
with open("/etc/ec2_asg", "r") as f:
pool = f.read().strip()
# clean up the pool name since we're putting stuff after "-"
pool = pool.partition("-")[0]
except IOError:
pool = "reddit-app"
self.log = logging.LoggerAdapter(log, {"pool": pool})
# make cssutils use the real logging system
csslog = logging.getLogger("cssutils")
cssutils.log.setLog(csslog)
# load the country list
countries_file_path = os.path.join(static_files, "countries.json")
try:
with open(countries_file_path) as handle:
self.countries = json.load(handle)
self.log.debug("Using countries.json.")
except IOError:
self.log.warning("Couldn't find countries.json. Using pycountry.")
self.countries = get_countries_and_codes()
if not self.media_domain:
self.media_domain = self.domain
if self.media_domain == self.domain:
print("Warning: g.media_domain == g.domain. " + "This may give untrusted content access to user cookies")
for arg in sys.argv:
tokens = arg.split("=")
if len(tokens) == 2:
k, v = tokens
self.log.debug("Overriding g.%s to %s" % (k, v))
setattr(self, k, v)
self.reddit_host = socket.gethostname()
self.reddit_pid = os.getpid()
if hasattr(signal, "SIGUSR1"):
# not all platforms have user signals
signal.signal(signal.SIGUSR1, thread_dump)
#.........这里部分代码省略.........
示例6: setup
def setup(self):
self.queues = queues.declare_queues(self)
# heavy load mode is read only mode with a different infobar
if self.heavy_load_mode:
self.read_only_mode = True
if hasattr(signal, 'SIGUSR1'):
# not all platforms have user signals
signal.signal(signal.SIGUSR1, thread_dump)
# initialize caches. Any cache-chains built here must be added
# to cache_chains (closed around by reset_caches) so that they
# can properly reset their local components
localcache_cls = (SelfEmptyingCache if self.running_as_script
else LocalCache)
num_mc_clients = self.num_mc_clients
self.cache_chains = {}
# for now, zookeeper will be an optional part of the stack.
# if it's not configured, we will grab the expected config from the
# [live_config] section of the ini file
zk_hosts = self.config.get("zookeeper_connection_string")
if zk_hosts:
from r2.lib.zookeeper import (connect_to_zookeeper,
LiveConfig, LiveList)
zk_username = self.config["zookeeper_username"]
zk_password = self.config["zookeeper_password"]
self.zookeeper = connect_to_zookeeper(zk_hosts, (zk_username,
zk_password))
self.live_config = LiveConfig(self.zookeeper, LIVE_CONFIG_NODE)
self.throttles = LiveList(self.zookeeper, "/throttles",
map_fn=ipaddress.ip_network,
reduce_fn=ipaddress.collapse_addresses)
else:
self.zookeeper = None
parser = ConfigParser.RawConfigParser()
parser.read([self.config["__file__"]])
self.live_config = extract_live_config(parser, self.plugins)
self.throttles = tuple() # immutable since it's not real
self.memcache = CMemcache(self.memcaches, num_clients = num_mc_clients)
self.lock_cache = CMemcache(self.lockcaches, num_clients=num_mc_clients)
self.stats = Stats(self.config.get('statsd_addr'),
self.config.get('statsd_sample_rate'))
event.listens_for(engine.Engine, 'before_cursor_execute')(
self.stats.pg_before_cursor_execute)
event.listens_for(engine.Engine, 'after_cursor_execute')(
self.stats.pg_after_cursor_execute)
self.make_lock = make_lock_factory(self.lock_cache, self.stats)
if not self.cassandra_seeds:
raise ValueError("cassandra_seeds not set in the .ini")
keyspace = "reddit"
self.cassandra_pools = {
"main":
StatsCollectingConnectionPool(
keyspace,
stats=self.stats,
logging_name="main",
server_list=self.cassandra_seeds,
pool_size=self.cassandra_pool_size,
timeout=2,
max_retries=3,
prefill=False
),
}
perma_memcache = (CMemcache(self.permacache_memcaches, num_clients = num_mc_clients)
if self.permacache_memcaches
else None)
self.permacache = CassandraCacheChain(localcache_cls(),
CassandraCache('permacache',
self.cassandra_pools[self.cassandra_default_pool],
read_consistency_level = self.cassandra_rcl,
write_consistency_level = self.cassandra_wcl),
memcache = perma_memcache,
lock_factory = self.make_lock)
self.cache_chains.update(permacache=self.permacache)
# hardcache is done after the db info is loaded, and then the
# chains are reset to use the appropriate initial entries
if self.stalecaches:
self.cache = StaleCacheChain(localcache_cls(),
CMemcache(self.stalecaches, num_clients=num_mc_clients),
self.memcache)
else:
self.cache = MemcacheChain((localcache_cls(), self.memcache))
self.cache_chains.update(cache=self.cache)
self.rendercache = MemcacheChain((localcache_cls(),
#.........这里部分代码省略.........