本文整理汇总了Python中os.environ.get函数的典型用法代码示例。如果您正苦于以下问题:Python get函数的具体用法?Python get怎么用?Python get使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了get函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _send_slack_alert
def _send_slack_alert(self, message, service, color='green', sender='Cabot'):
channel = '#' + env.get('SLACK_ALERT_CHANNEL')
url = env.get('SLACK_WEBHOOK_URL')
icon_url = env.get('SLACK_ICON_URL')
# TODO: handle color
resp = requests.post(url, data=json.dumps({
'channel': channel,
'username': sender[:15],
'icon_url': icon_url,
'attachments': [{
'title': service.name,
'text': message,
'color': color,
'fields': [{
'title': 'status',
'value': service.overall_status,
'short': 'false'
}, {
'title': 'old status',
'value': service.old_overall_status,
'short': 'false'
}
]
}]
}))
示例2: get_backend_api
def get_backend_api(test_case, cluster_id):
"""
Get an appropriate BackendAPI for the specified dataset backend.
Note this is a backdoor that is useful to be able to interact with cloud
APIs in tests. For many dataset backends this does not make sense, but it
provides a convenient means to interact with cloud backends such as EBS or
cinder.
:param test_case: The test case that is being run.
:param cluster_id: The unique cluster_id, used for backend APIs that
require this in order to be constructed.
"""
backend_config_filename = environ.get(
"FLOCKER_ACCEPTANCE_TEST_VOLUME_BACKEND_CONFIG")
if backend_config_filename is None:
raise SkipTest(
'This test requires the ability to construct an IBlockDeviceAPI '
'in order to verify construction. Please set '
'FLOCKER_ACCEPTANCE_TEST_VOLUME_BACKEND_CONFIG to a yaml filepath '
'with the dataset configuration.')
backend_name = environ.get("FLOCKER_ACCEPTANCE_VOLUME_BACKEND")
if backend_name is None:
raise SkipTest(
"Set acceptance testing volume backend using the " +
"FLOCKER_ACCEPTANCE_VOLUME_BACKEND environment variable.")
backend_config_filepath = FilePath(backend_config_filename)
full_backend_config = yaml.safe_load(
backend_config_filepath.getContent())
backend_config = full_backend_config.get(backend_name)
if 'backend' in backend_config:
backend_config.pop('backend')
backend = get_backend(backend_name)
return get_api(backend, pmap(backend_config), reactor, cluster_id)
示例3: application
def application(config):
app = Application("Scrapyd")
http_port = int(environ.get('PORT', config.getint('http_port', 6800)))
config.cp.set('scrapyd', 'database_url', environ.get('DATABASE_URL'))
poller = Psycopg2QueuePoller(config)
eggstorage = FilesystemEggStorage(config)
scheduler = Psycopg2SpiderScheduler(config)
environment = Environment(config)
app.setComponent(IPoller, poller)
app.setComponent(IEggStorage, eggstorage)
app.setComponent(ISpiderScheduler, scheduler)
app.setComponent(IEnvironment, environment)
launcher = Launcher(config, app)
timer = TimerService(5, poller.poll)
webservice = TCPServer(http_port, server.Site(Root(config, app)))
log.msg("Scrapyd web console available at http://localhost:%s/ (HEROKU)"
% http_port)
launcher.setServiceParent(app)
timer.setServiceParent(app)
webservice.setServiceParent(app)
return app
示例4: __init__
def __init__(self, api_key=environ.get('PIZ_GOOGLE_API_KEY'), cx=environ.get('PIZ_GOOGLE_SEARCH_CX')):
if api_key is None or cx is None:
raise UserMisconfigurationError('You must have both PIZ_GOOGLE_API_KEY and PIZ_GOOGLE_SEARCH_CX set as '
'environment variables in your shell.')
self.api_key = api_key
self.cx = cx
self.service = build('customsearch', 'v1', developerKey=api_key)
示例5: test_returned_filesize
def test_returned_filesize():
runner = CliRunner()
result = runner.invoke(
cli,
['search',
environ.get('SENTINEL_USER'),
environ.get('SENTINEL_PASSWORD'),
'tests/map.geojson',
'--url', 'https://scihub.copernicus.eu/dhus/',
'-s', '20141205',
'-e', '20141208',
'-q', 'producttype=GRD']
)
expected = "1 scenes found with a total size of 0.50 GB"
assert result.output.split("\n")[-2] == expected
result = runner.invoke(
cli,
['search',
environ.get('SENTINEL_USER'),
environ.get('SENTINEL_PASSWORD'),
'tests/map.geojson',
'--url', 'https://scihub.copernicus.eu/dhus/',
'-s', '20140101',
'-e', '20141231',
'-q', 'producttype=GRD']
)
expected = "20 scenes found with a total size of 11.06 GB"
assert result.output.split("\n")[-2] == expected
示例6: send_alert
def send_alert(self, service, users, duty_officers):
account_sid = env.get('TWILIO_ACCOUNT_SID')
auth_token = env.get('TWILIO_AUTH_TOKEN')
outgoing_number = env.get('TWILIO_OUTGOING_NUMBER')
all_users = list(users) + list(duty_officers)
client = TwilioRestClient(
account_sid, auth_token)
mobiles = TwilioUserData.objects.filter(user__user__in=all_users)
mobiles = [m.prefixed_phone_number for m in mobiles if m.phone_number]
c = Context({
'service': service,
'host': settings.WWW_HTTP_HOST,
'scheme': settings.WWW_SCHEME,
})
message = Template(sms_template).render(c)
for mobile in mobiles:
try:
client.sms.messages.create(
to=mobile,
from_=outgoing_number,
body=message,
)
except Exception, e:
logger.exception('Error sending twilio sms: %s' % e)
示例7: users_added_callback
def users_added_callback(self,users):
'''
Server url will be dynamic to work same code on different servers
in case on user enrollment and iOS profile generation also.
'''
loader = Loader("/opt/toppatch/mv/media/app/")
server_url = environ.get('SERVER_CNAME')
ses_conn = ses.connect_to_region('us-east-1',
aws_access_key_id=environ.get('AWS_SES_ACCESS_KEY_ID'),
aws_secret_access_key=environ.get(
'AWS_SES_SECRET_ACCESS_KEY'))
for user in users:
link = str(server_url) + '/enroll/'+str(user.get('enrollment_id'))
message = loader.load('user_enroll_mail.html').generate(
company_name=user.get('company_name'),
user_passwd=user.get('passcode'), activation_link=link)
# message = 'Your verification \
# link is : {0} and enrollment password is {1} . To ensure \
# your device os please open this link in your device \
# browser only. :)'.format(
# str(server_url) + '/enroll/'+str(user['enrollment_id']), user['passcode'])
#message = message.replace(' ', '')
try:
ses_conn.send_email('[email protected]',
'MDM Enrollment verification', message,
[user['email']], format='html')
except Exception,err:
print repr(err)
示例8: agent_main
def agent_main(collector):
to_file(sys.stdout)
startLogging(sys.stdout)
return react(
run_agent, [
environ.get(
"FLOCKER_CONFIGURATION_PATH",
"/etc/flocker",
).decode("ascii"),
environ.get(
"CATALOG_FIREHOSE_PROTOCOL",
DEFAULT_FIREHOSE_PROTOCOL,
).decode("ascii"),
environ.get(
"CATALOG_FIREHOSE_HOSTNAME",
DEFAULT_FIREHOSE_HOSTNAME,
).decode("ascii"),
int(
environ.get(
"CATALOG_FIREHOSE_PORT",
unicode(DEFAULT_FIREHOSE_PORT).encode("ascii"),
).decode("ascii")
),
# Base64 encoded
environ["CATALOG_FIREHOSE_SECRET"].decode("ascii"),
collector,
],
)
示例9: setUpClass
def setUpClass(cls):
cls.token = environ.get('KB_AUTH_TOKEN', None)
# WARNING: don't call any logging methods on the context object,
# it'll result in a NoneType error
cls.ctx = MethodContext(None)
cls.ctx.update({'token': cls.token,
'provenance': [
{'service': 'ReadsUtils',
'method': 'please_never_use_it_in_production',
'method_params': []
}],
'authenticated': 1})
config_file = environ.get('KB_DEPLOYMENT_CONFIG', None)
cls.cfg = {}
config = ConfigParser()
config.read(config_file)
for nameval in config.items('ReadsUtils'):
cls.cfg[nameval[0]] = nameval[1]
cls.shockURL = cls.cfg['shock-url']
cls.ws = Workspace(cls.cfg['workspace-url'], token=cls.token)
cls.impl = ReadsUtils(cls.cfg)
shutil.rmtree(cls.cfg['scratch'])
os.mkdir(cls.cfg['scratch'])
suffix = int(time.time() * 1000)
wsName = "test_ReadsUtils_" + str(suffix)
cls.ws_info = cls.ws.create_workspace({'workspace': wsName})
cls.dfu = DataFileUtil(os.environ['SDK_CALLBACK_URL'], token=cls.token)
示例10: load_env
def load_env(app):
if 'DATABASE_URI' in environ: app.config['DATABASE_URI'] = environ.get('DATABASE_URI')
if 'INSTA_ID' in environ: app.config['INSTA_ID'] = environ.get('INSTA_ID')
if 'INSTA_SECRET' in environ: app.config['INSTA_SECRET'] = environ.get('INSTA_SECRET')
if 'SECRET_KEY' in environ: app.config['SECRET_KEY'] = environ.get('SECRET_KEY')
示例11: run
def run():
"""
Run the server.
"""
# Set up the logger.
if not os.path.isdir(os.path.join(script_dir, 'logs')):
os.makedirs(os.path.join(script_dir, 'logs'))
# Format the logs.
formatter = logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s")
# Enable the logs to split files at midnight.
handler = TimedRotatingFileHandler(
os.path.join(script_dir, 'logs', 'TorSpider.log'),
when='midnight', backupCount=7, interval=1)
handler.setLevel(app.config['LOG_LEVEL'])
handler.setFormatter(formatter)
log = logging.getLogger('werkzeug')
log.setLevel(app.config['LOG_LEVEL'])
log.addHandler(handler)
app.logger.addHandler(handler)
app.logger.setLevel(app.config['APP_LOG_LEVEL'])
# Set up the app server, port, and configuration.
port = int(environ.get('PORT', app.config['LISTEN_PORT']))
addr = environ.get('LISTEN_ADDR', app.config['LISTEN_ADDR'])
if app.config['USETLS']:
context = (app.config['CERT_FILE'], app.config['CERT_KEY_FILE'])
app.run(host=addr, port=port, threaded=True, ssl_context=context)
else:
app.run(host=addr, port=port, threaded=True)
示例12: memcacheify
def memcacheify(timeout=500):
"""Return a fully configured Django ``CACHES`` setting. We do this by
analyzing all environment variables on Heorku, scanning for an available
memcache addon, and then building the settings dict properly.
If no memcache servers can be found, we'll revert to building a local
memory cache.
Returns a fully configured caches dict.
"""
caches = {}
if all((environ.get(e, "") for e in MEMCACHE_ENV_VARS)):
caches["default"] = CACHE_DEFAULTS
caches["default"].update({"LOCATION": "localhost:11211", "TIMEOUT": timeout})
elif all((environ.get(e, "") for e in MEMCACHIER_ENV_VARS)):
servers = environ.get("MEMCACHIER_SERVERS").replace(",", ";")
environ["MEMCACHE_SERVERS"] = servers
environ["MEMCACHE_USERNAME"] = environ.get("MEMCACHIER_USERNAME")
environ["MEMCACHE_PASSWORD"] = environ.get("MEMCACHIER_PASSWORD")
caches["default"] = CACHE_DEFAULTS
caches["default"].update({"LOCATION": servers, "TIMEOUT": timeout})
elif all((environ.get(e, "") for e in MEMCACHEDCLOUD_ENV_VARS)):
servers = environ.get("MEMCACHEDCLOUD_SERVERS").replace(",", ";")
environ["MEMCACHE_SERVERS"] = servers
environ["MEMCACHE_USERNAME"] = environ.get("MEMCACHEDCLOUD_USERNAME")
environ["MEMCACHE_PASSWORD"] = environ.get("MEMCACHEDCLOUD_PASSWORD")
caches["default"] = CACHE_DEFAULTS
caches["default"].update({"LOCATION": servers, "TIMEOUT": timeout})
elif environ.get("MEMCACHEIFY_USE_LOCAL", False):
caches["default"] = {"BACKEND": "django_pylibmc.memcached.PyLibMCCache"}
else:
caches["default"] = {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"}
return caches
示例13: setUpClass
def setUpClass(cls):
token = environ.get('KB_AUTH_TOKEN', None)
cls.ctx = {'token': token, 'provenance': [{'service': 'data_api2',
'method': 'please_never_use_it_in_production', 'method_params': []}],
'authenticated': 1}
config_file = environ.get('KB_DEPLOYMENT_CONFIG', None)
cls.cfg = {}
config = ConfigParser()
config.read(config_file)
for nameval in config.items('data_api2'):
cls.cfg[nameval[0]] = nameval[1]
cls.wsURL = cls.cfg['workspace-url']
cls.wsClient = workspaceService(cls.wsURL, token=token)
cls.serviceImpl = data_api2(cls.cfg)
cls.obj_name="ReferenceGenomeAnnotations/kb|g.207118"
cls.obj_name="ReferenceGenomeAnnotations/kb|g.217864"
cls.feature='kb|g.207118.CDS.3237'
cls.feature='kb|g.217864.CDS.11485'
cls.gene='kb|g.217864.locus.10619'
cls.obj_name="ReferenceGenomeAnnotations/kb|g.140057"
cls.feature='kb|g.140057.CDS.2901'
cls.gene='kb|g.140057.locus.2922'
cls.mrna='kb|g.140057.mRNA.2840'
cls.taxon= u'1779/523209/1'
cls.assembly='1837/56/1'
示例14: run
def run(self, args):
self.check_not_docsearch_app_id('run a config manually')
self.exec_shell_command(["docker", "stop", "documentation-scrapper-dev"])
self.exec_shell_command(["docker", "rm", "documentation-scrapper-dev"])
f = open(args[0], 'r')
config = f.read()
run_command = [
'docker',
'run',
'-e',
'APPLICATION_ID=' + environ.get('APPLICATION_ID'),
'-e',
'API_KEY=' + environ.get('API_KEY'),
'-e',
"CONFIG=" + config,
'-v',
getcwd() + '/scraper/src:/root/src',
'--name',
'documentation-scrapper-dev',
'-t',
'algolia/documentation-scrapper-dev',
'/root/run'
]
return self.exec_shell_command(run_command)
示例15: get_runtime_paths
def get_runtime_paths(self, what):
"""
Determine what components of LD_LIBRARY_PATH are necessary to run `what`
"""
from os import environ
from os.path import dirname
self.env.stash()
self.env.LD_LIBRARY_PATH = environ.get("LD_LIBRARY_PATH", "")
try:
# Run ldd
out, err = self.bld.cmd_and_log([self.env.LDD, what],
output=waflib.Context.BOTH)
finally:
self.env.revert()
# Parse ldd output to determine what paths are used by the dynamic linker
maybe_paths = set()
for line in out.split("\n"):
parts = line.split()
if not parts: continue
if parts[1] == "=>": maybe_paths.add(dirname(parts[2]))
return maybe_paths & set(environ.get("LD_LIBRARY_PATH", "").split(":"))