本文整理汇总了Python中locals函数的典型用法代码示例。如果您正苦于以下问题:Python locals函数的具体用法?Python locals怎么用?Python locals使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了locals函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: root
def root(module=None):
"""
Sets the "root module" for helipad.
The root module's directory is used as the definition of where relative paths
are based off of.
"""
global _ROOT_MODULE
if module is None:
return _ROOT_MODULE
if isinstance(module, basestring):
components = module.split('.')
module = __import__(module, globals(), locals(), [], -1)
for component in components[1:]:
module = getattr(module, component)
if inspect.ismodule(module):
_ROOT_MODULE = module
else:
raise ValueError, "Invalid module: %s" % module
# Return a reference to this module (so that we can string together method calls)
return __import__('helipad', globals(), locals(), [], -1)
示例2: check_server_disallowed
def check_server_disallowed(self):
"""
Check if server domain name or IP is disallowed in settings.py.
"""
hostname = self.netloc_parts[2].lower()
if (hasattr(settings, 'DISALLOWED_DOMAIN_LIST') and
settings.DISALLOWED_DOMAIN_LIST):
for domain in settings.DISALLOWED_DOMAIN_LIST:
if hostname == domain or hostname.endswith('.' + domain):
raise ValidationError(unicode(
_("Domain name %(domain)s is disallowed.") % locals()))
try:
ip = socket.gethostbyname(hostname)
except socket.error:
raise ValidationError(unicode(
_("Could not resolve IP address for %(hostname)s.") %
locals()))
if (not hasattr(settings, 'DISALLOWED_SERVER_IP_LIST') or
not settings.DISALLOWED_SERVER_IP_LIST):
return
server = long_ip(ip)
# print 'server', server, dotted_ip(server), ip
for disallowed in settings.DISALLOWED_SERVER_IP_LIST:
disallowed = disallowed.strip()
if disallowed == '' or disallowed.startswith('#'):
continue
mask = bit_mask(32)
if '/' in disallowed:
disallowed, bits = disallowed.split('/', 1)
mask = slash_mask(int(bits))
identifier = long_ip(disallowed) & mask
masked = server & mask
if masked == identifier:
raise ValidationError(unicode(
_("Server IP address %(ip)s is disallowed.") % locals()))
示例3: project_detail
def project_detail(request, pk, page=''):
print pk, page
try:
project = Project.objects.get(pk=pk)
except Project.DoesNotExist:
raise Http404
testers = project.testers.all()
bugs = project.bugs.all()
if page == None:
return render_to_response('project_detail.html', locals(),
context_instance=RequestContext(request))
elif page == '/bugs':
return render_to_response('project_bugs.html', locals(),
context_instance=RequestContext(request))
elif page == '/testers':
return render_to_response('project_testers.html', locals(),
context_instance=RequestContext(request))
elif page == '/enlist':
user = request.user
if not user.is_authenticated():
raise PermissionDenied
# Текущий залогиненый пользователь должен быть тестером
if not hasattr(user, 'tester'):
raise PermissionDenied
tester = user.tester
project.add_tester(tester)
return HttpResponseRedirect('/projects/%i/testers' % project.pk)
else:
raise Http404
示例4: Get_Marshall
def Get_Marshall(GetScript, SetScript, TestScript, User, Group):
arg_names = list(locals().keys())
if GetScript is None:
GetScript = ''
if SetScript is None:
SetScript = ''
if TestScript is None:
TestScript = ''
if User is None:
User = ''
if Group is None:
Group = ''
retval = 0
(retval, GetScript, SetScript, TestScript, User, Group,
Result) = Get(GetScript, SetScript, TestScript, User, Group)
GetScript = protocol.MI_String(GetScript)
SetScript = protocol.MI_String(SetScript)
TestScript = protocol.MI_String(TestScript)
User = protocol.MI_String(User)
Group = protocol.MI_String(Group)
Result = protocol.MI_String(Result)
arg_names.append('Result')
retd = {}
ld = locals()
for k in arg_names:
retd[k] = ld[k]
return retval, retd
示例5: _reset_database
def _reset_database(self, conn_string):
conn_pieces = urlparse.urlparse(conn_string)
if conn_string.startswith('sqlite'):
# We can just delete the SQLite database, which is
# the easiest and cleanest solution
db_path = conn_pieces.path.strip('/')
if db_path and os.path.exists(db_path):
os.unlink(db_path)
# No need to recreate the SQLite DB. SQLite will
# create it for us if it's not there...
elif conn_string.startswith('mysql'):
# We can execute the MySQL client to destroy and re-create
# the MYSQL database, which is easier and less error-prone
# than using SQLAlchemy to do this via MetaData...trust me.
database = conn_pieces.path.strip('/')
loc_pieces = conn_pieces.netloc.split('@')
host = loc_pieces[1]
auth_pieces = loc_pieces[0].split(':')
user = auth_pieces[0]
password = ""
if len(auth_pieces) > 1:
if auth_pieces[1].strip():
password = "-p%s" % auth_pieces[1]
sql = ("drop database if exists %(database)s; "
"create database %(database)s;") % locals()
cmd = ("mysql -u%(user)s %(password)s -h%(host)s "
"-e\"%(sql)s\"") % locals()
exitcode, out, err = execute(cmd)
self.assertEqual(0, exitcode)
示例6: migrate_instance_start
def migrate_instance_start(self, context, instance_uuid,
floating_addresses,
rxtx_factor=None, project_id=None,
source=None, dest=None):
# We only care if floating_addresses are provided and we're
# switching hosts
if not floating_addresses or (source and source == dest):
return
LOG.info(_("Starting migration network for instance"
" %(instance_uuid)s"), locals())
for address in floating_addresses:
floating_ip = self.db.floating_ip_get_by_address(context,
address)
if self._is_stale_floating_ip_address(context, floating_ip):
LOG.warn(_("Floating ip address |%(address)s| no longer "
"belongs to instance %(instance_uuid)s. Will not"
"migrate it "), locals())
continue
interface = CONF.public_interface or floating_ip['interface']
fixed_ip = self.db.fixed_ip_get(context,
floating_ip['fixed_ip_id'],
get_network=True)
self.l3driver.remove_floating_ip(floating_ip['address'],
fixed_ip['address'],
interface,
fixed_ip['network'])
# NOTE(wenjianhn): Make this address will not be bound to public
# interface when restarts nova-network on dest compute node
self.db.floating_ip_update(context,
floating_ip['address'],
{'host': None})
示例7: autoinc_sql
def autoinc_sql(self, table, column):
# To simulate auto-incrementing primary keys in Oracle, we have to
# create a sequence and a trigger.
sq_name = self._get_sequence_name(table)
tr_name = self._get_trigger_name(table)
tbl_name = self.quote_name(table)
col_name = self.quote_name(column)
sequence_sql = """
DECLARE
i INTEGER;
BEGIN
SELECT COUNT(*) INTO i FROM USER_CATALOG
WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE';
IF i = 0 THEN
EXECUTE IMMEDIATE 'CREATE SEQUENCE "%(sq_name)s"';
END IF;
END;
/""" % locals()
trigger_sql = """
CREATE OR REPLACE TRIGGER "%(tr_name)s"
BEFORE INSERT ON %(tbl_name)s
FOR EACH ROW
WHEN (new.%(col_name)s IS NULL)
BEGIN
SELECT "%(sq_name)s".nextval
INTO :new.%(col_name)s FROM dual;
END;
/""" % locals()
return sequence_sql, trigger_sql
示例8: __init__
def __init__(self, host, name, settings=None):
Component.__init__(self, yadtshell.settings.SERVICE, host, name)
settings = settings or {}
self.needs_services = []
self.needs_artefacts = []
self.needs.add(host.uri)
for k in settings:
setattr(self, k, settings[k])
extras = settings.get('extra', [])
for k in extras:
if hasattr(self, k):
getattr(self, k).extend(extras[k])
else:
setattr(self, k, extras[k])
for n in self.needs_services:
if n.startswith(yadtshell.settings.SERVICE):
self.needs.add(n % locals())
else:
self.needs.add(yadtshell.uri.create(
yadtshell.settings.SERVICE, host.host, n % locals()))
for n in self.needs_artefacts:
self.needs.add(yadtshell.uri.create(yadtshell.settings.ARTEFACT,
host.host,
n % locals() + "/" + yadtshell.settings.CURRENT))
self.state = yadtshell.settings.STATE_DESCRIPTIONS.get(
settings.get('state'),
yadtshell.settings.UNKNOWN)
self.script = None
示例9: _run_scalpel_paired
def _run_scalpel_paired(align_bams, items, ref_file, assoc_files,
region=None, out_file=None):
"""Detect indels with Scalpel.
This is used for paired tumor / normal samples.
"""
config = items[0]["config"]
if out_file is None:
out_file = "%s-paired-variants.vcf.gz" % os.path.splitext(align_bams[0])[0]
if not utils.file_exists(out_file):
with file_transaction(config, out_file) as tx_out_file:
paired = get_paired_bams(align_bams, items)
if not paired.normal_bam:
ann_file = _run_scalpel_caller(align_bams, items, ref_file,
assoc_files, region, out_file)
return ann_file
vcfstreamsort = config_utils.get_program("vcfstreamsort", config)
perl_exports = utils.get_perl_exports(os.path.dirname(tx_out_file))
tmp_path = "%s-scalpel-work" % utils.splitext_plus(out_file)[0]
db_file = os.path.join(tmp_path, "main", "somatic.db")
if not os.path.exists(db_file + ".dir"):
if os.path.exists(tmp_path):
utils.remove_safe(tmp_path)
opts = " ".join(_scalpel_options_from_config(items, config, out_file, region, tmp_path))
opts += " --ref {}".format(ref_file)
opts += " --dir %s" % tmp_path
# caling
cl = ("{perl_exports} && "
"scalpel-discovery --somatic {opts} --tumor {paired.tumor_bam} --normal {paired.normal_bam}")
do.run(cl.format(**locals()), "Genotyping paired variants with Scalpel", {})
# filtering to adjust input parameters
bed_opts = " ".join(_scalpel_bed_file_opts(items, config, out_file, region, tmp_path))
use_defaults = True
if use_defaults:
scalpel_tmp_file = os.path.join(tmp_path, "main/somatic.indel.vcf")
# Uses default filters but can tweak min-alt-count-tumor and min-phred-fisher
# to swap precision for sensitivity
else:
scalpel_tmp_file = os.path.join(tmp_path, "main/somatic-indel-filter.vcf.gz")
with file_transaction(config, scalpel_tmp_file) as tx_indel_file:
cmd = ("{perl_exports} && "
"scalpel-export --somatic {bed_opts} --ref {ref_file} --db {db_file} "
"--min-alt-count-tumor 5 --min-phred-fisher 10 --min-vaf-tumor 0.1 "
"| bgzip -c > {tx_indel_file}")
do.run(cmd.format(**locals()), "Scalpel somatic indel filter", {})
scalpel_tmp_file = bgzip_and_index(scalpel_tmp_file, config)
scalpel_tmp_file_common = bgzip_and_index(os.path.join(tmp_path, "main/common.indel.vcf"), config)
compress_cmd = "| bgzip -c" if out_file.endswith("gz") else ""
bcftools_cmd_chi2 = get_scalpel_bcftools_filter_expression("chi2", config)
bcftools_cmd_common = get_scalpel_bcftools_filter_expression("reject", config)
fix_ambig = vcfutils.fix_ambiguous_cl()
cl2 = ("vcfcat <({bcftools_cmd_chi2} {scalpel_tmp_file}) "
"<({bcftools_cmd_common} {scalpel_tmp_file_common}) | "
" {fix_ambig} | {vcfstreamsort} {compress_cmd} > {tx_out_file}")
do.run(cl2.format(**locals()), "Finalising Scalpel variants", {})
ann_file = annotation.annotate_nongatk_vcf(out_file, align_bams,
assoc_files.get("dbsnp"), ref_file,
config)
return ann_file
示例10: test_reusable_scope
def test_reusable_scope(self):
scope = let(a="tacos", b="soup", c="cake")
d = "godzilla"
with scope:
self.assertEquals(a, "tacos")
self.assertEquals(b, "soup")
self.assertEquals(c, "cake")
self.assertEquals(d, "godzilla")
a = "fajita"
b = "stew"
d = "mothra"
self.assertFalse("a" in locals())
self.assertFalse("b" in locals())
self.assertFalse("c" in locals())
self.assertTrue("d" in locals())
self.assertFalse("a" in globals())
self.assertFalse("b" in globals())
self.assertFalse("c" in globals())
self.assertFalse("d" in globals())
self.assertEquals(d, "mothra")
with scope:
self.assertEquals(a, "fajita")
self.assertEquals(b, "stew")
self.assertEquals(c, "cake")
self.assertEquals(d, "mothra")
示例11: add_param_writer_object
def add_param_writer_object(name, base_state, typ, var_type = "", var_index = None, root_node = False):
var_type1 = "_" + var_type if var_type != "" else ""
if isinstance(var_index, Number):
var_index = "uint32_t(" + str(var_index) +")"
set_varient_index = "serialize(_out, " + var_index +");\n" if var_index is not None else ""
ret = Template(reindent(4,"""
${base_state}__${name}$var_type1 start_${name}$var_type() && {
$set_varient_index
return { _out, std::move(_state) };
}
""")).substitute(locals())
if not is_stub(typ) and is_local_type(typ):
ret += add_param_writer_basic_type(name, base_state, typ, var_type, var_index, root_node)
if is_stub(typ):
set_command = "_state.f.end(_out);" if var_type is not "" else ""
return_command = "{ _out, std::move(_state._parent) }" if var_type is not "" and not root_node else "{ _out, std::move(_state) }"
ret += Template(reindent(4, """
template<typename Serializer>
after_${base_state}__${name} ${name}$var_type(Serializer&& f) && {
$set_varient_index
f(writer_of_$typ(_out));
$set_command
return $return_command;
}""")).substitute(locals())
return ret
示例12: create_hatch
def create_hatch(self, hatch):
sidelen = 72
if self._hatches.has_key(hatch):
return self._hatches[hatch]
name = 'H%d' % len(self._hatches)
self._pswriter.write("""\
<< /PatternType 1
/PaintType 2
/TilingType 2
/BBox[0 0 %(sidelen)d %(sidelen)d]
/XStep %(sidelen)d
/YStep %(sidelen)d
/PaintProc {
pop
0 setlinewidth
""" % locals())
self._pswriter.write(
self._convert_path(Path.hatch(hatch), Affine2D().scale(72.0)))
self._pswriter.write("""\
stroke
} bind
>>
matrix
makepattern
/%(name)s exch def
""" % locals())
self._hatches[hatch] = name
return name
示例13: convert_to_kallisto
def convert_to_kallisto(data):
files = dd.get_input_sequence_files(data)
if len(files) == 2:
fq1, fq2 = files
else:
fq1, fq2 = files[0], None
samplename = dd.get_sample_name(data)
work_dir = dd.get_work_dir(data)
kallisto_dir = os.path.join(work_dir, "kallisto", samplename, "fastq")
out_file = os.path.join(kallisto_dir, "barcodes.batch")
umis = config_utils.get_program("umis", dd.get_config(data))
if file_exists(out_file):
return out_file
if dd.get_minimum_barcode_depth(data):
cb_histogram = os.path.join(work_dir, "umis", samplename, "cb-histogram.txt")
cb_cutoff = dd.get_minimum_barcode_depth(data)
cb_options = "--cb_histogram {cb_histogram} --cb_cutoff {cb_cutoff}"
cb_options = cb_options.format(**locals())
else:
cb_options = ""
cmd = ("{umis} kallisto {cb_options} --out_dir {tx_kallisto_dir} {fq1}")
with file_transaction(data, kallisto_dir) as tx_kallisto_dir:
safe_makedir(tx_kallisto_dir)
message = ("Transforming %s to Kallisto singlecell format. "
% fq1)
do.run(cmd.format(**locals()), message)
return out_file
示例14: head
def head(self, **KWS):
## CHEETAH: generated from #def head at line 5, col 1.
trans = KWS.get("trans")
if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
trans = self.transaction # is None unless self.awake() was called
if not trans:
trans = DummyTransaction()
_dummyTrans = True
else: _dummyTrans = False
write = trans.response().write
SL = self._CHEETAH__searchList
_filter = self._CHEETAH__currentFilter
########################################
## START - generated method body
write(u'''<script type="text/javascript" src="http://maps.google.com/maps/api/js?sensor=false&language=pt-br"></script>
<script type="text/javascript">
function initialize() {
var hotel = new google.maps.LatLng(''')
_v = VFSL([locals()]+SL+[globals(), builtin],"site.latitude",True) # u'$site.latitude' on line 9, col 40
if _v is not None: write(_filter(_v, rawExpr=u'$site.latitude')) # from line 9, col 40.
write(u''', ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"site.longitude",True) # u'$site.longitude' on line 9, col 56
if _v is not None: write(_filter(_v, rawExpr=u'$site.longitude')) # from line 9, col 56.
write(u''');
var myOptions = {
zoom: 16,
center: hotel,
mapTypeId: google.maps.MapTypeId.ROADMAP
};
var map = new google.maps.Map(document.getElementById("map_canvas"), myOptions);
var hotelMarker = new google.maps.Marker({
position: hotel,
map: map,
title:"''')
_v = VFSL([locals()]+SL+[globals(), builtin],"site.name",True) # u'$site.name' on line 19, col 14
if _v is not None: write(_filter(_v, rawExpr=u'$site.name')) # from line 19, col 14.
write(u'''"
\t});
\t
\tvar content = "S\xedtio Tur\xedstico: ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"site.name",True) # u'$site.name' on line 22, col 34
if _v is not None: write(_filter(_v, rawExpr=u'$site.name')) # from line 22, col 34.
write(u'''<br>"
\tvar infoWindow = new google.maps.InfoWindow({content: content});
\tinfoWindow.setPosition(hotel);
infoWindow.open(map);
}
</script>
''')
########################################
## END - generated method body
return _dummyTrans and trans.response().getvalue() or ""
示例15: handle
def handle(self, *args, **options):
# Remove plugins which yield an error upon importing
for plugin in Plugin.objects.all():
try:
plugin.get_class()
except ImportError:
while True:
ans = raw_input('Error on importing {plugin.class_name}. Remove? [y/N]'.format(**locals()))
ans = ans.strip().lower()
if ans in ("", "n"):
break
elif ans == "y":
plugin.delete()
break
# Look for plugins in plugin directory
plugin_files = os.listdir(os.path.dirname(plugins.__file__))
plugin_paths = (os.path.join(os.path.dirname(plugins.__file__), p) for p in plugin_files)
detected_plugins = get_plugins(filter(is_module, plugin_paths))
new_plugins = (p for p in detected_plugins if not Plugin.objects.filter(class_name=get_qualified_name(p)).exists())
for p in new_plugins:
log.info("Found new plugin: {p}".format(**locals()))
plugin = Plugin.objects.create(
label=p.name(),
class_name=get_qualified_name(p),
plugin_type=get_plugin_type(p)
)
log.info("Created new plugin: {plugin.class_name}".format(**locals()))