本文整理汇总了Python中os.path.basename方法的典型用法代码示例。如果您正苦于以下问题:Python path.basename方法的具体用法?Python path.basename怎么用?Python path.basename使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类os.path
的用法示例。
在下文中一共展示了path.basename方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: detect
# 需要导入模块: from os import path [as 别名]
# 或者: from os.path import basename [as 别名]
def detect(self, newfile):
'''
Called when a new file is generated under the monitoring directory.
:param newfile: Path to file created recently.
'''
self._logger.info(' -------- New File Detected! -------- ')
filename = basename(newfile)
# .............................check whether the filename is in the supported list
if any([x.search(filename) for x in self._regexs]) or not self._regexs:
self._queue.insert(0, newfile)
self._logger.info('File "{0}" added to the queue.'.format(newfile))
return
self._logger.warning('Filename "%s" is not supported! Skip file...' % filename)
示例2: stop
# 需要导入模块: from os import path [as 别名]
# 或者: from os.path import basename [as 别名]
def stop(self):
'''
Signals the current thread to stop and waits until it terminates. This blocks
the calling thread until it terminates -- either normally or through an unhandled
exception.
:raises RuntimeError: If an attempt is made to join the current thread as that
would cause a deadlock. It is also an error to join() a
thread before it has been started and attemps to do so
raises the same exception.
'''
self._logger.info('Signal {0} thread to stop normally.'.format(str(self)))
super(FileWatcher, self).stop()
self._logger.info('Wait until the {0} thread terminates...'.format(str(self)))
super(FileWatcher, self).join()
while not self.is_empty:
self._logger.debug('Drop "%s" from the queue.' % basename(self._queue.pop()))
assert self.is_empty, 'Failed to clean the queue.'
示例3: __init__
# 需要导入模块: from os import path [as 别名]
# 或者: from os.path import basename [as 别名]
def __init__(self, spec_url):
"""Create a new URLLoader.
Keyword arguments:
spec_url -- URL where the specification YAML file is located."""
headers = {'Accept' : 'text/yaml'}
resp = requests.get(spec_url, headers=headers)
if resp.status_code == 200:
self.spec = yaml.load(resp.text)
self.spec['url'] = spec_url
self.spec['files'] = {}
for queryUrl in self.spec['queries']:
queryNameExt = path.basename(queryUrl)
queryName = path.splitext(queryNameExt)[0] # Remove extention
item = {
'name': queryName,
'download_url': queryUrl
}
self.spec['files'][queryNameExt] = item
del self.spec['queries']
else:
raise Exception(resp.text)
示例4: syncToLocal
# 需要导入模块: from os import path [as 别名]
# 或者: from os.path import basename [as 别名]
def syncToLocal(self, p, spec):
debug("Updating remote store for package %s@%s" % (p, spec["hash"]))
cmd = format(
"mkdir -p %(tarballHashDir)s\n"
"s3cmd sync -s -v --host s3.cern.ch --host-bucket %(b)s.s3.cern.ch s3://%(b)s/%(storePath)s/ %(tarballHashDir)s/ 2>/dev/null || true\n"
"for x in `s3cmd ls -s --host s3.cern.ch --host-bucket %(b)s.s3.cern.ch s3://%(b)s/%(linksPath)s/ 2>/dev/null | sed -e 's|.*s3://|s3://|'`; do"
" mkdir -p '%(tarballLinkDir)s'; find '%(tarballLinkDir)s' -type l -delete;"
" ln -sf `s3cmd get -s --host s3.cern.ch --host-bucket %(b)s.s3.cern.ch $x - 2>/dev/null` %(tarballLinkDir)s/`basename $x` || true\n"
"done",
b=self.remoteStore,
storePath=spec["storePath"],
linksPath=spec["linksPath"],
tarballHashDir=spec["tarballHashDir"],
tarballLinkDir=spec["tarballLinkDir"])
err = execute(cmd)
dieOnError(err, "Unable to update from specified store.")
示例5: test_toolchain_standard_build_dir_remapped
# 需要导入模块: from os import path [as 别名]
# 或者: from os.path import basename [as 别名]
def test_toolchain_standard_build_dir_remapped(self):
"""
This can either be caused by relative paths or symlinks. Will
result in the manually specified build_dir being remapped to its
real location
"""
fake = mkdtemp(self)
real = mkdtemp(self)
real_base = basename(real)
spec = Spec()
spec['build_dir'] = join(fake, pardir, real_base)
with pretty_logging(stream=StringIO()) as s:
with self.assertRaises(NotImplementedError):
self.toolchain(spec)
self.assertIn("realpath of 'build_dir' resolved to", s.getvalue())
self.assertEqual(spec['build_dir'], real)
示例6: test_transpiler_sourcemap
# 需要导入模块: from os import path [as 别名]
# 或者: from os.path import basename [as 别名]
def test_transpiler_sourcemap(self):
# a kind of silly test but shows concept
build_dir = mkdtemp(self)
srcdir = mkdtemp(self)
js_code = 'var dummy = function() {\n};\n'
source = join(srcdir, 'source.js')
target = 'target.js'
with open(source, 'w') as fd:
fd.write(js_code)
spec = Spec(build_dir=build_dir, generate_source_map=True)
modname = 'dummy'
self.toolchain.transpile_modname_source_target(
spec, modname, source, target)
with open(join(build_dir, target + '.map')) as fd:
result = json.load(fd)
self.assertEqual(result['mappings'], 'AAAA;AACA;')
self.assertEqual(len(result['sources']), 1)
self.assertEqual(basename(result['sources'][0]), 'source.js')
self.assertEqual(result['file'], target)
示例7: generate_metadata_entry
# 需要导入模块: from os import path [as 别名]
# 或者: from os.path import basename [as 别名]
def generate_metadata_entry(self, entry_point, toolchain, spec):
"""
After the toolchain and spec have been executed, this may be
called to generate the artifact export entry for persistence
into the metadata file.
"""
export_target = spec['export_target']
toolchain_bases = trace_toolchain(toolchain)
toolchain_bin_path = spec.get(TOOLCHAIN_BIN_PATH)
toolchain_bin = ([
basename(toolchain_bin_path), # bin_name
get_bin_version_str(toolchain_bin_path), # bin_version
] if toolchain_bin_path else [])
return {basename(export_target): {
'toolchain_bases': toolchain_bases,
'toolchain_bin': toolchain_bin,
'builder': '%s:%s' % (
entry_point.module_name, '.'.join(entry_point.attrs)),
}}
示例8: _get_eclipse_output_path
# 需要导入模块: from os import path [as 别名]
# 或者: from os.path import basename [as 别名]
def _get_eclipse_output_path(p, linkedResources=None):
"""
Gets the Eclipse path attribute value for the output of project `p`.
"""
outputDirRel = p.output_dir(relative=True)
if outputDirRel.startswith('..'):
outputDirName = basename(outputDirRel)
if linkedResources is not None:
linkedResources.append(_eclipse_linked_resource(outputDirName, '2', p.output_dir()))
return outputDirName
else:
return outputDirRel
#: Highest Execution Environment defined by most recent Eclipse release.
#: https://wiki.eclipse.org/Execution_Environments
#: https://git.eclipse.org/c/jdt/eclipse.jdt.debug.git/plain/org.eclipse.jdt.launching/plugin.properties
示例9: _write_cached_testclasses
# 需要导入模块: from os import path [as 别名]
# 或者: from os.path import basename [as 别名]
def _write_cached_testclasses(cachesDir, jar, jdk, testclasses, excludedclasses):
"""
Writes `testclasses` to a cache file specific to `jar`.
:param str cachesDir: directory containing files with cached test lists
:param JDKConfig jdk: the JDK for which the cached list of classes must be written
:param list testclasses: a list of test class names
:param list excludedclasses: a list of excluded class names
"""
jdkVersion = '.jdk' + str(jdk.javaCompliance)
cache = join(cachesDir, basename(jar) + jdkVersion + '.testclasses')
exclusions = join(cachesDir, basename(jar) + jdkVersion + '.excludedclasses')
try:
with open(cache, 'w') as fp:
for classname in testclasses:
print(classname, file=fp)
with open(exclusions, 'w') as fp:
if excludedclasses:
mx.warn('Unsupported class files listed in ' + exclusions)
for classname in excludedclasses:
print(classname[1:], file=fp)
except IOError as e:
mx.warn('Error writing to ' + cache + ': ' + str(e))
示例10: get_fn_recipe_links
# 需要导入模块: from os import path [as 别名]
# 或者: from os.path import basename [as 别名]
def get_fn_recipe_links():
letter_links = get_fn_letter_links()
recipe_links = {}
page_tracker = 0
for page in letter_links:
recipe_set = True
page_num = 1
lag0 = 0
while recipe_set:
t0 = time.time()
recipe_set = get_all_recipes_fn(path.basename(page), page_num)
lag1 = time.time() - t0
recipe_links[page_tracker] = []
recipe_links[page_tracker].extend(recipe_set)
page_num += 1
page_tracker += 1
time.sleep(lag1 * .5 + lag0 * .5)
lag0 = lag1
return recipe_links
示例11: config_paths_from_log_dir
# 需要导入模块: from os import path [as 别名]
# 或者: from os.path import basename [as 别名]
def config_paths_from_log_dir(log_dir, base_dirs):
log_dir = path.basename(log_dir.strip(path.sep))
# log_dir == {now} {netconfig} {probconfig} [RESTORE@some_dir@XXXX_YYYY], get [netconfig, probconfig]
comps = log_dir.split(' ')
assert is_log_date(comps[0]), 'Invalid log_dir: {}'.format(log_dir)
comps = [c for c in comps[1:] if _RESTORE_PREFIX not in c]
assert len(comps) <= len(base_dirs), 'Expected as many config components as base dirs: {}, {}'.format(
comps, base_dirs)
def get_real_path(base, prepped_p):
p_glob = prepped_p.replace('@', path.sep)
p_glob = path.join(base, p_glob) # e.g., ae_configs/p_glob
glob_matches = glob.glob(p_glob)
# We always only replace one character with *, so filter for those.
# I.e. lr1e-5 will become lr1e*5, which will match lr1e-5 but also lr1e-4.5
glob_matches_of_same_len = [g for g in glob_matches if len(g) == len(p_glob)]
if len(glob_matches_of_same_len) != 1:
raise ValueError('Cannot find config on disk: {} (matches: {})'.format(p_glob, glob_matches_of_same_len))
return glob_matches_of_same_len[0]
return tuple(get_real_path(base_dir, comp) for base_dir, comp in zip(base_dirs, comps))
示例12: purge_checkpoints
# 需要导入模块: from os import path [as 别名]
# 或者: from os.path import basename [as 别名]
def purge_checkpoints(log_dir_root, target_dir, verbose):
vprint = print if verbose else no_op.NoOp
ckpt_dir_glob = Saver.ckpt_dir_for_log_dir(path.join(log_dir_root, '*'))
ckpt_dir_matches = sorted(glob.glob(ckpt_dir_glob))
for ckpt_dir in ckpt_dir_matches:
log_dir = Saver.log_dir_from_ckpt_dir(ckpt_dir)
all_ckpts = Saver.all_ckpts_with_iterations(ckpt_dir)
if len(all_ckpts) <= 5:
vprint('Skipping {}'.format(log_dir))
continue
target_log_dir = path.join(target_dir, path.basename(log_dir))
target_ckpt_dir = Saver.ckpt_dir_for_log_dir(target_log_dir)
os.makedirs(target_ckpt_dir, exist_ok=True)
ckpts_to_keep = {all_ckpts[2], all_ckpts[len(all_ckpts) // 2], all_ckpts[-1]}
ckpts_to_move = set(all_ckpts) - ckpts_to_keep
vprint('Moving to {}:'.format(target_ckpt_dir))
for _, ckpt_to_move in ckpts_to_move:
# ckpt_to_move is /path/to/dir/ckpt-7000, add a * to match ckpt-7000.data, .meta, .index
for ckpt_file in glob.glob(ckpt_to_move + '*'):
vprint('- {}'.format(ckpt_file))
shutil.move(ckpt_file, target_ckpt_dir)
示例13: __on_apply
# 需要导入模块: from os import path [as 别名]
# 或者: from os.path import basename [as 别名]
def __on_apply(self, *__):
from ...models import BackupJSON
try:
paraphrase = self.paraphrase_widget.entry.get_text()
if not paraphrase:
paraphrase = " "
output_file = path.join(GLib.get_user_cache_dir(),
path.basename(NamedTemporaryFile().name))
status = GPG.get_default().decrypt_json(self._filename, paraphrase, output_file)
if status.ok:
BackupJSON.import_file(output_file)
self.destroy()
else:
self.__send_notification(_("There was an error during the import of the encrypted file."))
except AttributeError:
Logger.error("[GPG] Invalid JSON file.")
示例14: send_photo
# 需要导入模块: from os import path [as 别名]
# 或者: from os.path import basename [as 别名]
def send_photo(bot, update: Update, user: User, render):
file = update.message.photo[-1].get_file()
photo = download(file)
subject = 'Photo note to self'
text = ''
if update.message.caption is not None:
text = update.message.caption.strip()
if text:
subject = 'Photo: {}'.format(get_subject(text))
update.message.reply_text(text=render('photo_is_sent'))
tasks.send_file.delay(
user_id=user.pk,
file=photo,
filename=basename(file.file_path),
subject=subject,
text=text,
)
示例15: upload_asset
# 需要导入模块: from os import path [as 别名]
# 或者: from os.path import basename [as 别名]
def upload_asset(self, path, label="", content_type=""):
"""
:calls: `POST https://<upload_url>/repos/:owner/:repo/releases/:release_id/assets?name=foo.zip <https://developer.github.com/v3/repos/releases/#upload-a-release-asset>`_
:rtype: :class:`github.GitReleaseAsset.GitReleaseAsset`
"""
assert isinstance(path, (str, unicode)), path
assert isinstance(label, (str, unicode)), label
post_parameters = {
"name": basename(path),
"label": label
}
headers = {}
if len(content_type) > 0:
headers["Content-Type"] = content_type
resp_headers, data = self._requester.requestBlobAndCheck(
"POST",
self.upload_url.split("{?")[0],
parameters=post_parameters,
headers=headers,
input=path
)
return github.GitReleaseAsset.GitReleaseAsset(self._requester, resp_headers, data, completed=True)