本文整理汇总了Python中os.path.isfile函数的典型用法代码示例。如果您正苦于以下问题:Python isfile函数的具体用法?Python isfile怎么用?Python isfile使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了isfile函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: loadPlugin
def loadPlugin(package, filename, *args):
modName = "%s.%s" % (__name__, path.basename(filename))
# Search for full filename
fullPath = path.join(_ROOT, package, filename)
if not path.isfile(fullPath):
fullPath = fullPath + ".py"
if not path.isfile(fullPath):
raise Exception("Unable to find module: %s" % fullPath)
try:
# Load file
logging.debug("Loading module '%s' at: %s" % (modName, fullPath))
module = imp.load_source(__name__, fullPath)
if not module:
raise Exception('Error loading module source')
# Create instance using `create`
logging.debug("Creating instance of module '%s'" % modName)
inst = module.create(*args)
# Validate
if not inst:
raise Exception("Create did not return a valid instance")
if len(inst.__class__.__bases__) == 0:
logging.warning("Class '%s' does not inherit from base class", modName)
return inst
except Exception as e:
logging.error("Error loading module: %s" % e)
raise Exception("Error loading module: %s" % e)
示例2: initializeInitD
def initializeInitD(ownerName):
if (os.path.isdir(initdDirName)):
fn = join(RANGER_USERSYNC_HOME, initdProgramName)
initdFn = join(initdDirName, initdProgramName)
shutil.copy(fn, initdFn)
if (ownerName != 'ranger'):
f = open(initdFn, 'r')
filedata = f.read()
f.close()
find_str = "LINUX_USER=ranger"
replace_str = "LINUX_USER=" + ownerName
newdata = filedata.replace(find_str, replace_str)
f = open(initdFn, 'w')
f.write(newdata)
f.close()
os.chmod(initdFn, 0550)
rcDirList = ["/etc/rc2.d", "/etc/rc3.d", "/etc/rc.d/rc2.d", "/etc/rc.d/rc3.d"]
for rcDir in rcDirList:
if (os.path.isdir(rcDir)):
for prefix in initPrefixList:
scriptFn = prefix + initdProgramName
scriptName = join(rcDir, scriptFn)
if isfile(scriptName) or os.path.islink(scriptName):
os.remove(scriptName)
os.symlink(initdFn, scriptName)
userSyncScriptName = "ranger-usersync-services.sh"
localScriptName = os.path.abspath(join(RANGER_USERSYNC_HOME, userSyncScriptName))
ubinScriptName = join("/usr/bin", initdProgramName)
if isfile(ubinScriptName) or os.path.islink(ubinScriptName):
os.remove(ubinScriptName)
os.symlink(localScriptName, ubinScriptName)
示例3: check_local_config_schema
def check_local_config_schema(build):
log.info("Verifying your configuration settings...")
# leave this import here: might not be on sys.path in some situations
import validictory
local_conf_filename = build.tool_config.get('general.local_config')
if local_conf_filename is not None:
# explicit conf file defined
if not path.isfile(local_conf_filename):
raise ConfigurationError("{file} does not exist!".format(file=local_conf_filename))
else:
local_conf_filename = 'local_config.json'
if not path.isfile(local_conf_filename):
log.warning("Local configuration file '{file}' does not exist!".format(file=local_conf_filename))
with open(local_conf_filename) as local_conf_file:
local_conf = json.load(local_conf_file)
from forge.remote import Remote
from forge import build_config
remote = Remote(build_config.load())
local_conf_schema = remote._api_get('platform/{platform_version}/local_config_schema'.format(
platform_version=build.config['platform_version']))
try:
validictory.validate(local_conf, local_conf_schema)
except validictory.validator.UnexpectedPropertyError as e:
log.warning('Unexpected setting: "{error}" in "{file}". This will be ignored.'.format(
file=local_conf_filename,
error=e)
)
log.info("Configuration settings check complete")
示例4: delete_certificate
def delete_certificate(self, certificate_id, hackathon):
"""Delete certificate by azureKey.id and hackathon
Delete the hackathon-azureKey relationship first. If the auzreKey is not needed any more, delete the azureKey too
:type certificate_id: int
:param certificate_id: id of AzureKey
:type hackathon: Hackathon
:param hackathon: instance of Hackathon
"""
# delete all hackathon-azureKey relationships first
azure_key = AzureKey.objects(id=certificate_id).first()
# if no relations left, delete the azureKey itself
if azure_key in hackathon.azure_keys:
try:
if isfile(azure_key.cert_url):
os.remove(azure_key.cert_url)
else:
self.storage.delete(azure_key.cert_url)
if isfile(azure_key.pem_url):
os.remove(azure_key.pem_url)
else:
self.storage.delete(azure_key.pem_url)
except Exception as e:
self.log.error(e)
hackathon.azure_keys.remove(azure_key)
hackathon.save()
return ok(True)
示例5: test_transfer_and_delete
def test_transfer_and_delete(self):
"""
Make sure the TransferAndDelete task does what it says
"""
storage = QueuedStorage(
local='django.core.files.storage.FileSystemStorage',
remote='django.core.files.storage.FileSystemStorage',
local_options=dict(location=self.local_dir),
remote_options=dict(location=self.remote_dir),
task='queued_storage.tasks.TransferAndDelete')
field = TestModel._meta.get_field('file')
field.storage = storage
obj = TestModel(file=File(self.test_file))
obj.save()
obj.file.storage.result.get()
self.assertFalse(
path.isfile(path.join(self.local_dir, obj.file.name)),
"Local file is still available")
self.assertTrue(
path.isfile(path.join(self.remote_dir, obj.file.name)),
"Remote file is not available.")
示例6: test_create_structure
def test_create_structure(tmpfolder):
struct = {"my_file": "Some content",
"my_folder": {
"my_dir_file": "Some other content",
"empty_file": "",
"file_not_created": None
},
"empty_folder": {}}
expected = {"my_file": "Some content",
"my_folder": {
"my_dir_file": "Some other content",
"empty_file": ""
},
"empty_folder": {}}
changed, _ = structure.create_structure(struct, {})
assert changed == expected
assert isdir("my_folder")
assert isdir("empty_folder")
assert isfile("my_folder/my_dir_file")
assert isfile("my_folder/empty_file")
assert not isfile("my_folder/file_not_created")
assert isfile("my_file")
assert open("my_file").read() == "Some content"
assert open("my_folder/my_dir_file").read() == "Some other content"
assert open("my_folder/empty_file").read() == ""
示例7: get_recipes
def get_recipes(filter):
with open('support.json') as fi:
supported = json.load(fi)
result = 'result.txt'
if isfile(result):
with open(result) as fi:
for line in fi:
line = line.split()
if(line[1] == "OK"):
OK.append(line[0])
for dn in sorted(os.listdir('..')):
if(dn not in OK):
recipe_dir = join('..', dn)
meta_path = join(recipe_dir, 'meta.yaml')
if not isfile(meta_path):
continue
with open(meta_path) as fi:
data = fi.read()
name = yaml.load(data)['package']['name']
if name not in supported:
continue
sl = supported[name]
if sl == 'all':
ALLOWED.append(name)
yield recipe_dir, name
else:
assert isinstance(sl, list)
if filter in sl:
ALLOWED.append(name)
yield recipe_dir, name
示例8: process
def process(filename, size=-1):
file = out_dir + filename
if path.isfile(file) and stat(file).st_size == size:
print 'Skipping: ' + filename
return
print 'Processing: ' + filename
handle = urlopen(base_url + filename)
headers = handle.info()
content_length = int(headers.getheader('Content-Length'))
last_modified = mktime(strptime(headers.getheader('Last-Modified'), '%a, %d %b %Y %H:%M:%S %Z'))
if rfind(filename, '/') > 0:
dir = out_dir + filename[:rfind(filename, '/')]
else:
dir = out_dir
if not path.isdir(dir):
print 'Creating ' + dir
makedirs(dir)
if not path.isfile(file):
download(filename, last_modified)
else:
file_stat = stat(file)
if file_stat.st_mtime != last_modified or file_stat.st_size != content_length:
download(filename, last_modified)
else:
print 'Skipping: ' + filename
示例9: dump_db
def dump_db(dumpfile="pootle_DB_backup.sql"):
"""Dumps the DB as a SQL script and downloads it"""
require('environment', provided_by=[production, staging])
if ((isfile(dumpfile) and confirm('\n%s already exists locally. Do you '
'want to overwrite it?' % dumpfile, default=False))
or not isfile(dumpfile)):
remote_filename = '%s/%s' % (env['project_path'], dumpfile)
if ((exists(remote_filename) and confirm('\n%s already exists. Do you '
'want to overwrite it?' % remote_filename, default=False))
or not exists(remote_filename)):
print('\nDumping DB...')
with settings(hide('stderr')):
sudo('mysqldump -u %s -p %s > %s' % (env['db_user'],
env['db_name'],
remote_filename))
get(remote_filename, '.')
else:
print('\nAborting.')
else:
print('\nAborting.')
示例10: status
def status(request, task_id):
if request.method == 'GET':
status_file_path = path.join(settings.MEDIA_ROOT, 'blast', 'task', task_id, 'status.json')
status = {'status': 'unknown'}
if path.isfile(status_file_path):
with open(status_file_path, 'rb') as f:
statusdata = json.load(f)
if statusdata['status'] == 'pending' and settings.USE_CACHE:
tlist = cache.get('task_list_cache', [])
num_preceding = -1;
if tlist:
for index, tuple in enumerate(tlist):
if task_id in tuple:
num_preceding = index
break
statusdata['num_preceding'] = num_preceding
elif statusdata['status'] == 'running':
asn_path = path.join(settings.MEDIA_ROOT, 'blast', 'task', task_id, (task_id+'.asn'))
if path.isfile(asn_path):
with open(asn_path, 'r') as asn_f:
astr = asn_f.read()
processed_seq_count = astr.count('title \"')
statusdata['processed'] = processed_seq_count
else:
statusdata['processed'] = 0
return HttpResponse(json.dumps(statusdata))
return HttpResponse(json.dumps(status))
else:
return HttpResponse('Invalid Post')
示例11: is_fresh
def is_fresh(self, key, depends):
if self.force:
return
mtime_cache = self.mtime_cache
if self.prereq:
output = join(self.base_dir, key)
if not isfile(output):
self.prereq_data.pop(key, None)
return
for dep in depends:
if newer(dep, output, mtime_cache):
self.prereq_data.pop(key, None)
return
if newer(self.config_path, output, mtime_cache):
self.prereq_data.pop(key, None)
return
return 1
paths = self.output_data.get(key)
if not paths:
return
output_dir = self.output_dir
for output in paths:
output = join(output_dir, output)
if not isfile(output):
self.output_data.pop(key)
return
output = join(output_dir, list(paths).pop())
for dep in depends:
if newer(dep, output, mtime_cache):
self.output_data.pop(key)
return
if newer(self.config_path, output, mtime_cache):
self.output_data.pop(key)
return
return 1
示例12: test_demo_unicode_filenames
def test_demo_unicode_filenames(self):
"""Bug 741660: Demo package containing filenames with non-ASCII
characters works"""
fout = StringIO()
zf = zipfile.ZipFile(fout, "w")
zf.writestr("demo.html", """<html></html""")
zf.writestr("css/예제.css", "h1 { color: red }")
zf.writestr("js/示例.js", 'alert("HELLO WORLD");')
zf.close()
s = Submission(
title="Hello world", slug="hello-world", description="This is a hello world demo", creator=self.user
)
s.demo_package.save("play_demo.zip", ContentFile(fout.getvalue()))
s.demo_package.close()
s.clean()
s.save()
s.process_demo_package()
path = s.demo_package.path.replace(".zip", "")
ok_(isdir(path))
ok_(isfile((u"%s/index.html" % path).encode("utf-8")))
ok_(isfile((u"%s/css/예제.css" % path).encode("utf-8")))
ok_(isfile((u"%s/js/示例.js" % path).encode("utf-8")))
rmtree(path)
示例13: test_demo_html_normalized
def test_demo_html_normalized(self):
"""
Ensure a demo.html in zip file is normalized to index.html when
unpacked
"""
fout = StringIO()
zf = zipfile.ZipFile(fout, "w")
zf.writestr("demo.html", """<html></html""")
zf.writestr("css/main.css", "h1 { color: red }")
zf.writestr("js/main.js", 'alert("HELLO WORLD");')
zf.close()
s = Submission(
title="Hello world", slug="hello-world", description="This is a hello world demo", creator=self.user
)
s.demo_package.save("play_demo.zip", ContentFile(fout.getvalue()))
s.demo_package.close()
s.clean()
s.save()
s.process_demo_package()
path = s.demo_package.path.replace(".zip", "")
ok_(isdir(path))
ok_(isfile("%s/index.html" % path))
ok_(isfile("%s/css/main.css" % path))
ok_(isfile("%s/js/main.js" % path))
rmtree(path)
示例14: mainFunc
def mainFunc():
parser = argparse.ArgumentParser(description='Run Elastix registration protocol for all images in the directory')
parser.add_argument('--refDir', '-r', dest='refDir', required = True, \
help='The directory containing the reference images.')
parser.add_argument('--floatFile', '-f', dest='floatFile', required = True, \
help='Path to the floating image.')
parser.add_argument('--outDir', '-o', dest='outDir', required = False, \
help='Path to store the output images/parameters (default: current dir)', default=os.getcwd())
parser.add_argument('--atlas', '-a', dest='atlas', required = False, \
help='Path to the atlas segmentation file which will be resampled with the CPP file from the registration.', default=None)
args = parser.parse_args()
refImgs = [join(args.refDir, File) for File in listdir(args.refDir)]
refImgs = [img for img in refImgs if isfile(img) and img.endswith('.nii')]
if not refImgs:
print('Couldn\'t find any reference images')
return
if not path.isfile(args.floatFile):
print('Coudln\'t find the float image')
refImgs.sort(key=str.lower)
refFloatPairs = [[refImg, args.floatFile] for refImg in refImgs]
f3dParStr = paramListToShortString(f3d_params)
aladinParStr = paramListToShortString(aladin_params)
for rfPair in refFloatPairs:
baseName = basename(rfPair[0])[:-4]+'_'+basename(rfPair[1])[:-4]
currOutDir = join(args.outDir,baseName)
mkdir(currOutDir)
elastixLogPath = join(currOutDir,basename+'_LOG.txt')
elastixCommand = elastixExec+' -f '+rfPair[0]+' -m '+rfPair[1]+' -p '.join(elastixParams)+' -o '+currOutDir
elastixLog = ''
try:
elastixLog = ccall(elastixCommand, shell=True, stderr=STDOUT)
except CalledProcessError as err:
writeAndDie(err.output, elastixLogPath)
with open(elastixLogPath, 'w') as f:
f.write(elastixLog)
transformParameterFiles = ['TransformParameters.0.txt', 'TransformParameters.1.txt']
transformParameterFiles = [join(currOutDir,tpFile) for tpFile in transformParameterFiles]
for tpFilePath in transformParameterFiles:
with open(tpFilePath,'r') as tpFile:
tpCont = tpFile.read()
tpCont = tpCont.replace('(FinalBSplineInterpolationOrder 3)', '(FinalBSplineInterpolationOrder 1)')
with open(tpFilePath,'w') as tpFile:
tpCont = tpFile.write(tpCont)
if args.atlas is not None:
atlasOutDir = join(currOutDir,'atlas')
mkdir(atlasOutDir)
trfxCmd = trfxExec+' -in '+args.atlas+' -out '+atlasOutDir+' tp '+transformParameterFiles[-1]
try:
resampleLog = ccall(trfxCmd, shell=True, stderr=STDOUT)
except CalledProcessError as err:
writeAndDie(err.output, join(atlasOutDir,'ERR.txt'))
示例15: get_npz
def get_npz(name):
fname = 'npz_data/%s.npz' % name
if self.use_saved_npz and path.isfile(fname):
all_data = np.load(fname)
# Each work contains many parts. Loop through each one.
return [all_data[i] for i in all_data.files]
music_file = self._get_path('data/', name + '.krn')
if not path.isfile(music_file):
music_file = music_file[:-3] + 'xml'
if not path.isfile(music_file):
raise Exception("Cannot find score for %s" % music_file[:-4])
score = music21.converter.parse(music_file)
all_arr = []
for part in score.parts:
arr = []
for note in part.flat:
if isinstance(note, music21.note.Note):
elem = (note.ps, note.quarterLength)
elif isinstance(note, music21.note.Rest):
elem = (0.0, note.quarterLength)
else:
continue
arr.append(elem)
all_arr.append(np.array(arr))
if self.save_data:
np.savez(fname, *all_arr)
return all_arr