本文整理汇总了Python中os.path.getctime函数的典型用法代码示例。如果您正苦于以下问题:Python getctime函数的具体用法?Python getctime怎么用?Python getctime使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了getctime函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _check_if_tg_required
def _check_if_tg_required(self, tg_symbol):
"""Checks if the rule associated with tg_symbol should be ran or not. It's true if:
- The target isn't an existing file (then it's a rule, or a file that needs to be created)
- One of the rule's dependencies isn't a file
- One of the rule's dependency is a file, and has a more recent timestamp
"""
# checking if it's a file
tg_filepath = join(self.makefile_folder, tg_symbol.name)
if not isfile(tg_filepath):
return True
# checking the dependency
for dependency in self.rule_table[tg_symbol].dependencies:
dep_filepath = join(self.makefile_folder, dependency.name)
if isfile(join(self.makefile_folder, dependency.name)):
# dep have a more recent timestamp than tgt
if getctime(dep_filepath) > getctime(tg_filepath): # checking the timestamp
return True
else:
# Assuming that the Makefile is valid and the required file "dependency" is a target of another rule
return True
return False
示例2: execute
def execute(self, context):
# 1. Save scene, and set environment variables.
if bpy.data.filepath == '':
bpy.ops.wm.save_mainfile('INVOKE_AREA')
else:
bpy.ops.wm.save_as_mainfile('EXEC_AREA')
self.full_file_path = bpy.data.filepath
self.file_name = split(self.full_file_path)[1][:-6]
self.file_directory = dirname(self.full_file_path)
# 2. Check if 5 minutes has passed since last save. If it has, save new version. Else, just save.
self.version_number = '-0001'
self.old_version_number = self.version_number
while exists(join(self.file_directory, self.file_name + self.version_number + '.blend')) == True:
self.old_version_number = self.version_number
self.version_number = int(self.version_number[1:])
self.version_number += 1
self.append_number = ''
for i in range(4 - len(str(self.version_number))):
self.append_number += '0'
self.append_number += str(self.version_number)
self.version_number = '-' + self.append_number
try:
self.previous_time = getctime(join(self.file_directory, self.file_name + self.old_version_number + '.blend'))
except FileNotFoundError:
self.previous_time = getctime(self.full_file_path)
if (time() - self.previous_time) >= 300: # Check if 5 minutes has passed (300 seconds).
self.new_file_name = self.file_name + self.version_number
copyfile(join(self.file_directory, self.file_name + '.blend'), join(self.file_directory, self.new_file_name + '.blend'))
return {'FINISHED'}
示例3: createDict
def createDict(path, root={}):
pathList = listdir(path)
for i, item in enumerate(pathList):
file_path = path_join(path, item)
if item not in ignore_dir and exists(file_path):
if isdir(file_path):
if not root.get(item, False):
root[item] = {"type": "dir", "files": {}}
createDict(file_path, root[item]["files"])
else:
if not root.get(item, False):
log("new file " + file_path)
root[item] = {"type": "file",
"file_size": getsize(file_path),
"mtime": getmtime(file_path),
"ctime": getctime(file_path),
"md5": md5(file_path),
"sha256": sha256(file_path)}
else:
if root[item]["mtime"] != getmtime(file_path):
log("rehashing " + file_path)
root[item] = {"type": "file",
"file_size": getsize(file_path),
"mtime": getmtime(file_path),
"ctime": getctime(file_path),
"md5": md5(file_path),
"sha256": sha256(file_path)}
return root
示例4: bgzip_and_tabix
def bgzip_and_tabix(fpath, reuse=False, tabix_parameters='', **kwargs):
gzipped_fpath = join(fpath + '.gz')
tbi_fpath = gzipped_fpath + '.tbi'
if reuse and \
file_exists(gzipped_fpath) and (getctime(gzipped_fpath) >= getctime(fpath) if file_exists(fpath) else True) and \
file_exists(tbi_fpath) and getctime(tbi_fpath) >= getctime(gzipped_fpath):
info('Actual compressed file and index exist, reusing')
return gzipped_fpath
info('Compressing and tabixing file, writing ' + gzipped_fpath + '(.tbi)')
bgzip = which('bgzip')
tabix = which('tabix')
if not bgzip:
err('Cannot index file because bgzip is not found')
if not tabix:
err('Cannot index file because tabix is not found')
if not bgzip and not tabix:
return fpath
if isfile(gzipped_fpath):
os.remove(gzipped_fpath)
if isfile(tbi_fpath):
os.remove(tbi_fpath)
info('BGzipping ' + fpath)
cmdline = '{bgzip} {fpath}'.format(**locals())
call_process.run(cmdline)
info('Tabixing ' + gzipped_fpath)
cmdline = '{tabix} {tabix_parameters} {gzipped_fpath}'.format(**locals())
call_process.run(cmdline)
return gzipped_fpath
示例5: is_modified
def is_modified ( abs_path, is_file, max_age=48, feature_enabled=False,
image_file_pattern=compile('^.*$') ):
"""Check if a file was created between now and now minus the given
max age in hours. Return false if this feature is not configured."""
if not feature_enabled:
return False
oldest_epoch = time() - ( max_age * 60.0 * 60.0 )
is_modified = False
last_change = 0
# on files just check the file ..
if is_file:
if (path.getctime(abs_path) >= oldest_epoch or
path.getmtime(abs_path) >= oldest_epoch):
is_modified = True
last_change = max(path.getctime(abs_path), path.getmtime(abs_path))
# on folders find all images file and check those for changes (
# if we would just inspect the folder we'll get updates, e.g., simply
# because the folder was touched.
else:
files = findfiles( abs_path, image_file_pattern, doprint=False)
for subfile in files:
if (path.getctime(subfile) >= oldest_epoch or
path.getmtime(subfile) >= oldest_epoch):
is_modified = True
last_change = max(
last_change, path.getctime(abs_path), path.getmtime(abs_path))
return is_modified, last_change
示例6: needs_update
def needs_update(arg,dirname,names):
last_built = path.getctime('%s.root'%dirname)
times = []
for name in names:
times.append(path.getctime(path.join(dirname,name)))
arg[0] = (last_built < max(times))
示例7: filesort
def filesort(file1, file2):
""" sort by create time """
ctime1 = getctime(file1)
ctime2 = getctime(file2)
if ctime1 < ctime2:
return -1
elif ctime1 == ctime2:
return 0
else:
return 1
示例8: infos_ogr
def infos_ogr(shapepath):
u""" Uses gdal/ogr functions to extract basic informations about shapefile
given as parameter and store into the corresponding dictionary. """
global dico_infos_couche, dico_champs, liste_chps
source = ogr.Open(shapepath, 0) # OGR driver
couche = source.GetLayer() # get the layer
objet = couche.GetFeature(0) # get the first object (index 0)
geom = objet.GetGeometryRef() # get the geometry
def_couche = couche.GetLayerDefn() # get the layer definitions
srs = couche.GetSpatialRef() # get spatial system reference
srs.AutoIdentifyEPSG() # try to determine the EPSG code
# Storing into the dictionary
dico_infos_couche[u'nom'] = path.basename(shapepath)
dico_infos_couche[u'titre'] = dico_infos_couche[u'nom'][:-4].replace('_', ' ').capitalize()
dico_infos_couche[u'nbr_objets'] = couche.GetFeatureCount()
dico_infos_couche[u'nbr_attributs'] = def_couche.GetFieldCount()
dico_infos_couche[u'proj'] = unicode(srs.GetAttrValue("PROJCS")).replace('_', ' ')
dico_infos_couche[u'EPSG'] = unicode(srs.GetAttrValue("AUTHORITY", 1))
'''dico_infos_couche[u'EPSG'] = u"Projection : " + \
unicode(srs.GetAttrValue("PROJCS")).replace('_', ' ') + \
u" - Code EPSG : " + \
unicode(srs.GetAttrValue("AUTHORITY", 1))'''
# type géométrie
if geom.GetGeometryName() == u'POINT':
dico_infos_couche[u'type_geom'] = u'Point'
elif u'LINESTRING' in geom.GetGeometryName():
dico_infos_couche[u'type_geom'] = u'Ligne'
elif u'POLYGON' in geom.GetGeometryName():
dico_infos_couche[u'type_geom'] = u'Polygone'
else:
dico_infos_couche[u'type_geom'] = geom.GetGeometryName()
# Spatial extent (bounding box)
dico_infos_couche[u'Xmin'] = round(couche.GetExtent()[0],2)
dico_infos_couche[u'Xmax'] = round(couche.GetExtent()[1],2)
dico_infos_couche[u'Ymin'] = round(couche.GetExtent()[2],2)
dico_infos_couche[u'Ymax'] = round(couche.GetExtent()[3],2)
# Fields
i = 0
while i < def_couche.GetFieldCount():
liste_chps.append(def_couche.GetFieldDefn(i).GetName())
dico_champs[def_couche.GetFieldDefn(i).GetName()] = def_couche.GetFieldDefn(i).GetTypeName(),\
def_couche.GetFieldDefn(i).GetWidth(),\
def_couche.GetFieldDefn(i).GetPrecision()
i = i+1
dico_infos_couche[u'date_actu'] = unicode(localtime(path.getmtime(shapepath))[2]) +\
u'/'+ unicode(localtime(path.getmtime(shapepath))[1]) +\
u'/'+ unicode(localtime(path.getmtime(shapepath))[0])
dico_infos_couche[u'date_creation'] = unicode(localtime(path.getctime(shapepath))[2]) +\
u'/'+ unicode(localtime(path.getctime(shapepath))[1]) +\
u'/'+ unicode(localtime(path.getctime(shapepath))[0])
# end of function
return dico_infos_couche, dico_champs, liste_chps
示例9: generate_demand_time_df
def generate_demand_time_df():
cols = ["demand", "capacity", "day", "comp_time"]
data = pd.DataFrame(columns=cols)
counter = 0
for i, c, d in product(demands, [1, 4], range(1, 8)):
s, e = get_demand_comp_filenames(i, c, d)
diff = (path.getctime(e) - path.getctime(s)) / 2878
if diff > 500:
diff = 2.9
data.loc[counter] = [i, c, d - 1, diff]
counter += 1
return data
示例10: upgrade_static_files
def upgrade_static_files(self):
"""This method allows for updating a selection of static files
with corresponding files residing in a hidden .pyntrest folder
in your main image folder. This comes in handy when you want to
update the CSS or favicon without touching the core implementation."""
pyn_config_folder = path.join(self.main_images_path, '.pyntrest')
# these files can be overridden
changeable_files = [
path.join('res', 'favicon.png'),
path.join('res', 'favicon-apple.png'),
path.join('css', 'pyntrest-main.css'),
path.join('index.html'),
path.join('bookify.html'),
]
for ch_file in changeable_files:
# the changeable file at its final destination
if 'index' in ch_file:
exis_file = path.join(TEMPLATE_DIRS[0], 'pyntrest', ch_file)
else:
exis_file = path.join(self.static_path, ch_file)
# the candidate file from the main images folder
cand_file = path.join(pyn_config_folder, ch_file)
if not file_exists(exis_file) and not file_exists(cand_file):
# no target file and no custom file --> copy from default
print ('Creating file \'{}\' from default.'.format(exis_file))
copyfile(exis_file + '.default', exis_file)
elif not file_exists(exis_file) and file_exists(cand_file):
# no target file but custom file --> copy from custom
print ('Creating file \'{}\' from version at \'{}\'.'
.format(exis_file, cand_file))
copyfile(cand_file, exis_file)
#print 'staticfile candidate = {}'.format(cand_file)
if not file_exists(cand_file):
continue # nothing to compare
# get modified / created dates
efile_ts = max( path.getctime(exis_file), path.getmtime(exis_file))
cfile_ts = max( path.getctime(cand_file), path.getmtime(cand_file))
if cfile_ts >= efile_ts:
print (
'Updating file \'{}\' with newer version at \'{}\' [{} >> {}].'
.format(ch_file, cand_file, efile_ts, cfile_ts))
copyfile(cand_file, exis_file)
else:
pass
示例11: generate_hour_time_df
def generate_hour_time_df():
cols = ["hour", "day", "comp_time"]
data = pd.DataFrame(columns=cols)
counter = 0
for i, d in product(["same", "t12", "t19"], range(1, 8)):
s, e = get_hour_comp_filenames(i, d)
diff = (path.getctime(e) - path.getctime(s)) / 2878
if diff > 500:
diff = 2.9
data.loc[counter] = [i, d - 1, diff]
counter += 1
return data
示例12: generate_time_df
def generate_time_df():
cols = ["vehicles", "capacity", "waiting_time", "day", "comp_time"]
data = pd.DataFrame(columns=cols)
counter = 0
for v, c, wt, d in product(vehicles, caps, waiting_times, range(1, 8)):
s, e = get_comp_filenames(v, c, wt, 0, d)
diff = (path.getctime(e) - path.getctime(s)) / 2878
if diff > 500:
diff = 2.9
data.loc[counter] = [v, c, wt, d - 1, diff]
counter += 1
return data
示例13: generate_interval_time_df
def generate_interval_time_df():
cols = ["interval", "day", "comp_time"]
data = pd.DataFrame(columns=cols)
counter = 0
for i, d in product(intervals, range(1, 8)):
s, e = get_interval_comp_filenames(i, d)
total_secs = (24 * 60 * 60) / i
diff = (path.getctime(e) - path.getctime(s)) / total_secs
if diff > 500:
diff = 2.9
data.loc[counter] = [i, d - 1, diff]
counter += 1
return data
示例14: _lvm_pickle
def _lvm_pickle(filename):
""" Reads pickle file (for local use)
:param filename: filename of lvm file
:return lvm_data: dict with lvm data
"""
p_file = '{}.pkl'.format(filename)
lvm_data = False
# if pickle file exists and pickle is up-2-date just load it.
if path.exists(p_file) and path.getctime(p_file) > path.getctime(filename):
f = open(p_file, 'rb')
lvm_data = pickle.load(f)
f.close()
return lvm_data
示例15: add_existing_album
def add_existing_album(self, user, oldalbum, oldpath):
newalbum = path.join(ImageUtils.get_root(), 'content', user, oldalbum)
if path.exists(newalbum):
self.debug('album already exists: %s' % newalbum)
return
(post, comment, imgid) = self.get_post_comment_id(oldalbum)
url = 'http://imgur.com/a/%s' % imgid
try:
album_id = self.add_album(newalbum, user, url, post, comment)
except Exception as e:
self.debug('add_existing_album: failed: %s' % str(e))
return
for image in listdir(oldpath):
self.debug('add_existing_album: image=%s' % path.join(oldpath, image))
fakeimage = post
if comment != None:
fakeimage = '%s-%s' % (fakeimage, comment)
fakeimage = '%s_%s' % (fakeimage, image.split('_')[-1])
self.add_existing_image(user, fakeimage, path.join(oldpath, image), subdir=oldalbum, album_id=album_id)
# Add post
p = Post()
p.id = post
p.author = user
if comment == None: p.url = url
p.created = path.getctime(oldpath)
p.subreddit = ''
p.title = ''
try:
self.add_post(p, legacy=1)
except Exception as e:
#self.debug('add_existing_image: %s' % str(e))
pass
# Add comment
if comment != None:
c = Comment()
c.id = comment
c.post_id = post
c.author = user
if comment != None: c.body = url
p.created = path.getctime(oldpath)
try:
self.add_comment(c, legacy=1)
except Exception as e:
#self.debug('add_existing_image: %s' % str(e))
pass