本文整理汇总了Python中portage.os.lstat函数的典型用法代码示例。如果您正苦于以下问题:Python lstat函数的具体用法?Python lstat怎么用?Python lstat使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了lstat函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: first_existing
def first_existing(path):
"""
Returns the first existing path element, traversing from the given
path to the root directory. A path is considered to exist if lstat
either succeeds or raises an error other than ENOENT or ESTALE.
This can be particularly useful to check if there is permission to
create a particular file or directory, without actually creating
anything.
@param path: a filesystem path
@type path: str
@rtype: str
@return: the element that exists
"""
existing = False
for path in iter_parents(path):
try:
os.lstat(path)
existing = True
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
existing = True
if existing:
return path
return os.sep
示例2: _get_all_modules
def _get_all_modules(self):
"""scans the emaint modules dir for loadable modules
@rtype: dictionary of module_plugins
"""
module_dir = self._module_path
importables = []
names = os.listdir(module_dir)
for entry in names:
# skip any __init__ or __pycache__ files or directories
if entry.startswith('__'):
continue
try:
# test for statinfo to ensure it should a real module
# it will bail if it errors
os.lstat(os.path.join(module_dir, entry, '__init__.py'))
importables.append(entry)
except EnvironmentError:
pass
kids = {}
for entry in importables:
new_module = Module(entry, self._namepath)
for module_name in new_module.kids:
kid = new_module.kids[module_name]
kid['parent'] = new_module
kids[kid['name']] = kid
self.parents.append(entry)
return kids
示例3: rcs_archive
def rcs_archive(archive, curconf, newconf, mrgconf):
"""Archive existing config in rcs (on trunk). Then, if mrgconf is
specified and an old branch version exists, merge the user's changes
and the distributed changes and put the result into mrgconf. Lastly,
if newconf was specified, leave it in the archive dir with a .dist.new
suffix along with the last 1.1.1 branch version with a .dist suffix."""
try:
os.makedirs(os.path.dirname(archive))
except OSError:
pass
try:
curconf_st = os.lstat(curconf)
except OSError:
curconf_st = None
if curconf_st is not None and \
(stat.S_ISREG(curconf_st.st_mode) or
stat.S_ISLNK(curconf_st.st_mode)):
_archive_copy(curconf_st, curconf, archive)
if os.path.lexists(archive + ',v'):
os.system(RCS_LOCK + ' ' + archive)
os.system(RCS_PUT + ' ' + archive)
ret = 0
mystat = None
if newconf:
try:
mystat = os.lstat(newconf)
except OSError:
pass
if mystat is not None and \
(stat.S_ISREG(mystat.st_mode) or
stat.S_ISLNK(mystat.st_mode)):
os.system(RCS_GET + ' -r' + RCS_BRANCH + ' ' + archive)
has_branch = os.path.lexists(archive)
if has_branch:
os.rename(archive, archive + '.dist')
_archive_copy(mystat, newconf, archive)
if has_branch:
if mrgconf and os.path.isfile(archive) and \
os.path.isfile(mrgconf):
# This puts the results of the merge into mrgconf.
ret = os.system(RCS_MERGE % (archive, mrgconf))
os.chmod(mrgconf, mystat.st_mode)
os.chown(mrgconf, mystat.st_uid, mystat.st_gid)
os.rename(archive, archive + '.dist.new')
return ret
示例4: file_archive
def file_archive(archive, curconf, newconf, mrgconf):
"""Archive existing config to the archive-dir, bumping old versions
out of the way into .# versions (log-rotate style). Then, if mrgconf
was specified and there is a .dist version, merge the user's changes
and the distributed changes and put the result into mrgconf. Lastly,
if newconf was specified, archive it as a .dist.new version (which
gets moved to the .dist version at the end of the processing)."""
_file_archive_ensure_dir(os.path.dirname(archive))
# Archive the current config file if it isn't already saved
if (os.path.lexists(archive) and
len(diffstatusoutput_mixed(
"diff -aq '%s' '%s'", curconf, archive)[1]) != 0):
_file_archive_rotate(archive)
try:
curconf_st = os.lstat(curconf)
except OSError:
curconf_st = None
if curconf_st is not None and \
(stat.S_ISREG(curconf_st.st_mode) or
stat.S_ISLNK(curconf_st.st_mode)):
_archive_copy(curconf_st, curconf, archive)
mystat = None
if newconf:
try:
mystat = os.lstat(newconf)
except OSError:
pass
if mystat is not None and \
(stat.S_ISREG(mystat.st_mode) or
stat.S_ISLNK(mystat.st_mode)):
# Save off new config file in the archive dir with .dist.new suffix
newconf_archive = archive + '.dist.new'
if os.path.isdir(newconf_archive
) and not os.path.islink(newconf_archive):
_file_archive_rotate(newconf_archive)
_archive_copy(mystat, newconf, newconf_archive)
ret = 0
if mrgconf and os.path.isfile(curconf) and \
os.path.isfile(newconf) and \
os.path.isfile(archive + '.dist'):
# This puts the results of the merge into mrgconf.
ret = os.system(DIFF3_MERGE % (curconf, archive + '.dist', newconf, mrgconf))
os.chmod(mrgconf, mystat.st_mode)
os.chown(mrgconf, mystat.st_uid, mystat.st_gid)
return ret
示例5: testCompileModules
def testCompileModules(self):
for parent, dirs, files in itertools.chain(
os.walk(PORTAGE_BIN_PATH),
os.walk(PORTAGE_PYM_PATH)):
parent = _unicode_decode(parent,
encoding=_encodings['fs'], errors='strict')
for x in files:
x = _unicode_decode(x,
encoding=_encodings['fs'], errors='strict')
if x[-4:] in ('.pyc', '.pyo'):
continue
x = os.path.join(parent, x)
st = os.lstat(x)
if not stat.S_ISREG(st.st_mode):
continue
do_compile = False
if x[-3:] == '.py':
do_compile = True
else:
# Check for python shebang
with open(_unicode_encode(x,
encoding=_encodings['fs'], errors='strict'), 'rb') as f:
line = _unicode_decode(f.readline(),
encoding=_encodings['content'], errors='replace')
if line[:2] == '#!' and 'python' in line:
do_compile = True
if do_compile:
with open(_unicode_encode(x,
encoding=_encodings['fs'], errors='strict'), 'rb') as f:
compile(f.read(), x, 'exec')
示例6: testBashSyntax
def testBashSyntax(self):
for parent, dirs, files in os.walk(PORTAGE_BIN_PATH):
parent = _unicode_decode(parent,
encoding=_encodings['fs'], errors='strict')
for x in files:
x = _unicode_decode(x,
encoding=_encodings['fs'], errors='strict')
ext = x.split('.')[-1]
if ext in ('.py', '.pyc', '.pyo'):
continue
x = os.path.join(parent, x)
st = os.lstat(x)
if not stat.S_ISREG(st.st_mode):
continue
# Check for bash shebang
f = open(_unicode_encode(x,
encoding=_encodings['fs'], errors='strict'), 'rb')
line = _unicode_decode(f.readline(),
encoding=_encodings['content'], errors='replace')
f.close()
if line[:2] == '#!' and \
'bash' in line:
cmd = "%s -n %s" % (_shell_quote(BASH_BINARY), _shell_quote(x))
status, output = subprocess_getstatusoutput(cmd)
self.assertEqual(os.WIFEXITED(status) and \
os.WEXITSTATUS(status) == os.EX_OK, True, msg=output)
示例7: __iter__
def __iter__(self):
"""generator for walking the dir struct"""
dirs = [(0, self.location)]
len_base = len(self.location)
while dirs:
depth, dir_path = dirs.pop()
try:
dir_list = os.listdir(dir_path)
except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
continue
for l in dir_list:
p = os.path.join(dir_path, l)
try:
st = os.lstat(p)
except OSError:
# Cache entry disappeared.
continue
if stat.S_ISDIR(st.st_mode):
# Only recurse 1 deep, in order to avoid iteration over
# entries from another nested cache instance. This can
# happen if the user nests an overlay inside
# /usr/portage/local as in bug #302764.
if depth < 1:
dirs.append((depth+1, p))
continue
try:
yield _pkg_str(p[len_base+1:])
except InvalidData:
continue
示例8: _pkgindex_entry
def _pkgindex_entry(self, cpv):
"""
Performs checksums, and gets size and mtime via lstat.
Raises InvalidDependString if necessary.
@rtype: dict
@return: a dict containing entry for the give cpv.
"""
pkg_path = self.getname(cpv)
d = dict(cpv._metadata.items())
d.update(perform_multiple_checksums(
pkg_path, hashes=self._pkgindex_hashes))
d["CPV"] = cpv
st = os.lstat(pkg_path)
d["_mtime_"] = _unicode(st[stat.ST_MTIME])
d["SIZE"] = _unicode(st.st_size)
rel_path = pkg_path[len(self.pkgdir)+1:]
# record location if it's non-default
if rel_path != cpv + ".tbz2":
d["PATH"] = rel_path
return d
示例9: _prepare_fake_distdir
def _prepare_fake_distdir(settings, alist):
orig_distdir = settings["DISTDIR"]
edpath = os.path.join(settings["PORTAGE_BUILDDIR"], "distdir")
portage.util.ensure_dirs(edpath, gid=portage_gid, mode=0o755)
# Remove any unexpected files or directories.
for x in os.listdir(edpath):
symlink_path = os.path.join(edpath, x)
st = os.lstat(symlink_path)
if x in alist and stat.S_ISLNK(st.st_mode):
continue
if stat.S_ISDIR(st.st_mode):
shutil.rmtree(symlink_path)
else:
os.unlink(symlink_path)
# Check for existing symlinks and recreate if necessary.
for x in alist:
symlink_path = os.path.join(edpath, x)
target = os.path.join(orig_distdir, x)
try:
link_target = os.readlink(symlink_path)
except OSError:
os.symlink(target, symlink_path)
else:
if link_target != target:
os.unlink(symlink_path)
os.symlink(target, symlink_path)
示例10: updateprotect
def updateprotect(self):
"""Update internal state for isprotected() calls. Nonexistent paths
are ignored."""
os = _os_merge
self.protect = []
self._dirs = set()
for x in self.protect_list:
ppath = normalize_path(
os.path.join(self.myroot, x.lstrip(os.path.sep)))
try:
if stat.S_ISDIR(os.stat(ppath).st_mode):
self._dirs.add(ppath)
self.protect.append(ppath)
except OSError:
# If it doesn't exist, there's no need to protect it.
pass
self.protectmask = []
for x in self.mask_list:
ppath = normalize_path(
os.path.join(self.myroot, x.lstrip(os.path.sep)))
try:
"""Use lstat so that anything, even a broken symlink can be
protected."""
if stat.S_ISDIR(os.lstat(ppath).st_mode):
self._dirs.add(ppath)
self.protectmask.append(ppath)
"""Now use stat in case this is a symlink to a directory."""
if stat.S_ISDIR(os.stat(ppath).st_mode):
self._dirs.add(ppath)
except OSError:
# If it doesn't exist, there's no need to mask it.
pass
示例11: _file_archive_ensure_dir
def _file_archive_ensure_dir(parent_dir):
"""
Ensure that the parent directory for an archive exists.
If a file exists where a directory is needed, then rename
it (see bug 256376).
@param parent_dir: path of parent directory
@type parent_dir: str
"""
for parent in iter_parents(parent_dir):
# Use lstat because a symlink to a directory might point
# to a directory outside of the config archive, making
# it an unsuitable parent.
try:
parent_st = os.lstat(parent)
except OSError:
pass
else:
if not stat.S_ISDIR(parent_st.st_mode):
_file_archive_rotate(parent)
break
try:
os.makedirs(parent_dir)
except OSError:
pass
示例12: _need_update
def _need_update(self, cpv, data):
if "MD5" not in data:
return True
size = data.get("SIZE")
if size is None:
return True
mtime = data.get("_mtime_")
if mtime is None:
return True
pkg_path = self._bintree.getname(cpv)
try:
s = os.lstat(pkg_path)
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
# We can't update the index for this one because
# it disappeared.
return False
try:
if long(mtime) != s[stat.ST_MTIME]:
return True
if long(size) != long(s.st_size):
return True
except ValueError:
return True
return False
示例13: testCompileModules
def testCompileModules(self):
for parent, dirs, files in itertools.chain(
os.walk(PORTAGE_BIN_PATH),
os.walk(PORTAGE_PYM_PATH)):
parent = _unicode_decode(parent,
encoding=_encodings['fs'], errors='strict')
for x in files:
x = _unicode_decode(x,
encoding=_encodings['fs'], errors='strict')
if x[-4:] in ('.pyc', '.pyo'):
continue
x = os.path.join(parent, x)
st = os.lstat(x)
if not stat.S_ISREG(st.st_mode):
continue
do_compile = False
cfile = x
if x[-3:] == '.py':
do_compile = True
else:
# Check for python shebang
f = open(_unicode_encode(x,
encoding=_encodings['fs'], errors='strict'), 'rb')
line = _unicode_decode(f.readline(),
encoding=_encodings['content'], errors='replace')
f.close()
if line[:2] == '#!' and \
'python' in line:
do_compile = True
cfile += '.py'
if do_compile:
cfile += (__debug__ and 'c' or 'o')
py_compile.compile(x, cfile=cfile, doraise=True)
示例14: _init_ipc_fifos
def _init_ipc_fifos(self):
input_fifo = os.path.join(
self.settings['PORTAGE_BUILDDIR'], '.ipc_in')
output_fifo = os.path.join(
self.settings['PORTAGE_BUILDDIR'], '.ipc_out')
for p in (input_fifo, output_fifo):
st = None
try:
st = os.lstat(p)
except OSError:
os.mkfifo(p)
else:
if not stat.S_ISFIFO(st.st_mode):
st = None
try:
os.unlink(p)
except OSError:
pass
os.mkfifo(p)
apply_secpass_permissions(p,
uid=os.getuid(),
gid=portage.data.portage_gid,
mode=0o770, stat_cached=st)
return (input_fifo, output_fifo)
示例15: cacheddir
def cacheddir(my_original_path, ignorecvs, ignorelist, EmptyOnError, followSymlinks=True):
mypath = normalize_path(my_original_path)
try:
pathstat = os.stat(mypath)
if not stat.S_ISDIR(pathstat.st_mode):
raise DirectoryNotFound(mypath)
except EnvironmentError as e:
if e.errno == PermissionDenied.errno:
raise PermissionDenied(mypath)
del e
return [], []
except PortageException:
return [], []
else:
try:
fpaths = os.listdir(mypath)
except EnvironmentError as e:
if e.errno != errno.EACCES:
raise
del e
raise PermissionDenied(mypath)
ftype = []
for x in fpaths:
try:
if followSymlinks:
pathstat = os.stat(mypath+"/"+x)
else:
pathstat = os.lstat(mypath+"/"+x)
if stat.S_ISREG(pathstat[stat.ST_MODE]):
ftype.append(0)
elif stat.S_ISDIR(pathstat[stat.ST_MODE]):
ftype.append(1)
elif stat.S_ISLNK(pathstat[stat.ST_MODE]):
ftype.append(2)
else:
ftype.append(3)
except (IOError, OSError):
ftype.append(3)
if ignorelist or ignorecvs:
ret_list = []
ret_ftype = []
for file_path, file_type in zip(fpaths, ftype):
if file_path in ignorelist:
pass
elif ignorecvs:
if file_path[:2] != ".#" and \
not (file_type == 1 and file_path in VCS_DIRS):
ret_list.append(file_path)
ret_ftype.append(file_type)
else:
ret_list = fpaths
ret_ftype = ftype
return ret_list, ret_ftype