本文整理汇总了Python中portage.localization._函数的典型用法代码示例。如果您正苦于以下问题:Python _函数的具体用法?Python _怎么用?Python _使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了_函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: read_config
def read_config(mandatory_opts):
loader = KeyValuePairFileLoader(
'/etc/dispatch-conf.conf', None)
opts, errors = loader.load()
if not opts:
print(_('dispatch-conf: Error reading /etc/dispatch-conf.conf; fatal'), file=sys.stderr)
sys.exit(1)
# Handle quote removal here, since KeyValuePairFileLoader doesn't do that.
quotes = "\"'"
for k, v in opts.items():
if v[:1] in quotes and v[:1] == v[-1:]:
opts[k] = v[1:-1]
for key in mandatory_opts:
if key not in opts:
if key == "merge":
opts["merge"] = "sdiff --suppress-common-lines --output='%s' '%s' '%s'"
else:
print(_('dispatch-conf: Missing option "%s" in /etc/dispatch-conf.conf; fatal') % (key,), file=sys.stderr)
if not os.path.exists(opts['archive-dir']):
os.mkdir(opts['archive-dir'])
# Use restrictive permissions by default, in order to protect
# against vulnerabilities (like bug #315603 involving rcs).
os.chmod(opts['archive-dir'], 0o700)
elif not os.path.isdir(opts['archive-dir']):
print(_('dispatch-conf: Config archive dir [%s] must exist; fatal') % (opts['archive-dir'],), file=sys.stderr)
sys.exit(1)
return opts
示例2: read_config
def read_config(mandatory_opts):
loader = portage.env.loaders.KeyValuePairFileLoader(
'/etc/dispatch-conf.conf', None)
opts, errors = loader.load()
if not opts:
print(_('dispatch-conf: Error reading /etc/dispatch-conf.conf; fatal'), file=sys.stderr)
sys.exit(1)
# Handle quote removal here, since KeyValuePairFileLoader doesn't do that.
quotes = "\"'"
for k, v in opts.items():
if v[:1] in quotes and v[:1] == v[-1:]:
opts[k] = v[1:-1]
for key in mandatory_opts:
if key not in opts:
if key == "merge":
opts["merge"] = "sdiff --suppress-common-lines --output='%s' '%s' '%s'"
else:
print(_('dispatch-conf: Missing option "%s" in /etc/dispatch-conf.conf; fatal') % (key,), file=sys.stderr)
if not os.path.exists(opts['archive-dir']):
os.mkdir(opts['archive-dir'])
elif not os.path.isdir(opts['archive-dir']):
print(_('dispatch-conf: Config archive dir [%s] must exist; fatal') % (opts['archive-dir'],), file=sys.stderr)
sys.exit(1)
return opts
示例3: collect_ebuild_messages
def collect_ebuild_messages(path):
""" Collect elog messages generated by the bash logging function stored
at 'path'.
"""
mylogfiles = None
try:
mylogfiles = os.listdir(path)
except OSError:
pass
# shortcut for packages without any messages
if not mylogfiles:
return {}
# exploit listdir() file order so we process log entries in chronological order
mylogfiles.reverse()
logentries = {}
for msgfunction in mylogfiles:
filename = os.path.join(path, msgfunction)
if msgfunction not in EBUILD_PHASES:
writemsg(_("!!! can't process invalid log file: %s\n") % filename,
noiselevel=-1)
continue
if not msgfunction in logentries:
logentries[msgfunction] = []
lastmsgtype = None
msgcontent = []
f = io.open(_unicode_encode(filename,
encoding=_encodings['fs'], errors='strict'),
mode='r', encoding=_encodings['repo.content'], errors='replace')
for l in f:
l = l.rstrip('\n')
if not l:
continue
try:
msgtype, msg = l.split(" ", 1)
except ValueError:
writemsg(_("!!! malformed entry in "
"log file: '%s'\n") % filename, noiselevel=-1)
continue
if lastmsgtype is None:
lastmsgtype = msgtype
if msgtype == lastmsgtype:
msgcontent.append(msg)
else:
if msgcontent:
logentries[msgfunction].append((lastmsgtype, msgcontent))
msgcontent = [msg]
lastmsgtype = msgtype
f.close()
if msgcontent:
logentries[msgfunction].append((lastmsgtype, msgcontent))
# clean logfiles to avoid repetitions
for f in mylogfiles:
try:
os.unlink(os.path.join(path, f))
except OSError:
pass
return logentries
示例4: fetch_check
def fetch_check(self, mypkg, useflags=None, mysettings=None, all=False):
if all:
useflags = None
elif useflags is None:
if mysettings:
useflags = mysettings["USE"].split()
myfiles = self.getFetchMap(mypkg, useflags=useflags)
myebuild = self.findname(mypkg)
if myebuild is None:
raise AssertionError("ebuild not found for '%s'" % mypkg)
pkgdir = os.path.dirname(myebuild)
mf = Manifest(pkgdir, self.settings["DISTDIR"])
mysums = mf.getDigests()
failures = {}
for x in myfiles:
if not mysums or x not in mysums:
ok = False
reason = _("digest missing")
else:
try:
ok, reason = portage.checksum.verify_all(
os.path.join(self.settings["DISTDIR"], x), mysums[x])
except FileNotFound as e:
ok = False
reason = _("File Not Found: '%s'") % (e,)
if not ok:
failures[x] = reason
if failures:
return False
return True
示例5: verify_all
def verify_all(filename, mydict, calc_prelink=0, strict=0):
"""
Verify all checksums against a file.
@param filename: File to run the checksums against
@type filename: String
@param calc_prelink: Whether or not to reverse prelink before running the checksum
@type calc_prelink: Integer
@param strict: Enable/Disable strict checking (which stops exactly at a checksum failure and throws an exception)
@type strict: Integer
@rtype: Tuple
@return: Result of the checks and possible message:
1) If size fails, False, and a tuple containing a message, the given size, and the actual size
2) If there is an os error, False, and a tuple containing the system error followed by 2 nulls
3) If a checksum fails, False and a tuple containing a message, the given hash, and the actual hash
4) If all checks succeed, return True and a fake reason
"""
# Dict relates to single file only.
# returns: (passed,reason)
file_is_ok = True
reason = "Reason unknown"
try:
mysize = os.stat(filename)[stat.ST_SIZE]
if mydict["size"] != mysize:
return False, (_("Filesize does not match recorded size"), mysize, mydict["size"])
except OSError as e:
if e.errno == errno.ENOENT:
raise portage.exception.FileNotFound(filename)
return False, (str(e), None, None)
verifiable_hash_types = set(mydict).intersection(hashfunc_map)
verifiable_hash_types.discard("size")
if not verifiable_hash_types:
expected = set(hashfunc_map)
expected.discard("size")
expected = list(expected)
expected.sort()
expected = " ".join(expected)
got = set(mydict)
got.discard("size")
got = list(got)
got.sort()
got = " ".join(got)
return False, (_("Insufficient data for checksum verification"), got, expected)
for x in sorted(mydict):
if x == "size":
continue
elif x in hashfunc_map:
myhash = perform_checksum(filename, x, calc_prelink=calc_prelink)[0]
if mydict[x] != myhash:
if strict:
raise portage.exception.DigestException(
("Failed to verify '$(file)s' on " + "checksum type '%(type)s'") % {"file": filename, "type": x}
)
else:
file_is_ok = False
reason = (("Failed on %s verification" % x), myhash, mydict[x])
break
return file_is_ok, reason
示例6: lineParser
def lineParser(self, line, line_num, data, errors):
line = line.strip()
if line.startswith("#"): # skip commented lines
return
if not len(line): # skip empty lines
return
split = line.split("=", 1)
if len(split) < 2:
errors.setdefault(self.fname, []).append(_("Malformed data at line: %s, data %s") % (line_num + 1, line))
return
key = split[0].strip()
value = split[1].strip()
if not key:
errors.setdefault(self.fname, []).append(_("Malformed key at line: %s, key %s") % (line_num + 1, key))
return
if not self._validate(key):
errors.setdefault(self.fname, []).append(
_("Key validation failed at line: %s, data %s") % (line_num + 1, key)
)
return
if not self._valueValidate(value):
errors.setdefault(self.fname, []).append(
_("Value validation failed at line: %s, data %s") % (line_num + 1, value)
)
return
data[key] = value
示例7: load_unpack_dependencies_configuration
def load_unpack_dependencies_configuration(repositories):
repo_dict = {}
for repo in repositories.repos_with_profiles():
for eapi in _supported_eapis:
if eapi_has_automatic_unpack_dependencies(eapi):
file_name = os.path.join(repo.location, "profiles", "unpack_dependencies", eapi)
lines = grabfile(file_name, recursive=True)
for line in lines:
elements = line.split()
suffix = elements[0].lower()
if len(elements) == 1:
writemsg(_("--- Missing unpack dependencies for '%s' suffix in '%s'\n") % (suffix, file_name))
depend = " ".join(elements[1:])
try:
use_reduce(depend, eapi=eapi)
except InvalidDependString as e:
writemsg(_("--- Invalid unpack dependencies for '%s' suffix in '%s': '%s'\n" % (suffix, file_name, e)))
else:
repo_dict.setdefault(repo.name, {}).setdefault(eapi, {})[suffix] = depend
ret = {}
for repo in repositories.repos_with_profiles():
for repo_name in [x.name for x in repo.masters] + [repo.name]:
for eapi in repo_dict.get(repo_name, {}):
for suffix, depend in repo_dict.get(repo_name, {}).get(eapi, {}).items():
ret.setdefault(repo.name, {}).setdefault(eapi, {})[suffix] = depend
return ret
示例8: _parse_file_to_dict
def _parse_file_to_dict(self, file_name, juststrings=False, recursive=True, eapi_filter=None):
ret = {}
location_dict = {}
file_dict = grabdict_package(file_name, recursive=recursive, verify_eapi=True)
eapi = read_corresponding_eapi_file(file_name)
if eapi_filter is not None and not eapi_filter(eapi):
if file_dict:
writemsg(_("--- EAPI '%s' does not support '%s': '%s'\n") %
(eapi, os.path.basename(file_name), file_name),
noiselevel=-1)
return ret
useflag_re = _get_useflag_re(eapi)
for k, v in file_dict.items():
useflags = []
for prefixed_useflag in v:
if prefixed_useflag[:1] == "-":
useflag = prefixed_useflag[1:]
else:
useflag = prefixed_useflag
if useflag_re.match(useflag) is None:
writemsg(_("--- Invalid USE flag for '%s' in '%s': '%s'\n") %
(k, file_name, prefixed_useflag), noiselevel=-1)
else:
useflags.append(prefixed_useflag)
location_dict.setdefault(k, []).extend(useflags)
for k, v in location_dict.items():
if juststrings:
v = " ".join(v)
else:
v = tuple(v)
ret.setdefault(k.cp, {})[k] = v
return ret
示例9: _parse_repository_usealiases
def _parse_repository_usealiases(self, repositories):
ret = {}
for repo in repositories.repos_with_profiles():
file_name = os.path.join(repo.location, "profiles", "use.aliases")
eapi = read_corresponding_eapi_file(
file_name, default=repo.eapi)
useflag_re = _get_useflag_re(eapi)
raw_file_dict = grabdict(file_name, recursive=True)
file_dict = {}
for real_flag, aliases in raw_file_dict.items():
if useflag_re.match(real_flag) is None:
writemsg(_("--- Invalid real USE flag in '%s': '%s'\n") % (file_name, real_flag), noiselevel=-1)
else:
for alias in aliases:
if useflag_re.match(alias) is None:
writemsg(_("--- Invalid USE flag alias for '%s' real USE flag in '%s': '%s'\n") %
(real_flag, file_name, alias), noiselevel=-1)
else:
if any(alias in v for k, v in file_dict.items() if k != real_flag):
writemsg(_("--- Duplicated USE flag alias in '%s': '%s'\n") %
(file_name, alias), noiselevel=-1)
else:
file_dict.setdefault(real_flag, []).append(alias)
ret[repo.name] = file_dict
return ret
示例10: multiBuilder
def multiBuilder(self, options, settings, trees):
rValue = {}
directory = options.get("directory",
os.path.join(settings["PORTAGE_CONFIGROOT"],
USER_CONFIG_PATH, "sets"))
name_pattern = options.get("name_pattern", "${name}")
if not "$name" in name_pattern and not "${name}" in name_pattern:
raise SetConfigError(_("name_pattern doesn't include ${name} placeholder"))
greedy = get_boolean(options, "greedy", False)
# look for repository path variables
match = self._repopath_match.match(directory)
if match:
try:
directory = self._repopath_sub.sub(trees["porttree"].dbapi.treemap[match.groupdict()["reponame"]], directory)
except KeyError:
raise SetConfigError(_("Could not find repository '%s'") % match.groupdict()["reponame"])
try:
directory = _unicode_decode(directory,
encoding=_encodings['fs'], errors='strict')
# Now verify that we can also encode it.
_unicode_encode(directory,
encoding=_encodings['fs'], errors='strict')
except UnicodeError:
directory = _unicode_decode(directory,
encoding=_encodings['fs'], errors='replace')
raise SetConfigError(
_("Directory path contains invalid character(s) for encoding '%s': '%s'") \
% (_encodings['fs'], directory))
if os.path.isdir(directory):
directory = normalize_path(directory)
for parent, dirs, files in os.walk(directory):
try:
parent = _unicode_decode(parent,
encoding=_encodings['fs'], errors='strict')
except UnicodeDecodeError:
continue
for d in dirs[:]:
if d[:1] == '.':
dirs.remove(d)
for filename in files:
try:
filename = _unicode_decode(filename,
encoding=_encodings['fs'], errors='strict')
except UnicodeDecodeError:
continue
if filename[:1] == '.':
continue
if filename.endswith(".metadata"):
continue
filename = os.path.join(parent,
filename)[1 + len(directory):]
myname = name_pattern.replace("$name", filename)
myname = myname.replace("${name}", filename)
rValue[myname] = StaticFileSet(
os.path.join(directory, filename),
greedy=greedy, dbapi=trees["vartree"].dbapi)
return rValue
示例11: _finalize
def _finalize():
global _items
printer = EOutput()
for root, key, logentries, logfile in _items:
print()
if root == "/":
printer.einfo(_("Messages for package %s:") %
colorize("INFORM", key))
else:
printer.einfo(_("Messages for package %(pkg)s merged to %(root)s:") %
{"pkg": colorize("INFORM", key), "root": root})
if logfile is not None:
printer.einfo(_("Log file: %s") % colorize("INFORM", logfile))
print()
for phase in EBUILD_PHASES:
if phase not in logentries:
continue
for msgtype, msgcontent in logentries[phase]:
fmap = {"INFO": printer.einfo,
"WARN": printer.ewarn,
"ERROR": printer.eerror,
"LOG": printer.einfo,
"QA": printer.ewarn}
if isinstance(msgcontent, basestring):
msgcontent = [msgcontent]
for line in msgcontent:
fmap[msgtype](line.strip("\n"))
_items = []
return
示例12: _expand_parent_colon
def _expand_parent_colon(self, parentsFile, parentPath,
repo_loc, repositories):
colon = parentPath.find(":")
if colon == -1:
return parentPath
if colon == 0:
if repo_loc is None:
raise ParseError(
_("Parent '%s' not found: '%s'") % \
(parentPath, parentsFile))
else:
parentPath = normalize_path(os.path.join(
repo_loc, 'profiles', parentPath[colon+1:]))
else:
p_repo_name = parentPath[:colon]
try:
p_repo_loc = repositories.get_location_for_name(p_repo_name)
except KeyError:
raise ParseError(
_("Parent '%s' not found: '%s'") % \
(parentPath, parentsFile))
else:
parentPath = normalize_path(os.path.join(
p_repo_loc, 'profiles', parentPath[colon+1:]))
return parentPath
示例13: _run
def _run(self):
mf = self.repo_config.load_manifest(
os.path.join(self.repo_config.location, self.cp),
self.distdir, fetchlist_dict=self.fetchlist_dict)
try:
mf.create(assumeDistHashesAlways=True)
except FileNotFound as e:
portage.writemsg(_("!!! File %s doesn't exist, can't update "
"Manifest\n") % e, noiselevel=-1)
return 1
except PortagePackageException as e:
portage.writemsg(("!!! %s\n") % (e,), noiselevel=-1)
return 1
try:
modified = mf.write(sign=False)
except PermissionDenied as e:
portage.writemsg("!!! %s: %s\n" % (_("Permission Denied"), e,),
noiselevel=-1)
return 1
else:
if modified:
return self.MODIFIED
else:
return os.EX_OK
示例14: hardlock_cleanup
def hardlock_cleanup(path, remove_all_locks=False):
myhost = os.uname()[1]
mydl = os.listdir(path)
results = []
mycount = 0
mylist = {}
for x in mydl:
if os.path.isfile(path + "/" + x):
parts = x.split(".hardlock-")
if len(parts) == 2:
filename = parts[0][1:]
hostpid = parts[1].split("-")
host = "-".join(hostpid[:-1])
pid = hostpid[-1]
if filename not in mylist:
mylist[filename] = {}
if host not in mylist[filename]:
mylist[filename][host] = []
mylist[filename][host].append(pid)
mycount += 1
results.append(_("Found %(count)s locks") % {"count": mycount})
for x in mylist:
if myhost in mylist[x] or remove_all_locks:
mylockname = hardlock_name(path + "/" + x)
if hardlink_is_mine(mylockname, path + "/" + x) or \
not os.path.exists(path + "/" + x) or \
remove_all_locks:
for y in mylist[x]:
for z in mylist[x][y]:
filename = path + "/." + x + ".hardlock-" + y + "-" + z
if filename == mylockname:
continue
try:
# We're sweeping through, unlinking everyone's locks.
os.unlink(filename)
results.append(_("Unlinked: ") + filename)
except OSError:
pass
try:
os.unlink(path + "/" + x)
results.append(_("Unlinked: ") + path + "/" + x)
os.unlink(mylockname)
results.append(_("Unlinked: ") + mylockname)
except OSError:
pass
else:
try:
os.unlink(mylockname)
results.append(_("Unlinked: ") + mylockname)
except OSError:
pass
return results
示例15: _addProfile
def _addProfile(self, currentPath):
parentsFile = os.path.join(currentPath, "parent")
eapi_file = os.path.join(currentPath, "eapi")
try:
eapi = codecs.open(_unicode_encode(eapi_file,
encoding=_encodings['fs'], errors='strict'),
mode='r', encoding=_encodings['content'], errors='replace'
).readline().strip()
except IOError:
pass
else:
if not eapi_is_supported(eapi):
raise ParseError(_(
"Profile contains unsupported "
"EAPI '%s': '%s'") % \
(eapi, os.path.realpath(eapi_file),))
if os.path.exists(parentsFile):
parents = grabfile(parentsFile)
if not parents:
raise ParseError(
_("Empty parent file: '%s'") % parentsFile)
for parentPath in parents:
parentPath = normalize_path(os.path.join(
currentPath, parentPath))
if os.path.exists(parentPath):
self._addProfile(parentPath)
else:
raise ParseError(
_("Parent '%s' not found: '%s'") % \
(parentPath, parentsFile))
self.profiles.append(currentPath)