本文整理汇总了Python中pkgpanda.PackageId类的典型用法代码示例。如果您正苦于以下问题:Python PackageId类的具体用法?Python PackageId怎么用?Python PackageId使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了PackageId类的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _get_package_list
def _get_package_list(treeinfo_dict, key, excludes=None):
"""Return a list of package name strings from treeinfo_dict by key.
If key isn't present in treeinfo_dict, an empty list is returned.
"""
excludes = excludes or list()
package_list = treeinfo_dict.get(key, list())
# Validate package list.
if not isinstance(package_list, list):
raise BuildError("{} must be either null (meaning don't use) or a list of package names.".format(key))
for package_name in package_list:
if not isinstance(package_name, str):
raise BuildError("{} must be a list of strings. Found a {} with the value: {}".format(
key, type(package_name), package_name))
try:
PackageId.validate_name(package_name)
except ValidationError as ex:
raise BuildError("Invalid package name in {}: {}".format(key, package_name)) from ex
if package_name in excludes:
raise BuildError("Package found in both exclude and {}: {}".format(key, package_name))
return package_list
示例2: visit
def visit(pkg_tuple):
# Visit the node for the first (and only time). Finding a node again
# means a cycle and should be detected at caller.
assert isinstance(pkg_tuple, tuple)
assert pkg_tuple not in visited
visited.add(pkg_tuple)
# Ensure all dependencies are built. Sorted for stability
for require in sorted(package_store.packages[pkg_tuple]['requires']):
require_tuple = expand_require(require)
if require_tuple in built:
continue
if require_tuple in visited:
raise BuildError("Circular dependency. Circular link {0} -> {1}".format(pkg_tuple, require_tuple))
if PackageId.is_id(require_tuple[0]):
raise BuildError("Depending on a specific package id is not supported. Package {} "
"depends on {}".format(pkg_tuple, require_tuple))
if require_tuple not in package_store.packages:
raise BuildError("Package {0} require {1} not buildable from tree.".format(pkg_tuple, require_tuple))
visit(require_tuple)
build_order.append(pkg_tuple)
built.add(pkg_tuple)
示例3: visit
def visit(pkg_tuple):
"""Add a package and its requires to the build order.
Raises AssertionError if pkg_tuple is in the set of visited packages.
If the package has any requires, they're recursively visited and added
to the build order depth-first. Then the package itself is added.
"""
assert isinstance(pkg_tuple, tuple)
# Visit the node for the first (and only) time.
assert pkg_tuple not in visited
visited.add(pkg_tuple)
# Ensure all dependencies are built. Sorted for stability
for require in sorted(package_store.packages[pkg_tuple]['requires']):
require_tuple = expand_require(require)
# If the dependency has already been built, we can move on.
if require_tuple in built:
continue
# If the dependency has not been built but has been visited, then
# there's a cycle in the dependency graph.
if require_tuple in visited:
raise BuildError("Circular dependency. Circular link {0} -> {1}".format(pkg_tuple, require_tuple))
if PackageId.is_id(require_tuple[0]):
raise BuildError("Depending on a specific package id is not supported. Package {} "
"depends on {}".format(pkg_tuple, require_tuple))
if require_tuple not in package_store.packages:
raise BuildError("Package {0} require {1} not buildable from tree.".format(pkg_tuple, require_tuple))
# Add the dependency (after its dependencies, if any) to the build
# order.
visit(require_tuple)
build_order.append(pkg_tuple)
built.add(pkg_tuple)
示例4: _do_bootstrap
def _do_bootstrap(install, repository):
# These files should be set by the environment which initially builds
# the host (cloud-init).
repository_url = if_exists(load_string, install.get_config_filename("setup-flags/repository-url"))
def fetcher(id, target):
if repository_url is None:
raise ValidationError("ERROR: Non-local package {} but no repository url given.".format(id))
return requests_fetcher(repository_url, id, target, os.getcwd())
setup_pkg_dir = install.get_config_filename("setup-packages")
if os.path.exists(setup_pkg_dir):
raise ValidationError(
"setup-packages is no longer supported. It's functionality has been replaced with late "
"binding packages. Found setup packages dir: {}".format(setup_pkg_dir))
setup_packages_to_activate = []
# If the host has late config values, build the late config package from them.
late_config = if_exists(load_yaml, install.get_config_filename("setup-flags/late-config.yaml"))
if late_config:
pkg_id_str = late_config['late_bound_package_id']
late_values = late_config['bound_values']
print("Binding late config to late package {}".format(pkg_id_str))
print("Bound values: {}".format(late_values))
if not PackageId.is_id(pkg_id_str):
raise ValidationError("Invalid late package id: {}".format(pkg_id_str))
pkg_id = PackageId(pkg_id_str)
if pkg_id.version != "setup":
raise ValidationError("Late package must have the version setup. Bad package: {}".format(pkg_id_str))
# Collect the late config package.
with tempfile.NamedTemporaryFile() as f:
download(
f.name,
repository_url + '/packages/{0}/{1}.dcos_config'.format(pkg_id.name, pkg_id_str),
os.getcwd(),
rm_on_error=False,
)
late_package = load_yaml(f.name)
# Resolve the late package using the bound late config values.
final_late_package = resolve_late_package(late_package, late_values)
# Render the package onto the filesystem and add it to the package
# repository.
with tempfile.NamedTemporaryFile() as f:
do_gen_package(final_late_package, f.name)
repository.add(lambda _, target: extract_tarball(f.name, target), pkg_id_str)
setup_packages_to_activate.append(pkg_id_str)
# If active.json is set on the host, use that as the set of packages to
# activate. Otherwise just use the set of currently active packages (those
# active in the bootstrap tarball)
to_activate = None
active_path = install.get_config_filename("setup-flags/active.json")
if os.path.exists(active_path):
print("Loaded active packages from", active_path)
to_activate = load_json(active_path)
# Ensure all packages are local
print("Ensuring all packages in active set {} are local".format(",".join(to_activate)))
for package in to_activate:
repository.add(fetcher, package)
else:
print("Calculated active packages from bootstrap tarball")
to_activate = list(install.get_active())
package_list_filename = install.get_config_filename("setup-flags/cluster-package-list")
print("Checking for cluster packages in:", package_list_filename)
package_list_id = if_exists(load_string, package_list_filename)
if package_list_id:
print("Cluster package list:", package_list_id)
cluster_packages = _get_package_list(package_list_id, repository_url)
print("Loading cluster-packages: {}".format(cluster_packages))
for package_id_str in cluster_packages:
# Validate the package ids
pkg_id = PackageId(package_id_str)
# Fetch the packages if not local
if not repository.has_package(package_id_str):
repository.add(fetcher, package_id_str)
# Add the package to the set to activate
setup_packages_to_activate.append(package_id_str)
else:
print("No cluster-packages specified")
# Calculate the full set of final packages (Explicit activations + setup packages).
# De-duplicate using a set.
to_activate = list(set(to_activate + setup_packages_to_activate))
print("Activating packages")
install.activate(repository.load_packages(to_activate))
示例5: build
#.........这里部分代码省略.........
active_package_ids.add(pkg_id_str)
# Mount the package into the docker container.
cmd.volumes[pkg_path] = "/opt/mesosphere/packages/{}:ro".format(pkg_id_str)
os.makedirs(os.path.join(install_dir, "packages/{}".format(pkg_id_str)))
# Add the dependencies of the package to the set which will be
# activated.
# TODO(cmaloney): All these 'transitive' dependencies shouldn't
# be available to the package being built, only what depends on
# them directly.
to_check += pkg_requires
except ValidationError as ex:
raise BuildError("validating package needed as dependency {0}: {1}".format(requires_name, ex)) from ex
except PackageError as ex:
raise BuildError("loading package needed as dependency {0}: {1}".format(requires_name, ex)) from ex
# Add requires to the package id, calculate the final package id.
# NOTE: active_packages isn't fully constructed here since we lazily load
# packages not already in the repository.
builder.update('requires', list(active_package_ids))
version_extra = None
if builder.has('version_extra'):
version_extra = builder.take('version_extra')
build_ids = builder.get_build_ids()
version_base = hash_checkout(build_ids)
version = None
if builder.has('version_extra'):
version = "{0}-{1}".format(version_extra, version_base)
else:
version = version_base
pkg_id = PackageId.from_parts(name, version)
# Everything must have been extracted by now. If it wasn't, then we just
# had a hard error that it was set but not used, as well as didn't include
# it in the caluclation of the PackageId.
builder = None
# Save the build_ids. Useful for verify exactly what went into the
# package build hash.
final_buildinfo['build_ids'] = build_ids
final_buildinfo['package_version'] = version
# Save the package name and variant. The variant is used when installing
# packages to validate dependencies.
final_buildinfo['name'] = name
final_buildinfo['variant'] = variant
# If the package is already built, don't do anything.
pkg_path = package_store.get_package_cache_folder(name) + '/{}.tar.xz'.format(pkg_id)
# Done if it exists locally
if exists(pkg_path):
print("Package up to date. Not re-building.")
# TODO(cmaloney): Updating / filling last_build should be moved out of
# the build function.
write_string(package_store.get_last_build_filename(name, variant), str(pkg_id))
return pkg_path
# Try downloading.
dl_path = package_store.try_fetch_by_id(pkg_id)
if dl_path:
示例6: _do_bootstrap
def _do_bootstrap(install, repository):
# These files should be set by the environment which initially builds
# the host (cloud-init).
repository_url = if_exists(load_string, install.get_config_filename("setup-flags/repository-url"))
# TODO(cmaloney): If there is 1+ master, grab the active config from a master.
# If the config can't be grabbed from any of them, fail.
def fetcher(id, target):
if repository_url is None:
raise ValidationError("ERROR: Non-local package {} but no repository url given.".format(repository_url))
return requests_fetcher(repository_url, id, target, os.getcwd())
# Copy host/cluster-specific packages written to the filesystem manually
# from the setup-packages folder into the repository. Do not overwrite or
# merge existing packages, hard fail instead.
setup_packages_to_activate = []
setup_pkg_dir = install.get_config_filename("setup-packages")
copy_fetcher = partial(_copy_fetcher, setup_pkg_dir)
if os.path.exists(setup_pkg_dir):
for pkg_id_str in os.listdir(setup_pkg_dir):
print("Installing setup package: {}".format(pkg_id_str))
if not PackageId.is_id(pkg_id_str):
raise ValidationError("Invalid package id in setup package: {}".format(pkg_id_str))
pkg_id = PackageId(pkg_id_str)
if pkg_id.version != "setup":
raise ValidationError(
"Setup packages (those in `{0}`) must have the version setup. "
"Bad package: {1}".format(setup_pkg_dir, pkg_id_str))
# Make sure there is no existing package
if repository.has_package(pkg_id_str):
print("WARNING: Ignoring already installed package {}".format(pkg_id_str))
repository.add(copy_fetcher, pkg_id_str)
setup_packages_to_activate.append(pkg_id_str)
# If active.json is set on the host, use that as the set of packages to
# activate. Otherwise just use the set of currently active packages (those
# active in the bootstrap tarball)
to_activate = None
active_path = install.get_config_filename("setup-flags/active.json")
if os.path.exists(active_path):
print("Loaded active packages from", active_path)
to_activate = load_json(active_path)
# Ensure all packages are local
print("Ensuring all packages in active set {} are local".format(",".join(to_activate)))
for package in to_activate:
repository.add(fetcher, package)
else:
print("Calculated active packages from bootstrap tarball")
to_activate = list(install.get_active())
# Fetch and activate all requested additional packages to accompany the bootstrap packages.
cluster_packages_filename = install.get_config_filename("setup-flags/cluster-packages.json")
cluster_packages = if_exists(load_json, cluster_packages_filename)
print("Checking for cluster packages in:", cluster_packages_filename)
if cluster_packages:
if not isinstance(cluster_packages, list):
print('ERROR: {} should contain a JSON list of packages. Got a {}'.format(cluster_packages_filename,
type(cluster_packages)))
print("Loading cluster-packages: {}".format(cluster_packages))
for package_id_str in cluster_packages:
# Validate the package ids
pkg_id = PackageId(package_id_str)
# Fetch the packages if not local
if not repository.has_package(package_id_str):
repository.add(fetcher, package_id_str)
# Add the package to the set to activate
setup_packages_to_activate.append(package_id_str)
else:
print("No cluster-packages specified")
# Calculate the full set of final packages (Explicit activations + setup packages).
# De-duplicate using a set.
to_activate = list(set(to_activate + setup_packages_to_activate))
print("Activating packages")
install.activate(repository.load_packages(to_activate))
示例7: build
#.........这里部分代码省略.........
raise BuildError("The build tarball {} refered to by the last_build file of the "
"dependency {} variant {} doesn't exist. Rebuild the dependency.".format(
pkg_tar,
requires_name,
requires_variant))
active_package_ids.add(pkg_id_str)
# Mount the package into the docker container.
cmd.volumes[pkg_path] = "/opt/mesosphere/packages/{}:ro".format(pkg_id_str)
os.makedirs(os.path.join(install_dir, "packages/{}".format(pkg_id_str)))
# Add the dependencies of the package to the set which will be
# activated.
# TODO(cmaloney): All these 'transitive' dependencies shouldn't
# be available to the package being built, only what depends on
# them directly.
to_check += pkg_requires
except ValidationError as ex:
raise BuildError("validating package needed as dependency {0}: {1}".format(requires_name, ex)) from ex
except PackageError as ex:
raise BuildError("loading package needed as dependency {0}: {1}".format(requires_name, ex)) from ex
# Add requires to the package id, calculate the final package id.
# NOTE: active_packages isn't fully constructed here since we lazily load
# packages not already in the repository.
build_ids['requires'] = list(active_package_ids)
version_base = hash_checkout(build_ids)
version = None
if "version_extra" in buildinfo:
version = "{0}-{1}".format(buildinfo["version_extra"], version_base)
else:
version = version_base
pkg_id = PackageId.from_parts(name, version)
# Save the build_ids. Useful for verify exactly what went into the
# package build hash.
buildinfo['build_ids'] = build_ids
buildinfo['package_version'] = version
# Save the package name and variant. The variant is used when installing
# packages to validate dependencies.
buildinfo['name'] = name
buildinfo['variant'] = variant
# If the package is already built, don't do anything.
pkg_path = pkg_abs("{}.tar.xz".format(pkg_id))
# Done if it exists locally
if exists(pkg_path):
print("Package up to date. Not re-building.")
# TODO(cmaloney): Updating / filling last_build should be moved out of
# the build function.
check_call(["mkdir", "-p", pkg_abs("cache")])
write_string(pkg_abs(last_build_filename(variant)), str(pkg_id))
return pkg_path
# Try downloading.
if repository_url:
tmp_filename = pkg_path + '.tmp'
try:
# Normalize to no trailing slash for repository_url
repository_url = repository_url.rstrip('/')
url = repository_url + '/packages/{0}/{1}.tar.xz'.format(pkg_id.name, str(pkg_id))