本文整理汇总了Python中conda_build.metadata.MetaData类的典型用法代码示例。如果您正苦于以下问题:Python MetaData类的具体用法?Python MetaData怎么用?Python MetaData使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了MetaData类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __call__
def __call__(self, args):
# check some error conditions
if args.recipe_directory and not os.path.isdir(args.recipe_directory):
raise IOError("The source recipe directory should be the directory of the "
"conda-recipe you want to build a feedstock for. Got {}".format(
args.recipe_directory))
# Get some information about the source recipe.
if args.recipe_directory:
meta = MetaData(args.recipe_directory)
else:
meta = None
feedstock_directory = args.feedstock_directory.format(package=argparse.Namespace(name=meta.name()))
msg = 'Initial commit of the {} feedstock.'.format(meta.name())
try:
generate_feedstock_content(feedstock_directory, args.recipe_directory, meta)
if not args.no_git_repo:
create_git_repo(feedstock_directory, msg)
print("\nRepository created, please edit conda-forge.yml to configure the upload channels\n"
"and afterwards call 'conda smithy register-github'")
except RuntimeError as e:
print(e)
示例2: version_compare
def version_compare(package, versions):
if not versions:
# PyPI is case sensitive, this will pass control
# to a method in main() to take care of that.
return
nv = normalized_version
norm_versions = [nv(ver) for ver in versions]
recipe_dir = abspath(package.lower())
if not isdir(recipe_dir):
sys.exit("Error: no such directory: %s" % recipe_dir)
m = MetaData(recipe_dir)
local_version = nv(m.version())
print("Local recipe for %s has version %s" % (package, local_version))
if local_version not in versions:
sys.exit("Error: %s %s is not available on PyPI."
% (package, local_version))
else:
# Comparing normalized versions, displaying non normalized ones
new_versions = versions[:norm_versions.index(local_version)]
if len(new_versions) > 0:
print("Following new versions of %s are avaliable" % (package))
for ver in new_versions:
print(ver)
else:
print("No new version for %s is available" % (package))
sys.exit()
示例3: collapse_subpackage_nodes
def collapse_subpackage_nodes(graph):
"""Collapse all subpackage nodes into their parent recipe node
We get one node per output, but a given recipe can have multiple outputs. It's important
for dependency ordering in the graph that the outputs exist independently, but once those
dependencies are established, we need to collapse subpackages down to a single job for the
top-level recipe."""
# group nodes by their recipe path first, then within those groups by their variant
node_groups = {}
for node in graph.nodes():
if 'meta' in graph.node[node]:
meta = graph.node[node]['meta']
meta_path = meta.meta_path or meta.meta['extra']['parent_recipe']['path']
master = False
master_meta = MetaData(meta_path, config=meta.config)
if master_meta.name() == meta.name():
master = True
group = node_groups.get(meta_path, {})
subgroup = group.get(HashableDict(meta.config.variant), {})
if master:
if 'master' in subgroup:
raise ValueError("tried to set more than one node in a group as master")
subgroup['master'] = node
else:
sps = subgroup.get('subpackages', [])
sps.append(node)
subgroup['subpackages'] = sps
group[HashableDict(meta.config.variant)] = subgroup
node_groups[meta_path] = group
for recipe_path, group in node_groups.items():
for variant, subgroup in group.items():
# if no node is the top-level recipe (only outputs, no top-level output), need to obtain
# package/name from recipe given by common recipe path.
subpackages = subgroup.get('subpackages')
if 'master' not in subgroup:
sp0 = graph.node[subpackages[0]]
master_meta = MetaData(recipe_path, config=sp0['meta'].config)
worker = sp0['worker']
master_key = package_key(master_meta, worker['label'])
graph.add_node(master_key, meta=master_meta, worker=worker)
master = graph.node[master_key]
else:
master = subgroup['master']
master_key = package_key(graph.node[master]['meta'],
graph.node[master]['worker']['label'])
# fold in dependencies for all of the other subpackages within a group. This is just
# the intersection of the edges between all nodes. Store this on the "master" node.
if subpackages:
remap_edges = [edge for edge in graph.edges() if edge[1] in subpackages]
for edge in remap_edges:
# make sure not to add references to yourself
if edge[0] != master_key:
graph.add_edge(edge[0], master_key)
graph.remove_edge(*edge)
# remove nodes that have been folded into master nodes
for subnode in subpackages:
graph.remove_node(subnode)
示例4: execute
def execute(args, parser):
import sys
import shutil
import tarfile
import tempfile
from os.path import abspath, isdir, isfile, join
from conda.lock import Locked
import conda_build.build as build
import conda_build.source as source
from conda_build.config import croot
from conda_build.metadata import MetaData
check_external()
with Locked(croot):
for arg in args.recipe:
if isfile(arg):
if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
recipe_dir = tempfile.mkdtemp()
t = tarfile.open(arg, 'r:*')
t.extractall(path=recipe_dir)
t.close()
need_cleanup = True
else:
print("Ignoring non-recipe: %s" % arg)
continue
else:
recipe_dir = abspath(arg)
need_cleanup = False
if not isdir(recipe_dir):
sys.exit("Error: no such directory: %s" % recipe_dir)
m = MetaData(recipe_dir)
binstar_upload = False
if args.check and len(args.recipe) > 1:
print(m.path)
m.check_fields()
if args.check:
continue
if args.output:
print(build.bldpkg_path(m))
continue
elif args.test:
build.test(m)
elif args.source:
source.provide(m.path, m.get_section('source'))
print('Source tree in:', source.get_dir())
else:
build.build(m)
if not args.notest:
build.test(m)
binstar_upload = True
if need_cleanup:
shutil.rmtree(recipe_dir)
if binstar_upload:
handle_binstar_upload(build.bldpkg_path(m), args)
示例5: __call__
def __call__(self, args):
# check some error conditions
if args.recipe_directory and not os.path.isdir(args.recipe_directory):
raise IOError(
"The source recipe directory should be the directory of the "
"conda-recipe you want to build a feedstock for. Got {}".format(
args.recipe_directory
)
)
# Get some information about the source recipe.
if args.recipe_directory:
meta = MetaData(args.recipe_directory)
else:
meta = None
feedstock_directory = args.feedstock_directory.format(
package=argparse.Namespace(name=meta.name())
)
msg = "Initial feedstock commit with conda-smithy {}.".format(
__version__
)
os.makedirs(feedstock_directory)
subprocess.check_call(["git", "init"], cwd=feedstock_directory)
generate_feedstock_content(feedstock_directory, args.recipe_directory)
subprocess.check_call(
["git", "commit", "-m", msg], cwd=feedstock_directory
)
print(
"\nRepository created, please edit conda-forge.yml to configure the upload channels\n"
"and afterwards call 'conda smithy register-github'"
)
示例6: read_recipe_name_version_build
def read_recipe_name_version_build(meta_yaml_path):
"""
Read the given metadata file and return (package_name, version, build_number)
meta_yaml_path: May be a path to a meta.yaml file or it's parent recipe directory.
"""
# Provide these default values, otherwise conda-build will
# choke on jinja templates that reference them.
# This will be fixed when they finally merge conda-build PR#662 and PR#666
if "CONDA_NPY" not in os.environ:
os.environ["CONDA_NPY"] = '19'
if "CONDA_PY" not in os.environ:
os.environ["CONDA_PY"] = '27'
os.environ["GIT_FULL_HASH"] = "9999999"
if os.path.isdir(meta_yaml_path):
recipe_dir = meta_yaml_path
else:
recipe_dir = os.path.split(meta_yaml_path)[0]
try:
metadata = MetaData(recipe_dir)
return (metadata.name(), metadata.version(), metadata.build_number())
except SystemExit as ex:
raise Exception(*ex.args)
示例7: setup
def setup(*args):
"""
Go through every folder in the `bioconda-recipes/recipes` dir
and generate a README.rst file.
"""
print('Generating package READMEs...')
summaries = []
for folder in os.listdir(RECIPE_DIR):
# Subfolders correspond to different versions
versions = []
for sf in os.listdir(op.join(RECIPE_DIR, folder)):
if not op.isdir(op.join(RECIPE_DIR, folder, sf)):
# Not a folder
continue
try:
LooseVersion(sf)
except ValueError:
print("'{}' does not look like a proper version!".format(sf))
continue
versions.append(sf)
versions.sort(key=LooseVersion, reverse=True)
# Read the meta.yaml file
try:
metadata = MetaData(op.join(RECIPE_DIR, folder))
if metadata.version() not in versions:
versions.insert(0, metadata.version())
except SystemExit:
if versions:
metadata = MetaData(op.join(RECIPE_DIR, folder, versions[0]))
else:
# ignore non-recipe folders
continue
# Format the README
notes = metadata.get_section('extra').get('notes', '')
if notes:
notes = 'Notes\n-----\n\n' + notes
summary = metadata.get_section('about').get('summary', '')
summaries.append(summary)
template_options = {
'title': metadata.name(),
'title_underline': '=' * len(metadata.name()),
'summary': summary,
'home': metadata.get_section('about').get('home', ''),
'versions': ', '.join(versions),
'license': metadata.get_section('about').get('license', ''),
'recipe': ('https://github.com/bioconda/bioconda-recipes/tree/master/recipes/' +
op.dirname(op.relpath(metadata.meta_path, RECIPE_DIR))),
'notes': notes
}
readme = README_TEMPLATE.format(**template_options)
# Write to file
try:
os.makedirs(op.join(OUTPUT_DIR, folder)) # exist_ok=True on Python 3
except OSError:
pass
output_file = op.join(OUTPUT_DIR, folder, 'README.rst')
with open(output_file, 'wb') as ofh:
ofh.write(readme.encode('utf-8'))
示例8: test_meta_sorting_version_strip
def test_meta_sorting_version_strip():
m1 = MetaData.fromdict({'package':
{'name': 'a'},
'requirements':
{'build': ['b > 1.2']}})
m2 = MetaData.fromdict({'package':
{'name': 'b'}})
metas = sort_dependency_order([m1, m2])
assert_equal([meta.name() for meta in metas], ['b', 'a'])
示例9: main
def main():
recipe_dir = os.environ["RECIPE_DIR"]
src_dir = os.environ["SRC_DIR"]
main_work_dir = source.WORK_DIR
metadata = MetaData(recipe_dir)
extra_sources_sections = metadata.get_section('extra')['sources']
for name, source_section in extra_sources_sections.items():
# Override the location to clone into
source.WORK_DIR = main_work_dir + '/' + name
os.makedirs(source.WORK_DIR)
# Download source
source.provide(recipe_dir, source_section)
示例10: get_deps
def get_deps(recipe, build=True):
"""
Generator of dependencies for a single recipe, which can be specified as
a path or as a parsed MetaData.
Only names (not versions) of dependencies are yielded. Use `build=True` to
yield build dependencies, otherwise yield run dependencies.
"""
if isinstance(recipe, str):
metadata = MetaData(recipe)
else:
metadata = recipe
for dep in metadata.get_value(
"requirements/{}".format("build" if build else "run"), []
):
yield dep.split()[0]
示例11: render_recipe
def render_recipe(recipe_path, config, no_download_source=False):
arg = recipe_path
# Don't use byte literals for paths in Python 2
if not PY3:
arg = arg.decode(getpreferredencoding() or 'utf-8')
if isfile(arg):
if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
recipe_dir = tempfile.mkdtemp()
t = tarfile.open(arg, 'r:*')
t.extractall(path=recipe_dir)
t.close()
need_cleanup = True
elif arg.endswith('.yaml'):
recipe_dir = os.path.dirname(arg)
need_cleanup = False
else:
print("Ignoring non-recipe: %s" % arg)
return
else:
recipe_dir = abspath(arg)
need_cleanup = False
if not isdir(recipe_dir):
sys.exit("Error: no such directory: %s" % recipe_dir)
if config.set_build_id:
# updates a unique build id if not already computed
config.compute_build_id(os.path.basename(recipe_dir))
try:
m = MetaData(recipe_dir, config=config)
except exceptions.YamlParsingError as e:
sys.stderr.write(e.error_msg())
sys.exit(1)
m, need_download, need_reparse_in_env = parse_or_try_download(m,
no_download_source=no_download_source,
config=config)
if need_download and no_download_source:
raise ValueError("no_download_source specified, but can't fully render recipe without"
" downloading source. Please fix the recipe, or don't use "
"no_download_source.")
config.noarch = bool(m.get_value('build/noarch'))
if need_cleanup:
rm_rf(recipe_dir)
return m, need_download, need_reparse_in_env
示例12: __call__
def __call__(self, args):
if not os.path.isdir(args.recipe_directory):
raise IOError("The recipe directory should be the directory of the conda-recipe. Got {}".format(args.recipe_directory))
meta = MetaData(args.recipe_directory)
feedstock_directory = args.feedstock_directory.format(package=argparse.Namespace(name=meta.name()))
generate_feedstock_content(feedstock_directory, args.recipe_directory)
if not args.no_git_repo:
create_git_repo(feedstock_directory, meta)
示例13: testing_metadata
def testing_metadata(request, testing_config):
d = defaultdict(dict)
d['package']['name'] = request.function.__name__
d['package']['version'] = '1.0'
d['build']['number'] = '1'
d['build']['entry_points'] = []
d['requirements']['build'] = ['python']
d['requirements']['run'] = ['python']
d['test']['commands'] = ['echo "A-OK"', 'exit 0']
d['about']['home'] = "sweet home"
d['about']['license'] = "contract in blood"
d['about']['summary'] = "a test package"
testing_config.variant = get_default_variants()[0]
return MetaData.fromdict(d, config=testing_config)
示例14: main
def main():
token = os.environ.get('BINSTAR_TOKEN')
description = ('Upload or check consistency of a built version of a '
'conda recipe with binstar. Note: The existence of the '
'BINSTAR_TOKEN environment variable determines '
'whether the upload should actually take place.')
parser = argparse.ArgumentParser(description=description)
parser.add_argument('recipe_dir', help='the conda recipe directory')
parser.add_argument('owner', help='the binstar owner/user')
parser.add_argument('--channel', help='the binstar channel', default='main')
args = parser.parse_args()
recipe_dir, owner, channel = args.recipe_dir, args.owner, args.channel
cli = get_binstar(argparse.Namespace(token=token, site=None))
meta = MetaData(recipe_dir)
if meta.skip():
print("No upload to take place - this configuration was skipped in build/skip.")
return
exists = built_distribution_already_exists(cli, meta, owner)
if token:
on_channel = distribution_exists_on_channel(cli, meta, owner, channel)
if not exists:
upload(cli, meta, owner, channel)
print('Uploaded {}'.format(bldpkg_path(meta)))
elif not on_channel:
print('Adding distribution {} to {}\'s {} channel'
''.format(bldpkg_path(meta), owner, channel))
add_distribution_to_channel(cli, meta, owner, channel)
else:
print('Distribution {} already \nexists on {}\'s {} channel.'
''.format(bldpkg_path(meta), owner, channel))
else:
print("No BINSTAR_TOKEN present, so no upload is taking place. "
"The distribution just built {} already available on {}'s "
"{} channel.".format('is' if exists else 'is not',
owner, channel))
示例15: main
def main():
recipe_dir = os.environ["RECIPE_DIR"]
conda_platform = 'win-32' if os.environ["ARCH"] == '32' else 'win-64'
prefix = os.environ['PREFIX']
metadata = MetaData(recipe_dir)
msys2_tar_xz_url = metadata.get_section(
'extra')['msys2-binaries'][conda_platform]['url']
msys2_md5 = metadata.get_section(
'extra')['msys2-binaries'][conda_platform]['md5']
mv_srcs_list = metadata.get_section(
'extra')['msys2-binaries'][conda_platform]['mv-srcs']
mv_dsts_list = metadata.get_section(
'extra')['msys2-binaries'][conda_platform]['mv-dsts']
msys2_tar_xz = get_tar_xz(msys2_tar_xz_url, msys2_md5)
tar = tarfile.open(msys2_tar_xz, 'r|xz')
tar.extractall(path=prefix)
try:
patches = metadata.get_section(
'extra')['msys2-binaries'][conda_platform]['patches']
except:
patches = []
if len(patches):
for patchname in patches:
patchset = patch.fromfile(join(getenv('RECIPE_DIR'), patchname))
patchset.apply(1, root=prefix)
# shutil is a bit funny (like mv) with regards to how it treats
# the destination depending on whether it is an existing directory or not
# (i.e. moving into that versus moving as that).
# Therefore, the rules employed are:
# 1. If mv_dst ends with a '/' it is a directory that you want mv_src
# moved into.
# 2. If mv_src has a wildcard, mv_dst is a directory that you want mv_src
# moved into.
# In these cases we makedirs(mv_dst) and then call move(mv_src, mv_dst)
# .. otherwise we makedirs(dirname(mv_dst)) and call move(mv_src, mv_dst)
# .. however, if no mv_srcs exist we don't makedirs at all.
for mv_src, mv_dst in zip(mv_srcs_list, mv_dsts_list):
mv_dst_definitely_dir = False
mv_srcs = glob(join(prefix, normpath(mv_src)))
if '*' in mv_src or mv_dst.endswith('/') or len(mv_srcs) > 1:
mv_dst_definitely_dir = True
if len(mv_srcs):
mv_dst = join(prefix, normpath(mv_dst))
mv_dst_mkdir = mv_dst
if not mv_dst_definitely_dir:
mv_dst_mkdir = dirname(mv_dst_mkdir)
try:
makedirs(mv_dst_mkdir)
except:
pass
for mv_src in mv_srcs:
move(mv_src, mv_dst)
tar.close()