本文整理汇总了Python中pulp_rpm.plugins.importers.yum.repomd.packages.package_list_generator函数的典型用法代码示例。如果您正苦于以下问题:Python package_list_generator函数的具体用法?Python package_list_generator怎么用?Python package_list_generator使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了package_list_generator函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: save_fileless_units
def save_fileless_units(self, file_handle, tag, process_func, mutable_type=False,
additive_type=False):
"""
Generic method for saving units parsed from a repo metadata file where
the units do not have files to store on disk. For example, groups.
:param file_handle: open file-like object containing metadata
:type file_handle: file
:param tag: XML tag that identifies each unit
:type tag: basestring
:param process_func: function that processes each unit and returns
a dict representing that unit's attribute names
and values. The function must take one parameter,
which is an ElementTree instance
:type process_func: function
:param mutable_type: iff True, each unit will be saved regardless of
whether it already exists in the repo. this is
useful for units like group and category which
don't have a version, but could change
:type mutable_type: bool
:param additive_type: iff True, units will be updated instead of
replaced. For example, if you wanted to save an
errata and concatenate its package list with an
existing errata, you'd set this. Note that mutable_type
and additive_type are mutually exclusive.
:type additive_type: bool
"""
if mutable_type and additive_type:
raise PulpCodedException(message="The mutable_type and additive_type arguments for "
"this method are mutually exclusive.")
# iterate through the file and determine what we want to have
package_info_generator = packages.package_list_generator(file_handle,
tag,
process_func)
# if units aren't mutable, we don't need to attempt saving units that
# we already have
if not mutable_type and not additive_type:
wanted = (model.as_named_tuple for model in package_info_generator)
# given what we want, filter out what we already have
to_save = existing.check_repo(wanted, self.sync_conduit.get_units)
# rewind, iterate again through the file, and save what we need
file_handle.seek(0)
all_packages = packages.package_list_generator(file_handle,
tag,
process_func)
package_info_generator = (model for model in all_packages if
model.as_named_tuple in to_save)
for model in package_info_generator:
unit = self.sync_conduit.init_unit(model.TYPE, model.unit_key, model.metadata, None)
if additive_type:
existing_unit = self.sync_conduit.find_unit_by_unit_key(model.TYPE, model.unit_key)
if existing_unit:
unit = self._concatenate_units(existing_unit, unit)
self.sync_conduit.save_unit(unit)
示例2: download
def download(self, metadata_files, rpms_to_download, drpms_to_download):
"""
Actually download the requested RPMs and DRPMs. This method iterates over
the appropriate metadata file and downloads those items which are present
in the corresponding set. It also checks for the RPMs and DRPMs which exist
in other repositories before downloading them. If they are already downloaded,
we skip the download and just associate them to the given repository.
:param metadata_files: populated instance of MetadataFiles
:type metadata_files: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles
:param rpms_to_download: set of RPM.NAMEDTUPLEs
:type rpms_to_download: set
:param drpms_to_download: set of DRPM.NAMEDTUPLEs
:type drpms_to_download: set
:rtype: pulp.plugins.model.SyncReport
"""
# TODO: probably should make this more generic
event_listener = ContentListener(self.sync_conduit, self.progress_status, self.call_config,
metadata_files)
primary_file_handle = metadata_files.get_metadata_file_handle(primary.METADATA_FILE_NAME)
try:
package_model_generator = packages.package_list_generator(
primary_file_handle, primary.PACKAGE_TAG, primary.process_package_element)
units_to_download = self._filtered_unit_generator(package_model_generator,
rpms_to_download)
download_wrapper = alternate.Packages(self.sync_feed, self.nectar_config,
units_to_download, self.tmp_dir, event_listener)
# allow the downloader to be accessed by the cancel method if necessary
self.downloader = download_wrapper.downloader
_logger.info(_('Downloading %(num)s RPMs.') % {'num': len(rpms_to_download)})
download_wrapper.download_packages()
self.downloader = None
finally:
primary_file_handle.close()
# download DRPMs
presto_file_handle = metadata_files.get_metadata_file_handle(presto.METADATA_FILE_NAME)
if presto_file_handle:
try:
package_model_generator = packages.package_list_generator(
presto_file_handle, presto.PACKAGE_TAG, presto.process_package_element)
units_to_download = self._filtered_unit_generator(package_model_generator,
drpms_to_download)
download_wrapper = packages.Packages(self.sync_feed, self.nectar_config,
units_to_download, self.tmp_dir,
event_listener)
# allow the downloader to be accessed by the cancel method if necessary
self.downloader = download_wrapper.downloader
_logger.info(_('Downloading %(num)s DRPMs.') % {'num': len(drpms_to_download)})
download_wrapper.download_packages()
self.downloader = None
finally:
presto_file_handle.close()
report = self.sync_conduit.build_success_report({}, {})
return report
示例3: generate_dbs
def generate_dbs(self):
"""
For repo data files that contain data we need to access later for each
unit in the repo, generate a local db file that gives us quick read
access to each unit's data.
"""
for filename, tag, process_func in (
(filelists.METADATA_FILE_NAME, filelists.PACKAGE_TAG, filelists.process_package_element),
(other.METADATA_FILE_NAME, other.PACKAGE_TAG, other.process_package_element),
):
xml_file_handle = self.get_metadata_file_handle(filename)
try:
generator = package_list_generator(xml_file_handle, tag)
db_filename = os.path.join(self.dst_dir, "%s.db" % filename)
# always a New file, and open with Fast writing mode.
db_file_handle = gdbm.open(db_filename, "nf")
try:
for element in generator:
utils.strip_ns(element)
raw_xml = utils.element_to_raw_xml(element)
unit_key, _ = process_func(element)
db_key = self.generate_db_key(unit_key)
db_file_handle[db_key] = raw_xml
db_file_handle.sync()
finally:
db_file_handle.close()
finally:
xml_file_handle.close()
self.dbs[filename] = db_filename
示例4: test_rhel6_real_data
def test_rhel6_real_data(self):
with open(os.path.join(os.path.dirname(__file__),
'../data/RHBA-2010-0836.erratum.xml')) as f:
errata = packages.package_list_generator(f,
updateinfo.PACKAGE_TAG,
updateinfo.process_package_element)
errata = list(errata)
self.assertEqual(len(errata), 1)
erratum = errata[0]
self.assertTrue(isinstance(erratum, models.Errata))
self.assertEqual(erratum.metadata.get('rights'), 'Copyright 2010 Red Hat Inc')
self.assertTrue(erratum.metadata.get('summary') is not None)
self.assertEqual(erratum.id, 'RHBA-2010:0836')
self.assertEqual(erratum.metadata.get('type'), 'bugfix')
self.assertEqual(erratum.metadata.get('updated'), '2010-11-10 00:00:00')
self.assertEqual(erratum.metadata.get('reboot_suggested'), False)
self.assertEqual(erratum.metadata.get('severity'), '')
rpms = erratum.rpm_search_dicts
self.assertEqual(len(rpms), 4)
for rpm in rpms:
# make sure all of the correct keys are present
model = models.RPM.from_package_info(rpm)
self.assertEqual(model.checksumtype, 'sha256')
self.assertTrue(len(model.checksum) > 0)
self.assertTrue(model.name.startswith('NetworkManager'))
self.assertEqual(model.version, '0.8.1')
self.assertEqual(model.release, '5.el6_0.1')
示例5: generate_dbs
def generate_dbs(self):
"""
For repo data files that contain data we need to access later for each
unit in the repo, generate a local db file that gives us quick read
access to each unit's data.
:raises PulpCodedException: if there is some inconsistency in metadata
"""
package_count = {}
for filename, tag, process_func in (
(filelists.METADATA_FILE_NAME,
filelists.PACKAGE_TAG, filelists.process_package_element),
(other.METADATA_FILE_NAME, other.PACKAGE_TAG, other.process_package_element),
):
with contextlib.closing(self.get_metadata_file_handle(filename)) as xml_file_handle:
generator = package_list_generator(xml_file_handle, tag)
db_filename = os.path.join(self.dst_dir, '%s.db' % filename)
# always a New file, and open with Fast writing mode.
with contextlib.closing(gdbm.open(db_filename, 'nf')) as db_file_handle:
for element in generator:
utils.strip_ns(element)
element.attrib['pkgid'] = models.RpmBase.PKGID_TEMPLATE
raw_xml = utils.element_to_raw_xml(element)
unit_key, _ = process_func(element)
db_key = self.generate_db_key(unit_key)
db_file_handle[db_key] = raw_xml
db_file_handle.sync()
package_count[filename] = len(db_file_handle)
self.dbs[filename] = db_filename
if package_count[filelists.METADATA_FILE_NAME] != package_count[other.METADATA_FILE_NAME]:
reason = ('metadata is specified for different set of packages in filelists.xml'
' and in other.xml')
raise PulpCodedException(error_code=error_codes.RPM1015, reason=reason)
self.rpm_count = package_count[filelists.METADATA_FILE_NAME]
示例6: main
def main():
try:
update_info_file_path = sys.argv[1]
output_directory = sys.argv[2]
except IndexError:
print "Usage: %s <update info file path> <output directory>"
return os.EX_NOINPUT
update_info_file_handle = open(update_info_file_path, "r")
package_list_generator = packages.package_list_generator(
update_info_file_handle, "update", updateinfo.process_package_element
)
with metadata.UpdateinfoXMLFileContext(output_directory) as update_info_file_context:
try:
for erratum_unit in package_list_generator:
# pprint(erratum_unit.metadata)
update_info_file_context.add_unit_metadata(erratum_unit)
except:
traceback.print_exc(file=sys.stderr)
return os.EX_SOFTWARE
return os.EX_OK
示例7: _get_and_save_file_units
def _get_and_save_file_units(filename, processing_function, tag, conduit, repo):
"""
Given a comps.xml file, this method decides which groups/categories to get and saves
the parsed units.
:param filename: open file-like object containing metadata
:type filename: file
:param processing_function: method to use for generating the units
:type processing_function: function
:param tag: XML tag that identifies each unit
:type tag: str
:param conduit: provides access to relevant Pulp functionality
:type conduit: pulp.plugins.conduits.upload.UploadConduit
:param repo: The repository to import the package into
:type repo: pulp.server.db.model.Repository
"""
repo_id = repo.repo_id
process_func = functools.partial(processing_function, repo_id)
package_info_generator = packages.package_list_generator(filename, tag, process_func)
for model in package_info_generator:
try:
model.save()
except NotUniqueError:
model = model.__class__.objects.filter(**model.unit_key).first()
repo_controller.associate_single_unit(repo, model)
示例8: test_updateinfo_unit_metadata
def test_updateinfo_unit_metadata(self):
path = os.path.join(self.metadata_file_dir,
REPO_DATA_DIR_NAME,
UPDATE_INFO_XML_FILE_NAME)
handle = open(os.path.join(DATA_DIR, 'updateinfo.xml'), 'r')
generator = packages.package_list_generator(handle, 'update',
updateinfo.process_package_element)
erratum_unit = next(generator)
# just checking
self.assertEqual(erratum_unit.unit_key['id'], 'RHEA-2010:9999')
context = UpdateinfoXMLFileContext(self.metadata_file_dir)
context._open_metadata_file_handle()
context.add_unit_metadata(erratum_unit)
context._close_metadata_file_handle()
self.assertNotEqual(os.path.getsize(path), 0)
updateinfo_handle = gzip.open(path, 'r')
content = updateinfo_handle.read()
updateinfo_handle.close()
self.assertEqual(content.count('from="[email protected]"'), 1)
self.assertEqual(content.count('status="final"'), 1)
self.assertEqual(content.count('type="enhancements"'), 1)
self.assertEqual(content.count('version="1"'), 1)
self.assertEqual(content.count('<id>RHEA-2010:9999</id>'), 1)
self.assertEqual(content.count('<collection short="F13PTP">'), 1)
self.assertEqual(content.count('<package'), 2)
self.assertEqual(content.count('<sum type="md5">f3c197a29d9b66c5b65c5d62b25db5b4</sum>'), 1)
示例9: _decide_drpms_to_download
def _decide_drpms_to_download(self, metadata_files):
"""
Decide which DRPMs should be downloaded based on the repo metadata and on
the importer config.
:param metadata_files: instance of MetadataFiles
:type metadata_files: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles
:return: tuple of (set(DRPM.NAMEDTUPLEs), number of DRPMs, total size in bytes)
:rtype: tuple
"""
if models.DRPM.TYPE in self.call_config.get(constants.CONFIG_SKIP, []):
_LOGGER.debug('skipping DRPM sync')
return set(), 0, 0
presto_file_handle = metadata_files.get_metadata_file_handle(presto.METADATA_FILE_NAME)
if presto_file_handle:
try:
package_info_generator = packages.package_list_generator(presto_file_handle,
presto.PACKAGE_TAG,
presto.process_package_element)
wanted = self._identify_wanted_versions(package_info_generator)
to_download = existing.check_repo(wanted.iterkeys(), self.sync_conduit.get_units)
count = len(to_download)
size = 0
for unit in to_download:
size += wanted[unit]
finally:
presto_file_handle.close()
else:
to_download = set()
count = 0
size = 0
return to_download, count, size
示例10: _decide_rpms_to_download
def _decide_rpms_to_download(self, metadata_files):
"""
Decide which RPMs should be downloaded based on the repo metadata and on
the importer config.
:param metadata_files: instance of MetadataFiles
:type metadata_files: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles
:return: tuple of (set(RPM.NAMEDTUPLEs), number of RPMs, total size in bytes)
:rtype: tuple
"""
if ids.TYPE_ID_RPM in self.config.get(constants.CONFIG_SKIP, []):
_logger.debug('skipping RPM sync')
return set(), 0, 0
primary_file_handle = metadata_files.get_metadata_file_handle(primary.METADATA_FILE_NAME)
try:
# scan through all the metadata to decide which packages to download
package_info_generator = packages.package_list_generator(
primary_file_handle, primary.PACKAGE_TAG, primary.process_package_element)
wanted = self._identify_wanted_versions(package_info_generator)
# check for the units that are not in the repo, but exist on the server
# and associate them to the repo
to_download = existing.check_all_and_associate(
wanted.iterkeys(), self.conduit, self.download_deferred)
count = len(to_download)
size = 0
for unit in to_download:
size += wanted[unit]
return to_download, count, size
finally:
primary_file_handle.close()
示例11: get_remote_units
def get_remote_units(file_function, tag, process_func):
"""
return a set of units (as named tuples) that are in the remote repository
:param file_function: Method that returns a file handle for the units file on disk.
:type file_function: function
:param tag: name of the XML tag that identifies each object
in the XML file
:type tag: basestring
:param process_func: function that takes one argument, of type
xml.etree.ElementTree.Element, or the cElementTree
equivalent, and returns a dictionary containing
metadata about the unit
:type process_func: function
:return: set of named tuples representing units
:rtype: set
"""
remote_named_tuples = set()
file_handle = file_function()
if file_handle is None:
return set()
try:
package_info_generator = packages.package_list_generator(file_handle,
tag,
process_func)
for model in package_info_generator:
named_tuple = model.as_named_tuple
remote_named_tuples.add(named_tuple)
finally:
file_handle.close()
return remote_named_tuples
示例12: test_rhel6_real_data
def test_rhel6_real_data(self):
with open(os.path.join(os.path.dirname(__file__), "../data/RHBA-2010-0836.erratum.xml")) as f:
errata = packages.package_list_generator(f, updateinfo.PACKAGE_TAG, updateinfo.process_package_element)
errata = list(errata)
self.assertEqual(len(errata), 1)
erratum = errata[0]
self.assertTrue(isinstance(erratum, models.Errata))
self.assertEqual(erratum.metadata.get("rights"), "Copyright 2010 Red Hat Inc")
self.assertTrue(erratum.metadata.get("summary") is not None)
self.assertEqual(erratum.id, "RHBA-2010:0836")
self.assertEqual(erratum.metadata.get("type"), "bugfix")
self.assertEqual(erratum.metadata.get("updated"), "2010-11-10 00:00:00")
self.assertEqual(erratum.metadata.get("reboot_suggested"), False)
self.assertEqual(erratum.metadata.get("severity"), "")
rpms = erratum.rpm_search_dicts
self.assertEqual(len(rpms), 4)
for rpm in rpms:
# make sure all of the correct keys are present
model = models.RPM.from_package_info(rpm)
self.assertEqual(model.checksumtype, "sha256")
self.assertTrue(len(model.checksum) > 0)
self.assertTrue(model.name.startswith("NetworkManager"))
self.assertEqual(model.version, "0.8.1")
self.assertEqual(model.release, "5.el6_0.1")
示例13: generate_dbs
def generate_dbs(self):
"""
For repo data files that contain data we need to access later for each
unit in the repo, generate a local db file that gives us quick read
access to each unit's data.
"""
for filename, tag, process_func in (
(filelists.METADATA_FILE_NAME,
filelists.PACKAGE_TAG, filelists.process_package_element),
(other.METADATA_FILE_NAME, other.PACKAGE_TAG, other.process_package_element),
):
with contextlib.closing(self.get_metadata_file_handle(filename)) as xml_file_handle:
generator = package_list_generator(xml_file_handle, tag)
db_filename = os.path.join(self.dst_dir, '%s.db' % filename)
# always a New file, and open with Fast writing mode.
with contextlib.closing(gdbm.open(db_filename, 'nf')) as db_file_handle:
for element in generator:
utils.strip_ns(element)
element.attrib['pkgid'] = models.RpmBase.PKGID_TEMPLATE
raw_xml = utils.element_to_raw_xml(element)
unit_key, _ = process_func(element)
db_key = self.generate_db_key(unit_key)
db_file_handle[db_key] = raw_xml
db_file_handle.sync()
self.dbs[filename] = db_filename
示例14: test_centos6_real_data
def test_centos6_real_data(self):
groups = packages.package_list_generator(StringIO(CENTOS6_COMPS_XML), group.GROUP_TAG, self.process_group)
groups = list(groups)
self.assertEqual(len(groups), 2)
for model in groups:
self.assertTrue(isinstance(model, models.PackageGroup))
self.assertEqual(model.repo_id, "repo1")
示例15: download_drpms
def download_drpms(self, metadata_files, drpms_to_download, url):
"""
Actually download the requested DRPMs. This method iterates over
the appropriate metadata file and downloads those items which are present
in the corresponding set. It also checks for the DRPMs which exist
in other repositories before downloading them. If they are already downloaded,
we skip the download and just associate them to the given repository.
Multiple options for deltainfo files depending on the distribution
so we have to go through all of them to get all the DRPMs
:param metadata_files: populated instance of MetadataFiles
:type metadata_files: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles
:param drpms_to_download: set of DRPM.NAMEDTUPLEs
:type drpms_to_download: set
:param url: current URL we should sync
:type: str
"""
event_listener = DRPMListener(self, metadata_files)
for presto_file_name in presto.METADATA_FILE_NAMES:
presto_file_handle = metadata_files.get_metadata_file_handle(presto_file_name)
if presto_file_handle:
try:
package_model_generator = packages.package_list_generator(
presto_file_handle,
presto.PACKAGE_TAG,
presto.process_package_element)
units_to_download = self._filtered_unit_generator(package_model_generator,
drpms_to_download)
# Wrapped in a generator that adds entries to
# the deferred (Lazy) catalog.
units_to_download = self.catalog_generator(url, units_to_download)
if self.download_deferred:
for unit in units_to_download:
unit.downloaded = False
self.add_drpm_unit(metadata_files, unit)
continue
download_wrapper = packages.Packages(
url,
self.nectar_config,
units_to_download,
self.tmp_dir,
event_listener,
self._url_modify)
# allow the downloader to be accessed by the cancel method if necessary
self.downloader = download_wrapper.downloader
_logger.info(_('Downloading %(num)s DRPMs.') % {'num': len(drpms_to_download)})
download_wrapper.download_packages()
self.downloader = None
finally:
presto_file_handle.close()