本文整理汇总了Python中pulp.server.controllers.repository.find_repo_content_units函数的典型用法代码示例。如果您正苦于以下问题:Python find_repo_content_units函数的具体用法?Python find_repo_content_units怎么用?Python find_repo_content_units使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了find_repo_content_units函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_existing_units
def get_existing_units(search_dicts, unit_class, repo):
"""
Get units from the given repository that match the search terms. The unit instances will only
have their unit key fields populated.
:param search_dicts: iterable of dictionaries that should be used to search units
:type search_dicts: iterable
:param unit_class: subclass representing the type of unit to search for
:type unit_class: pulp_rpm.plugins.db.models.Package
:param repo: repository to search in
:type repo: pulp.server.db.model.Repository
:return: generator of unit_class instances with only their unit key fields populated
:rtype: generator
"""
unit_fields = unit_class.unit_key_fields
for segment in paginate(search_dicts):
unit_filters = {'$or': list(segment)}
units_q = mongoengine.Q(__raw__=unit_filters)
association_q = mongoengine.Q(unit_type_id=unit_class._content_type_id.default)
for result in repo_controller.find_repo_content_units(repo, units_q=units_q,
repo_content_unit_q=association_q,
unit_fields=unit_fields,
yield_content_unit=True):
yield result
示例2: publish_repo
def publish_repo(self, repo, publish_conduit, config):
"""
Publish the repository.
:param repo: metadata describing the repo
:type repo: pulp.plugins.model.Repository
:param publish_conduit: The conduit for publishing a repo
:type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit
:param config: plugin configuration
:type config: pulp.plugins.config.PluginConfiguration
:param config_conduit: Configuration Conduit;
:type config_conduit: pulp.plugins.conduits.repo_validate.RepoConfigConduit
:return: report describing the publish operation
:rtype: pulp.plugins.model.PublishReport
"""
progress_report = FilePublishProgressReport(publish_conduit)
_logger.info(_('Beginning publish for repository <%(repo)s>') % {'repo': repo.id})
try:
progress_report.state = progress_report.STATE_IN_PROGRESS
repo_model = repo.repo_obj
units = repo_controller.find_repo_content_units(repo_model, yield_content_unit=True)
# Set up an empty build_dir
working_dir = common_utils.get_working_directory()
build_dir = os.path.join(working_dir, BUILD_DIRNAME)
os.makedirs(build_dir)
self.initialize_metadata(build_dir)
try:
# process each unit
for unit in units:
links_to_create = self.get_paths_for_unit(unit)
self._symlink_unit(build_dir, unit, links_to_create)
self.publish_metadata_for_unit(unit)
finally:
# Finalize the processing
self.finalize_metadata()
# Let's unpublish, and then republish
self.unpublish_repo(repo, config)
hosting_locations = self.get_hosting_locations(repo_model, config)
for location in hosting_locations:
shutil.copytree(build_dir, location, symlinks=True)
self.post_repo_publish(repo_model, config)
# Report that we are done
progress_report.state = progress_report.STATE_COMPLETE
return progress_report.build_final_report()
except Exception, e:
_logger.exception(e)
# Something failed. Let's put an error message on the report
progress_report.error_message = str(e)
progress_report.traceback = traceback.format_exc()
progress_report.state = progress_report.STATE_FAILED
report = progress_report.build_final_report()
return report
示例3: test_limit
def test_limit(self, mock_get_model, mock_demo_objects, mock_rcu_objects):
"""
Test that limits are applied properly to the results
"""
repo = MagicMock(repo_id='foo')
rcu_list = []
unit_list = []
for i in range(10):
unit_id = 'bar_%i' % i
unit_key = 'key_%i' % i
rcu = model.RepositoryContentUnit(repo_id='foo',
unit_type_id='demo_model',
unit_id=unit_id)
rcu_list.append(rcu)
unit_list.append(DemoModel(id=unit_id, key_field=unit_key))
mock_rcu_objects.return_value = rcu_list
mock_get_model.return_value = DemoModel
mock_demo_objects.return_value = unit_list
result = list(repo_controller.find_repo_content_units(repo, limit=5))
self.assertEquals(5, len(result))
self.assertEquals(result[0].unit_id, 'bar_0')
self.assertEquals(result[4].unit_id, 'bar_4')
示例4: _filter_missing_isos
def _filter_missing_isos(self, manifest, download_deferred):
"""
Use the sync_conduit and the manifest to determine which ISOs are at the feed_url
that are not in our local store, as well as which ISOs are in our local store that are not
available at the feed_url.
:param manifest: An ISOManifest describing the ISOs that are available at the
feed_url that we are synchronizing with
:type manifest: pulp_rpm.plugins.db.models.ISOManifest
:param download_deferred: indicates downloading is deferred (or not).
:type download_deferred: bool
:return: A 3-tuple. The first element of the tuple is a list of ISOs that we should
retrieve from the feed_url. The second element of the tuple is a list of
Units that are available locally already, but are not currently associated
with the repository. The third element of the tuple is a list of Units that
represent the ISOs that we have in our local repo that were not found in
the remote repo.
:rtype: tuple
"""
# A list of all the ISOs we have in Pulp
existing_units = models.ISO.objects()
existing_units_by_key = dict([(unit.unit_key_str, unit)
for unit in existing_units if not download_deferred and
os.path.isfile(unit.storage_path)])
existing_units.rewind()
existing_unit_keys = set([unit.unit_key_str
for unit in existing_units if not download_deferred and
os.path.isfile(unit.storage_path)])
# A list of units currently associated with the repository
existing_repo_units = repo_controller.find_repo_content_units(
self.sync_conduit.repo, yield_content_unit=True)
existing_repo_units = list(existing_repo_units)
existing_repo_units_by_key = dict([(unit.unit_key_str, unit)
for unit in existing_repo_units])
existing_repo_unit_keys = set([unit.unit_key_str
for unit in existing_repo_units])
# A list of the ISOs in the remote repository
available_isos_by_key = dict([(iso.unit_key_str, iso) for iso in manifest])
available_iso_keys = set([iso.unit_key_str for iso in manifest])
# Content that is available locally and just needs to be associated with the repository
local_available_iso_keys = set([iso for iso in available_iso_keys
if iso in existing_unit_keys])
local_available_iso_keys = local_available_iso_keys - existing_repo_unit_keys
local_available_units = [existing_units_by_key[k] for k in local_available_iso_keys]
# Content that is missing locally and must be downloaded
local_missing_iso_keys = list(available_iso_keys - existing_unit_keys)
local_missing_isos = [available_isos_by_key[k] for k in local_missing_iso_keys]
# Content that is missing from the remote repository that is present locally
remote_missing_unit_keys = list(existing_repo_unit_keys - available_iso_keys)
remote_missing_units = [existing_repo_units_by_key[k] for k in remote_missing_unit_keys]
return local_missing_isos, local_available_units, remote_missing_units
示例5: test_repo_content_units_query
def test_repo_content_units_query(self, mock_rcu_objects):
"""
Test the query parameters for the RepositoryContentUnit
"""
repo = MagicMock(repo_id='foo')
rcu_filter = mongoengine.Q(unit_type_id='demo_model')
list(repo_controller.find_repo_content_units(repo, repo_content_unit_q=rcu_filter))
self.assertEquals(mock_rcu_objects.call_args[1]['repo_id'], 'foo')
self.assertEquals(mock_rcu_objects.call_args[1]['q_obj'], rcu_filter)
示例6: _retrieve_repo_modules
def _retrieve_repo_modules(self):
"""
Retrieves all modules in the repository.
:return: list of modules in the repository; empty list if there are none
:rtype: list of pulp_puppet.plugins.db.models.Module objects
"""
modules_generator = find_repo_content_units(self.repo, yield_content_unit=True)
modules = list(modules_generator)
return modules
示例7: _do_import_modules
def _do_import_modules(self, metadata):
"""
Actual logic of the import. This method will do a best effort per module;
if an individual module fails it will be recorded and the import will
continue. This method will only raise an exception in an extreme case
where it cannot react and continue.
"""
downloader = self._create_downloader()
self.downloader = downloader
# Ease module lookup
metadata_modules_by_key = dict([(m.unit_key_as_named_tuple, m) for m in metadata.modules])
# Collect information about the repository's modules before changing it
existing_module_ids_by_key = {}
modules = repo_controller.find_repo_content_units(
self.repo.repo_obj, unit_fields=Module.unit_key_fields, yield_content_unit=True)
for module in modules:
existing_module_ids_by_key[module.unit_key_as_named_tuple] = module.id
new_unit_keys = self._resolve_new_units(existing_module_ids_by_key.keys(),
metadata_modules_by_key.keys())
# Once we know how many things need to be processed, we can update the progress report
self.progress_report.modules_total_count = len(new_unit_keys)
self.progress_report.modules_finished_count = 0
self.progress_report.modules_error_count = 0
self.progress_report.update_progress()
# Add new units
for key in new_unit_keys:
if self._canceled:
break
module = metadata_modules_by_key[key]
try:
self._add_new_module(downloader, module)
self.progress_report.modules_finished_count += 1
except Exception as e:
self.progress_report.add_failed_module(module, e, sys.exc_info()[2])
self.progress_report.update_progress()
# Remove missing units if the configuration indicates to do so
if self._should_remove_missing():
remove_unit_keys = self._resolve_remove_units(existing_module_ids_by_key.keys(),
metadata_modules_by_key.keys())
doomed_ids = [existing_module_ids_by_key[key] for key in remove_unit_keys]
doomed_module_iterator = Module.objects.in_bulk(doomed_ids).itervalues()
repo_controller.disassociate_units(self.repo.repo_obj, doomed_module_iterator)
self.downloader = None
示例8: get_iterator
def get_iterator(self):
"""
This method returns a generator to loop over items.
The items created by this generator will be iterated over by the process_main method.
:return: a list or other iterable
:rtype: iterator of pulp_docker.plugins.db.models.DockerImage
"""
return repo_controller.find_repo_content_units(
self.get_repo(),
repo_content_unit_q=Q(unit_type_id=constants.IMAGE_TYPE_ID),
yield_content_unit=True)
示例9: _get_units
def _get_units(self):
"""
Get the collection of units to be published.
The collection contains only the newest unit for each branch.
:return: An iterable of units to publish.
:rtype: iterable
"""
units = {}
query = Q(unit_type_id=constants.OSTREE_TYPE_ID)
associations = find_repo_content_units(
self.get_repo(),
repo_content_unit_q=query)
for unit in sorted([a.unit for a in associations], key=lambda u: u.created):
units[unit.branch] = unit
return units.values()
示例10: publish_repo
def publish_repo(self, repo, publish_conduit, config):
"""
Publish the repository by "installing" each puppet module into the given
destination directory. This effectively means extracting each module's
tarball in that directory.
:param repo: plugin repository object
:type repo: pulp.plugins.model.Repository
:param publish_conduit: provides access to relevant Pulp functionality
:type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit
:param config: plugin configuration
:type config: pulp.plugins.config.PluginConfiguration
:return: report describing the publish run
:rtype: pulp.plugins.model.PublishReport
"""
# get dir from config
destination = config.get(constants.CONFIG_INSTALL_PATH)
subdir = config.get(constants.CONFIG_SUBDIR)
if not destination:
return publish_conduit.build_failure_report(_('install path not provided'),
self.detail_report.report)
if subdir:
destination = os.path.join(destination, subdir)
units = list(repo_controller.find_repo_content_units(repo.repo_obj,
yield_content_unit=True))
duplicate_units = self._find_duplicate_names(units)
if duplicate_units:
for unit in duplicate_units:
self.detail_report.error(unit.unit_key,
'another unit in this repo also has this name')
return publish_conduit.build_failure_report(_('duplicate unit names'),
self.detail_report.report)
# check for unsafe paths in tarballs, and fail early if problems are found
self._check_for_unsafe_archive_paths(units, destination)
if self.detail_report.has_errors:
return publish_conduit.build_failure_report('failed', self.detail_report.report)
# ensure the destination directory exists
try:
mkdir(destination)
temporarydestination = self._create_temporary_destination_directory(destination)
except OSError, e:
return publish_conduit.build_failure_report(
_('failed to create destination directory: %s') % str(e), self.detail_report.report)
示例11: install_units
def install_units(self, consumer, units, options, config, conduit):
"""
Inspect the options, and if constants.WHOLE_REPO_ID has a non-False
value, replace the list of units with a list of all units in the given
repository. Omits version numbers, which allows the install tool to
automatically choose the most recent version of each.
:param consumer: A consumer.
:type consumer: pulp.plugins.model.Consumer
:param units: A list of content units to be installed.
:type units: list of: { type_id:<str>, unit_key:<dict> }
:param options: Install options; based on unit type.
:type options: dict
:param config: plugin configuration
:type config: pulp.plugins.config.PluginCallConfiguration
:param conduit: provides access to relevant Pulp functionality
:type conduit: pulp.plugins.conduits.profiler.ProfilerConduit
:return: The translated units
:rtype: list of: {'type_id': <str>, unit_key: {'author': <author>, 'name': <name>}
"""
repo_id = options.get(constants.REPO_ID_OPTION)
self._inject_forge_settings(options)
if options.get(constants.WHOLE_REPO_OPTION) and repo_id:
msg = _('installing whole repo %(repo_id)s on %(consumer_id)s')
msg_dict = {'repo_id': repo_id, 'consumer_id': consumer.id}
_LOGGER.debug(msg, msg_dict)
repo = Repository.objects.get(repo_id=repo_id)
units = find_repo_content_units(repo, yield_content_unit=True)
unit_key_dict = {}
for unit in units:
fullname = '%s/%s' % (unit.author, unit.name)
unit_key_dict[fullname] = {
'unit_key': {'author': unit.author, 'name': unit.name},
'type_id': constants.TYPE_PUPPET_MODULE
}
return unit_key_dict.values()
else:
return units
示例12: _build_source_with_provides
def _build_source_with_provides(self):
"""
Get a list of all available packages with their "Provides" info.
Note that the 'provides' metadata will be flattened via _trim_provides().
:return: list of pulp_rpm.plugins.db.models.RPM
:rtype: list
"""
fields = list(models.RPM.unit_key_fields)
fields.extend(['provides', 'version_sort_index', 'release_sort_index'])
units = repo_controller.find_repo_content_units(
repository=self.source_repo,
repo_content_unit_q=mongoengine.Q(unit_type_id=ids.TYPE_ID_RPM),
unit_fields=fields, yield_content_unit=True
)
return [self._trim_provides(unit) for unit in units]
示例13: _unit_generator
def _unit_generator(self, fields):
"""
Yields RPM content units in the current source repo with the specified fields
Note that the 'provides' metadata will be flattened via _trim_provides().
:param fields: list of fields to include in the yielded units
:type fields: list
:return: iterable of pulp_rpm.plugins.db.models.RPM
:rtype: generator
"""
# integration point with repo_controller, ideal for mocking in testing
return repo_controller.find_repo_content_units(
repository=self.source_repo,
repo_content_unit_q=mongoengine.Q(unit_type_id=ids.TYPE_ID_RPM),
unit_fields=fields, yield_content_unit=True
)
示例14: _associate_unit
def _associate_unit(self, repo, unit):
"""
Associate an iso unit with a repository but first check if there's already any with the same
name and if so, remove them.
:param repo: An ISO repository that is being synced
:type repo: pulp.server.db.model.Repository
:param unit: An ISO unit to associate with repo
:type unit: pulp_rpm.plugins.db.models.ISO
"""
if not self.repo_units:
# store the existing repo units to prevent querying mongo multiple times
self.repo_units = repo_controller.find_repo_content_units(repo, yield_content_unit=True)
units_to_remove = [iso for iso in self.repo_units if iso['name'] == unit['name']]
repo_controller.disassociate_units(repo, units_to_remove)
repo_controller.associate_single_unit(repo, unit)
示例15: copy_units
def copy_units(import_conduit, units):
"""
Copies puppet modules from one repo into another. There is nothing that
the importer needs to do; it maintains no state in the working directory
so the process is to simply tell Pulp to import each unit specified.
"""
# Determine which units are being copied
if units is None:
repo = Repository.objects.get(repo_id=import_conduit.source_repo_id)
units = find_repo_content_units(repo, yield_content_unit=True)
# Associate to the new repository
units_to_return = []
for u in units:
units_to_return.append(u)
import_conduit.associate_unit(u)
return units_to_return