本文整理汇总了Python中pulp.plugins.types.database.type_units_collection函数的典型用法代码示例。如果您正苦于以下问题:Python type_units_collection函数的具体用法?Python type_units_collection怎么用?Python type_units_collection使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了type_units_collection函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: link_referenced_content_units
def link_referenced_content_units(self, from_type, from_id, to_type, to_ids):
"""
Link referenced content units.
@param from_type: unique id of the parent content collection
@type from_type: str
@param from_id: unique id of the parent content unit
@type from_id: str
@param to_type: unique id of the child content collection
@type to_type: str
@param to_ids: list of unique ids of child content units
@types child_ids: tuple of list
"""
collection = content_types_db.type_units_collection(from_type)
parent = collection.find_one({'_id': from_id})
if parent is None:
raise InvalidValue(['from_type'])
parent_type_def = content_types_db.type_definition(from_type)
if to_type not in parent_type_def['referenced_types']:
raise Exception()
children = parent.setdefault('_%s_references' % to_type, [])
for id_ in to_ids:
if id_ in children:
continue
children.append(id_)
collection.update({'_id': from_id}, parent, safe=True)
示例2: test_migrate_category
def test_migrate_category(self):
# Setup
orig_cat_id = add_unit('c1', self.source_repo_id, ids.TYPE_ID_PKG_CATEGORY)
associate_unit(orig_cat_id, self.source_repo_id, ids.TYPE_ID_PKG_CATEGORY)
associate_unit(orig_cat_id, self.dest_repo_id, ids.TYPE_ID_PKG_CATEGORY)
# Test
self.migration.migrate()
group_coll = types_db.type_units_collection(ids.TYPE_ID_PKG_CATEGORY)
all_cats = group_coll.find({}).sort('repo_id', 1)
self.assertEqual(2, all_cats.count())
dest_cat = all_cats[0] # ordered by ID, this will be first
self.assertEqual(dest_cat['id'], 'c1')
self.assertEqual(dest_cat['repo_id'], self.dest_repo_id)
source_cat = all_cats[1]
self.assertEqual(source_cat['id'], 'c1')
self.assertEqual(source_cat['repo_id'], self.source_repo_id)
# Verify the associations
query_manager = factory.repo_unit_association_query_manager()
source_units = query_manager.get_units(self.source_repo_id)
self.assertEqual(1, len(source_units))
self.assertEqual(source_units[0]['unit_type_id'], ids.TYPE_ID_PKG_CATEGORY)
self.assertEqual(source_units[0]['unit_id'], source_cat['_id'])
dest_units = query_manager.get_units(self.dest_repo_id)
self.assertEqual(1, len(dest_units))
self.assertEqual(dest_units[0]['unit_type_id'], ids.TYPE_ID_PKG_CATEGORY)
self.assertEqual(dest_units[0]['unit_id'], dest_cat['_id'])
示例3: generate_orphans_by_type
def generate_orphans_by_type(content_type_id, fields=None):
"""
Return an generator of all orphaned content units of the given content type.
If fields is not specified, only the `_id` field will be present.
:param content_type_id: id of the content type
:type content_type_id: basestring
:param fields: list of fields to include in each content unit
:type fields: list or None
:return: generator of orphaned content units for the given content type
:rtype: generator
"""
fields = fields if fields is not None else ['_id']
content_units_collection = content_types_db.type_units_collection(content_type_id)
repo_content_units_collection = RepoContentUnit.get_collection()
for content_unit in content_units_collection.find({}, fields=fields):
repo_content_units_cursor = repo_content_units_collection.find(
{'unit_id': content_unit['_id']})
if repo_content_units_cursor.count() > 0:
continue
yield content_unit
示例4: test_update_no_changes
def test_update_no_changes(self):
"""
Tests the common use case of loading type definitions that have been
loaded already and have not changed.
"""
# Setup
defs = [DEF_1, DEF_2, DEF_3, DEF_4]
types_db.update_database(defs)
# Test
same_defs = [DEF_4, DEF_3, DEF_2, DEF_1] # no real reason for this, just felt better than using the previous list
types_db.update_database(same_defs)
# Verify
all_collection_names = types_db.all_type_collection_names()
self.assertEqual(len(same_defs), len(all_collection_names))
for d in defs:
self.assertTrue(types_db.unit_collection_name(d.id) in all_collection_names)
# Quick sanity check on the indexes
collection = types_db.type_units_collection(d.id)
all_indexes = collection.index_information()
total_index_count = 1 + 1 + len(d.search_indexes) # _id + unit key + all search
self.assertEqual(total_index_count, len(all_indexes))
示例5: test_update_missing_no_error
def test_update_missing_no_error(self):
"""
Tests that updating a previously loaded database with some missing
definitions does not throw an error.
"""
# Setup
defs = [DEF_1, DEF_2, DEF_3]
types_db.update_database(defs)
# Test
new_defs = [DEF_4]
types_db.update_database(new_defs)
# Verify
all_collection_names = types_db.all_type_collection_names()
self.assertEqual(len(defs) + len(new_defs), len(all_collection_names)) # old are not deleted
for d in defs:
self.assertTrue(types_db.unit_collection_name(d.id) in all_collection_names)
# Quick sanity check on the indexes
collection = types_db.type_units_collection(d.id)
all_indexes = collection.index_information()
total_index_count = 1 + 1 + len(d.search_indexes) # _id + unit key + all search
self.assertEqual(total_index_count, len(all_indexes))
示例6: create_content_unit
def create_content_unit(unit_data, relative_path=None):
collection = types_database.type_units_collection(_TYPE_YUM_REPO_METADATA_FILE)
unit_data["_id"] = str(uuid.uuid4())
unit_data["_content_type_id"] = _TYPE_YUM_REPO_METADATA_FILE
unit_data["_storage_path"] = get_content_storage_path(relative_path)
collection.insert(unit_data, safe=True)
return unit_data
示例7: delete_orphans_by_type
def delete_orphans_by_type(content_type_id, content_unit_ids=None):
"""
Delete the orphaned content units for the given content type.
If the content_unit_ids parameter is not None, is acts as a filter of
the specific orphaned content units that may be deleted.
NOTE: this method deletes the content unit's bits from disk, if applicable.
:param content_type_id: id of the content type
:type content_type_id: basestring
:param content_unit_ids: list of content unit ids to delete; None means delete them all
:type content_unit_ids: iterable or None
:return: count of units deleted
:rtype: int
"""
content_units_collection = content_types_db.type_units_collection(content_type_id)
count = 0
for content_unit in OrphanManager.generate_orphans_by_type(content_type_id, fields=["_id", "_storage_path"]):
if content_unit_ids is not None and content_unit["_id"] not in content_unit_ids:
continue
model.LazyCatalogEntry.objects(unit_id=content_unit["_id"], unit_type_id=content_type_id).delete()
content_units_collection.remove(content_unit["_id"])
storage_path = content_unit.get("_storage_path", None)
if storage_path is not None:
OrphanManager.delete_orphaned_file(storage_path)
count += 1
return count
示例8: _migrate_collection
def _migrate_collection(type_id):
collection = types_db.type_units_collection(type_id)
for package in collection.find():
# grab the raw XML and parse it into the elements we'll need later
try:
# make a guess at the encoding
codec = 'UTF-8'
package['repodata']['primary'].encode(codec)
except UnicodeEncodeError:
# best second guess we have, and it will never fail due to the nature
# of the encoding.
codec = 'ISO-8859-1'
package['repodata']['primary'].encode(codec)
fake_xml = FAKE_XML % {'encoding': codec, 'xml': package['repodata']['primary']}
fake_element = ET.fromstring(fake_xml.encode(codec))
utils.strip_ns(fake_element)
primary_element = fake_element.find('package')
format_element = primary_element.find('format')
provides_element = format_element.find('provides')
requires_element = format_element.find('requires')
# add these attributes, which we previously didn't track in the DB.
package['size'] = int(primary_element.find('size').attrib['package'])
if type_id == 'rpm':
package['sourcerpm'] = format_element.find('sourcerpm').text
package['summary'] = primary_element.find('summary').text
# re-parse provides and requires. The format changed from 2.1, and the
# 2.1 upload workflow was known to produce invalid data for these fields
package['provides'] = map(primary._process_rpm_entry_element,
provides_element.findall('entry')) if provides_element else []
package['requires'] = map(primary._process_rpm_entry_element,
requires_element.findall('entry')) if requires_element else []
collection.save(package)
示例9: list_content_units
def list_content_units(self, content_type, db_spec=None, model_fields=None, start=0, limit=None):
"""
DEPRECATED!!! Please use find_by_criteria() instead.
List the content units in a content type collection.
@param content_type: unique id of content collection
@type content_type: str
@param db_spec: spec document used to filter the results,
None means no filter
@type db_spec: None or dict
@param model_fields: fields of each content unit to report,
None means all fields
@type model_fields: None or list of str's
@param start: offset from the beginning of the results to return as the
first element
@type start: non-negative int
@param limit: the maximum number of results to return,
None means no limit
@type limit: None or non-negative int
@return: list of content units in the content type collection that
matches the parameters
@rtype: (possibly empty) tuple of dicts
"""
collection = content_types_db.type_units_collection(content_type)
if db_spec is None:
db_spec = {}
cursor = collection.find(db_spec, fields=model_fields)
if start > 0:
cursor.skip(start)
if limit is not None:
cursor.limit(limit)
return tuple(cursor)
示例10: _associated_units_by_type_cursor
def _associated_units_by_type_cursor(unit_type_id, criteria, associated_unit_ids):
"""
Retrieve a pymongo cursor for units associated with a repository of a
give unit type that meet to the provided criteria.
:type unit_type_id: str
:type criteria: UnitAssociationCriteria
:type associated_unit_ids: list
:rtype: pymongo.cursor.Cursor
"""
collection = types_db.type_units_collection(unit_type_id)
spec = criteria.unit_filters.copy()
spec['_id'] = {'$in': associated_unit_ids}
fields = criteria.unit_fields
# The _content_type_id is required for looking up the association.
if fields is not None and '_content_type_id' not in fields:
fields = list(fields)
fields.append('_content_type_id')
cursor = collection.find(spec, fields=fields)
sort = criteria.unit_sort
if sort is None:
sort = [('_id', SORT_ASCENDING)]
cursor.sort(sort)
return cursor
示例11: delete_orphans_by_type
def delete_orphans_by_type(content_type_id, content_unit_ids=None):
"""
Delete the orphaned content units for the given content type.
If the content_unit_ids parameter is not None, is acts as a filter of
the specific orphaned content units that may be deleted.
NOTE: this method deletes the content unit's bits from disk, if applicable.
:param content_type_id: id of the content type
:type content_type_id: basestring
:param content_unit_ids: list of content unit ids to delete; None means delete them all
:type content_unit_ids: iterable or None
"""
content_units_collection = content_types_db.type_units_collection(content_type_id)
for content_unit in OrphanManager.generate_orphans_by_type(content_type_id,
fields=['_id', '_storage_path']):
if content_unit_ids is not None and content_unit['_id'] not in content_unit_ids:
continue
content_units_collection.remove(content_unit['_id'], safe=False)
storage_path = content_unit.get('_storage_path', None)
if storage_path is not None:
OrphanManager.delete_orphaned_file(storage_path)
示例12: delete_orphans_by_type
def delete_orphans_by_type(self, content_type_id, content_unit_ids=None, flush=True):
"""
Delete the orphaned content units for the given content type.
If the content_unit_ids parameter is not None, is acts as a filter of
the specific orphaned content units that may be deleted.
NOTE: this method deletes the content unit's bits from disk, if applicable.
NOTE: `flush` should not be set to False unless you know what you're doing
:param content_type_id: id of the content type
:type content_type_id: basestring
:param content_unit_ids: list of content unit ids to delete; None means delete them all
:type content_unit_ids: iterable or None
:param flush: flush the database updates to disk on completion
:type flush: bool
"""
content_units_collection = content_types_db.type_units_collection(content_type_id)
for content_unit in self.generate_orphans_by_type(content_type_id, fields=['_id', '_storage_path']):
if content_unit_ids is not None and content_unit['_id'] not in content_unit_ids:
continue
content_units_collection.remove(content_unit['_id'], safe=False)
storage_path = content_unit.get('_storage_path', None)
if storage_path is not None:
self.delete_orphaned_file(storage_path)
# this forces the database to flush any cached changes to the disk
# in the background; for example: the unsafe deletes in the loop above
if flush:
db_connection.flush_database()
示例13: delete_orphans_by_id
def delete_orphans_by_id(self, orphans):
"""
Delete a list of orphaned content units by their content type and unit ids.
@param orphans: list of documents with 'content_type' and 'content_id' keys
@type orphans: list
"""
# XXX this does no validation of the orphans
# munge the orphans into something more programmatic-ly convenient
orphans_by_id = {}
for o in orphans:
if 'content_type_id' not in o or 'unit_id' not in o:
raise pulp_exceptions.InvalidValue(['content_type_id', 'unit_id'])
id_list = orphans_by_id.setdefault(o['content_type_id'], [])
id_list.append(o['unit_id'])
# iterate through the types and ids
content_query_manager = manager_factory.content_query_manager()
for content_type, content_id_list in orphans_by_id.items():
# build a list of the on-disk contents
orphaned_paths = []
for unit_id in content_id_list:
content_unit = content_query_manager.get_content_unit_by_id(content_type, unit_id, model_fields=['_storage_path'])
if content_unit['_storage_path'] is not None:
orphaned_paths.append(content_unit['_storage_path'])
# remove the orphans from the db
collection = content_types_db.type_units_collection(content_type)
spec = {'_id': {'$in': content_id_list}}
collection.remove(spec, safe=True)
# delete the on-disk contents
for path in orphaned_paths:
self.delete_orphaned_file(path)
示例14: test_pulp_manage_db_loads_types
def test_pulp_manage_db_loads_types(self, listdir_mock):
"""
Test calling pulp-manage-db imports types on a clean types database.
"""
manage.main()
all_collection_names = types_db.all_type_collection_names()
self.assertEqual(len(all_collection_names), 1)
self.assertEqual(['units_test_type_id'], all_collection_names)
# Let's make sure we loaded the type definitions correctly
db_type_definitions = types_db.all_type_definitions()
self.assertEquals(len(db_type_definitions), 1)
test_json = json.loads(_test_type_json)
for attribute in ['id', 'display_name', 'description', 'unit_key', 'search_indexes']:
self.assertEquals(test_json['types'][0][attribute], db_type_definitions[0][attribute])
# Now let's ensure that we have the correct indexes
collection = types_db.type_units_collection('test_type_id')
indexes = collection.index_information()
self.assertEqual(indexes['_id_']['key'], [(u'_id', 1)])
# Make sure we have the unique constraint on all three attributes
self.assertEqual(indexes['attribute_1_1_attribute_2_1_attribute_3_1']['unique'], True)
self.assertEqual(indexes['attribute_1_1_attribute_2_1_attribute_3_1']['dropDups'], False)
self.assertEqual(indexes['attribute_1_1_attribute_2_1_attribute_3_1']['key'],
[(u'attribute_1', 1), (u'attribute_2', 1), (u'attribute_3', 1)])
# Make sure we indexes attributes 1 and 3
self.assertEqual(indexes['attribute_1_1']['dropDups'], False)
self.assertEqual(indexes['attribute_1_1']['key'], [(u'attribute_1', 1)])
self.assertEqual(indexes['attribute_3_1']['dropDups'], False)
self.assertEqual(indexes['attribute_3_1']['key'], [(u'attribute_3', 1)])
# Make sure we only have the indexes that we've hand inspected here
self.assertEqual(indexes.keys(), [u'_id_', u'attribute_1_1_attribute_2_1_attribute_3_1',
u'attribute_1_1', u'attribute_3_1'])
示例15: test_migrate
def test_migrate(self, start_logging_mock, listdir_mock, mock_plugin_definitions,
mock_drop_indices):
"""
Ensure that migrate() imports types on a clean types database.
"""
migration.migrate()
self.assertTrue(mock_drop_indices.called)
all_collection_names = types_db.all_type_collection_names()
self.assertEqual(len(all_collection_names), 1)
self.assertEqual(['units_test_type_id'], all_collection_names)
# Let's make sure we loaded the type definitions correctly
db_type_definitions = types_db.all_type_definitions()
self.assertEquals(len(db_type_definitions), 1)
test_json = json.loads(_test_type_json)
for attribute in ['id', 'display_name', 'description', 'unit_key', 'search_indexes']:
self.assertEquals(test_json['types'][0][attribute], db_type_definitions[0][attribute])
# Now let's ensure that we have the correct indexes
collection = types_db.type_units_collection('test_type_id')
indexes = collection.index_information()
self.assertEqual(indexes['_id_']['key'], [(u'_id', 1)])
# Make sure we have the unique constraint on all three attributes
self.assertEqual(indexes['attribute_1_1_attribute_2_1_attribute_3_1']['unique'], True)
self.assertEqual(indexes['attribute_1_1_attribute_2_1_attribute_3_1']['key'],
[(u'attribute_1', 1), (u'attribute_2', 1), (u'attribute_3', 1)])
# Make sure we indexed attributes 1 and 3
self.assertEqual(indexes['attribute_1_1']['key'], [(u'attribute_1', 1)])
self.assertEqual(indexes['attribute_3_1']['key'], [(u'attribute_3', 1)])
# Make sure we only have the indexes that we've hand inspected here
self.assertEqual(indexes.keys(), [u'_id_', u'attribute_1_1_attribute_2_1_attribute_3_1',
u'attribute_1_1', u'attribute_3_1'])