本文整理汇总了Python中lp.services.database.interfaces.IMasterStore.find方法的典型用法代码示例。如果您正苦于以下问题:Python IMasterStore.find方法的具体用法?Python IMasterStore.find怎么用?Python IMasterStore.find使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类lp.services.database.interfaces.IMasterStore
的用法示例。
在下文中一共展示了IMasterStore.find方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _test_AnswerContactPruner
# 需要导入模块: from lp.services.database.interfaces import IMasterStore [as 别名]
# 或者: from lp.services.database.interfaces.IMasterStore import find [as 别名]
def _test_AnswerContactPruner(self, status, interval, expected_count=0):
# Garbo should remove answer contacts for accounts with given 'status'
# which was set more than 'interval' days ago.
switch_dbuser('testadmin')
store = IMasterStore(AnswerContact)
person = self.factory.makePerson()
person.addLanguage(getUtility(ILanguageSet)['en'])
question = self.factory.makeQuestion()
with person_logged_in(question.owner):
question.target.addAnswerContact(person, person)
Store.of(question).flush()
self.assertEqual(
store.find(
AnswerContact,
AnswerContact.person == person.id).count(),
1)
account = person.account
account.status = status
# We flush because a trigger sets the date_status_set and we need to
# modify it ourselves.
Store.of(account).flush()
if interval is not None:
account.date_status_set = interval
self.runDaily()
switch_dbuser('testadmin')
self.assertEqual(
store.find(
AnswerContact,
AnswerContact.person == person.id).count(),
expected_count)
示例2: test_LoginTokenPruner
# 需要导入模块: from lp.services.database.interfaces import IMasterStore [as 别名]
# 或者: from lp.services.database.interfaces.IMasterStore import find [as 别名]
def test_LoginTokenPruner(self):
store = IMasterStore(LoginToken)
now = datetime.now(UTC)
switch_dbuser('testadmin')
# It is configured as a daily task.
self.assertTrue(
LoginTokenPruner in DailyDatabaseGarbageCollector.tunable_loops)
# Create a token that will be pruned.
old_token = LoginToken(
email='whatever', tokentype=LoginTokenType.NEWACCOUNT)
old_token.date_created = now - timedelta(days=666)
old_token_id = old_token.id
store.add(old_token)
# Create a token that will not be pruned.
current_token = LoginToken(
email='whatever', tokentype=LoginTokenType.NEWACCOUNT)
current_token_id = current_token.id
store.add(current_token)
# Run the pruner. Batching is tested by the BulkPruner tests so
# no need to repeat here.
switch_dbuser('garbo_daily')
pruner = LoginTokenPruner(logging.getLogger('garbo'))
while not pruner.isDone():
pruner(10)
pruner.cleanUp()
# Only the old LoginToken is gone.
self.assertEqual(
store.find(LoginToken, id=old_token_id).count(), 0)
self.assertEqual(
store.find(LoginToken, id=current_token_id).count(), 1)
示例3: test_BranchJobPruner
# 需要导入模块: from lp.services.database.interfaces import IMasterStore [as 别名]
# 或者: from lp.services.database.interfaces.IMasterStore import find [as 别名]
def test_BranchJobPruner(self):
# Garbo should remove jobs completed over 30 days ago.
switch_dbuser('testadmin')
store = IMasterStore(Job)
db_branch = self.factory.makeAnyBranch()
db_branch.branch_format = BranchFormat.BZR_BRANCH_5
db_branch.repository_format = RepositoryFormat.BZR_KNIT_1
Store.of(db_branch).flush()
branch_job = BranchUpgradeJob.create(
db_branch, self.factory.makePerson())
branch_job.job.date_finished = THIRTY_DAYS_AGO
self.assertEqual(
store.find(
BranchJob,
BranchJob.branch == db_branch.id).count(),
1)
self.runDaily()
switch_dbuser('testadmin')
self.assertEqual(
store.find(
BranchJob,
BranchJob.branch == db_branch.id).count(),
0)
示例4: test_store_disconnected_after_request_handled_logs_oops
# 需要导入模块: from lp.services.database.interfaces import IMasterStore [as 别名]
# 或者: from lp.services.database.interfaces.IMasterStore import find [as 别名]
def test_store_disconnected_after_request_handled_logs_oops(self):
# Bug #504291 was that a Store was being left in a disconnected
# state after a request, causing subsequent requests handled by that
# thread to fail. We detect this state in endRequest and log an
# OOPS to help track down the trigger.
request = LaunchpadTestRequest()
publication = WebServicePublication(None)
dbadapter.set_request_started()
# Disconnect a store
store = IMasterStore(EmailAddress)
store._connection._state = STATE_DISCONNECTED
# Invoke the endRequest hook.
publication.endRequest(request, None)
self.assertEqual(1, len(self.oopses))
oops = self.oopses[0]
# Ensure the OOPS mentions the correct exception
self.assertStartsWith(oops['value'], "Bug #504291")
# Ensure the store has been rolled back and in a usable state.
self.assertEqual(store._connection._state, STATE_RECONNECT)
store.find(EmailAddress).first() # Confirms Store is working.
示例5: test_CodeImportResultPruner
# 需要导入模块: from lp.services.database.interfaces import IMasterStore [as 别名]
# 或者: from lp.services.database.interfaces.IMasterStore import find [as 别名]
def test_CodeImportResultPruner(self):
now = datetime.now(UTC)
store = IMasterStore(CodeImportResult)
results_to_keep_count = (
config.codeimport.consecutive_failure_limit - 1)
switch_dbuser('testadmin')
code_import_id = self.factory.makeCodeImport().id
machine_id = self.factory.makeCodeImportMachine().id
requester_id = self.factory.makePerson().id
transaction.commit()
def new_code_import_result(timestamp):
switch_dbuser('testadmin')
CodeImportResult(
date_created=timestamp,
code_importID=code_import_id, machineID=machine_id,
requesting_userID=requester_id,
status=CodeImportResultStatus.FAILURE,
date_job_started=timestamp)
transaction.commit()
new_code_import_result(now - timedelta(days=60))
for i in range(results_to_keep_count - 1):
new_code_import_result(now - timedelta(days=19 + i))
# Run the garbage collector
self.runDaily()
# Nothing is removed, because we always keep the
# ``results_to_keep_count`` latest.
store = IMasterStore(CodeImportResult)
self.failUnlessEqual(
results_to_keep_count,
store.find(CodeImportResult).count())
new_code_import_result(now - timedelta(days=31))
self.runDaily()
store = IMasterStore(CodeImportResult)
self.failUnlessEqual(
results_to_keep_count,
store.find(CodeImportResult).count())
new_code_import_result(now - timedelta(days=29))
self.runDaily()
store = IMasterStore(CodeImportResult)
self.failUnlessEqual(
results_to_keep_count,
store.find(CodeImportResult).count())
# We now have no CodeImportResults older than 30 days
self.failUnless(
store.find(
Min(CodeImportResult.date_created)).one().replace(tzinfo=UTC)
>= now - timedelta(days=30))
示例6: pruneRevisionCache
# 需要导入模块: from lp.services.database.interfaces import IMasterStore [as 别名]
# 或者: from lp.services.database.interfaces.IMasterStore import find [as 别名]
def pruneRevisionCache(limit):
"""See `IRevisionSet`."""
# Storm doesn't handle remove a limited result set:
# FeatureError: Can't remove a sliced result set
store = IMasterStore(RevisionCache)
epoch = datetime.now(tz=pytz.UTC) - timedelta(days=30)
subquery = Select(
[RevisionCache.id],
RevisionCache.revision_date < epoch,
limit=limit)
store.find(RevisionCache, RevisionCache.id.is_in(subquery)).remove()
示例7: __init__
# 需要导入模块: from lp.services.database.interfaces import IMasterStore [as 别名]
# 或者: from lp.services.database.interfaces.IMasterStore import find [as 别名]
class MigrateCurrentFlagProcess:
"""Mark all translations as is_imported if they are is_current.
Processes only translations for upstream projects, since Ubuntu
source packages need no migration.
"""
def __init__(self, transaction, logger=None):
self.transaction = transaction
self.logger = logger
if logger is None:
self.logger = logging.getLogger("migrate-current-flag")
self.store = IMasterStore(Product)
def getProductsWithTemplates(self):
"""Get Product.ids for projects with any translations templates."""
return (
self.store.find(
Product, POTemplate.productseriesID == ProductSeries.id, ProductSeries.productID == Product.id
)
.group_by(Product)
.having(Count(POTemplate.id) > 0)
)
def getCurrentNonimportedTranslations(self, product):
"""Get TranslationMessage.ids that need migration for a `product`."""
return self.store.find(
TranslationMessage.id,
TranslationMessage.is_current_ubuntu == True,
TranslationMessage.is_current_upstream == False,
(TranslationMessage.potmsgsetID == TranslationTemplateItem.potmsgsetID),
TranslationTemplateItem.potemplateID == POTemplate.id,
POTemplate.productseriesID == ProductSeries.id,
ProductSeries.productID == product.id,
).config(distinct=True)
def run(self):
products_with_templates = list(self.getProductsWithTemplates())
total_products = len(products_with_templates)
if total_products == 0:
self.logger.info("Nothing to do.")
current_product = 0
for product in products_with_templates:
current_product += 1
self.logger.info(
"Migrating %s translations (%d of %d)..." % (product.name, current_product, total_products)
)
tm_ids = self.getCurrentNonimportedTranslations(product)
tm_loop = TranslationMessageImportedFlagUpdater(self.transaction, self.logger, tm_ids)
DBLoopTuner(tm_loop, 5, minimum_chunk_size=100).run()
self.logger.info("Done.")
示例8: __call__
# 需要导入模块: from lp.services.database.interfaces import IMasterStore [as 别名]
# 或者: from lp.services.database.interfaces.IMasterStore import find [as 别名]
def __call__(self, chunk_size):
"""Take a batch of targets and update their BugTasks' name caches.
See `ITunableLoop`.
"""
# XXX 2008-03-05 gmb:
# We cast chunk_size to an integer to ensure that we're not
# trying to slice using floats or anything similarly
# foolish. We shouldn't have to do this, but bug #198767
# means that we do.
chunk_size = int(chunk_size)
start = self.offset
end = self.offset + chunk_size
chunk = self.candidates[start:end]
self.transaction.begin()
store = IMasterStore(BugTask)
# Transpose the target rows into lists of object IDs to retrieve.
ids_to_cache = zip(*(target for (target, names) in chunk))
for index, cls in enumerate(target_classes):
# Get all of the objects that we will need into the cache.
list(store.find(cls, cls.id.is_in(set(ids_to_cache[index]))))
for target_bits, cached_names in chunk:
self.offset += 1
# Resolve the IDs to objects, and get the actual IBugTarget.
# If the ID is None, don't even try to get an object.
target_objects = (
(store.get(cls, id) if id is not None else None) for cls, id in zip(target_classes, target_bits)
)
target = bug_target_from_key(*target_objects)
new_name = target.bugtargetdisplayname
cached_names.discard(new_name)
# If there are any outdated names cached, update them all in
# a single query.
if len(cached_names) > 0:
self.logger.info("Updating %r to '%s'." % (tuple(cached_names), new_name))
self.total_updated += len(cached_names)
conditions = (col == id for col, id in zip(target_columns, target_bits))
to_update = store.find(BugTask, BugTask.targetnamecache.is_in(cached_names), *conditions)
to_update.set(targetnamecache=new_name)
self.logger.info("Checked %i targets." % len(chunk))
self.transaction.commit()
示例9: test_CodeImportEventPruner
# 需要导入模块: from lp.services.database.interfaces import IMasterStore [as 别名]
# 或者: from lp.services.database.interfaces.IMasterStore import find [as 别名]
def test_CodeImportEventPruner(self):
now = datetime.now(UTC)
store = IMasterStore(CodeImportResult)
switch_dbuser('testadmin')
machine = self.factory.makeCodeImportMachine()
requester = self.factory.makePerson()
# Create 6 code import events for this machine, 3 on each side of 30
# days. Use the event set to the extra event data rows get created
# too.
event_set = getUtility(ICodeImportEventSet)
for age in (35, 33, 31, 29, 27, 15):
event_set.newOnline(
machine, user=requester, message='Hello',
_date_created=(now - timedelta(days=age)))
transaction.commit()
# Run the garbage collector
self.runDaily()
# Only the three most recent results are left.
events = list(machine.events)
self.assertEqual(3, len(events))
# We now have no CodeImportEvents older than 30 days
self.failUnless(
store.find(
Min(CodeImportEvent.date_created)).one().replace(tzinfo=UTC)
>= now - timedelta(days=30))
示例10: iterReady
# 需要导入模块: from lp.services.database.interfaces import IMasterStore [as 别名]
# 或者: from lp.services.database.interfaces.IMasterStore import find [as 别名]
def iterReady(cls):
"""See `IJobSource`."""
store = IMasterStore(QuestionJob)
jobs = store.find(
QuestionJob,
And(QuestionJob.job_type == cls.class_job_type,
QuestionJob.job_id.is_in(Job.ready_jobs)))
return (cls(job) for job in jobs)
示例11: iterReady
# 需要导入模块: from lp.services.database.interfaces import IMasterStore [as 别名]
# 或者: from lp.services.database.interfaces.IMasterStore import find [as 别名]
def iterReady(cls):
"""Iterate through all ready ProductJobs."""
store = IMasterStore(ProductJob)
jobs = store.find(
ProductJob,
And(ProductJob.job_type == cls.class_job_type,
ProductJob.job_id.is_in(Job.ready_jobs)))
return (cls(job) for job in jobs)
示例12: getRecentBuilds
# 需要导入模块: from lp.services.database.interfaces import IMasterStore [as 别名]
# 或者: from lp.services.database.interfaces.IMasterStore import find [as 别名]
def getRecentBuilds(cls, requester, recipe, distroseries, _now=None):
if _now is None:
_now = datetime.now(pytz.UTC)
store = IMasterStore(SourcePackageRecipeBuild)
old_threshold = _now - timedelta(days=1)
return store.find(cls, cls.distroseries_id == distroseries.id,
cls.requester_id == requester.id, cls.recipe_id == recipe.id,
cls.date_created > old_threshold)
示例13: _getOldestLiveRequest
# 需要导入模块: from lp.services.database.interfaces import IMasterStore [as 别名]
# 或者: from lp.services.database.interfaces.IMasterStore import find [as 别名]
def _getOldestLiveRequest(self):
"""Return the oldest live request on the master store.
Due to replication lag, the master store is always a little
ahead of the slave store that exports come from.
"""
master_store = IMasterStore(POExportRequest)
sorted_by_id = master_store.find(POExportRequest).order_by(
POExportRequest.id)
return sorted_by_id.first()
示例14: cleanupAssociations
# 需要导入模块: from lp.services.database.interfaces import IMasterStore [as 别名]
# 或者: from lp.services.database.interfaces.IMasterStore import find [as 别名]
def cleanupAssociations(self):
"""See `OpenIDStore`."""
store = IMasterStore(self.Association)
now = int(time.time())
expired = store.find(
self.Association,
self.Association.issued + self.Association.lifetime < now)
count = expired.count()
if count > 0:
expired.remove()
return count
示例15: getMatchingDSD
# 需要导入模块: from lp.services.database.interfaces import IMasterStore [as 别名]
# 或者: from lp.services.database.interfaces.IMasterStore import find [as 别名]
def getMatchingDSD(self):
"""Find an existing `DistroSeriesDifference` for this difference."""
spn_id = self.metadata["sourcepackagename"]
parent_id = self.metadata["parent_series"]
store = IMasterStore(DistroSeriesDifference)
search = store.find(
DistroSeriesDifference,
DistroSeriesDifference.derived_series == self.derived_series,
DistroSeriesDifference.parent_series_id == parent_id,
DistroSeriesDifference.source_package_name_id == spn_id)
return search.one()