本文整理汇总了Python中WMCore.Services.RequestDB.RequestDBWriter.RequestDBWriter类的典型用法代码示例。如果您正苦于以下问题:Python RequestDBWriter类的具体用法?Python RequestDBWriter怎么用?Python RequestDBWriter使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了RequestDBWriter类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: StepChainParentageFixTask
class StepChainParentageFixTask(CherryPyPeriodicTask):
"""
Upldates StepChain parentage periodically
"""
def __init__(self, rest, config):
super(StepChainParentageFixTask, self).__init__(config)
self.reqmgrDB = RequestDBWriter(config.reqmgrdb_url)
self.dbsSvc = DBS3Reader(config.dbs_url, logger=self.logger)
self.statusToCheck = ["announced", "normal-archived"]
def setConcurrentTasks(self, config):
"""
sets the list of functions which
"""
self.concurrentTasks = [{'func': self.fixStepChainParentage, 'duration': config.parentageFixDuration}]
def fixStepChainParentage(self, config):
"""
Look through the stepchain workflows with ParentageResolved flag is False.
Fix the StepChain parentage and update the ParentageResolved flag to True
"""
self.logger.info("Updating parentage for StepChain workflows for %s", self.statusToCheck)
childDatasets = set()
requests = set()
requestsByChildDataset = {}
for status in self.statusToCheck:
reqByChildDS= getChildDatasetsForStepChainMissingParent(self.reqmgrDB, status)
childDatasets = childDatasets.union(set(reqByChildDS.keys()))
# We need to just get one of the StepChain workflow if multiple workflow contains the same datasets. (i.e. ACDC)
requestsByChildDataset.update(reqByChildDS)
for wfs in reqByChildDS.values():
requests = requests.union(wfs)
failedRequests = set()
totalChildDS = len(childDatasets)
fixCount = 0
for childDS in childDatasets:
start = int(time.time())
failedBlocks = self.dbsSvc.fixMissingParentageDatasets(childDS, insertFlag=True)
end = int(time.time())
timeTaken = end - start
if failedBlocks:
self.logger.warning("Failed to fix the parentage for %s will be retried: time took: %s (sec)",
failedBlocks, timeTaken)
failedRequests = failedRequests.union(requestsByChildDataset[childDS])
else:
fixCount += 1
self.logger.info("Fixed %s parentage: %s out of %s datasets. time took: %s (sec)",
childDS, fixCount, totalChildDS, timeTaken)
requestsToUpdate = requests - failedRequests
for request in requestsToUpdate:
self.reqmgrDB.updateRequestProperty(request, {"ParentageResolved": True})
self.logger.info("Total %s requests' ParentageResolved flag is set to True", len(requestsToUpdate))
self.logger.info("Total %s requests will be retried next cycle: %s", len(failedRequests), failedRequests)
示例2: moveToArchived
def moveToArchived(self, config):
"""
gather active data statistics
"""
testbedWMStats = WMStatsReader(config.wmstats_url, reqdbURL=config.reqmgrdb_url)
reqdbWriter = RequestDBWriter(config.reqmgrdb_url)
statusTransition = {"aborted": ["aborted-completed", "aborted-archived"], "rejected": ["rejected-archived"]}
for status, nextStatusList in statusTransition.items():
requests = testbedWMStats.getRequestByStatus([status], jobInfoFlag=True, legacyFormat=True)
self.logger.info("checking %s workflows: %d" % (status, len(requests)))
if len(requests) > 0:
requestCollection = RequestInfoCollection(requests)
requestsDict = requestCollection.getData()
numOfArchived = 0
for requestName, requestInfo in requestsDict.items():
if requestInfo.getJobSummary().getTotalJobs() == 0:
for nextStatus in nextStatusList:
reqdbWriter.updateRequestStatus(requestName, nextStatus)
numOfArchived += 1
self.logger.info("Total %s-archieved: %d" % (status, numOfArchived))
return
示例3: __init__
def __init__(self, config):
"""
Initialise class members
"""
BaseWorkerThread.__init__(self)
myThread = threading.currentThread()
self.daoFactory = DAOFactory(package="WMCore.WMBS", logger=myThread.logger, dbinterface=myThread.dbi)
self.dbsDaoFactory = DAOFactory(
package="WMComponent.DBS3Buffer", logger=myThread.logger, dbinterface=myThread.dbi
)
self.config = config
self.jobCacheDir = self.config.JobCreator.jobCacheDir
if getattr(self.config.TaskArchiver, "useWorkQueue", False) != False:
# Get workqueue setup from config unless overridden
if hasattr(self.config.TaskArchiver, "WorkQueueParams"):
self.workQueue = localQueue(**self.config.TaskArchiver.WorkQueueParams)
else:
from WMCore.WorkQueue.WorkQueueUtils import queueFromConfig
self.workQueue = queueFromConfig(self.config)
else:
self.workQueue = None
self.timeout = getattr(self.config.TaskArchiver, "timeOut", None)
self.useReqMgrForCompletionCheck = getattr(self.config.TaskArchiver, "useReqMgrForCompletionCheck", True)
if not self.useReqMgrForCompletionCheck:
# sets the local monitor summary couch db
self.requestLocalCouchDB = RequestDBWriter(
self.config.AnalyticsDataCollector.localT0RequestDBURL,
couchapp=self.config.AnalyticsDataCollector.RequestCouchApp,
)
self.centralCouchDBWriter = self.requestLocalCouchDB
else:
self.centralCouchDBWriter = RequestDBWriter(self.config.AnalyticsDataCollector.centralRequestDBURL)
self.reqmgr2Svc = ReqMgr(self.config.TaskArchiver.ReqMgr2ServiceURL)
# TODO: remove this when reqmgr2 replace reqmgr completely (reqmgr2Only)
self.reqmgrSvc = RequestManager({"endpoint": self.config.TaskArchiver.ReqMgrServiceURL})
# Load the cleanout state ID and save it
stateIDDAO = self.daoFactory(classname="Jobs.GetStateID")
self.stateID = stateIDDAO.execute("cleanout")
return
示例4: __init__
def __init__(self, app, api, config, mount):
# main CouchDB database where requests/workloads are stored
RESTEntity.__init__(self, app, api, config, mount)
self.reqmgr_db = api.db_handler.get_db(config.couch_reqmgr_db)
self.reqmgr_db_service = RequestDBWriter(self.reqmgr_db, couchapp = "ReqMgr")
# this need for the post validtiaon
self.reqmgr_aux_db = api.db_handler.get_db(config.couch_reqmgr_aux_db)
示例5: __init__
def __init__(self, config):
"""
_init_
"""
BaseWorkerThread.__init__(self)
myThread = threading.currentThread()
self.daoFactory = DAOFactory(package="T0.WMBS", logger=logging, dbinterface=myThread.dbi)
self.tier0ConfigFile = config.Tier0Feeder.tier0ConfigFile
self.specDirectory = config.Tier0Feeder.specDirectory
self.dropboxuser = getattr(config.Tier0Feeder, "dropboxuser", None)
self.dropboxpass = getattr(config.Tier0Feeder, "dropboxpass", None)
self.transferSystemBaseDir = getattr(config.Tier0Feeder, "transferSystemBaseDir", None)
if self.transferSystemBaseDir != None:
if not os.path.exists(self.transferSystemBaseDir):
self.transferSystemBaseDir = None
self.dqmUploadProxy = getattr(config.Tier0Feeder, "dqmUploadProxy", None)
self.serviceProxy = getattr(config.Tier0Feeder, "serviceProxy", None)
self.localRequestCouchDB = RequestDBWriter(
config.AnalyticsDataCollector.localT0RequestDBURL, couchapp=config.AnalyticsDataCollector.RequestCouchApp
)
hltConfConnectUrl = config.HLTConfDatabase.connectUrl
dbFactoryHltConf = DBFactory(logging, dburl=hltConfConnectUrl, options={})
dbInterfaceHltConf = dbFactoryHltConf.connect()
daoFactoryHltConf = DAOFactory(package="T0.WMBS", logger=logging, dbinterface=dbInterfaceHltConf)
self.getHLTConfigDAO = daoFactoryHltConf(classname="RunConfig.GetHLTConfig")
storageManagerConnectUrl = config.StorageManagerDatabase.connectUrl
dbFactoryStorageManager = DBFactory(logging, dburl=storageManagerConnectUrl, options={})
self.dbInterfaceStorageManager = dbFactoryStorageManager.connect()
self.getExpressReadyRunsDAO = None
if hasattr(config, "PopConLogDatabase"):
popConLogConnectUrl = getattr(config.PopConLogDatabase, "connectUrl", None)
if popConLogConnectUrl != None:
dbFactoryPopConLog = DBFactory(logging, dburl=popConLogConnectUrl, options={})
dbInterfacePopConLog = dbFactoryPopConLog.connect()
daoFactoryPopConLog = DAOFactory(package="T0.WMBS", logger=logging, dbinterface=dbInterfacePopConLog)
self.getExpressReadyRunsDAO = daoFactoryPopConLog(classname="Tier0Feeder.GetExpressReadyRuns")
self.haveT0DataSvc = False
if hasattr(config, "T0DataSvcDatabase"):
t0datasvcConnectUrl = getattr(config.T0DataSvcDatabase, "connectUrl", None)
if t0datasvcConnectUrl != None:
self.haveT0DataSvc = True
dbFactoryT0DataSvc = DBFactory(logging, dburl=t0datasvcConnectUrl, options={})
dbInterfaceT0DataSvc = dbFactoryT0DataSvc.connect()
self.daoFactoryT0DataSvc = DAOFactory(
package="T0.WMBS", logger=logging, dbinterface=dbInterfaceT0DataSvc
)
return
示例6: T0RequestDBTest
class T0RequestDBTest(unittest.TestCase):
"""
"""
def setUp(self):
"""
_setUp_
"""
self.schema = []
self.couchApps = ["T0Request"]
self.testInit = TestInitCouchApp('RequestDBServiceTest')
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setSchema(customModules = self.schema,
useDefault = False)
dbName = 't0_requsetdb_t'
self.testInit.setupCouch(dbName, *self.couchApps)
reqDBURL = "%s/%s" % (self.testInit.couchUrl, dbName)
self.requestWriter = RequestDBWriter(reqDBURL, self.couchApps[0])
self.requestReader = RequestDBReader(reqDBURL, self.couchApps[0])
self.requestWriter.defaultStale = {}
self.requestReader.defaultStale = {}
return
def tearDown(self):
"""
_tearDown_
Drop all the WMBS tables.
"""
self.testInit.tearDownCouch()
def testRequestDBWriter(self):
# test getWork
schema = generate_reqmgr_schema()
result = self.requestWriter.insertGenericRequest(schema[0])
self.assertEqual(len(result), 1, 'insert fail');
result = self.requestWriter.updateRequestStatus(schema[0]['RequestName'], "assigned")
self.assertEqual(result, 'not allowed state assigned', 'update fail')
self.assertEqual(self.requestWriter.updateRequestStatus("not_exist_schema", "new"),
'Error: document not found')
allowedStates = ["Closed", "Merge", "AlcaSkim", "Harvesting",
"Processing Done", "completed"]
for state in allowedStates:
self.assertEqual(self.requestWriter.updateRequestStatus(schema[0]['RequestName'], state),
'OK')
self.assertEqual(self.requestWriter.updateRequestStatus(schema[0]['RequestName'], "Processing Done"),
'not allowed transition completed to Processing Done')
self.assertEqual(self.requestWriter.updateRequestStatus(schema[0]['RequestName'], "normal-archived"),
'OK')
result = self.requestWriter.getRequestByStatus(["normal-archived"], False, 1)
self.assertEqual(len(result), 1, "should be 1 but %s" % result)
示例7: setup
def setup(self, parameters):
"""
Called at startup
"""
self.teamName = self.config.Agent.teamName
# set the connection for local couchDB call
self.useReqMgrForCompletionCheck = getattr(self.config.TaskArchiver, 'useReqMgrForCompletionCheck', True)
self.archiveDelayHours = getattr(self.config.TaskArchiver, 'archiveDelayHours', 0)
self.wmstatsCouchDB = WMStatsWriter(self.config.TaskArchiver.localWMStatsURL,
"WMStatsAgent")
#TODO: we might need to use local db for Tier0
self.centralRequestDBReader = RequestDBReader(self.config.AnalyticsDataCollector.centralRequestDBURL,
couchapp=self.config.AnalyticsDataCollector.RequestCouchApp)
if self.useReqMgrForCompletionCheck:
self.deletableState = "announced"
self.centralRequestDBWriter = RequestDBWriter(self.config.AnalyticsDataCollector.centralRequestDBURL,
couchapp=self.config.AnalyticsDataCollector.RequestCouchApp)
if self.config.TaskArchiver.reqmgr2Only:
self.reqmgr2Svc = ReqMgr(self.config.TaskArchiver.ReqMgr2ServiceURL)
else:
#TODO: remove this for reqmgr2
self.reqmgrSvc = RequestManager({'endpoint': self.config.TaskArchiver.ReqMgrServiceURL})
else:
# Tier0 case
self.deletableState = "completed"
# use local for update
self.centralRequestDBWriter = RequestDBWriter(self.config.AnalyticsDataCollector.localT0RequestDBURL,
couchapp=self.config.AnalyticsDataCollector.RequestCouchApp)
jobDBurl = sanitizeURL(self.config.JobStateMachine.couchurl)['url']
jobDBName = self.config.JobStateMachine.couchDBName
self.jobCouchdb = CouchServer(jobDBurl)
self.jobsdatabase = self.jobCouchdb.connectDatabase("%s/jobs" % jobDBName)
self.fwjrdatabase = self.jobCouchdb.connectDatabase("%s/fwjrs" % jobDBName)
statSummaryDBName = self.config.JobStateMachine.summaryStatsDBName
self.statsumdatabase = self.jobCouchdb.connectDatabase(statSummaryDBName)
示例8: setUp
def setUp(self):
"""
setup for test.
"""
myThread = threading.currentThread()
self.testInit = TestInit(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection(destroyAllDatabase = True)
self.testInit.setSchema(customModules = ["WMCore.WMBS", "WMComponent.DBS3Buffer"],
useDefault = False)
self.databaseName = "taskarchiver_t_0"
self.testInit.setupCouch("%s/workloadsummary" % self.databaseName, "WorkloadSummary")
self.testInit.setupCouch("%s/jobs" % self.databaseName, "JobDump")
self.testInit.setupCouch("%s/fwjrs" % self.databaseName, "FWJRDump")
self.testInit.setupCouch("wmagent_summary_t", "WMStats")
self.testInit.setupCouch("wmagent_summary_central_t", "WMStats")
self.testInit.setupCouch("stat_summary_t", "SummaryStats")
reqmgrdb = "reqmgrdb_t"
self.testInit.setupCouch(reqmgrdb, "ReqMgr")
reqDBURL = "%s/%s" % (self.testInit.couchUrl, reqmgrdb)
self.requestWriter = RequestDBWriter(reqDBURL)
self.requestWriter.defaultStale = {}
self.daofactory = DAOFactory(package = "WMCore.WMBS",
logger = myThread.logger,
dbinterface = myThread.dbi)
self.dbsDaoFactory = DAOFactory(package="WMComponent.DBS3Buffer",
logger=myThread.logger,
dbinterface=myThread.dbi)
self.getJobs = self.daofactory(classname = "Jobs.GetAllJobs")
self.inject = self.daofactory(classname = "Workflow.MarkInjectedWorkflows")
self.testDir = self.testInit.generateWorkDir()
os.makedirs(os.path.join(self.testDir, 'specDir'))
self.nJobs = 10
self.campaignName = 'aCampaign'
self.uploadPublishInfo = False
self.uploadPublishDir = None
return
示例9: setUp
def setUp(self):
"""
_setUp_
"""
self.schema = []
self.couchApps = ["T0Request"]
self.testInit = TestInitCouchApp('RequestDBServiceTest')
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setSchema(customModules = self.schema,
useDefault = False)
dbName = 't0_requsetdb_t'
self.testInit.setupCouch(dbName, *self.couchApps)
reqDBURL = "%s/%s" % (self.testInit.couchUrl, dbName)
self.requestWriter = RequestDBWriter(reqDBURL, self.couchApps[0])
self.requestReader = RequestDBReader(reqDBURL, self.couchApps[0])
self.requestWriter.defaultStale = {}
self.requestReader.defaultStale = {}
return
示例10: setUp
def setUp(self):
"""
_setUp_
Setup the test environment
"""
self.testInit = TestInit(__file__)
self.testInit.setDatabaseConnection()
self.testInit.setSchema(["WMCore.WMBS"])
self.requestCouchDB = 'wmstats_plugin_t'
self.testInit.setupCouch(self.requestCouchDB, 'T0Request')
self.testDir = self.testInit.generateWorkDir()
reqDBURL = "%s/%s" % (os.environ['COUCHURL'], self.requestCouchDB)
self.requestDBWriter = RequestDBWriter(reqDBURL, couchapp="T0Request")
self.requestDBWriter._setNoStale()
self.stateMap = {}
self.orderedStates = []
self.plugin = None
return
示例11: setUp
def setUp(self):
"""
_setUp_
"""
self.schema = []
self.couchApps = ["WMStats"]
self.testInit = TestInitCouchApp("WorkQueueServiceTest")
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setSchema(customModules=self.schema, useDefault=False)
dbName = "wmstats_t"
self.testInit.setupCouch(dbName, "WMStats")
reqDBName = "reqmgrdb_t"
self.testInit.setupCouch(reqDBName, "ReqMgr")
wmstatsURL = "%s/%s" % (self.testInit.couchUrl, dbName)
reqDBURL = "%s/%s" % (self.testInit.couchUrl, reqDBName)
self.reqDBWriter = RequestDBWriter(reqDBURL)
self.wmstatsReader = WMStatsReader(wmstatsURL, reqDBURL)
self.wmstatsReader.defaultStale = {}
self.wmstatsReader.reqDB.defaultStale = {}
return
示例12: TaskArchiverTest
class TaskArchiverTest(unittest.TestCase):
"""
TestCase for TestTaskArchiver module
"""
_setup_done = False
_teardown = False
_maxMessage = 10
OWNERDN = os.environ['OWNERDN'] if 'OWNERDN' in os.environ else "Generic/OWNERDN"
def setUp(self):
"""
setup for test.
"""
myThread = threading.currentThread()
self.testInit = TestInit(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection(destroyAllDatabase = True)
self.testInit.setSchema(customModules = ["WMCore.WMBS", "WMComponent.DBS3Buffer"],
useDefault = False)
self.databaseName = "taskarchiver_t_0"
self.testInit.setupCouch("%s/workloadsummary" % self.databaseName, "WorkloadSummary")
self.testInit.setupCouch("%s/jobs" % self.databaseName, "JobDump")
self.testInit.setupCouch("%s/fwjrs" % self.databaseName, "FWJRDump")
self.testInit.setupCouch("wmagent_summary_t", "WMStats")
self.testInit.setupCouch("wmagent_summary_central_t", "WMStats")
self.testInit.setupCouch("stat_summary_t", "SummaryStats")
reqmgrdb = "reqmgrdb_t"
self.testInit.setupCouch(reqmgrdb, "ReqMgr")
reqDBURL = "%s/%s" % (self.testInit.couchUrl, reqmgrdb)
self.requestWriter = RequestDBWriter(reqDBURL)
self.requestWriter.defaultStale = {}
self.daofactory = DAOFactory(package = "WMCore.WMBS",
logger = myThread.logger,
dbinterface = myThread.dbi)
self.dbsDaoFactory = DAOFactory(package="WMComponent.DBS3Buffer",
logger=myThread.logger,
dbinterface=myThread.dbi)
self.getJobs = self.daofactory(classname = "Jobs.GetAllJobs")
self.inject = self.daofactory(classname = "Workflow.MarkInjectedWorkflows")
self.testDir = self.testInit.generateWorkDir()
os.makedirs(os.path.join(self.testDir, 'specDir'))
self.nJobs = 10
self.campaignName = 'aCampaign'
self.uploadPublishInfo = False
self.uploadPublishDir = None
return
def tearDown(self):
"""
Database deletion
"""
myThread = threading.currentThread()
self.testInit.clearDatabase(modules = ["WMCore.WMBS"])
self.testInit.delWorkDir()
self.testInit.tearDownCouch()
return
def getConfig(self):
"""
_createConfig_
General config file
"""
config = self.testInit.getConfiguration()
#self.testInit.generateWorkDir(config)
config.section_("General")
config.General.workDir = "."
config.section_("JobStateMachine")
config.JobStateMachine.couchurl = os.getenv("COUCHURL", "cmssrv52.fnal.gov:5984")
config.JobStateMachine.couchDBName = self.databaseName
config.JobStateMachine.jobSummaryDBName = 'wmagent_summary_t'
config.JobStateMachine.summaryStatsDBName = 'stat_summary_t'
config.component_("JobCreator")
config.JobCreator.jobCacheDir = os.path.join(self.testDir, 'testDir')
config.component_("TaskArchiver")
config.TaskArchiver.componentDir = self.testDir
config.TaskArchiver.WorkQueueParams = {}
config.TaskArchiver.pollInterval = 60
config.TaskArchiver.logLevel = 'INFO'
config.TaskArchiver.timeOut = 0
config.TaskArchiver.histogramKeys = ['AvgEventTime', 'writeTotalMB', 'jobTime']
config.TaskArchiver.histogramBins = 5
config.TaskArchiver.histogramLimit = 5
#.........这里部分代码省略.........
示例13: Tier0PluginTest
class Tier0PluginTest(unittest.TestCase):
def setUp(self):
"""
_setUp_
Setup the test environment
"""
self.testInit = TestInit(__file__)
self.testInit.setDatabaseConnection()
self.testInit.setSchema(["WMCore.WMBS"])
self.requestCouchDB = 'wmstats_plugin_t'
self.testInit.setupCouch(self.requestCouchDB, 'T0Request')
self.testDir = self.testInit.generateWorkDir()
reqDBURL = "%s/%s" % (os.environ['COUCHURL'], self.requestCouchDB)
self.requestDBWriter = RequestDBWriter(reqDBURL, couchapp="T0Request")
self.requestDBWriter._setNoStale()
self.stateMap = {}
self.orderedStates = []
self.plugin = None
return
def tearDown(self):
"""
_tearDown_
Clear databases and delete files
"""
self.testInit.tearDownCouch()
self.testInit.clearDatabase()
self.testInit.delWorkDir()
return
def setupRepackWorkflow(self):
"""
_setupRepackWorkflow_
Populate WMBS with a repack-like workflow,
every subscription must be unfinished at first
"""
workflowName = 'Repack_Run481516_StreamZ'
mergeTasks = ['RepackMergewrite_QuadElectron_RAW', 'RepackMergewrite_TriPhoton_RAW',
'RepackMergewrite_SingleNeutrino_RAW']
self.stateMap = {'Merge': [],
'Processing Done': []}
self.orderedStates = ['Merge', 'Processing Done']
# Populate WMStats
self.requestDBWriter.insertGenericRequest({'RequestName': workflowName})
self.requestDBWriter.updateRequestStatus(workflowName, 'Closed')
# Create a wmspec in disk
workload = newWorkload(workflowName)
repackTask = workload.newTask('Repack')
for task in mergeTasks:
repackTask.addTask(task)
repackTask.addTask('RepackCleanupUnmergedwrite_QuadElectron_RAW')
specPath = os.path.join(self.testDir, 'Repack.pkl')
workload.save(specPath)
# Populate WMBS
topFileset = Fileset(name='TestStreamerFileset')
topFileset.create()
options = {'spec': specPath, 'owner': 'ItsAMeMario',
'name': workflowName, 'wfType': 'tier0'}
topLevelWorkflow = Workflow(task='/%s/Repack' % workflowName,
**options)
topLevelWorkflow.create()
topLevelSub = Subscription(topFileset, topLevelWorkflow)
topLevelSub.create()
self.stateMap['Merge'].append(topFileset)
for task in mergeTasks:
mergeWorkflow = Workflow(task='/%s/Repack/%s' % (workflowName, task), **options)
mergeWorkflow.create()
unmergedFileset = Fileset(name='TestUnmergedFileset%s' % task)
unmergedFileset.create()
mergeSub = Subscription(unmergedFileset, mergeWorkflow)
mergeSub.create()
self.stateMap['Processing Done'].append(unmergedFileset)
cleanupWorkflow = Workflow(task='/Repack_Run481516_StreamZ/Repack/RepackCleanupUnmergedwrite_QuadElectron_RAW',
**options)
cleanupWorkflow.create()
unmergedFileset = Fileset(name='TestUnmergedFilesetToCleanup')
unmergedFileset.create()
cleanupSub = Subscription(unmergedFileset, cleanupWorkflow)
cleanupSub.create()
return
def setupExpressWorkflow(self):
"""
_setupExpressWorkflow_
Populate WMBS with a express-like workflow,
#.........这里部分代码省略.........
示例14: RequestDBTest
class RequestDBTest(unittest.TestCase):
"""
"""
def setUp(self):
"""
_setUp_
"""
self.schema = []
self.couchApps = ["ReqMgr"]
self.testInit = TestInitCouchApp('RequestDBServiceTest')
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setSchema(customModules = self.schema,
useDefault = False)
dbName = 'requsetdb_t'
self.testInit.setupCouch(dbName, *self.couchApps)
reqDBURL = "%s/%s" % (self.testInit.couchUrl, dbName)
self.requestWriter = RequestDBWriter(reqDBURL)
self.requestReader = RequestDBReader(reqDBURL)
self.requestWriter.defaultStale = {}
self.requestReader.defaultStale = {}
return
def tearDown(self):
"""
_tearDown_
Drop all the WMBS tables.
"""
self.testInit.tearDownCouch()
def testRequestDBWriter(self):
# test getWork
schema = generate_reqmgr_schema(3)
result = self.requestWriter.insertGenericRequest(schema[0])
self.assertEqual(len(result), 1, 'insert fail');
self.assertEqual(self.requestWriter.updateRequestStatus(schema[0]['RequestName'], "failed"), 'OK', 'update fail')
self.assertEqual(self.requestWriter.updateRequestStatus("not_exist_schema", "assigned"),
'Error: document not found')
result = self.requestWriter.updateRequestProperty(schema[0]['RequestName'],
{'Teams': ['teamA']})
self.assertEqual(self.requestWriter.updateRequestProperty(schema[0]['RequestName'],
{'Teams': ['teamA']}), 'OK', 'update fail')
self.assertEqual(self.requestWriter.updateRequestProperty("not_exist_schema", {'Teams': 'teamA'}),
'Error: document not found')
result = self.requestReader.getRequestByNames([schema[0]['RequestName']])
self.assertEqual(len(result), 1, "should be 1")
result = self.requestReader.getRequestByStatus(["failed"], False, 1)
self.assertEqual(len(result), 1, "should be 1")
result = self.requestReader.getStatusAndTypeByRequest([schema[0]['RequestName']])
self.assertEqual(result[schema[0]['RequestName']][0], 'failed', "should be failed")
result = self.requestWriter.insertGenericRequest(schema[1])
time.sleep(2)
result = self.requestWriter.insertGenericRequest(schema[2])
endTime = int(time.time()) - 1
result = self.requestReader.getRequestByStatusAndEndTime("new", False, endTime)
self.assertEqual(len(result), 1, "should be 1")
endTime = int(time.time()) + 1
result = self.requestReader.getRequestByStatusAndEndTime("new", False, endTime)
self.assertEqual(len(result), 2, "should be 2")
示例15: Tier0FeederPoller
class Tier0FeederPoller(BaseWorkerThread):
def __init__(self, config):
"""
_init_
"""
BaseWorkerThread.__init__(self)
myThread = threading.currentThread()
self.daoFactory = DAOFactory(package = "T0.WMBS",
logger = logging,
dbinterface = myThread.dbi)
self.tier0ConfigFile = config.Tier0Feeder.tier0ConfigFile
self.specDirectory = config.Tier0Feeder.specDirectory
self.dropboxuser = getattr(config.Tier0Feeder, "dropboxuser", None)
self.dropboxpass = getattr(config.Tier0Feeder, "dropboxpass", None)
self.transferSystemBaseDir = getattr(config.Tier0Feeder, "transferSystemBaseDir", None)
if self.transferSystemBaseDir != None:
if not os.path.exists(self.transferSystemBaseDir):
self.transferSystemBaseDir = None
self.dqmUploadProxy = getattr(config.Tier0Feeder, "dqmUploadProxy", None)
self.serviceProxy = getattr(config.Tier0Feeder, "serviceProxy", None)
self.localRequestCouchDB = RequestDBWriter(config.AnalyticsDataCollector.localT0RequestDBURL,
couchapp = config.AnalyticsDataCollector.RequestCouchApp)
hltConfConnectUrl = config.HLTConfDatabase.connectUrl
dbFactoryHltConf = DBFactory(logging, dburl = hltConfConnectUrl, options = {})
dbInterfaceHltConf = dbFactoryHltConf.connect()
daoFactoryHltConf = DAOFactory(package = "T0.WMBS",
logger = logging,
dbinterface = dbInterfaceHltConf)
self.getHLTConfigDAO = daoFactoryHltConf(classname = "RunConfig.GetHLTConfig")
storageManagerConnectUrl = config.StorageManagerDatabase.connectUrl
dbFactoryStorageManager = DBFactory(logging, dburl = storageManagerConnectUrl, options = {})
self.dbInterfaceStorageManager = dbFactoryStorageManager.connect()
self.getExpressReadyRunsDAO = None
if hasattr(config, "PopConLogDatabase"):
popConLogConnectUrl = getattr(config.PopConLogDatabase, "connectUrl", None)
if popConLogConnectUrl != None:
dbFactoryPopConLog = DBFactory(logging, dburl = popConLogConnectUrl, options = {})
dbInterfacePopConLog = dbFactoryPopConLog.connect()
daoFactoryPopConLog = DAOFactory(package = "T0.WMBS",
logger = logging,
dbinterface = dbInterfacePopConLog)
self.getExpressReadyRunsDAO = daoFactoryPopConLog(classname = "Tier0Feeder.GetExpressReadyRuns")
self.haveT0DataSvc = False
if hasattr(config, "T0DataSvcDatabase"):
t0datasvcConnectUrl = getattr(config.T0DataSvcDatabase, "connectUrl", None)
if t0datasvcConnectUrl != None:
self.haveT0DataSvc = True
dbFactoryT0DataSvc = DBFactory(logging, dburl = t0datasvcConnectUrl, options = {})
dbInterfaceT0DataSvc = dbFactoryT0DataSvc.connect()
self.daoFactoryT0DataSvc = DAOFactory(package = "T0.WMBS",
logger = logging,
dbinterface = dbInterfaceT0DataSvc)
return
def algorithm(self, parameters = None):
"""
_algorithm_
"""
logging.debug("Running Tier0Feeder algorithm...")
myThread = threading.currentThread()
findNewRunsDAO = self.daoFactory(classname = "Tier0Feeder.FindNewRuns")
findNewRunStreamsDAO = self.daoFactory(classname = "Tier0Feeder.FindNewRunStreams")
findNewExpressRunsDAO = self.daoFactory(classname = "Tier0Feeder.FindNewExpressRuns")
releaseExpressDAO = self.daoFactory(classname = "Tier0Feeder.ReleaseExpress")
feedStreamersDAO = self.daoFactory(classname = "Tier0Feeder.FeedStreamers")
markWorkflowsInjectedDAO = self.daoFactory(classname = "Tier0Feeder.MarkWorkflowsInjected")
tier0Config = None
try:
tier0Config = loadConfigurationFile(self.tier0ConfigFile)
except:
# usually happens when there are syntax errors in the configuration
logging.exception("Cannot load Tier0 configuration file, not configuring new runs and run/streams")
# only configure new runs and run/streams if we have a valid Tier0 configuration
if tier0Config != None:
#
# find new runs, setup global run settings and stream/dataset/trigger mapping
#
runHltkeys = findNewRunsDAO.execute(transaction = False)
for run, hltkey in sorted(runHltkeys.items()):
hltConfig = None
#.........这里部分代码省略.........