本文整理汇总了Python中WMCore.Services.RequestDB.RequestDBWriter.RequestDBWriter.insertGenericRequest方法的典型用法代码示例。如果您正苦于以下问题:Python RequestDBWriter.insertGenericRequest方法的具体用法?Python RequestDBWriter.insertGenericRequest怎么用?Python RequestDBWriter.insertGenericRequest使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类WMCore.Services.RequestDB.RequestDBWriter.RequestDBWriter
的用法示例。
在下文中一共展示了RequestDBWriter.insertGenericRequest方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: T0RequestDBTest
# 需要导入模块: from WMCore.Services.RequestDB.RequestDBWriter import RequestDBWriter [as 别名]
# 或者: from WMCore.Services.RequestDB.RequestDBWriter.RequestDBWriter import insertGenericRequest [as 别名]
class T0RequestDBTest(unittest.TestCase):
"""
"""
def setUp(self):
"""
_setUp_
"""
self.schema = []
self.couchApps = ["T0Request"]
self.testInit = TestInitCouchApp('RequestDBServiceTest')
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setSchema(customModules = self.schema,
useDefault = False)
dbName = 't0_requsetdb_t'
self.testInit.setupCouch(dbName, *self.couchApps)
reqDBURL = "%s/%s" % (self.testInit.couchUrl, dbName)
self.requestWriter = RequestDBWriter(reqDBURL, self.couchApps[0])
self.requestReader = RequestDBReader(reqDBURL, self.couchApps[0])
self.requestWriter.defaultStale = {}
self.requestReader.defaultStale = {}
return
def tearDown(self):
"""
_tearDown_
Drop all the WMBS tables.
"""
self.testInit.tearDownCouch()
def testRequestDBWriter(self):
# test getWork
schema = generate_reqmgr_schema()
result = self.requestWriter.insertGenericRequest(schema[0])
self.assertEqual(len(result), 1, 'insert fail');
result = self.requestWriter.updateRequestStatus(schema[0]['RequestName'], "assigned")
self.assertEqual(result, 'not allowed state assigned', 'update fail')
self.assertEqual(self.requestWriter.updateRequestStatus("not_exist_schema", "new"),
'Error: document not found')
allowedStates = ["Closed", "Merge", "AlcaSkim", "Harvesting",
"Processing Done", "completed"]
for state in allowedStates:
self.assertEqual(self.requestWriter.updateRequestStatus(schema[0]['RequestName'], state),
'OK')
self.assertEqual(self.requestWriter.updateRequestStatus(schema[0]['RequestName'], "Processing Done"),
'not allowed transition completed to Processing Done')
self.assertEqual(self.requestWriter.updateRequestStatus(schema[0]['RequestName'], "normal-archived"),
'OK')
result = self.requestWriter.getRequestByStatus(["normal-archived"], False, 1)
self.assertEqual(len(result), 1, "should be 1 but %s" % result)
示例2: WMStatsTest
# 需要导入模块: from WMCore.Services.RequestDB.RequestDBWriter import RequestDBWriter [as 别名]
# 或者: from WMCore.Services.RequestDB.RequestDBWriter.RequestDBWriter import insertGenericRequest [as 别名]
class WMStatsTest(unittest.TestCase):
"""
"""
def setUp(self):
"""
_setUp_
"""
self.schema = []
self.couchApps = ["WMStats"]
self.testInit = TestInitCouchApp('WorkQueueServiceTest')
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setSchema(customModules = self.schema,
useDefault = False)
dbName = 'wmstats_t'
self.testInit.setupCouch(dbName, "WMStats")
reqDBName = "reqmgrdb_t"
self.testInit.setupCouch(reqDBName, "ReqMgr")
wmstatsURL = "%s/%s" % (self.testInit.couchUrl, dbName)
reqDBURL = "%s/%s" % (self.testInit.couchUrl, reqDBName)
self.reqDBWriter = RequestDBWriter(reqDBURL)
self.wmstatsReader = WMStatsReader(wmstatsURL, reqdbURL=reqDBURL)
self.wmstatsReader.defaultStale = {}
self.wmstatsReader.reqDB.defaultStale = {}
return
def tearDown(self):
"""
_tearDown_
Drop all the WMBS tables.
"""
self.testInit.tearDownCouch()
def testWMStatsWriter(self):
# test getWork
schema = generate_reqmgr_schema()
result = self.reqDBWriter.insertGenericRequest(schema[0])
self.assertEquals(result[0]['ok'], True, 'insert fail')
result = self.reqDBWriter.updateRequestStatus(schema[0]['RequestName'], "failed")
self.assertEquals(result, 'OK', 'update fail')
result = self.reqDBWriter.updateRequestStatus("not_exist_schema", "assigned")
self.assertEquals(result,'Error: document not found')
result = self.reqDBWriter.updateRequestProperty(schema[0]['RequestName'], {"Teams": ['teamA']})
self.assertEquals(result, 'OK', 'update fail')
result = self.reqDBWriter.updateRequestProperty("not_exist_schema", {"Teams": ['teamA']})
self.assertEquals(result, 'Error: document not found')
totalStats = {'TotalEstimatedJobs': 100, 'TotalInputEvents': 1000, 'TotalInputLumis': 1234, 'TotalInputFiles': 5}
result = self.reqDBWriter.updateRequestProperty(schema[0]['RequestName'], totalStats)
self.assertEquals(result, 'OK', 'update fail')
result = self.reqDBWriter.updateRequestProperty(schema[0]['RequestName'], totalStats)
self.assertEquals(result, 'OK', 'update fail')
result = self.reqDBWriter.updateRequestProperty("not_exist_schema", totalStats)
self.assertEquals(result, 'Error: document not found')
spec1 = newWorkload(schema[0]['RequestName'])
production = spec1.newTask("Production")
production.setTaskType("Merge")
production.setSiteWhitelist(['TEST_SITE'])
properties = {"RequestPriority": spec1.priority(),
'SiteWhitelist': spec1.getTopLevelTask()[0].siteWhitelist(),
'OutputDatasets': spec1.listOutputDatasets()}
result = self.reqDBWriter.updateRequestProperty(spec1.name(), properties)
self.assertEquals(result, 'OK', 'update fail')
spec2 = newWorkload("not_exist_schema")
production = spec2.newTask("Production")
production.setTaskType("Merge")
properties = {"RequestPriority": spec2.priority(),
'SiteWhitelist': spec2.getTopLevelTask()[0].siteWhitelist(),
'OutputDatasets': spec2.listOutputDatasets()}
result = self.reqDBWriter.updateRequestProperty(spec2.name(), properties)
self.assertEquals(result, 'Error: document not found')
requests = self.wmstatsReader.getRequestByStatus(["failed"], jobInfoFlag = False, legacyFormat = True)
self.assertEquals(requests.keys(), [schema[0]['RequestName']])
requestCollection = RequestInfoCollection(requests)
result = requestCollection.getJSONData()
self.assertEquals(result.keys(), [schema[0]['RequestName']])
requests = self.wmstatsReader.getActiveData()
self.assertEquals(requests.keys(), [schema[0]['RequestName']])
requests = self.wmstatsReader.getRequestByStatus(["failed"])
self.assertEquals(requests.keys(), [schema[0]['RequestName']])
requests = self.wmstatsReader.getRequestSummaryWithJobInfo(schema[0]['RequestName'])
self.assertEquals(requests.keys(), [schema[0]['RequestName']])
示例3: TaskArchiverTest
# 需要导入模块: from WMCore.Services.RequestDB.RequestDBWriter import RequestDBWriter [as 别名]
# 或者: from WMCore.Services.RequestDB.RequestDBWriter.RequestDBWriter import insertGenericRequest [as 别名]
#.........这里部分代码省略.........
# Fetching the important values
instLuminosity = i*binSize
timePerEvent = points[i]
if instLuminosity > minLumi and instLuminosity < maxLumi :
worthPoints[instLuminosity] = timePerEvent
return worthPoints
def publishPerformanceDashBoard(self, dashBoardUrl, PD, release, worthPoints):
dashboardPayload = []
for instLuminosity in worthPoints :
timePerEvent = int(worthPoints[instLuminosity])
dashboardPayload.append({"primaryDataset" : PD,
"release" : release,
"integratedLuminosity" : instLuminosity,
"timePerEvent" : timePerEvent})
data = "{\"data\":%s}" % str(dashboardPayload).replace("\'","\"")
# let's suppose it works..
testDashBoardPayloadFile = open(os.path.join(getTestBase(),
'WMComponent_t/TaskArchiver_t/DashBoardPayload.json'), 'r')
testDashBoardPayload = testDashBoardPayloadFile.read()
testDashBoardPayloadFile.close()
self.assertEqual(data, testDashBoardPayload)
return True
def populateWorkflowWithCompleteStatus(self, name ="TestWorkload"):
schema = generate_reqmgr_schema(1)
schema[0]["RequestName"] = name
self.requestWriter.insertGenericRequest(schema[0])
result = self.requestWriter.updateRequestStatus(name, "completed")
return result
def testA_BasicFunctionTest(self):
"""
_BasicFunctionTest_
Tests the components, by seeing if they can process a simple set of closeouts
"""
myThread = threading.currentThread()
config = self.getConfig()
workloadPath = os.path.join(self.testDir, 'specDir', 'spec.pkl')
workload = self.createWorkload(workloadName = workloadPath)
testJobGroup = self.createTestJobGroup(config = config,
name = workload.name(),
specLocation = workloadPath,
error = False)
# Create second workload
testJobGroup2 = self.createTestJobGroup(config = config,
name = workload.name(),
filesetName = "TestFileset_2",
specLocation = workloadPath,
task = "/TestWorkload/ReReco/LogCollect",
type = "LogCollect")
cachePath = os.path.join(config.JobCreator.jobCacheDir,
"TestWorkload", "ReReco")
os.makedirs(cachePath)
self.assertTrue(os.path.exists(cachePath))
示例4: Tier0PluginTest
# 需要导入模块: from WMCore.Services.RequestDB.RequestDBWriter import RequestDBWriter [as 别名]
# 或者: from WMCore.Services.RequestDB.RequestDBWriter.RequestDBWriter import insertGenericRequest [as 别名]
class Tier0PluginTest(unittest.TestCase):
def setUp(self):
"""
_setUp_
Setup the test environment
"""
self.testInit = TestInit(__file__)
self.testInit.setDatabaseConnection()
self.testInit.setSchema(["WMCore.WMBS"])
self.requestCouchDB = 'wmstats_plugin_t'
self.testInit.setupCouch(self.requestCouchDB, 'T0Request')
self.testDir = self.testInit.generateWorkDir()
reqDBURL = "%s/%s" % (os.environ['COUCHURL'], self.requestCouchDB)
self.requestDBWriter = RequestDBWriter(reqDBURL, couchapp="T0Request")
self.requestDBWriter._setNoStale()
self.stateMap = {}
self.orderedStates = []
self.plugin = None
return
def tearDown(self):
"""
_tearDown_
Clear databases and delete files
"""
self.testInit.tearDownCouch()
self.testInit.clearDatabase()
self.testInit.delWorkDir()
return
def setupRepackWorkflow(self):
"""
_setupRepackWorkflow_
Populate WMBS with a repack-like workflow,
every subscription must be unfinished at first
"""
workflowName = 'Repack_Run481516_StreamZ'
mergeTasks = ['RepackMergewrite_QuadElectron_RAW', 'RepackMergewrite_TriPhoton_RAW',
'RepackMergewrite_SingleNeutrino_RAW']
self.stateMap = {'Merge': [],
'Processing Done': []}
self.orderedStates = ['Merge', 'Processing Done']
# Populate WMStats
self.requestDBWriter.insertGenericRequest({'RequestName': workflowName})
self.requestDBWriter.updateRequestStatus(workflowName, 'Closed')
# Create a wmspec in disk
workload = newWorkload(workflowName)
repackTask = workload.newTask('Repack')
for task in mergeTasks:
repackTask.addTask(task)
repackTask.addTask('RepackCleanupUnmergedwrite_QuadElectron_RAW')
specPath = os.path.join(self.testDir, 'Repack.pkl')
workload.save(specPath)
# Populate WMBS
topFileset = Fileset(name='TestStreamerFileset')
topFileset.create()
options = {'spec': specPath, 'owner': 'ItsAMeMario',
'name': workflowName, 'wfType': 'tier0'}
topLevelWorkflow = Workflow(task='/%s/Repack' % workflowName,
**options)
topLevelWorkflow.create()
topLevelSub = Subscription(topFileset, topLevelWorkflow)
topLevelSub.create()
self.stateMap['Merge'].append(topFileset)
for task in mergeTasks:
mergeWorkflow = Workflow(task='/%s/Repack/%s' % (workflowName, task), **options)
mergeWorkflow.create()
unmergedFileset = Fileset(name='TestUnmergedFileset%s' % task)
unmergedFileset.create()
mergeSub = Subscription(unmergedFileset, mergeWorkflow)
mergeSub.create()
self.stateMap['Processing Done'].append(unmergedFileset)
cleanupWorkflow = Workflow(task='/Repack_Run481516_StreamZ/Repack/RepackCleanupUnmergedwrite_QuadElectron_RAW',
**options)
cleanupWorkflow.create()
unmergedFileset = Fileset(name='TestUnmergedFilesetToCleanup')
unmergedFileset.create()
cleanupSub = Subscription(unmergedFileset, cleanupWorkflow)
cleanupSub.create()
return
def setupExpressWorkflow(self):
"""
_setupExpressWorkflow_
Populate WMBS with a express-like workflow,
#.........这里部分代码省略.........
示例5: Tier0FeederPoller
# 需要导入模块: from WMCore.Services.RequestDB.RequestDBWriter import RequestDBWriter [as 别名]
# 或者: from WMCore.Services.RequestDB.RequestDBWriter.RequestDBWriter import insertGenericRequest [as 别名]
#.........这里部分代码省略.........
#
# upload PCL conditions to DropBox
#
ConditionUploadAPI.uploadConditions(self.dropboxuser, self.dropboxpass, self.serviceProxy)
return
def feedCouchMonitoring(self):
"""
_feedCouchMonitoring_
check for workflows that haven't been uploaded to Couch for monitoring yet
"""
getStreamerWorkflowsForMonitoringDAO = self.daoFactory(classname = "Tier0Feeder.GetStreamerWorkflowsForMonitoring")
getPromptRecoWorkflowsForMonitoringDAO = self.daoFactory(classname = "Tier0Feeder.GetPromptRecoWorkflowsForMonitoring")
markTrackedWorkflowMonitoringDAO = self.daoFactory(classname = "Tier0Feeder.MarkTrackedWorkflowMonitoring")
workflows = getStreamerWorkflowsForMonitoringDAO.execute()
workflows += getPromptRecoWorkflowsForMonitoringDAO.execute()
if len(workflows) == 0:
logging.debug("No workflows to publish to couch monitoring, doing nothing")
if workflows:
logging.debug(" Going to publish %d workflows" % len(workflows))
for (workflowId, run, workflowName) in workflows:
logging.info(" Publishing workflow %s to monitoring" % workflowName)
#TODO: add more information about workflow if there need to be kept longer than
# workflow life cycle.
doc = {}
doc["RequestName"] = workflowName
doc["Run"] = run
response = self.localRequestCouchDB.insertGenericRequest(doc)
if response == "OK" or "EXISTS":
logging.info(" Successfully uploaded request %s" % workflowName)
# Here we have to trust the insert, if it doesn't happen will be easy to spot on the logs
markTrackedWorkflowMonitoringDAO.execute(workflowId)
return
def closeOutRealTimeWorkflows(self):
"""
_closeOutRealTimeWorkflows_
Updates couch with the closeout status of Repack and Express
PromptReco should be closed out automatically
"""
getNotClosedOutWorkflowsDAO = self.daoFactory(classname = "Tier0Feeder.GetNotClosedOutWorkflows")
workflows = getNotClosedOutWorkflowsDAO.execute()
if len(workflows) == 0:
logging.debug("No workflows to publish to couch monitoring, doing nothing")
if workflows:
for workflow in workflows:
(workflowId, filesetId, filesetOpen, workflowName) = workflow
# find returns -1 if the string is not found
if workflowName.find('PromptReco') >= 0:
logging.debug("Closing out instantaneously PromptReco Workflow %s" % workflowName)
self.updateClosedState(workflowName, workflowId)
else :
# Check if fileset (which you already know) is closed or not
# FIXME: No better way to do it? what comes from the DAO is a string, casting bool or int doesn't help much.
# Works like that :
示例6: Tier0FeederPoller
# 需要导入模块: from WMCore.Services.RequestDB.RequestDBWriter import RequestDBWriter [as 别名]
# 或者: from WMCore.Services.RequestDB.RequestDBWriter.RequestDBWriter import insertGenericRequest [as 别名]
#.........这里部分代码省略.........
#
# upload PCL conditions to DropBox
#
ConditionUploadAPI.uploadConditions(self.dropboxuser, self.dropboxpass, self.serviceProxy)
return
def feedCouchMonitoring(self):
"""
_feedCouchMonitoring_
check for workflows that haven't been uploaded to Couch for monitoring yet
"""
getStreamerWorkflowsForMonitoringDAO = self.daoFactory(classname = "Tier0Feeder.GetStreamerWorkflowsForMonitoring")
getPromptRecoWorkflowsForMonitoringDAO = self.daoFactory(classname = "Tier0Feeder.GetPromptRecoWorkflowsForMonitoring")
markTrackedWorkflowMonitoringDAO = self.daoFactory(classname = "Tier0Feeder.MarkTrackedWorkflowMonitoring")
workflows = getStreamerWorkflowsForMonitoringDAO.execute()
workflows += getPromptRecoWorkflowsForMonitoringDAO.execute()
if len(workflows) == 0:
logging.debug("No workflows to publish to couch monitoring, doing nothing")
if workflows:
logging.debug(" Going to publish %d workflows" % len(workflows))
for (workflowId, run, workflowName) in workflows:
logging.info(" Publishing workflow %s to monitoring" % workflowName)
#TODO: add more information about workflow if there need to be kept longer than
# workflow life cycle.
doc = {}
doc["RequestName"] = workflowName
doc["Run"] = run
response = self.localRequestCouchDB.insertGenericRequest(doc)
if response == "OK" or "EXISTS":
logging.info(" Successfully uploaded request %s" % workflowName)
# Here we have to trust the insert, if it doesn't happen will be easy to spot on the logs
markTrackedWorkflowMonitoringDAO.execute(workflowId)
return
def closeOutRealTimeWorkflows(self):
"""
_closeOutRealTimeWorkflows_
Updates couch with the closeout status of Repack and Express
PromptReco should be closed out automatically
"""
getNotClosedOutWorkflowsDAO = self.daoFactory(classname = "Tier0Feeder.GetNotClosedOutWorkflows")
workflows = getNotClosedOutWorkflowsDAO.execute()
if len(workflows) == 0:
logging.debug("No workflows to publish to couch monitoring, doing nothing")
if workflows:
for workflow in workflows:
(workflowId, filesetId, filesetOpen, workflowName) = workflow
# find returns -1 if the string is not found
if workflowName.find('PromptReco') >= 0:
logging.debug("Closing out instantaneously PromptReco Workflow %s" % workflowName)
self.updateClosedState(workflowName, workflowId)
else :
# Check if fileset (which you already know) is closed or not
# FIXME: No better way to do it? what comes from the DAO is a string, casting bool or int doesn't help much.
# Works like that :
示例7: RequestDBTest
# 需要导入模块: from WMCore.Services.RequestDB.RequestDBWriter import RequestDBWriter [as 别名]
# 或者: from WMCore.Services.RequestDB.RequestDBWriter.RequestDBWriter import insertGenericRequest [as 别名]
class RequestDBTest(unittest.TestCase):
"""
"""
def setUp(self):
"""
_setUp_
"""
self.schema = []
self.couchApps = ["ReqMgr"]
self.testInit = TestInitCouchApp('RequestDBServiceTest')
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setSchema(customModules = self.schema,
useDefault = False)
dbName = 'requsetdb_t'
self.testInit.setupCouch(dbName, *self.couchApps)
reqDBURL = "%s/%s" % (self.testInit.couchUrl, dbName)
self.requestWriter = RequestDBWriter(reqDBURL)
self.requestReader = RequestDBReader(reqDBURL)
self.requestWriter.defaultStale = {}
self.requestReader.defaultStale = {}
return
def tearDown(self):
"""
_tearDown_
Drop all the WMBS tables.
"""
self.testInit.tearDownCouch()
def testRequestDBWriter(self):
# test getWork
schema = generate_reqmgr_schema(3)
result = self.requestWriter.insertGenericRequest(schema[0])
self.assertEqual(len(result), 1, 'insert fail');
self.assertEqual(self.requestWriter.updateRequestStatus(schema[0]['RequestName'], "failed"), 'OK', 'update fail')
self.assertEqual(self.requestWriter.updateRequestStatus("not_exist_schema", "assigned"),
'Error: document not found')
result = self.requestWriter.updateRequestProperty(schema[0]['RequestName'],
{'Teams': ['teamA']})
self.assertEqual(self.requestWriter.updateRequestProperty(schema[0]['RequestName'],
{'Teams': ['teamA']}), 'OK', 'update fail')
self.assertEqual(self.requestWriter.updateRequestProperty("not_exist_schema", {'Teams': 'teamA'}),
'Error: document not found')
result = self.requestReader.getRequestByNames([schema[0]['RequestName']])
self.assertEqual(len(result), 1, "should be 1")
result = self.requestReader.getRequestByStatus(["failed"], False, 1)
self.assertEqual(len(result), 1, "should be 1")
result = self.requestReader.getStatusAndTypeByRequest([schema[0]['RequestName']])
self.assertEqual(result[schema[0]['RequestName']][0], 'failed', "should be failed")
result = self.requestWriter.insertGenericRequest(schema[1])
time.sleep(2)
result = self.requestWriter.insertGenericRequest(schema[2])
endTime = int(time.time()) - 1
result = self.requestReader.getRequestByStatusAndEndTime("new", False, endTime)
self.assertEqual(len(result), 1, "should be 1")
endTime = int(time.time()) + 1
result = self.requestReader.getRequestByStatusAndEndTime("new", False, endTime)
self.assertEqual(len(result), 2, "should be 2")