本文整理汇总了Python中WMQuality.TestInitCouchApp.TestInitCouchApp.clearDatabase方法的典型用法代码示例。如果您正苦于以下问题:Python TestInitCouchApp.clearDatabase方法的具体用法?Python TestInitCouchApp.clearDatabase怎么用?Python TestInitCouchApp.clearDatabase使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类WMQuality.TestInitCouchApp.TestInitCouchApp
的用法示例。
在下文中一共展示了TestInitCouchApp.clearDatabase方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: RESTBaseUnitTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import clearDatabase [as 别名]
class RESTBaseUnitTest(unittest.TestCase):
def setUp(self):
# default set
self.schemaModules = []
self.initialize()
if self.schemaModules:
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging() # logLevel = logging.SQLDEBUG
self.testInit.setDatabaseConnection(self.config.getDBUrl())
self.testInit.setSchema(customModules = self.schemaModules,
useDefault = False)
self.rt = Root(self.config)
self.rt.start(blocking=False)
def tearDown(self):
self.rt.stop()
if self.schemaModules:
self.testInit.clearDatabase()
self.config = None
def initialize(self):
"""
i.e.
self.config = DefaultConfig('WMCore.WebTools.RESTModel')
self.config.setDBUrl("sqlite://")
self.schemaModules = ["WMCore.ThreadPool", WMCore.WMBS"]
"""
message = "initialize method has to be implemented, self.restModel, self.schemaModules needs to be set"
raise NotImplementedError, message
示例2: WorkloadSummary_t
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import clearDatabase [as 别名]
class WorkloadSummary_t(unittest.TestCase):
def setUp(self):
"""bootstrap tests"""
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging()
#self.testInit.setDatabaseConnection()
#self.testInit.setSchema(customModules = ["WMCore.WMBS"],
# useDefault = False)
self.testInit.setupCouch("wmcore-workloadsummary", "WorkloadSummary")
self.workload1 = newWorkload("WorkloadSummaryTest1")
self.workload2 = newWorkload("WorkloadSummaryTest2")
def tearDown(self):
self.testInit.tearDownCouch()
self.testInit.clearDatabase()
pass
def testA(self):
"""
register workloads in the couchapp
and pulling back information from the views
"""
populateWorkload(self.workload1, "evansde77")
populateWorkload(self.workload2, "drsm79")
summary1 = self.workload1.generateWorkloadSummary()
summary2 = self.workload2.generateWorkloadSummary()
summ1 = WorkloadSummary(self.workload1.name(), self.testInit.couchUrl, self.testInit.couchDbName, self.workload1)
summ2 = WorkloadSummary(self.workload2.name(), self.testInit.couchUrl, self.testInit.couchDbName, self.workload2)
summ1.create()
summ2.create()
summ1.addACDCCollection(makeUUID())
for t in self.workload1.listAllTaskPathNames():
fakeDoc = makeUUID()
summ1.addACDCFileset(t, fakeDoc)
示例3: MonteCarloTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import clearDatabase [as 别名]
class MonteCarloTest(unittest.TestCase):
def setUp(self):
"""
_setUp_
Initialize the database and couch.
"""
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setupCouch("montecarlo_t", "ConfigCache")
self.testInit.setSchema(customModules = ["WMCore.WMBS"],
useDefault = False)
self.testInit.generateWorkDir()
couchServer = CouchServer(os.environ["COUCHURL"])
self.configDatabase = couchServer.connectDatabase("rereco_t")
EmulatorHelper.setEmulators(dbs = True)
return
def tearDown(self):
"""
_tearDown_
Clear out the database.
"""
self.testInit.tearDownCouch()
self.testInit.clearDatabase()
self.testInit.delWorkDir()
EmulatorHelper.resetEmulators()
return
def injectMonteCarloConfig(self):
"""
_injectMonteCarlo_
Create a bogus config cache document for the montecarlo generation and
inject it into couch. Return the ID of the document.
"""
newConfig = Document()
newConfig["info"] = None
newConfig["config"] = None
newConfig["md5hash"] = "eb1c38cf50e14cf9fc31278a5c8e580f"
newConfig["pset_hash"] = "7c856ad35f9f544839d8525ca10259a7"
newConfig["owner"] = {"group": "cmsdataops", "user": "sfoulkes"}
newConfig["pset_tweak_details"] ={"process": {"outputModules_": ["OutputA", "OutputB"],
"OutputA": {"dataset": {"filterName": "OutputAFilter",
"dataTier": "RECO"}},
"OutputB": {"dataset": {"filterName": "OutputBFilter",
"dataTier": "USER"}}}}
result = self.configDatabase.commitOne(newConfig)
return result[0]["id"]
def _commonMonteCarloTest(self):
"""
Retrieve the workload from WMBS and test all its properties.
"""
prodWorkflow = Workflow(name = "TestWorkload",
task = "/TestWorkload/Production")
prodWorkflow.load()
self.assertEqual(len(prodWorkflow.outputMap.keys()), 3,
"Error: Wrong number of WF outputs.")
goldenOutputMods = ["OutputA", "OutputB"]
for goldenOutputMod in goldenOutputMods:
mergedOutput = prodWorkflow.outputMap[goldenOutputMod][0]["merged_output_fileset"]
unmergedOutput = prodWorkflow.outputMap[goldenOutputMod][0]["output_fileset"]
mergedOutput.loadData()
unmergedOutput.loadData()
self.assertEqual(mergedOutput.name, "/TestWorkload/Production/ProductionMerge%s/merged-Merged" % goldenOutputMod,
"Error: Merged output fileset is wrong: %s" % mergedOutput.name)
self.assertEqual(unmergedOutput.name, "/TestWorkload/Production/unmerged-%s" % goldenOutputMod,
"Error: Unmerged output fileset is wrong.")
logArchOutput = prodWorkflow.outputMap["logArchive"][0]["merged_output_fileset"]
unmergedLogArchOutput = prodWorkflow.outputMap["logArchive"][0]["output_fileset"]
logArchOutput.loadData()
unmergedLogArchOutput.loadData()
self.assertEqual(logArchOutput.name, "/TestWorkload/Production/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
self.assertEqual(unmergedLogArchOutput.name, "/TestWorkload/Production/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
for goldenOutputMod in goldenOutputMods:
mergeWorkflow = Workflow(name = "TestWorkload",
task = "/TestWorkload/Production/ProductionMerge%s" % goldenOutputMod)
mergeWorkflow.load()
self.assertEqual(len(mergeWorkflow.outputMap.keys()), 2,
#.........这里部分代码省略.........
示例4: PrivateMCTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import clearDatabase [as 别名]
class PrivateMCTest(unittest.TestCase):
def setUp(self):
"""
_setUp_
Initialize the database.
"""
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setupCouch("privatemc_t", "ConfigCache")
self.testInit.setSchema(customModules = ["WMCore.WMBS"],
useDefault = False)
couchServer = CouchServer(os.environ["COUCHURL"])
self.configDatabase = couchServer.connectDatabase("privatemc_t")
self.testDir = self.testInit.generateWorkDir()
return
def injectAnalysisConfig(self):
"""
Create a bogus config cache document for the analysis workflow and
inject it into couch. Return the ID of the document.
"""
newConfig = Document()
newConfig["info"] = None
newConfig["config"] = None
newConfig["pset_hash"] = "21cb400c6ad63c3a97fa93f8e8785127"
newConfig["owner"] = {"group": "Analysis", "user": "mmascher"}
newConfig["pset_tweak_details"] ={"process": {"outputModules_": ["OutputA", "OutputB"],
"OutputA": {"dataset": {"filterName": "OutputAFilter",
"dataTier": "RECO"}},
"OutputB": {"dataset": {"filterName": "OutputBFilter",
"dataTier": "USER"}}}}
result = self.configDatabase.commitOne(newConfig)
return result[0]["id"]
def tearDown(self):
"""
_tearDown_
Clear out the database.
"""
self.testInit.tearDownCouch()
self.testInit.clearDatabase()
self.testInit.delWorkDir()
return
def testPrivateMC(self):
"""
_testAnalysis_
"""
defaultArguments = getTestArguments()
defaultArguments["CouchURL"] = os.environ["COUCHURL"]
defaultArguments["CouchDBName"] = "privatemc_t"
defaultArguments["AnalysisConfigCacheDoc"] = self.injectAnalysisConfig()
defaultArguments["ProcessingVersion"] = 1
processingFactory = PrivateMCWorkloadFactory()
testWorkload = processingFactory("TestWorkload", defaultArguments)
testWorkload.setSpecUrl("somespec")
testWorkload.setOwnerDetails("[email protected]", "DMWM")
testWMBSHelper = WMBSHelper(testWorkload, "PrivateMC", "SomeBlock", cachepath = self.testDir)
testWMBSHelper.createTopLevelFileset()
testWMBSHelper._createSubscriptionsInWMBS(testWMBSHelper.topLevelTask, testWMBSHelper.topLevelFileset)
procWorkflow = Workflow(name = "TestWorkload",
task = "/TestWorkload/PrivateMC")
procWorkflow.load()
self.assertEqual(len(procWorkflow.outputMap.keys()), 3,
"Error: Wrong number of WF outputs: %s" % len(procWorkflow.outputMap.keys()))
logArchOutput = procWorkflow.outputMap["logArchive"][0]["merged_output_fileset"]#Actually Analysis does not have a merge task
unmergedLogArchOutput = procWorkflow.outputMap["logArchive"][0]["output_fileset"]
logArchOutput.loadData()
unmergedLogArchOutput.loadData()
self.assertEqual(logArchOutput.name, "/TestWorkload/PrivateMC/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
self.assertEqual(unmergedLogArchOutput.name, "/TestWorkload/PrivateMC/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
goldenOutputMods = ["OutputA", "OutputB"]
for goldenOutputMod in goldenOutputMods:
mergedOutput = procWorkflow.outputMap[goldenOutputMod][0]["merged_output_fileset"]
unmergedOutput = procWorkflow.outputMap[goldenOutputMod][0]["output_fileset"]
mergedOutput.loadData()
unmergedOutput.loadData()
self.assertEqual(mergedOutput.name, "/TestWorkload/PrivateMC/unmerged-%s" % goldenOutputMod,
"Error: Merged output fileset is wrong: %s" % mergedOutput.name)
self.assertEqual(unmergedOutput.name, "/TestWorkload/PrivateMC/unmerged-%s" % goldenOutputMod,
"Error: Unmerged output fileset is wrong.")
topLevelFileset = Fileset(name = "TestWorkload-PrivateMC-SomeBlock")
topLevelFileset.loadData()
procSubscription = Subscription(fileset = topLevelFileset, workflow = procWorkflow)
procSubscription.loadData()
#.........这里部分代码省略.........
示例5: MonteCarloTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import clearDatabase [as 别名]
class MonteCarloTest(EmulatedUnitTestCase):
def setUp(self):
"""
_setUp_
Initialize the database and couch.
"""
super(MonteCarloTest, self).setUp()
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setupCouch(TEST_DB_NAME, "ConfigCache")
self.testInit.setSchema(customModules=["WMCore.WMBS"], useDefault=False)
self.testInit.generateWorkDir()
couchServer = CouchServer(os.environ["COUCHURL"])
self.configDatabase = couchServer.connectDatabase(TEST_DB_NAME)
myThread = threading.currentThread()
self.daoFactory = DAOFactory(package="WMCore.WMBS",
logger=myThread.logger,
dbinterface=myThread.dbi)
self.listTasksByWorkflow = self.daoFactory(classname="Workflow.LoadFromName")
self.listFilesets = self.daoFactory(classname="Fileset.List")
self.listSubsMapping = self.daoFactory(classname="Subscriptions.ListSubsAndFilesetsFromWorkflow")
return
def tearDown(self):
"""
_tearDown_
Clear out the database.
"""
self.testInit.tearDownCouch()
self.testInit.clearDatabase()
self.testInit.delWorkDir()
super(MonteCarloTest, self).tearDown()
return
def injectMonteCarloConfig(self):
"""
_injectMonteCarlo_
Create a bogus config cache document for the montecarlo generation and
inject it into couch. Return the ID of the document.
"""
newConfig = Document()
newConfig["info"] = None
newConfig["config"] = None
newConfig["md5hash"] = "eb1c38cf50e14cf9fc31278a5c8e580f"
newConfig["pset_hash"] = "7c856ad35f9f544839d8525ca10259a7"
newConfig["owner"] = {"group": "cmsdataops", "user": "sfoulkes"}
newConfig["pset_tweak_details"] = {"process": {"outputModules_": ["OutputA", "OutputB"],
"OutputA": {"dataset": {"filterName": "OutputAFilter",
"dataTier": "RECO"}},
"OutputB": {"dataset": {"filterName": "OutputBFilter",
"dataTier": "USER"}}}}
result = self.configDatabase.commitOne(newConfig)
return result[0]["id"]
def _commonMonteCarloTest(self):
"""
Retrieve the workload from WMBS and test all its properties.
"""
goldenOutputMods = {"OutputA": "RECO", "OutputB": "USER"}
prodWorkflow = Workflow(name="TestWorkload", task="/TestWorkload/Production")
prodWorkflow.load()
self.assertEqual(len(prodWorkflow.outputMap.keys()), 3,
"Error: Wrong number of WF outputs.")
for goldenOutputMod, tier in goldenOutputMods.items():
fset = goldenOutputMod + tier
mergedOutput = prodWorkflow.outputMap[fset][0]["merged_output_fileset"]
unmergedOutput = prodWorkflow.outputMap[fset][0]["output_fileset"]
mergedOutput.loadData()
unmergedOutput.loadData()
self.assertEqual(mergedOutput.name,
"/TestWorkload/Production/ProductionMerge%s/merged-Merged%s" % (goldenOutputMod, tier),
"Error: Merged output fileset is wrong: %s" % mergedOutput.name)
self.assertEqual(unmergedOutput.name, "/TestWorkload/Production/unmerged-%s" % (goldenOutputMod + tier),
"Error: Unmerged output fileset is wrong.")
logArchOutput = prodWorkflow.outputMap["logArchive"][0]["merged_output_fileset"]
unmergedLogArchOutput = prodWorkflow.outputMap["logArchive"][0]["output_fileset"]
logArchOutput.loadData()
unmergedLogArchOutput.loadData()
self.assertEqual(logArchOutput.name, "/TestWorkload/Production/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
self.assertEqual(unmergedLogArchOutput.name, "/TestWorkload/Production/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
#.........这里部分代码省略.........
示例6: ReDigiTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import clearDatabase [as 别名]
class ReDigiTest(EmulatedUnitTestCase):
def setUp(self):
"""
_setUp_
Initialize the database and couch.
"""
super(ReDigiTest, self).setUp()
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setupCouch("redigi_t", "ConfigCache")
self.testInit.setSchema(customModules = ["WMCore.WMBS"],
useDefault = False)
self.testInit.generateWorkDir()
couchServer = CouchServer(os.environ["COUCHURL"])
self.configDatabase = couchServer.connectDatabase("redigi_t")
return
def tearDown(self):
"""
_tearDown_
Clear out the database.
"""
self.testInit.tearDownCouch()
self.testInit.clearDatabase()
self.testInit.delWorkDir()
EmulatorHelper.resetEmulators()
super(ReDigiTest, self).tearDown()
return
def testDependentReDigi(self):
"""
_testDependentReDigi_
Verfiy that a dependent ReDigi workflow that keeps stages out
RAW data is created and installed into WMBS correctly.
"""
defaultArguments = ReDigiWorkloadFactory.getTestArguments()
defaultArguments["CouchURL"] = os.environ["COUCHURL"]
defaultArguments["CouchDBName"] = "redigi_t"
configs = injectReDigiConfigs(self.configDatabase)
defaultArguments["StepOneConfigCacheID"] = configs[0]
defaultArguments["StepTwoConfigCacheID"] = configs[1]
defaultArguments["StepThreeConfigCacheID"] = configs[2]
defaultArguments["StepOneOutputModuleName"] = "RAWDEBUGoutput"
defaultArguments["StepTwoOutputModuleName"] = "RECODEBUGoutput"
factory = ReDigiWorkloadFactory()
testWorkload = factory.factoryWorkloadConstruction("TestWorkload", defaultArguments)
testWMBSHelper = WMBSHelper(testWorkload, "StepOneProc", "SomeBlock", cachepath = self.testInit.testDir)
testWMBSHelper.createTopLevelFileset()
testWMBSHelper._createSubscriptionsInWMBS(testWMBSHelper.topLevelTask, testWMBSHelper.topLevelFileset)
topLevelFileset = Fileset(name = "TestWorkload-StepOneProc-SomeBlock")
topLevelFileset.loadData()
stepOneUnmergedRAWFileset = Fileset(name = "/TestWorkload/StepOneProc/unmerged-RAWDEBUGoutput")
stepOneUnmergedRAWFileset.loadData()
stepOneMergedRAWFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/merged-Merged")
stepOneMergedRAWFileset.loadData()
stepOneLogArchiveFileset = Fileset(name = "/TestWorkload/StepOneProc/unmerged-logArchive")
stepOneLogArchiveFileset.loadData()
stepOneMergeLogArchiveFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/merged-logArchive")
stepOneMergeLogArchiveFileset.loadData()
stepTwoUnmergedDQMFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/unmerged-DQMoutput")
stepTwoUnmergedDQMFileset.loadData()
stepTwoUnmergedRECOFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/unmerged-RECODEBUGoutput")
stepTwoUnmergedRECOFileset.loadData()
stepTwoMergedDQMFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/StepTwoProcMergeDQMoutput/merged-Merged")
stepTwoMergedDQMFileset.loadData()
stepTwoMergedRECOFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/StepTwoProcMergeRECODEBUGoutput/merged-Merged")
stepTwoMergedRECOFileset.loadData()
stepTwoLogArchiveFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/unmerged-logArchive")
stepTwoLogArchiveFileset.loadData()
stepTwoMergeDQMLogArchiveFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/StepTwoProcMergeDQMoutput/merged-logArchive")
stepTwoMergeDQMLogArchiveFileset.loadData()
stepTwoMergeRECOLogArchiveFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/StepTwoProcMergeRECODEBUGoutput/merged-logArchive")
stepTwoMergeRECOLogArchiveFileset.loadData()
stepThreeUnmergedAODFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/StepTwoProcMergeRECODEBUGoutput/StepThreeProc/unmerged-aodOutputModule")
stepThreeUnmergedAODFileset.loadData()
stepThreeMergedAODFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/StepTwoProcMergeRECODEBUGoutput/StepThreeProc/StepThreeProcMergeaodOutputModule/merged-Merged")
stepThreeMergedAODFileset.loadData()
stepThreeLogArchiveFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/StepTwoProcMergeRECODEBUGoutput/StepThreeProc/unmerged-logArchive")
stepThreeLogArchiveFileset.loadData()
stepThreeMergeLogArchiveFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/StepTwoProcMergeRECODEBUGoutput/StepThreeProc/StepThreeProcMergeaodOutputModule/merged-logArchive")
stepThreeMergeLogArchiveFileset.loadData()
stepOneWorkflow = Workflow(spec = "somespec", name = "TestWorkload",
task = "/TestWorkload/StepOneProc")
stepOneWorkflow.load()
self.assertEqual(stepOneWorkflow.wfType, 'reprocessing')
self.assertTrue("logArchive" in stepOneWorkflow.outputMap.keys(),
#.........这里部分代码省略.........
示例7: ReportTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import clearDatabase [as 别名]
class ReportTest(unittest.TestCase):
"""
_ReportTest_
Unit tests for the Report class.
"""
def setUp(self):
"""
_setUp_
Figure out the location of the XML report produced by CMSSW.
"""
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection(destroyAllDatabase = True)
self.testInit.setupCouch("report_t/fwjrs", "FWJRDump")
self.xmlPath = os.path.join(getTestBase(),
"WMCore_t/FwkJobReport_t/CMSSWProcessingReport.xml")
self.badxmlPath = os.path.join(getTestBase(),
"WMCore_t/FwkJobReport_t/CMSSWFailReport2.xml")
self.skippedFilesxmlPath = os.path.join(getTestBase(),
"WMCore_t/FwkJobReport_t/CMSSWSkippedNonExistentFile.xml")
self.skippedAllFilesxmlPath = os.path.join(getTestBase(),
"WMCore_t/FwkJobReport_t/CMSSWSkippedAll.xml")
self.fallbackXmlPath = os.path.join(getTestBase(),
"WMCore_t/FwkJobReport_t/CMSSWInputFallback.xml")
self.testDir = self.testInit.generateWorkDir()
return
def tearDown(self):
"""
_tearDown_
Cleanup the databases.
"""
self.testInit.tearDownCouch()
self.testInit.clearDatabase()
self.testInit.delWorkDir()
return
def verifyInputData(self, report):
"""
_verifyInputData_
Verify that the input file in the Report class matches the input file in
the XML generated by CMSSW.
"""
inputFiles = report.getInputFilesFromStep("cmsRun1")
assert len(inputFiles) == 1, \
"Error: Wrong number of input files."
assert inputFiles[0]["lfn"] == "/store/data/BeamCommissioning09/MinimumBias/RAW/v1/000/122/023/142F3F42-C5D6-DE11-945D-000423D94494.root", \
"Error: Wrong LFN on input file."
assert inputFiles[0]["pfn"] == "dcap://cmsdca.fnal.gov:24137/pnfs/fnal.gov/usr/cms/WAX/11/store/data/BeamCommissioning09/MinimumBias/RAW/v1/000/122/023/142F3F42-C5D6-DE11-945D-000423D94494.root", \
"Error: Wrong PFN on input file."
inputRun = list(inputFiles[0]["runs"])
assert len(inputRun) == 1, \
"Error: Wrong number of runs in input."
assert inputRun[0].run == 122023, \
"Error: Wrong run number on input file."
assert len(inputRun[0].lumis) == 1, \
"Error: Wrong number of lumis in input file."
assert 215 in inputRun[0].lumis, \
"Error: Input file is missing lumis."
assert inputFiles[0]["events"] == 2, \
"Error: Wrong number of events in input file."
assert inputFiles[0]["size"] == 0, \
"Error: Wrong size in input file."
assert inputFiles[0]["catalog"] == "trivialcatalog_file:/uscmst1/prod/sw/cms/SITECONF/T1_US_FNAL/PhEDEx/storage.xml?protocol=dcap", \
"Error: Catalog on input file is wrong."
assert inputFiles[0]["guid"] == "142F3F42-C5D6-DE11-945D-000423D94494", \
"Error: GUID of input file is wrong."
return
def verifyRecoOutput(self, report):
"""
_verifyRecoOutput_
Verify that all the metadata in the RECO output module is correct.
"""
outputFiles = report.getFilesFromOutputModule("cmsRun1", "outputRECORECO")
assert len(outputFiles) == 1, \
"Error: Wrong number of output files."
assert outputFiles[0]["lfn"] == "/store/backfill/2/unmerged/WMAgentCommissioining10/MinimumBias/RECO/rereco_GR09_R_34X_V5_All_v1/0000/outputRECORECO.root", \
"Error: Wrong LFN on output file: %s" % outputFiles[0]["lfn"]
assert outputFiles[0]["pfn"] == "outputRECORECO.root", \
"Error: Wrong PFN on output file."
outputRun = list(outputFiles[0]["runs"])
assert len(outputRun) == 1, \
"Error: Wrong number of runs in output."
assert outputRun[0].run == 122023, \
"Error: Wrong run number on output file."
assert len(outputRun[0].lumis) == 1, \
#.........这里部分代码省略.........
示例8: RetryManagerTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import clearDatabase [as 别名]
class RetryManagerTest(unittest.TestCase):
"""
TestCase for TestRetryManager module
"""
def setUp(self):
"""
setup for test.
"""
myThread = threading.currentThread()
self.testInit = TestInit(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setSchema(customModules = ["WMCore.WMBS"],
useDefault = False)
self.testInit.setupCouch("retry_manager_t/jobs", "JobDump")
self.testInit.setupCouch("retry_manager_t/fwjrs", "FWJRDump")
self.daofactory = DAOFactory(package = "WMCore.WMBS",
logger = myThread.logger,
dbinterface = myThread.dbi)
self.getJobs = self.daofactory(classname = "Jobs.GetAllJobs")
self.setJobTime = self.daofactory(classname = "Jobs.SetStateTime")
self.increaseRetry = self.daofactory(classname = "Jobs.IncrementRetry")
self.testDir = self.testInit.generateWorkDir()
self.configFile = EmulatorSetup.setupWMAgentConfig()
self.nJobs = 10
return
def tearDown(self):
"""
Database deletion
"""
self.testInit.clearDatabase()
self.testInit.delWorkDir()
self.testInit.tearDownCouch()
EmulatorSetup.deleteConfig(self.configFile)
return
def getConfig(self):
"""
_getConfig_
"""
config = self.testInit.getConfiguration()
self.testInit.generateWorkDir(config)
# First the general stuff
config.section_("General")
config.General.workDir = os.getenv("TESTDIR", self.testDir)
config.section_("CoreDatabase")
config.CoreDatabase.connectUrl = os.getenv("DATABASE")
config.CoreDatabase.socket = os.getenv("DBSOCK")
config.component_("RetryManager")
config.RetryManager.logLevel = 'DEBUG'
config.RetryManager.namespace = 'WMComponent.RetryManager.RetryManager'
config.RetryManager.pollInterval = 10
# These are the cooloff times for the RetryManager, the times it waits
# Before attempting resubmission
config.RetryManager.section_("DefaultRetryAlgo")
config.RetryManager.DefaultRetryAlgo.section_("default")
config.RetryManager.DefaultRetryAlgo.default.coolOffTime = {'create': 120, 'submit': 120, 'job': 120}
# Path to plugin directory
config.RetryManager.pluginPath = 'WMComponent.RetryManager.PlugIns'
config.RetryManager.WMCoreBase = WMCore.WMBase.getWMBASE()
config.RetryManager.componentDir = os.path.join(os.getcwd(), 'Components')
# ErrorHandler
# Not essential, but useful for ProcessingAlgo
config.component_("ErrorHandler")
config.ErrorHandler.maxRetries = 5
# JobStateMachine
config.component_('JobStateMachine')
config.JobStateMachine.couchurl = os.getenv('COUCHURL', None)
config.JobStateMachine.couchDBName = "retry_manager_t"
return config
def createTestJobGroup(self, nJobs, subType = "Processing", retryOnce = False):
"""
_createTestJobGroup_
Creates a group of several jobs
"""
testWorkflow = Workflow(spec = "spec.xml", owner = "Simon",
name = makeUUID(), task="Test")
testWorkflow.create()
testWMBSFileset = Fileset(name = "TestFileset")
testWMBSFileset.create()
testSubscription = Subscription(fileset = testWMBSFileset,
workflow = testWorkflow,
type = subType)
testSubscription.create()
testJobGroup = JobGroup(subscription = testSubscription)
testJobGroup.create()
#.........这里部分代码省略.........
示例9: WMBSHelperTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import clearDatabase [as 别名]
class WMBSHelperTest(unittest.TestCase):
def setUp(self):
"""
_setUp_
"""
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setupCouch("wmbshelper_t/jobs", "JobDump")
self.testInit.setupCouch("wmbshelper_t/fwjrs", "FWJRDump")
os.environ["COUCHDB"] = "wmbshelper_t"
self.testInit.setSchema(customModules = ["WMCore.WMBS",
"WMComponent.DBSBuffer.Database",
"WMCore.BossAir",
"WMCore.ResourceControl"],
useDefault = False)
self.workDir = self.testInit.generateWorkDir()
self.wmspec = self.createWMSpec()
self.topLevelTask = getFirstTask(self.wmspec)
self.inputDataset = self.topLevelTask.inputDataset()
self.dataset = self.topLevelTask.getInputDatasetPath()
self.dbs = MockDBSReader(self.inputDataset.dbsurl)
self.daoFactory = DAOFactory(package = "WMCore.WMBS",
logger = threading.currentThread().logger,
dbinterface = threading.currentThread().dbi)
return
def tearDown(self):
"""
_tearDown_
Clear out the database.
"""
self.testInit.clearDatabase()
self.testInit.tearDownCouch()
self.testInit.delWorkDir()
return
def setupForKillTest(self, baAPI = None):
"""
_setupForKillTest_
Inject a workflow into WMBS that has a processing task, a merge task and
a cleanup task. Inject files into the various tasks at various
processing states (acquired, complete, available...). Also create jobs
for each subscription in various states.
"""
myThread = threading.currentThread()
daoFactory = DAOFactory(package = "WMCore.WMBS",
logger = myThread.logger,
dbinterface = myThread.dbi)
locationAction = daoFactory(classname = "Locations.New")
changeStateAction = daoFactory(classname = "Jobs.ChangeState")
resourceControl = ResourceControl()
resourceControl.insertSite(siteName = 'site1', seName = 'goodse.cern.ch',
ceName = 'site1', plugin = "TestPlugin")
resourceControl.insertThreshold(siteName = 'site1', taskType = 'Processing', \
maxSlots = 10000)
inputFileset = Fileset("input")
inputFileset.create()
inputFileA = File("lfnA", locations = "goodse.cern.ch")
inputFileB = File("lfnB", locations = "goodse.cern.ch")
inputFileC = File("lfnC", locations = "goodse.cern.ch")
inputFileA.create()
inputFileB.create()
inputFileC.create()
inputFileset.addFile(inputFileA)
inputFileset.addFile(inputFileB)
inputFileset.addFile(inputFileC)
inputFileset.commit()
unmergedOutputFileset = Fileset("unmerged")
unmergedOutputFileset.create()
unmergedFileA = File("ulfnA", locations = "goodse.cern.ch")
unmergedFileB = File("ulfnB", locations = "goodse.cern.ch")
unmergedFileC = File("ulfnC", locations = "goodse.cern.ch")
unmergedFileA.create()
unmergedFileB.create()
unmergedFileC.create()
unmergedOutputFileset.addFile(unmergedFileA)
unmergedOutputFileset.addFile(unmergedFileB)
unmergedOutputFileset.addFile(unmergedFileC)
unmergedOutputFileset.commit()
mainProcWorkflow = Workflow(spec = "spec1", owner = "Steve",
name = "Main", task = "Proc")
mainProcWorkflow.create()
mainProcMergeWorkflow = Workflow(spec = "spec1", owner = "Steve",
name = "Main", task = "ProcMerge")
mainProcMergeWorkflow.create()
mainCleanupWorkflow = Workflow(spec = "spec1", owner = "Steve",
#.........这里部分代码省略.........
示例10: JobArchiverTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import clearDatabase [as 别名]
class JobArchiverTest(EmulatedUnitTestCase):
"""
TestCase for TestJobArchiver module
"""
_maxMessage = 10
def setUp(self):
"""
setup for test.
"""
super(JobArchiverTest, self).setUp()
myThread = threading.currentThread()
super(JobArchiverTest, self).setUp()
self.testInit = TestInit(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
# self.tearDown()
self.testInit.setSchema(customModules=["WMCore.WMBS"],
useDefault=False)
self.testInit.setupCouch("jobarchiver_t_0/jobs", "JobDump")
self.testInit.setupCouch("jobarchiver_t_0/fwjrs", "FWJRDump")
self.daofactory = DAOFactory(package="WMCore.WMBS",
logger=myThread.logger,
dbinterface=myThread.dbi)
self.getJobs = self.daofactory(classname="Jobs.GetAllJobs")
self.testDir = self.testInit.generateWorkDir(deleteOnDestruction=False)
self.nJobs = 10
self.configFile = EmulatorSetup.setupWMAgentConfig()
return
def tearDown(self):
"""
Database deletion
"""
self.testInit.clearDatabase(modules=["WMCore.WMBS"])
self.testInit.tearDownCouch()
self.testInit.delWorkDir()
EmulatorSetup.deleteConfig(self.configFile)
super(JobArchiverTest, self).tearDown()
return
def getConfig(self):
"""
_createConfig_
General config file
"""
config = self.testInit.getConfiguration()
self.testInit.generateWorkDir(config)
# First the general stuff
config.section_("General")
config.General.workDir = os.getenv("TESTDIR", os.getcwd())
config.General.WorkDir = os.getenv("TESTDIR", os.getcwd())
# Now the CoreDatabase information
# This should be the dialect, dburl, etc
config.section_("CoreDatabase")
config.CoreDatabase.connectUrl = os.getenv("DATABASE")
config.CoreDatabase.socket = os.getenv("DBSOCK")
config.section_("JobStateMachine")
config.JobStateMachine.couchurl = os.getenv("COUCHURL", "cmssrv48.fnal.gov:5984")
config.JobStateMachine.couchDBName = "jobarchiver_t_0"
config.component_("JobArchiver")
config.JobArchiver.pollInterval = 60
config.JobArchiver.logLevel = 'INFO'
# config.JobArchiver.logDir = os.path.join(self.testDir, 'logs')
config.JobArchiver.componentDir = self.testDir
config.JobArchiver.numberOfJobsToCluster = 1000
config.component_('WorkQueueManager')
config.WorkQueueManager.namespace = "WMComponent.WorkQueueManager.WorkQueueManager"
config.WorkQueueManager.componentDir = config.General.workDir + "/WorkQueueManager"
config.WorkQueueManager.level = 'LocalQueue'
config.WorkQueueManager.logLevel = 'DEBUG'
config.WorkQueueManager.couchurl = 'https://None'
config.WorkQueueManager.dbname = 'whatever'
config.WorkQueueManager.inboxDatabase = 'whatever2'
config.WorkQueueManager.queueParams = {}
config.WorkQueueManager.queueParams["ParentQueueCouchUrl"] = "https://cmsweb.cern.ch/couchdb/workqueue"
return config
def createTestJobGroup(self):
"""
Creates a group of several jobs
"""
testWorkflow = Workflow(spec="spec.xml", owner="Simon",
#.........这里部分代码省略.........
示例11: PileupFetcherTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import clearDatabase [as 别名]
class PileupFetcherTest(unittest.TestCase):
def setUp(self):
"""
Initialize the database and couch.
"""
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setupCouch("pileupfetcher_t", "ConfigCache")
self.testInit.setSchema(customModules = ["WMCore.WMBS"],
useDefault = False)
couchServer = CouchServer(os.environ["COUCHURL"])
self.configDatabase = couchServer.connectDatabase("pileupfetcher_t")
self.testDir = self.testInit.generateWorkDir()
EmulatorHelper.setEmulators(dbs = True)
def tearDown(self):
"""
Clear out the database.
"""
self.testInit.tearDownCouch()
self.testInit.clearDatabase()
self.testInit.delWorkDir()
EmulatorHelper.resetEmulators()
def injectGenerationConfig(self):
"""
_injectGenerationConfig_
Inject a generation config for the MC workflow.
"""
config = Document()
config["info"] = None
config["config"] = None
config["md5hash"] = "eb1c38cf50e14cf9fc31278a5c8e580f"
config["pset_hash"] = "7c856ad35f9f544839d8525ca10259a7"
config["owner"] = {"group": "cmsdataops", "user": "sfoulkes"}
config["pset_tweak_details"] = None
config["pset_tweak_details"] = \
{"process": {"outputModules_": ["OutputA"],
"OutputA": {"dataset": {"filterName": "OutputAFilter",
"dataTier": "GEN-SIM-RAW"}}}}
result = self.configDatabase.commitOne(config)
return result[0]["id"]
def _queryAndCompareWithDBS(self, pileupDict, defaultArguments, dbsUrl):
"""
pileupDict is a Python dictionary containing particular pileup
configuration information. Query DBS on given dataset contained
now in both input defaultArguments as well as in the pileupDict
and compare values.
"""
args = {}
args["version"] = "DBS_2_0_9"
args["mode"] = "GET"
reader = DBSReader(dbsUrl, **args)
inputArgs = defaultArguments["PileupConfig"]
self.assertEqual(len(inputArgs), len(pileupDict),
"Number of pileup types different.")
for pileupType in inputArgs:
m = ("pileup type '%s' not in PileupFetcher-produced pileup "
"configuration: '%s'" % (pileupType, pileupDict))
self.assertTrue(pileupType in pileupDict, m)
# now query DBS for compare actual results on files lists for each
# pileup type and dataset and location (storage element names)
# pileupDict is saved in the file and now comparing items of this
# configuration with actual DBS results, the structure of pileupDict:
# {"pileupTypeA": {"BlockA": {"FileList": [], "StorageElementNames": []},
# "BlockB": {"FileList": [], "StorageElementName": []}, ....}
for pileupType, datasets in inputArgs.items():
# this is from the pileup configuration produced by PileupFetcher
blockDict = pileupDict[pileupType]
for dataset in datasets:
dbsFileBlocks = reader.listFileBlocks(dataset = dataset)
for dbsFileBlockName in dbsFileBlocks:
fileList = [] # list of files in the block (dbsFile["LogicalFileName"])
storageElemNames = set() # list of StorageElementName
# each DBS block has a list under 'StorageElementList', iterate over
storageElements = reader.listFileBlockLocation(dbsFileBlockName)
for storElem in storageElements:
storageElemNames.add(storElem)
# now get list of files in the block
dbsFiles = reader.listFilesInBlock(dbsFileBlockName)
for dbsFile in dbsFiles:
fileList.append(dbsFile["LogicalFileName"])
# now compare the sets:
m = ("StorageElementNames don't agree for pileup type '%s', "
"dataset '%s' in configuration: '%s'" % (pileupType, dataset, pileupDict))
self.assertEqual(set(blockDict[dbsFileBlockName]["StorageElementNames"]), storageElemNames, m)
m = ("FileList don't agree for pileup type '%s', dataset '%s' "
" in configuration: '%s'" % (pileupType, dataset, pileupDict))
print fileList
print blockDict[dbsFileBlockName]["FileList"]
self.assertEqual(sorted(blockDict[dbsFileBlockName]["FileList"]), sorted(fileList))
#.........这里部分代码省略.........
示例12: RepackTests
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import clearDatabase [as 别名]
class RepackTests(unittest.TestCase):
def setUp(self):
"""
_setUp_
Initialize the database and couch.
"""
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setSchema(customModules=["WMCore.WMBS"],
useDefault=False)
self.testDir = self.testInit.generateWorkDir()
myThread = threading.currentThread()
self.daoFactory = DAOFactory(package="WMCore.WMBS",
logger=myThread.logger,
dbinterface=myThread.dbi)
self.listTasksByWorkflow = self.daoFactory(classname="Workflow.LoadFromName")
self.listFilesets = self.daoFactory(classname="Fileset.List")
self.listSubsMapping = self.daoFactory(classname="Subscriptions.ListSubsAndFilesetsFromWorkflow")
return
def tearDown(self):
"""
_tearDown_
Clear out the database.
"""
self.testInit.clearDatabase()
self.testInit.delWorkDir()
return
def testRepack(self):
"""
_testRepack_
Create a Repack workflow
and verify it installs into WMBS correctly.
"""
testArguments = RepackWorkloadFactory.getTestArguments()
testArguments.update(deepcopy(REQUEST))
factory = RepackWorkloadFactory()
testWorkload = factory.factoryWorkloadConstruction("TestWorkload", testArguments)
testWorkload.setSpecUrl("somespec")
testWorkload.setOwnerDetails("[email protected]", "T0")
testWMBSHelper = WMBSHelper(testWorkload, "Repack", cachepath=self.testDir)
testWMBSHelper.createTopLevelFileset()
testWMBSHelper._createSubscriptionsInWMBS(testWMBSHelper.topLevelTask, testWMBSHelper.topLevelFileset)
repackWorkflow = Workflow(name="TestWorkload",
task="/TestWorkload/Repack")
repackWorkflow.load()
self.assertEqual(len(repackWorkflow.outputMap.keys()), len(testArguments["Outputs"]) + 1,
"Error: Wrong number of WF outputs in the Repack WF.")
goldenOutputMods = {"write_PrimaryDataset1_RAW": "RAW", "write_PrimaryDataset2_RAW": "RAW"}
for goldenOutputMod, tier in goldenOutputMods.items():
fset = goldenOutputMod + tier
mergedOutput = repackWorkflow.outputMap[fset][0]["merged_output_fileset"]
unmergedOutput = repackWorkflow.outputMap[fset][0]["output_fileset"]
mergedOutput.loadData()
unmergedOutput.loadData()
if goldenOutputMod != "write_PrimaryDataset1_RAW":
self.assertEqual(mergedOutput.name,
"/TestWorkload/Repack/RepackMerge%s/merged-Merged%s" % (goldenOutputMod, tier),
"Error: Merged output fileset is wrong: %s" % mergedOutput.name)
self.assertEqual(unmergedOutput.name, "/TestWorkload/Repack/unmerged-%s" % fset,
"Error: Unmerged output fileset is wrong: %s" % unmergedOutput.name)
logArchOutput = repackWorkflow.outputMap["logArchive"][0]["merged_output_fileset"]
unmergedLogArchOutput = repackWorkflow.outputMap["logArchive"][0]["output_fileset"]
logArchOutput.loadData()
unmergedLogArchOutput.loadData()
self.assertEqual(logArchOutput.name, "/TestWorkload/Repack/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
self.assertEqual(unmergedLogArchOutput.name, "/TestWorkload/Repack/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
for goldenOutputMod, tier in goldenOutputMods.items():
mergeWorkflow = Workflow(name="TestWorkload",
task="/TestWorkload/Repack/RepackMerge%s" % goldenOutputMod)
mergeWorkflow.load()
self.assertEqual(len(mergeWorkflow.outputMap.keys()), 3,
"Error: Wrong number of WF outputs.")
mergedMergeOutput = mergeWorkflow.outputMap["Merged%s" % tier][0]["merged_output_fileset"]
unmergedMergeOutput = mergeWorkflow.outputMap["Merged%s" % tier][0]["output_fileset"]
mergedMergeOutput.loadData()
unmergedMergeOutput.loadData()
self.assertEqual(mergedMergeOutput.name,
"/TestWorkload/Repack/RepackMerge%s/merged-Merged%s" % (goldenOutputMod, tier),
#.........这里部分代码省略.........
示例13: DBSUploadTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import clearDatabase [as 别名]
class DBSUploadTest(unittest.TestCase):
"""
TestCase for DBSUpload module
Note:
This fails if you use the in-memory syntax for sqlite
i.e. (DATABASE = sqlite://)
"""
_maxMessage = 10
def setUp(self):
"""
_setUp_
setUp function for unittest
"""
# Set constants
self.couchDB = "config_test"
self.configURL = "RANDOM;;URL;;NAME"
self.configString = "This is a random string"
self.testInit = TestInit(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setSchema(customModules=["WMComponent.DBS3Buffer", "WMCore.Agent.Database"], useDefault=False)
self.testInit.setupCouch(self.couchDB, "GroupUser", "ConfigCache")
myThread = threading.currentThread()
self.bufferFactory = DAOFactory(
package="WMComponent.DBSBuffer.Database", logger=myThread.logger, dbinterface=myThread.dbi
)
locationAction = self.bufferFactory(classname="DBSBufferFiles.AddLocation")
locationAction.execute(siteName="se1.cern.ch")
locationAction.execute(siteName="se1.fnal.gov")
locationAction.execute(siteName="malpaquet")
# Set heartbeat
self.componentName = "JobSubmitter"
self.heartbeatAPI = HeartbeatAPI(self.componentName)
self.heartbeatAPI.registerComponent()
# Set up a config cache
configCache = ConfigCache(os.environ["COUCHURL"], couchDBName=self.couchDB)
configCache.createUserGroup(groupname="testGroup", username="testOps")
self.testDir = self.testInit.generateWorkDir()
psetPath = os.path.join(self.testDir, "PSet.txt")
f = open(psetPath, "w")
f.write(self.configString)
f.close()
configCache.addConfig(newConfig=psetPath, psetHash=None)
configCache.save()
self.configURL = "%s;;%s;;%s" % (os.environ["COUCHURL"], self.couchDB, configCache.getCouchID())
return
def tearDown(self):
"""
_tearDown_
tearDown function for unittest
"""
self.testInit.clearDatabase(modules=["WMComponent.DBS3Buffer", "WMCore.Agent.Database"])
def createConfig(self):
"""
_createConfig_
This creates the actual config file used by the component
"""
config = Configuration()
# First the general stuff
config.section_("General")
config.General.workDir = os.getenv("TESTDIR", os.getcwd())
config.section_("Agent")
config.Agent.componentName = "DBSUpload"
config.Agent.useHeartbeat = False
# Now the CoreDatabase information
# This should be the dialect, dburl, etc
config.section_("CoreDatabase")
config.CoreDatabase.connectUrl = os.getenv("DATABASE")
config.CoreDatabase.socket = os.getenv("DBSOCK")
config.component_("DBSUpload")
config.DBSUpload.pollInterval = 10
config.DBSUpload.logLevel = "ERROR"
config.DBSUpload.maxThreads = 1
config.DBSUpload.namespace = "WMComponent.DBSUpload.DBSUpload"
config.DBSUpload.componentDir = os.path.join(os.getcwd(), "Components")
config.DBSUpload.workerThreads = 4
#.........这里部分代码省略.........
示例14: PromptRecoTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import clearDatabase [as 别名]
class PromptRecoTest(unittest.TestCase):
def setUp(self):
"""
_setUp_
Initialize the database and couch.
"""
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setupCouch("promptreco_t", "ConfigCache")
self.testInit.setSchema(customModules = ["WMCore.WMBS"],
useDefault = False)
couchServer = CouchServer(os.environ["COUCHURL"])
self.configDatabase = couchServer.connectDatabase("promptreco_t")
self.testDir = self.testInit.generateWorkDir()
return
def tearDown(self):
"""
_tearDown_
Clear out the database.
"""
self.testInit.tearDownCouch()
self.testInit.clearDatabase()
self.testInit.delWorkDir()
return
def setupPromptSkimConfigObject(self):
"""
_setupPromptSkimConfigObject_
Creates a custom config object for testing
of the skim functionality
"""
self.promptSkim = ConfigSection(name="Tier1Skim")
self.promptSkim.SkimName = "TestSkim1"
self.promptSkim.DataTier = "RECO"
self.promptSkim.TwoFileRead = False
self.promptSkim.ProcessingVersion = "PromptSkim-v1"
self.promptSkim.ConfigURL = "http://cmssw.cvs.cern.ch/cgi-bin/cmssw.cgi/CMSSW/Configuration/DataOps/python/prescaleskimmer.py?revision=1.1"
def testPromptReco(self):
"""
_testPromptReco_
Create a Prompt Reconstruction workflow
and verify it installs into WMBS correctly.
"""
testArguments = getTestArguments()
testWorkload = promptrecoWorkload("TestWorkload", testArguments)
testWorkload.setSpecUrl("somespec")
testWorkload.setOwnerDetails("[email protected]", "T0")
testWMBSHelper = WMBSHelper(testWorkload, "Reco", "SomeBlock", cachepath = self.testDir)
testWMBSHelper.createTopLevelFileset()
testWMBSHelper._createSubscriptionsInWMBS(testWMBSHelper.topLevelTask, testWMBSHelper.topLevelFileset)
recoWorkflow = Workflow(name = "TestWorkload",
task = "/TestWorkload/Reco")
recoWorkflow.load()
self.assertEqual(len(recoWorkflow.outputMap.keys()), len(testArguments["WriteTiers"]) + 1,
"Error: Wrong number of WF outputs in the Reco WF.")
goldenOutputMods = ["write_RECO", "write_ALCARECO", "write_AOD", "write_DQM"]
for goldenOutputMod in goldenOutputMods:
mergedOutput = recoWorkflow.outputMap[goldenOutputMod][0]["merged_output_fileset"]
unmergedOutput = recoWorkflow.outputMap[goldenOutputMod][0]["output_fileset"]
mergedOutput.loadData()
unmergedOutput.loadData()
if goldenOutputMod != "write_ALCARECO":
self.assertEqual(mergedOutput.name, "/TestWorkload/Reco/RecoMerge%s/merged-Merged" % goldenOutputMod,
"Error: Merged output fileset is wrong: %s" % mergedOutput.name)
self.assertEqual(unmergedOutput.name, "/TestWorkload/Reco/unmerged-%s" % goldenOutputMod,
"Error: Unmerged output fileset is wrong: %s" % unmergedOutput.name)
logArchOutput = recoWorkflow.outputMap["logArchive"][0]["merged_output_fileset"]
unmergedLogArchOutput = recoWorkflow.outputMap["logArchive"][0]["output_fileset"]
logArchOutput.loadData()
unmergedLogArchOutput.loadData()
self.assertEqual(logArchOutput.name, "/TestWorkload/Reco/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
self.assertEqual(unmergedLogArchOutput.name, "/TestWorkload/Reco/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
alcaSkimWorkflow = Workflow(name = "TestWorkload",
task = "/TestWorkload/Reco/AlcaSkim")
alcaSkimWorkflow.load()
self.assertEqual(len(alcaSkimWorkflow.outputMap.keys()), len(testArguments["AlcaSkims"]) + 1,
"Error: Wrong number of WF outputs in the AlcaSkim WF.")
goldenOutputMods = []
for alcaProd in testArguments["AlcaSkims"]:
goldenOutputMods.append("ALCARECOStream%s" % alcaProd)
for goldenOutputMod in goldenOutputMods:
mergedOutput = alcaSkimWorkflow.outputMap[goldenOutputMod][0]["merged_output_fileset"]
#.........这里部分代码省略.........
示例15: JobSubmitterTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import clearDatabase [as 别名]
class JobSubmitterTest(unittest.TestCase):
"""
_JobSubmitterTest_
Test class for the JobSubmitterPoller
"""
def setUp(self):
"""
_setUp_
Standard setup: Now with 100% more couch
"""
self.testInit = TestInit(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setSchema(customModules = ["WMCore.WMBS", "WMCore.BossAir", "WMCore.ResourceControl", "WMCore.Agent.Database"])
self.testInit.setupCouch("jobsubmitter_t/jobs", "JobDump")
self.testInit.setupCouch("jobsubmitter_t/fwjrs", "FWJRDump")
self.testInit.setupCouch("wmagent_summary_t", "WMStats")
myThread = threading.currentThread()
self.daoFactory = DAOFactory(package = "WMCore.WMBS",
logger = myThread.logger,
dbinterface = myThread.dbi)
self.baDaoFactory = DAOFactory(package = "WMCore.BossAir",
logger = myThread.logger,
dbinterface = myThread.dbi)
self.testDir = self.testInit.generateWorkDir()
# Set heartbeat
self.componentName = 'JobSubmitter'
self.heartbeatAPI = HeartbeatAPI(self.componentName)
self.heartbeatAPI.registerComponent()
return
def tearDown(self):
"""
_tearDown_
Standard tearDown
"""
self.testInit.clearDatabase()
self.testInit.delWorkDir()
self.testInit.tearDownCouch()
return
def setResourceThresholds(self, site, **options):
"""
_setResourceThresholds_
Utility to set resource thresholds
"""
if not options:
options = {'state' : 'Normal',
'runningSlots' : 10,
'pendingSlots' : 5,
'tasks' : ['Processing', 'Merge'],
'Processing' : {'pendingSlots' : 5,
'runningSlots' : 10},
'Merge' : {'pendingSlots' : 2,
'runningSlots' : 5}}
resourceControl = ResourceControl()
resourceControl.insertSite(siteName = site, seName = 'se.%s' % (site),
ceName = site, plugin = "MockPlugin", pendingSlots = options['pendingSlots'],
runningSlots = options['runningSlots'], cmsName = site)
for task in options['tasks']:
resourceControl.insertThreshold(siteName = site, taskType = task,
maxSlots = options[task]['runningSlots'],
pendingSlots = options[task]['pendingSlots'])
if options.get('state'):
resourceControl.changeSiteState(site, options.get('state'))
return
def createJobGroups(self, nSubs, nJobs, task, workloadSpec, site,
bl = [], wl = [], taskType = 'Processing', name = None):
"""
_createJobGroups_
Creates a series of jobGroups for submissions
"""
jobGroupList = []
if name is None:
name = makeUUID()
testWorkflow = Workflow(spec = workloadSpec, owner = "mnorman",
name = name, task = "basicWorkload/Production")
testWorkflow.create()
# Create subscriptions
for _ in range(nSubs):
name = makeUUID()
#.........这里部分代码省略.........