本文整理汇总了Python中WMQuality.TestInitCouchApp.TestInitCouchApp.generateWorkDir方法的典型用法代码示例。如果您正苦于以下问题:Python TestInitCouchApp.generateWorkDir方法的具体用法?Python TestInitCouchApp.generateWorkDir怎么用?Python TestInitCouchApp.generateWorkDir使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类WMQuality.TestInitCouchApp.TestInitCouchApp
的用法示例。
在下文中一共展示了TestInitCouchApp.generateWorkDir方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: ConfigurationTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import generateWorkDir [as 别名]
class ConfigurationTest(unittest.TestCase):
"""
test case for Configuration object
"""
def setUp(self):
"""set up"""
self.testInit = TestInit(__file__)
self.testDir = self.testInit.generateWorkDir()
self.normalSave = "%s/WMCore_Agent_Configuration_t_normal.py" % self.testDir
self.docSave = "%s/WMCore_Agent_Configuration_t_documented.py" % self.testDir
self.commentSave = "%s/WMCore_Agent_Configuration_t_commented.py" % self.testDir
def tearDown(self):
"""clean up"""
self.testInit.delWorkDir()
def testA(self):
"""ctor"""
try:
config = Configuration()
except Exception, ex:
msg = "Failed to instantiate Configuration\n"
msg += str(ex)
self.fail(msg)
示例2: MonteCarloTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import generateWorkDir [as 别名]
class MonteCarloTest(unittest.TestCase):
def setUp(self):
"""
_setUp_
Initialize the database and couch.
"""
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setupCouch("montecarlo_t", "ConfigCache")
self.testInit.setSchema(customModules = ["WMCore.WMBS"],
useDefault = False)
self.testInit.generateWorkDir()
couchServer = CouchServer(os.environ["COUCHURL"])
self.configDatabase = couchServer.connectDatabase("rereco_t")
EmulatorHelper.setEmulators(dbs = True)
return
def tearDown(self):
"""
_tearDown_
Clear out the database.
"""
self.testInit.tearDownCouch()
self.testInit.clearDatabase()
self.testInit.delWorkDir()
EmulatorHelper.resetEmulators()
return
def injectMonteCarloConfig(self):
"""
_injectMonteCarlo_
Create a bogus config cache document for the montecarlo generation and
inject it into couch. Return the ID of the document.
"""
newConfig = Document()
newConfig["info"] = None
newConfig["config"] = None
newConfig["md5hash"] = "eb1c38cf50e14cf9fc31278a5c8e580f"
newConfig["pset_hash"] = "7c856ad35f9f544839d8525ca10259a7"
newConfig["owner"] = {"group": "cmsdataops", "user": "sfoulkes"}
newConfig["pset_tweak_details"] ={"process": {"outputModules_": ["OutputA", "OutputB"],
"OutputA": {"dataset": {"filterName": "OutputAFilter",
"dataTier": "RECO"}},
"OutputB": {"dataset": {"filterName": "OutputBFilter",
"dataTier": "USER"}}}}
result = self.configDatabase.commitOne(newConfig)
return result[0]["id"]
def _commonMonteCarloTest(self):
"""
Retrieve the workload from WMBS and test all its properties.
"""
prodWorkflow = Workflow(name = "TestWorkload",
task = "/TestWorkload/Production")
prodWorkflow.load()
self.assertEqual(len(prodWorkflow.outputMap.keys()), 3,
"Error: Wrong number of WF outputs.")
goldenOutputMods = ["OutputA", "OutputB"]
for goldenOutputMod in goldenOutputMods:
mergedOutput = prodWorkflow.outputMap[goldenOutputMod][0]["merged_output_fileset"]
unmergedOutput = prodWorkflow.outputMap[goldenOutputMod][0]["output_fileset"]
mergedOutput.loadData()
unmergedOutput.loadData()
self.assertEqual(mergedOutput.name, "/TestWorkload/Production/ProductionMerge%s/merged-Merged" % goldenOutputMod,
"Error: Merged output fileset is wrong: %s" % mergedOutput.name)
self.assertEqual(unmergedOutput.name, "/TestWorkload/Production/unmerged-%s" % goldenOutputMod,
"Error: Unmerged output fileset is wrong.")
logArchOutput = prodWorkflow.outputMap["logArchive"][0]["merged_output_fileset"]
unmergedLogArchOutput = prodWorkflow.outputMap["logArchive"][0]["output_fileset"]
logArchOutput.loadData()
unmergedLogArchOutput.loadData()
self.assertEqual(logArchOutput.name, "/TestWorkload/Production/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
self.assertEqual(unmergedLogArchOutput.name, "/TestWorkload/Production/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
for goldenOutputMod in goldenOutputMods:
mergeWorkflow = Workflow(name = "TestWorkload",
task = "/TestWorkload/Production/ProductionMerge%s" % goldenOutputMod)
mergeWorkflow.load()
self.assertEqual(len(mergeWorkflow.outputMap.keys()), 2,
#.........这里部分代码省略.........
示例3: PrivateMCTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import generateWorkDir [as 别名]
class PrivateMCTest(unittest.TestCase):
def setUp(self):
"""
_setUp_
Initialize the database.
"""
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setupCouch("privatemc_t", "ConfigCache")
self.testInit.setSchema(customModules = ["WMCore.WMBS"],
useDefault = False)
couchServer = CouchServer(os.environ["COUCHURL"])
self.configDatabase = couchServer.connectDatabase("privatemc_t")
self.testDir = self.testInit.generateWorkDir()
return
def injectAnalysisConfig(self):
"""
Create a bogus config cache document for the analysis workflow and
inject it into couch. Return the ID of the document.
"""
newConfig = Document()
newConfig["info"] = None
newConfig["config"] = None
newConfig["pset_hash"] = "21cb400c6ad63c3a97fa93f8e8785127"
newConfig["owner"] = {"group": "Analysis", "user": "mmascher"}
newConfig["pset_tweak_details"] ={"process": {"outputModules_": ["OutputA", "OutputB"],
"OutputA": {"dataset": {"filterName": "OutputAFilter",
"dataTier": "RECO"}},
"OutputB": {"dataset": {"filterName": "OutputBFilter",
"dataTier": "USER"}}}}
result = self.configDatabase.commitOne(newConfig)
return result[0]["id"]
def tearDown(self):
"""
_tearDown_
Clear out the database.
"""
self.testInit.tearDownCouch()
self.testInit.clearDatabase()
self.testInit.delWorkDir()
return
def testPrivateMC(self):
"""
_testAnalysis_
"""
defaultArguments = getTestArguments()
defaultArguments["CouchURL"] = os.environ["COUCHURL"]
defaultArguments["CouchDBName"] = "privatemc_t"
defaultArguments["AnalysisConfigCacheDoc"] = self.injectAnalysisConfig()
defaultArguments["ProcessingVersion"] = 1
processingFactory = PrivateMCWorkloadFactory()
testWorkload = processingFactory("TestWorkload", defaultArguments)
testWorkload.setSpecUrl("somespec")
testWorkload.setOwnerDetails("[email protected]", "DMWM")
testWMBSHelper = WMBSHelper(testWorkload, "PrivateMC", "SomeBlock", cachepath = self.testDir)
testWMBSHelper.createTopLevelFileset()
testWMBSHelper._createSubscriptionsInWMBS(testWMBSHelper.topLevelTask, testWMBSHelper.topLevelFileset)
procWorkflow = Workflow(name = "TestWorkload",
task = "/TestWorkload/PrivateMC")
procWorkflow.load()
self.assertEqual(len(procWorkflow.outputMap.keys()), 3,
"Error: Wrong number of WF outputs: %s" % len(procWorkflow.outputMap.keys()))
logArchOutput = procWorkflow.outputMap["logArchive"][0]["merged_output_fileset"]#Actually Analysis does not have a merge task
unmergedLogArchOutput = procWorkflow.outputMap["logArchive"][0]["output_fileset"]
logArchOutput.loadData()
unmergedLogArchOutput.loadData()
self.assertEqual(logArchOutput.name, "/TestWorkload/PrivateMC/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
self.assertEqual(unmergedLogArchOutput.name, "/TestWorkload/PrivateMC/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
goldenOutputMods = ["OutputA", "OutputB"]
for goldenOutputMod in goldenOutputMods:
mergedOutput = procWorkflow.outputMap[goldenOutputMod][0]["merged_output_fileset"]
unmergedOutput = procWorkflow.outputMap[goldenOutputMod][0]["output_fileset"]
mergedOutput.loadData()
unmergedOutput.loadData()
self.assertEqual(mergedOutput.name, "/TestWorkload/PrivateMC/unmerged-%s" % goldenOutputMod,
"Error: Merged output fileset is wrong: %s" % mergedOutput.name)
self.assertEqual(unmergedOutput.name, "/TestWorkload/PrivateMC/unmerged-%s" % goldenOutputMod,
"Error: Unmerged output fileset is wrong.")
topLevelFileset = Fileset(name = "TestWorkload-PrivateMC-SomeBlock")
topLevelFileset.loadData()
procSubscription = Subscription(fileset = topLevelFileset, workflow = procWorkflow)
procSubscription.loadData()
#.........这里部分代码省略.........
示例4: MonteCarloTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import generateWorkDir [as 别名]
class MonteCarloTest(EmulatedUnitTestCase):
def setUp(self):
"""
_setUp_
Initialize the database and couch.
"""
super(MonteCarloTest, self).setUp()
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setupCouch(TEST_DB_NAME, "ConfigCache")
self.testInit.setSchema(customModules=["WMCore.WMBS"], useDefault=False)
self.testInit.generateWorkDir()
couchServer = CouchServer(os.environ["COUCHURL"])
self.configDatabase = couchServer.connectDatabase(TEST_DB_NAME)
myThread = threading.currentThread()
self.daoFactory = DAOFactory(package="WMCore.WMBS",
logger=myThread.logger,
dbinterface=myThread.dbi)
self.listTasksByWorkflow = self.daoFactory(classname="Workflow.LoadFromName")
self.listFilesets = self.daoFactory(classname="Fileset.List")
self.listSubsMapping = self.daoFactory(classname="Subscriptions.ListSubsAndFilesetsFromWorkflow")
return
def tearDown(self):
"""
_tearDown_
Clear out the database.
"""
self.testInit.tearDownCouch()
self.testInit.clearDatabase()
self.testInit.delWorkDir()
super(MonteCarloTest, self).tearDown()
return
def injectMonteCarloConfig(self):
"""
_injectMonteCarlo_
Create a bogus config cache document for the montecarlo generation and
inject it into couch. Return the ID of the document.
"""
newConfig = Document()
newConfig["info"] = None
newConfig["config"] = None
newConfig["md5hash"] = "eb1c38cf50e14cf9fc31278a5c8e580f"
newConfig["pset_hash"] = "7c856ad35f9f544839d8525ca10259a7"
newConfig["owner"] = {"group": "cmsdataops", "user": "sfoulkes"}
newConfig["pset_tweak_details"] = {"process": {"outputModules_": ["OutputA", "OutputB"],
"OutputA": {"dataset": {"filterName": "OutputAFilter",
"dataTier": "RECO"}},
"OutputB": {"dataset": {"filterName": "OutputBFilter",
"dataTier": "USER"}}}}
result = self.configDatabase.commitOne(newConfig)
return result[0]["id"]
def _commonMonteCarloTest(self):
"""
Retrieve the workload from WMBS and test all its properties.
"""
goldenOutputMods = {"OutputA": "RECO", "OutputB": "USER"}
prodWorkflow = Workflow(name="TestWorkload", task="/TestWorkload/Production")
prodWorkflow.load()
self.assertEqual(len(prodWorkflow.outputMap.keys()), 3,
"Error: Wrong number of WF outputs.")
for goldenOutputMod, tier in goldenOutputMods.items():
fset = goldenOutputMod + tier
mergedOutput = prodWorkflow.outputMap[fset][0]["merged_output_fileset"]
unmergedOutput = prodWorkflow.outputMap[fset][0]["output_fileset"]
mergedOutput.loadData()
unmergedOutput.loadData()
self.assertEqual(mergedOutput.name,
"/TestWorkload/Production/ProductionMerge%s/merged-Merged%s" % (goldenOutputMod, tier),
"Error: Merged output fileset is wrong: %s" % mergedOutput.name)
self.assertEqual(unmergedOutput.name, "/TestWorkload/Production/unmerged-%s" % (goldenOutputMod + tier),
"Error: Unmerged output fileset is wrong.")
logArchOutput = prodWorkflow.outputMap["logArchive"][0]["merged_output_fileset"]
unmergedLogArchOutput = prodWorkflow.outputMap["logArchive"][0]["output_fileset"]
logArchOutput.loadData()
unmergedLogArchOutput.loadData()
self.assertEqual(logArchOutput.name, "/TestWorkload/Production/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
self.assertEqual(unmergedLogArchOutput.name, "/TestWorkload/Production/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
#.........这里部分代码省略.........
示例5: ReDigiTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import generateWorkDir [as 别名]
class ReDigiTest(EmulatedUnitTestCase):
def setUp(self):
"""
_setUp_
Initialize the database and couch.
"""
super(ReDigiTest, self).setUp()
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setupCouch("redigi_t", "ConfigCache")
self.testInit.setSchema(customModules = ["WMCore.WMBS"],
useDefault = False)
self.testInit.generateWorkDir()
couchServer = CouchServer(os.environ["COUCHURL"])
self.configDatabase = couchServer.connectDatabase("redigi_t")
return
def tearDown(self):
"""
_tearDown_
Clear out the database.
"""
self.testInit.tearDownCouch()
self.testInit.clearDatabase()
self.testInit.delWorkDir()
EmulatorHelper.resetEmulators()
super(ReDigiTest, self).tearDown()
return
def testDependentReDigi(self):
"""
_testDependentReDigi_
Verfiy that a dependent ReDigi workflow that keeps stages out
RAW data is created and installed into WMBS correctly.
"""
defaultArguments = ReDigiWorkloadFactory.getTestArguments()
defaultArguments["CouchURL"] = os.environ["COUCHURL"]
defaultArguments["CouchDBName"] = "redigi_t"
configs = injectReDigiConfigs(self.configDatabase)
defaultArguments["StepOneConfigCacheID"] = configs[0]
defaultArguments["StepTwoConfigCacheID"] = configs[1]
defaultArguments["StepThreeConfigCacheID"] = configs[2]
defaultArguments["StepOneOutputModuleName"] = "RAWDEBUGoutput"
defaultArguments["StepTwoOutputModuleName"] = "RECODEBUGoutput"
factory = ReDigiWorkloadFactory()
testWorkload = factory.factoryWorkloadConstruction("TestWorkload", defaultArguments)
testWMBSHelper = WMBSHelper(testWorkload, "StepOneProc", "SomeBlock", cachepath = self.testInit.testDir)
testWMBSHelper.createTopLevelFileset()
testWMBSHelper._createSubscriptionsInWMBS(testWMBSHelper.topLevelTask, testWMBSHelper.topLevelFileset)
topLevelFileset = Fileset(name = "TestWorkload-StepOneProc-SomeBlock")
topLevelFileset.loadData()
stepOneUnmergedRAWFileset = Fileset(name = "/TestWorkload/StepOneProc/unmerged-RAWDEBUGoutput")
stepOneUnmergedRAWFileset.loadData()
stepOneMergedRAWFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/merged-Merged")
stepOneMergedRAWFileset.loadData()
stepOneLogArchiveFileset = Fileset(name = "/TestWorkload/StepOneProc/unmerged-logArchive")
stepOneLogArchiveFileset.loadData()
stepOneMergeLogArchiveFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/merged-logArchive")
stepOneMergeLogArchiveFileset.loadData()
stepTwoUnmergedDQMFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/unmerged-DQMoutput")
stepTwoUnmergedDQMFileset.loadData()
stepTwoUnmergedRECOFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/unmerged-RECODEBUGoutput")
stepTwoUnmergedRECOFileset.loadData()
stepTwoMergedDQMFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/StepTwoProcMergeDQMoutput/merged-Merged")
stepTwoMergedDQMFileset.loadData()
stepTwoMergedRECOFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/StepTwoProcMergeRECODEBUGoutput/merged-Merged")
stepTwoMergedRECOFileset.loadData()
stepTwoLogArchiveFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/unmerged-logArchive")
stepTwoLogArchiveFileset.loadData()
stepTwoMergeDQMLogArchiveFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/StepTwoProcMergeDQMoutput/merged-logArchive")
stepTwoMergeDQMLogArchiveFileset.loadData()
stepTwoMergeRECOLogArchiveFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/StepTwoProcMergeRECODEBUGoutput/merged-logArchive")
stepTwoMergeRECOLogArchiveFileset.loadData()
stepThreeUnmergedAODFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/StepTwoProcMergeRECODEBUGoutput/StepThreeProc/unmerged-aodOutputModule")
stepThreeUnmergedAODFileset.loadData()
stepThreeMergedAODFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/StepTwoProcMergeRECODEBUGoutput/StepThreeProc/StepThreeProcMergeaodOutputModule/merged-Merged")
stepThreeMergedAODFileset.loadData()
stepThreeLogArchiveFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/StepTwoProcMergeRECODEBUGoutput/StepThreeProc/unmerged-logArchive")
stepThreeLogArchiveFileset.loadData()
stepThreeMergeLogArchiveFileset = Fileset(name = "/TestWorkload/StepOneProc/StepOneProcMergeRAWDEBUGoutput/StepTwoProc/StepTwoProcMergeRECODEBUGoutput/StepThreeProc/StepThreeProcMergeaodOutputModule/merged-logArchive")
stepThreeMergeLogArchiveFileset.loadData()
stepOneWorkflow = Workflow(spec = "somespec", name = "TestWorkload",
task = "/TestWorkload/StepOneProc")
stepOneWorkflow.load()
self.assertEqual(stepOneWorkflow.wfType, 'reprocessing')
self.assertTrue("logArchive" in stepOneWorkflow.outputMap.keys(),
#.........这里部分代码省略.........
示例6: CouchappTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import generateWorkDir [as 别名]
class CouchappTest(unittest.TestCase):
def setUp(self):
myThread = threading.currentThread()
self.testInit = TestInit(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setSchema(customModules = ["WMCore.WMBS"],
useDefault = False)
self.databaseName = "couchapp_t_0"
self.testInit.setupCouch(self.databaseName, "WorkloadSummary")
self.testInit.setupCouch("%s/jobs" % self.databaseName, "JobDump")
self.testInit.setupCouch("%s/fwjrs" % self.databaseName, "FWJRDump")
# Setup config for couch connections
config = self.testInit.getConfiguration()
config.section_("JobStateMachine")
config.JobStateMachine.couchDBName = self.databaseName
# Create couch server and connect to databases
self.couchdb = CouchServer(config.JobStateMachine.couchurl)
self.jobsdatabase = self.couchdb.connectDatabase("%s/jobs" % config.JobStateMachine.couchDBName)
self.fwjrdatabase = self.couchdb.connectDatabase("%s/fwjrs" % config.JobStateMachine.couchDBName)
# Create changeState
self.changeState = ChangeState(config)
self.config = config
# Create testDir
self.testDir = self.testInit.generateWorkDir()
return
def tearDown(self):
self.testInit.clearDatabase(modules = ["WMCore.WMBS"])
self.testInit.delWorkDir()
#self.testInit.tearDownCouch()
return
def createWorkload(self, workloadName = 'Test', emulator = True):
"""
_createTestWorkload_
Creates a test workload for us to run on, hold the basic necessities.
"""
workload = testWorkload("Tier1ReReco")
taskMaker = TaskMaker(workload, os.path.join(self.testDir, 'workloadTest'))
taskMaker.skipSubscription = True
taskMaker.processWorkload()
workload.save(workloadName)
return workload
def createTestJobGroup(self, name = "TestWorkthrough",
specLocation = "spec.xml", error = False,
task = "/TestWorkload/ReReco", nJobs = 10):
"""
_createTestJobGroup_
Generate a test WMBS JobGroup with real FWJRs
"""
myThread = threading.currentThread()
testWorkflow = Workflow(spec = specLocation, owner = "Simon",
name = name, task = task)
testWorkflow.create()
testWMBSFileset = Fileset(name = name)
testWMBSFileset.create()
testFileA = File(lfn = makeUUID(), size = 1024, events = 10)
testFileA.addRun(Run(10, *[12312]))
testFileA.setLocation('malpaquet')
testFileB = File(lfn = makeUUID(), size = 1024, events = 10)
testFileB.addRun(Run(10, *[12312]))
testFileB.setLocation('malpaquet')
testFileA.create()
testFileB.create()
testWMBSFileset.addFile(testFileA)
testWMBSFileset.addFile(testFileB)
testWMBSFileset.commit()
testWMBSFileset.markOpen(0)
testSubscription = Subscription(fileset = testWMBSFileset,
workflow = testWorkflow)
testSubscription.create()
testJobGroup = JobGroup(subscription = testSubscription)
testJobGroup.create()
#.........这里部分代码省略.........
示例7: JobSubmitterTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import generateWorkDir [as 别名]
class JobSubmitterTest(unittest.TestCase):
"""
_JobSubmitterTest_
Test class for the JobSubmitterPoller
"""
def setUp(self):
"""
_setUp_
Standard setup: Now with 100% more couch
"""
self.testInit = TestInit(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setSchema(customModules = ["WMCore.WMBS", "WMCore.BossAir", "WMCore.ResourceControl", "WMCore.Agent.Database"])
self.testInit.setupCouch("jobsubmitter_t/jobs", "JobDump")
self.testInit.setupCouch("jobsubmitter_t/fwjrs", "FWJRDump")
self.testInit.setupCouch("wmagent_summary_t", "WMStats")
myThread = threading.currentThread()
self.daoFactory = DAOFactory(package = "WMCore.WMBS",
logger = myThread.logger,
dbinterface = myThread.dbi)
self.baDaoFactory = DAOFactory(package = "WMCore.BossAir",
logger = myThread.logger,
dbinterface = myThread.dbi)
self.testDir = self.testInit.generateWorkDir()
# Set heartbeat
self.componentName = 'JobSubmitter'
self.heartbeatAPI = HeartbeatAPI(self.componentName)
self.heartbeatAPI.registerComponent()
return
def tearDown(self):
"""
_tearDown_
Standard tearDown
"""
self.testInit.clearDatabase()
self.testInit.delWorkDir()
self.testInit.tearDownCouch()
return
def setResourceThresholds(self, site, **options):
"""
_setResourceThresholds_
Utility to set resource thresholds
"""
if not options:
options = {'state' : 'Normal',
'runningSlots' : 10,
'pendingSlots' : 5,
'tasks' : ['Processing', 'Merge'],
'Processing' : {'pendingSlots' : 5,
'runningSlots' : 10},
'Merge' : {'pendingSlots' : 2,
'runningSlots' : 5}}
resourceControl = ResourceControl()
resourceControl.insertSite(siteName = site, seName = 'se.%s' % (site),
ceName = site, plugin = "MockPlugin", pendingSlots = options['pendingSlots'],
runningSlots = options['runningSlots'], cmsName = site)
for task in options['tasks']:
resourceControl.insertThreshold(siteName = site, taskType = task,
maxSlots = options[task]['runningSlots'],
pendingSlots = options[task]['pendingSlots'])
if options.get('state'):
resourceControl.changeSiteState(site, options.get('state'))
return
def createJobGroups(self, nSubs, nJobs, task, workloadSpec, site,
bl = [], wl = [], taskType = 'Processing', name = None):
"""
_createJobGroups_
Creates a series of jobGroups for submissions
"""
jobGroupList = []
if name is None:
name = makeUUID()
testWorkflow = Workflow(spec = workloadSpec, owner = "mnorman",
name = name, task = "basicWorkload/Production")
testWorkflow.create()
# Create subscriptions
for _ in range(nSubs):
name = makeUUID()
#.........这里部分代码省略.........
示例8: WMBSHelperTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import generateWorkDir [as 别名]
class WMBSHelperTest(unittest.TestCase):
def setUp(self):
"""
_setUp_
"""
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setupCouch("wmbshelper_t/jobs", "JobDump")
self.testInit.setupCouch("wmbshelper_t/fwjrs", "FWJRDump")
os.environ["COUCHDB"] = "wmbshelper_t"
self.testInit.setSchema(customModules = ["WMCore.WMBS",
"WMComponent.DBSBuffer.Database",
"WMCore.BossAir",
"WMCore.ResourceControl"],
useDefault = False)
self.workDir = self.testInit.generateWorkDir()
self.wmspec = self.createWMSpec()
self.topLevelTask = getFirstTask(self.wmspec)
self.inputDataset = self.topLevelTask.inputDataset()
self.dataset = self.topLevelTask.getInputDatasetPath()
self.dbs = MockDBSReader(self.inputDataset.dbsurl)
self.daoFactory = DAOFactory(package = "WMCore.WMBS",
logger = threading.currentThread().logger,
dbinterface = threading.currentThread().dbi)
return
def tearDown(self):
"""
_tearDown_
Clear out the database.
"""
self.testInit.clearDatabase()
self.testInit.tearDownCouch()
self.testInit.delWorkDir()
return
def setupForKillTest(self, baAPI = None):
"""
_setupForKillTest_
Inject a workflow into WMBS that has a processing task, a merge task and
a cleanup task. Inject files into the various tasks at various
processing states (acquired, complete, available...). Also create jobs
for each subscription in various states.
"""
myThread = threading.currentThread()
daoFactory = DAOFactory(package = "WMCore.WMBS",
logger = myThread.logger,
dbinterface = myThread.dbi)
locationAction = daoFactory(classname = "Locations.New")
changeStateAction = daoFactory(classname = "Jobs.ChangeState")
resourceControl = ResourceControl()
resourceControl.insertSite(siteName = 'site1', seName = 'goodse.cern.ch',
ceName = 'site1', plugin = "TestPlugin")
resourceControl.insertThreshold(siteName = 'site1', taskType = 'Processing', \
maxSlots = 10000)
inputFileset = Fileset("input")
inputFileset.create()
inputFileA = File("lfnA", locations = "goodse.cern.ch")
inputFileB = File("lfnB", locations = "goodse.cern.ch")
inputFileC = File("lfnC", locations = "goodse.cern.ch")
inputFileA.create()
inputFileB.create()
inputFileC.create()
inputFileset.addFile(inputFileA)
inputFileset.addFile(inputFileB)
inputFileset.addFile(inputFileC)
inputFileset.commit()
unmergedOutputFileset = Fileset("unmerged")
unmergedOutputFileset.create()
unmergedFileA = File("ulfnA", locations = "goodse.cern.ch")
unmergedFileB = File("ulfnB", locations = "goodse.cern.ch")
unmergedFileC = File("ulfnC", locations = "goodse.cern.ch")
unmergedFileA.create()
unmergedFileB.create()
unmergedFileC.create()
unmergedOutputFileset.addFile(unmergedFileA)
unmergedOutputFileset.addFile(unmergedFileB)
unmergedOutputFileset.addFile(unmergedFileC)
unmergedOutputFileset.commit()
mainProcWorkflow = Workflow(spec = "spec1", owner = "Steve",
name = "Main", task = "Proc")
mainProcWorkflow.create()
mainProcMergeWorkflow = Workflow(spec = "spec1", owner = "Steve",
name = "Main", task = "ProcMerge")
mainProcMergeWorkflow.create()
mainCleanupWorkflow = Workflow(spec = "spec1", owner = "Steve",
#.........这里部分代码省略.........
示例9: JobArchiverTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import generateWorkDir [as 别名]
class JobArchiverTest(EmulatedUnitTestCase):
"""
TestCase for TestJobArchiver module
"""
_maxMessage = 10
def setUp(self):
"""
setup for test.
"""
super(JobArchiverTest, self).setUp()
myThread = threading.currentThread()
super(JobArchiverTest, self).setUp()
self.testInit = TestInit(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
# self.tearDown()
self.testInit.setSchema(customModules=["WMCore.WMBS"],
useDefault=False)
self.testInit.setupCouch("jobarchiver_t_0/jobs", "JobDump")
self.testInit.setupCouch("jobarchiver_t_0/fwjrs", "FWJRDump")
self.daofactory = DAOFactory(package="WMCore.WMBS",
logger=myThread.logger,
dbinterface=myThread.dbi)
self.getJobs = self.daofactory(classname="Jobs.GetAllJobs")
self.testDir = self.testInit.generateWorkDir(deleteOnDestruction=False)
self.nJobs = 10
self.configFile = EmulatorSetup.setupWMAgentConfig()
return
def tearDown(self):
"""
Database deletion
"""
self.testInit.clearDatabase(modules=["WMCore.WMBS"])
self.testInit.tearDownCouch()
self.testInit.delWorkDir()
EmulatorSetup.deleteConfig(self.configFile)
super(JobArchiverTest, self).tearDown()
return
def getConfig(self):
"""
_createConfig_
General config file
"""
config = self.testInit.getConfiguration()
self.testInit.generateWorkDir(config)
# First the general stuff
config.section_("General")
config.General.workDir = os.getenv("TESTDIR", os.getcwd())
config.General.WorkDir = os.getenv("TESTDIR", os.getcwd())
# Now the CoreDatabase information
# This should be the dialect, dburl, etc
config.section_("CoreDatabase")
config.CoreDatabase.connectUrl = os.getenv("DATABASE")
config.CoreDatabase.socket = os.getenv("DBSOCK")
config.section_("JobStateMachine")
config.JobStateMachine.couchurl = os.getenv("COUCHURL", "cmssrv48.fnal.gov:5984")
config.JobStateMachine.couchDBName = "jobarchiver_t_0"
config.component_("JobArchiver")
config.JobArchiver.pollInterval = 60
config.JobArchiver.logLevel = 'INFO'
# config.JobArchiver.logDir = os.path.join(self.testDir, 'logs')
config.JobArchiver.componentDir = self.testDir
config.JobArchiver.numberOfJobsToCluster = 1000
config.component_('WorkQueueManager')
config.WorkQueueManager.namespace = "WMComponent.WorkQueueManager.WorkQueueManager"
config.WorkQueueManager.componentDir = config.General.workDir + "/WorkQueueManager"
config.WorkQueueManager.level = 'LocalQueue'
config.WorkQueueManager.logLevel = 'DEBUG'
config.WorkQueueManager.couchurl = 'https://None'
config.WorkQueueManager.dbname = 'whatever'
config.WorkQueueManager.inboxDatabase = 'whatever2'
config.WorkQueueManager.queueParams = {}
config.WorkQueueManager.queueParams["ParentQueueCouchUrl"] = "https://cmsweb.cern.ch/couchdb/workqueue"
return config
def createTestJobGroup(self):
"""
Creates a group of several jobs
"""
testWorkflow = Workflow(spec="spec.xml", owner="Simon",
#.........这里部分代码省略.........
示例10: PileupFetcherTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import generateWorkDir [as 别名]
class PileupFetcherTest(unittest.TestCase):
def setUp(self):
"""
Initialize the database and couch.
"""
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setupCouch("pileupfetcher_t", "ConfigCache")
self.testInit.setSchema(customModules = ["WMCore.WMBS"],
useDefault = False)
couchServer = CouchServer(os.environ["COUCHURL"])
self.configDatabase = couchServer.connectDatabase("pileupfetcher_t")
self.testDir = self.testInit.generateWorkDir()
EmulatorHelper.setEmulators(dbs = True)
def tearDown(self):
"""
Clear out the database.
"""
self.testInit.tearDownCouch()
self.testInit.clearDatabase()
self.testInit.delWorkDir()
EmulatorHelper.resetEmulators()
def injectGenerationConfig(self):
"""
_injectGenerationConfig_
Inject a generation config for the MC workflow.
"""
config = Document()
config["info"] = None
config["config"] = None
config["md5hash"] = "eb1c38cf50e14cf9fc31278a5c8e580f"
config["pset_hash"] = "7c856ad35f9f544839d8525ca10259a7"
config["owner"] = {"group": "cmsdataops", "user": "sfoulkes"}
config["pset_tweak_details"] = None
config["pset_tweak_details"] = \
{"process": {"outputModules_": ["OutputA"],
"OutputA": {"dataset": {"filterName": "OutputAFilter",
"dataTier": "GEN-SIM-RAW"}}}}
result = self.configDatabase.commitOne(config)
return result[0]["id"]
def _queryAndCompareWithDBS(self, pileupDict, defaultArguments, dbsUrl):
"""
pileupDict is a Python dictionary containing particular pileup
configuration information. Query DBS on given dataset contained
now in both input defaultArguments as well as in the pileupDict
and compare values.
"""
args = {}
args["version"] = "DBS_2_0_9"
args["mode"] = "GET"
reader = DBSReader(dbsUrl, **args)
inputArgs = defaultArguments["PileupConfig"]
self.assertEqual(len(inputArgs), len(pileupDict),
"Number of pileup types different.")
for pileupType in inputArgs:
m = ("pileup type '%s' not in PileupFetcher-produced pileup "
"configuration: '%s'" % (pileupType, pileupDict))
self.assertTrue(pileupType in pileupDict, m)
# now query DBS for compare actual results on files lists for each
# pileup type and dataset and location (storage element names)
# pileupDict is saved in the file and now comparing items of this
# configuration with actual DBS results, the structure of pileupDict:
# {"pileupTypeA": {"BlockA": {"FileList": [], "StorageElementNames": []},
# "BlockB": {"FileList": [], "StorageElementName": []}, ....}
for pileupType, datasets in inputArgs.items():
# this is from the pileup configuration produced by PileupFetcher
blockDict = pileupDict[pileupType]
for dataset in datasets:
dbsFileBlocks = reader.listFileBlocks(dataset = dataset)
for dbsFileBlockName in dbsFileBlocks:
fileList = [] # list of files in the block (dbsFile["LogicalFileName"])
storageElemNames = set() # list of StorageElementName
# each DBS block has a list under 'StorageElementList', iterate over
storageElements = reader.listFileBlockLocation(dbsFileBlockName)
for storElem in storageElements:
storageElemNames.add(storElem)
# now get list of files in the block
dbsFiles = reader.listFilesInBlock(dbsFileBlockName)
for dbsFile in dbsFiles:
fileList.append(dbsFile["LogicalFileName"])
# now compare the sets:
m = ("StorageElementNames don't agree for pileup type '%s', "
"dataset '%s' in configuration: '%s'" % (pileupType, dataset, pileupDict))
self.assertEqual(set(blockDict[dbsFileBlockName]["StorageElementNames"]), storageElemNames, m)
m = ("FileList don't agree for pileup type '%s', dataset '%s' "
" in configuration: '%s'" % (pileupType, dataset, pileupDict))
print fileList
print blockDict[dbsFileBlockName]["FileList"]
self.assertEqual(sorted(blockDict[dbsFileBlockName]["FileList"]), sorted(fileList))
#.........这里部分代码省略.........
示例11: RepackTests
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import generateWorkDir [as 别名]
class RepackTests(unittest.TestCase):
def setUp(self):
"""
_setUp_
Initialize the database and couch.
"""
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setSchema(customModules=["WMCore.WMBS"],
useDefault=False)
self.testDir = self.testInit.generateWorkDir()
myThread = threading.currentThread()
self.daoFactory = DAOFactory(package="WMCore.WMBS",
logger=myThread.logger,
dbinterface=myThread.dbi)
self.listTasksByWorkflow = self.daoFactory(classname="Workflow.LoadFromName")
self.listFilesets = self.daoFactory(classname="Fileset.List")
self.listSubsMapping = self.daoFactory(classname="Subscriptions.ListSubsAndFilesetsFromWorkflow")
return
def tearDown(self):
"""
_tearDown_
Clear out the database.
"""
self.testInit.clearDatabase()
self.testInit.delWorkDir()
return
def testRepack(self):
"""
_testRepack_
Create a Repack workflow
and verify it installs into WMBS correctly.
"""
testArguments = RepackWorkloadFactory.getTestArguments()
testArguments.update(deepcopy(REQUEST))
factory = RepackWorkloadFactory()
testWorkload = factory.factoryWorkloadConstruction("TestWorkload", testArguments)
testWorkload.setSpecUrl("somespec")
testWorkload.setOwnerDetails("[email protected]", "T0")
testWMBSHelper = WMBSHelper(testWorkload, "Repack", cachepath=self.testDir)
testWMBSHelper.createTopLevelFileset()
testWMBSHelper._createSubscriptionsInWMBS(testWMBSHelper.topLevelTask, testWMBSHelper.topLevelFileset)
repackWorkflow = Workflow(name="TestWorkload",
task="/TestWorkload/Repack")
repackWorkflow.load()
self.assertEqual(len(repackWorkflow.outputMap.keys()), len(testArguments["Outputs"]) + 1,
"Error: Wrong number of WF outputs in the Repack WF.")
goldenOutputMods = {"write_PrimaryDataset1_RAW": "RAW", "write_PrimaryDataset2_RAW": "RAW"}
for goldenOutputMod, tier in goldenOutputMods.items():
fset = goldenOutputMod + tier
mergedOutput = repackWorkflow.outputMap[fset][0]["merged_output_fileset"]
unmergedOutput = repackWorkflow.outputMap[fset][0]["output_fileset"]
mergedOutput.loadData()
unmergedOutput.loadData()
if goldenOutputMod != "write_PrimaryDataset1_RAW":
self.assertEqual(mergedOutput.name,
"/TestWorkload/Repack/RepackMerge%s/merged-Merged%s" % (goldenOutputMod, tier),
"Error: Merged output fileset is wrong: %s" % mergedOutput.name)
self.assertEqual(unmergedOutput.name, "/TestWorkload/Repack/unmerged-%s" % fset,
"Error: Unmerged output fileset is wrong: %s" % unmergedOutput.name)
logArchOutput = repackWorkflow.outputMap["logArchive"][0]["merged_output_fileset"]
unmergedLogArchOutput = repackWorkflow.outputMap["logArchive"][0]["output_fileset"]
logArchOutput.loadData()
unmergedLogArchOutput.loadData()
self.assertEqual(logArchOutput.name, "/TestWorkload/Repack/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
self.assertEqual(unmergedLogArchOutput.name, "/TestWorkload/Repack/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
for goldenOutputMod, tier in goldenOutputMods.items():
mergeWorkflow = Workflow(name="TestWorkload",
task="/TestWorkload/Repack/RepackMerge%s" % goldenOutputMod)
mergeWorkflow.load()
self.assertEqual(len(mergeWorkflow.outputMap.keys()), 3,
"Error: Wrong number of WF outputs.")
mergedMergeOutput = mergeWorkflow.outputMap["Merged%s" % tier][0]["merged_output_fileset"]
unmergedMergeOutput = mergeWorkflow.outputMap["Merged%s" % tier][0]["output_fileset"]
mergedMergeOutput.loadData()
unmergedMergeOutput.loadData()
self.assertEqual(mergedMergeOutput.name,
"/TestWorkload/Repack/RepackMerge%s/merged-Merged%s" % (goldenOutputMod, tier),
#.........这里部分代码省略.........
示例12: DBSUploadTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import generateWorkDir [as 别名]
class DBSUploadTest(unittest.TestCase):
"""
TestCase for DBSUpload module
Note:
This fails if you use the in-memory syntax for sqlite
i.e. (DATABASE = sqlite://)
"""
_maxMessage = 10
def setUp(self):
"""
_setUp_
setUp function for unittest
"""
# Set constants
self.couchDB = "config_test"
self.configURL = "RANDOM;;URL;;NAME"
self.configString = "This is a random string"
self.testInit = TestInit(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setSchema(customModules=["WMComponent.DBS3Buffer", "WMCore.Agent.Database"], useDefault=False)
self.testInit.setupCouch(self.couchDB, "GroupUser", "ConfigCache")
myThread = threading.currentThread()
self.bufferFactory = DAOFactory(
package="WMComponent.DBSBuffer.Database", logger=myThread.logger, dbinterface=myThread.dbi
)
locationAction = self.bufferFactory(classname="DBSBufferFiles.AddLocation")
locationAction.execute(siteName="se1.cern.ch")
locationAction.execute(siteName="se1.fnal.gov")
locationAction.execute(siteName="malpaquet")
# Set heartbeat
self.componentName = "JobSubmitter"
self.heartbeatAPI = HeartbeatAPI(self.componentName)
self.heartbeatAPI.registerComponent()
# Set up a config cache
configCache = ConfigCache(os.environ["COUCHURL"], couchDBName=self.couchDB)
configCache.createUserGroup(groupname="testGroup", username="testOps")
self.testDir = self.testInit.generateWorkDir()
psetPath = os.path.join(self.testDir, "PSet.txt")
f = open(psetPath, "w")
f.write(self.configString)
f.close()
configCache.addConfig(newConfig=psetPath, psetHash=None)
configCache.save()
self.configURL = "%s;;%s;;%s" % (os.environ["COUCHURL"], self.couchDB, configCache.getCouchID())
return
def tearDown(self):
"""
_tearDown_
tearDown function for unittest
"""
self.testInit.clearDatabase(modules=["WMComponent.DBS3Buffer", "WMCore.Agent.Database"])
def createConfig(self):
"""
_createConfig_
This creates the actual config file used by the component
"""
config = Configuration()
# First the general stuff
config.section_("General")
config.General.workDir = os.getenv("TESTDIR", os.getcwd())
config.section_("Agent")
config.Agent.componentName = "DBSUpload"
config.Agent.useHeartbeat = False
# Now the CoreDatabase information
# This should be the dialect, dburl, etc
config.section_("CoreDatabase")
config.CoreDatabase.connectUrl = os.getenv("DATABASE")
config.CoreDatabase.socket = os.getenv("DBSOCK")
config.component_("DBSUpload")
config.DBSUpload.pollInterval = 10
config.DBSUpload.logLevel = "ERROR"
config.DBSUpload.maxThreads = 1
config.DBSUpload.namespace = "WMComponent.DBSUpload.DBSUpload"
config.DBSUpload.componentDir = os.path.join(os.getcwd(), "Components")
config.DBSUpload.workerThreads = 4
#.........这里部分代码省略.........
示例13: URLFetcherTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import generateWorkDir [as 别名]
class URLFetcherTest(unittest.TestCase):
"""
Main test for the URLFetcher
"""
def setUp(self):
"""
Basic setUp
"""
self.testInit = TestInit(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setupCouch("config_test", "GroupUser", "ConfigCache")
self.testDir = self.testInit.generateWorkDir()
return
def tearDown(self):
"""
Basic tearDown
"""
self.testInit.delWorkDir()
self.testInit.tearDownCouch()
return
def getConfig(self):
"""
_getConfig_
Create a test config and put it in the cache
"""
PSetTweak = {'someKey': "Hello, I am a PSetTweak. It's nice to meet you."}
configCache = ConfigCache(os.environ["COUCHURL"], couchDBName = 'config_test')
configCache.createUserGroup(groupname = "testGroup", username = 'testOps')
configCache.setPSetTweaks(PSetTweak = PSetTweak)
configCache.attachments['configFile'] = 'This Is A Test Config'
configCache.save()
return configCache
def createTask(self, configCache):
"""
_createTask_
Create a test task that includes the
fileURL
"""
task = WMTask.makeWMTask("testTask")
task.makeStep("step1")
task.makeStep("step2")
for t in task.steps().nodeIterator():
t = WMStep.WMStepHelper(t)
os.mkdir(os.path.join(self.testDir, t.name()))
t.setStepType("CMSSW")
t.data.application.section_('command')
t.data.application.configuration.configCacheUrl = configCache.dburl
t.data.application.configuration.cacheName = configCache.dbname
t.data.application.configuration.configId = configCache.getCouchID()
t.data.application.command.psetTweak = 'tweak'
t.data.application.command.configuration = 'configCache.file'
return task
def testA_BasicFunction(self):
"""
_BasicFunction_
Run a test to find out if we can grab a configCache
"""
configCache = self.getConfig()
task = self.createTask(configCache = configCache)
fetcher = CMSSWFetcher()
fetcher.setWorkingDirectory(workingDir = self.testDir)
fetcher(wmTask = task)
self.assertTrue(os.path.isfile(os.path.join(self.testDir,
'step2', 'tweak')))
f = open(os.path.join(self.testDir, 'step2', 'configCache.file'))
content = f.read()
f.close()
self.assertEqual(content, 'This Is A Test Config')
return
示例14: BossAirTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import generateWorkDir [as 别名]
class BossAirTest(unittest.TestCase):
"""
Tests for the BossAir prototype
"""
sites = ['T2_US_UCSD', 'T2_TW_Taiwan', 'T1_CH_CERN', 'T2_US_Florida']
def setUp(self):
"""
setup for test.
"""
myThread = threading.currentThread()
self.testInit = TestInit(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.tearDown()
self.testInit.setSchema(customModules = ["WMCore.WMBS", "WMCore.BossAir", "WMCore.ResourceControl", "WMCore.Agent.Database"],
useDefault = False)
self.testInit.setupCouch("bossair_t/jobs", "JobDump")
self.testInit.setupCouch("bossair_t/fwjrs", "FWJRDump")
self.daoFactory = DAOFactory(package = "WMCore.WMBS",
logger = myThread.logger,
dbinterface = myThread.dbi)
self.getJobs = self.daoFactory(classname = "Jobs.GetAllJobs")
#Create sites in resourceControl
resourceControl = ResourceControl()
for site in self.sites:
resourceControl.insertSite(siteName = site, seName = 'se.%s' % (site), cmsName = site,
ceName = site, plugin = "CondorPlugin", pendingSlots = 1000,
runningSlots = 2000)
resourceControl.insertThreshold(siteName = site, taskType = 'Processing', \
maxSlots = 1000, pendingSlots = 1000)
resourceControl.insertSite(siteName = 'Xanadu', seName = 'se.Xanadu',cmsName = site,
ceName = 'Xanadu', plugin = "TestPlugin")
resourceControl.insertThreshold(siteName = 'Xanadu', taskType = 'Processing', \
maxSlots = 10000, pendingSlots = 10000)
resourceControl.insertSite(siteName = 'jade-cms.hip.fi', seName = 'madhatter.csc.fi', cmsName = site,
ceName = 'jade-cms.hip.fi', plugin = "ARCPlugin")
resourceControl.insertThreshold(siteName = 'jade-cms.hip.fi', taskType = 'Processing', \
maxSlots = 100, pendingSlots = 100)
# using this for glite submissions
resourceControl.insertSite(siteName = 'grid-ce-01.ba.infn.it', seName = 'storm-se-01.ba.infn.it', cmsName = site,
ceName = 'grid-ce-01.ba.infn.it', plugin = 'gLitePlugin')
resourceControl.insertThreshold(siteName = 'grid-ce-01.ba.infn.it', taskType = 'Processing', \
maxSlots = 50, pendingSlots = 50)
# Create user
newuser = self.daoFactory(classname = "Users.New")
newuser.execute(dn = "tapas", group_name = "phgroup", role_name = "cmsrole")
# We actually need the user name
self.user = getpass.getuser()
# Change this to the working dir to keep track of error and log files from condor
self.testInit.generateWorkDir()
# Set heartbeat
componentName = 'test'
self.heartbeatAPI = HeartbeatAPI(componentName)
self.heartbeatAPI.registerComponent()
componentName = 'JobTracker'
self.heartbeatAPI2 = HeartbeatAPI(componentName)
self.heartbeatAPI2.registerComponent()
return
def tearDown(self):
"""
Database deletion
"""
#self.testInit.clearDatabase(modules = ["WMCore.WMBS", "WMCore.BossAir", "WMCore.ResourceControl", "WMCore.Agent.Database"])
self.testInit.delWorkDir()
self.testInit.tearDownCouch()
return
def getConfig(self):
"""
_getConfig_
Build a basic BossAir config
"""
config = self.testInit.getConfiguration()
config.section_("Agent")
config.Agent.agentName = 'testAgent'
config.Agent.componentName = 'test'
config.Agent.useHeartbeat = False
#.........这里部分代码省略.........
示例15: PromptRecoTest
# 需要导入模块: from WMQuality.TestInitCouchApp import TestInitCouchApp [as 别名]
# 或者: from WMQuality.TestInitCouchApp.TestInitCouchApp import generateWorkDir [as 别名]
class PromptRecoTest(unittest.TestCase):
def setUp(self):
"""
_setUp_
Initialize the database and couch.
"""
self.testInit = TestInitCouchApp(__file__)
self.testInit.setLogging()
self.testInit.setDatabaseConnection()
self.testInit.setupCouch("promptreco_t", "ConfigCache")
self.testInit.setSchema(customModules = ["WMCore.WMBS"],
useDefault = False)
couchServer = CouchServer(os.environ["COUCHURL"])
self.configDatabase = couchServer.connectDatabase("promptreco_t")
self.testDir = self.testInit.generateWorkDir()
return
def tearDown(self):
"""
_tearDown_
Clear out the database.
"""
self.testInit.tearDownCouch()
self.testInit.clearDatabase()
self.testInit.delWorkDir()
return
def setupPromptSkimConfigObject(self):
"""
_setupPromptSkimConfigObject_
Creates a custom config object for testing
of the skim functionality
"""
self.promptSkim = ConfigSection(name="Tier1Skim")
self.promptSkim.SkimName = "TestSkim1"
self.promptSkim.DataTier = "RECO"
self.promptSkim.TwoFileRead = False
self.promptSkim.ProcessingVersion = "PromptSkim-v1"
self.promptSkim.ConfigURL = "http://cmssw.cvs.cern.ch/cgi-bin/cmssw.cgi/CMSSW/Configuration/DataOps/python/prescaleskimmer.py?revision=1.1"
def testPromptReco(self):
"""
_testPromptReco_
Create a Prompt Reconstruction workflow
and verify it installs into WMBS correctly.
"""
testArguments = getTestArguments()
testWorkload = promptrecoWorkload("TestWorkload", testArguments)
testWorkload.setSpecUrl("somespec")
testWorkload.setOwnerDetails("[email protected]", "T0")
testWMBSHelper = WMBSHelper(testWorkload, "Reco", "SomeBlock", cachepath = self.testDir)
testWMBSHelper.createTopLevelFileset()
testWMBSHelper._createSubscriptionsInWMBS(testWMBSHelper.topLevelTask, testWMBSHelper.topLevelFileset)
recoWorkflow = Workflow(name = "TestWorkload",
task = "/TestWorkload/Reco")
recoWorkflow.load()
self.assertEqual(len(recoWorkflow.outputMap.keys()), len(testArguments["WriteTiers"]) + 1,
"Error: Wrong number of WF outputs in the Reco WF.")
goldenOutputMods = ["write_RECO", "write_ALCARECO", "write_AOD", "write_DQM"]
for goldenOutputMod in goldenOutputMods:
mergedOutput = recoWorkflow.outputMap[goldenOutputMod][0]["merged_output_fileset"]
unmergedOutput = recoWorkflow.outputMap[goldenOutputMod][0]["output_fileset"]
mergedOutput.loadData()
unmergedOutput.loadData()
if goldenOutputMod != "write_ALCARECO":
self.assertEqual(mergedOutput.name, "/TestWorkload/Reco/RecoMerge%s/merged-Merged" % goldenOutputMod,
"Error: Merged output fileset is wrong: %s" % mergedOutput.name)
self.assertEqual(unmergedOutput.name, "/TestWorkload/Reco/unmerged-%s" % goldenOutputMod,
"Error: Unmerged output fileset is wrong: %s" % unmergedOutput.name)
logArchOutput = recoWorkflow.outputMap["logArchive"][0]["merged_output_fileset"]
unmergedLogArchOutput = recoWorkflow.outputMap["logArchive"][0]["output_fileset"]
logArchOutput.loadData()
unmergedLogArchOutput.loadData()
self.assertEqual(logArchOutput.name, "/TestWorkload/Reco/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
self.assertEqual(unmergedLogArchOutput.name, "/TestWorkload/Reco/unmerged-logArchive",
"Error: LogArchive output fileset is wrong.")
alcaSkimWorkflow = Workflow(name = "TestWorkload",
task = "/TestWorkload/Reco/AlcaSkim")
alcaSkimWorkflow.load()
self.assertEqual(len(alcaSkimWorkflow.outputMap.keys()), len(testArguments["AlcaSkims"]) + 1,
"Error: Wrong number of WF outputs in the AlcaSkim WF.")
goldenOutputMods = []
for alcaProd in testArguments["AlcaSkims"]:
goldenOutputMods.append("ALCARECOStream%s" % alcaProd)
for goldenOutputMod in goldenOutputMods:
mergedOutput = alcaSkimWorkflow.outputMap[goldenOutputMod][0]["merged_output_fileset"]
#.........这里部分代码省略.........