当前位置: 首页>>代码示例>>Python>>正文


Python api.DBCaseRecorder类代码示例

本文整理汇总了Python中openmdao.lib.casehandlers.api.DBCaseRecorder的典型用法代码示例。如果您正苦于以下问题:Python DBCaseRecorder类的具体用法?Python DBCaseRecorder怎么用?Python DBCaseRecorder使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了DBCaseRecorder类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_string

 def test_string(self):
     recorder = DBCaseRecorder()
     case = Case(inputs=[('str', 'Normal String'),
                         ('unicode', u'Unicode String'),
                         ('list', ['Hello', 'world'])])  # Check pickling.
     recorder.record(case)
     for case in recorder.get_iterator():
         self.assertEqual(case['str'], 'Normal String')
         self.assertEqual(case['unicode'], u'Unicode String')
         self.assertEqual(case['list'], ['Hello', 'world'])
开发者ID:akhi28,项目名称:OpenMDAO-Framework,代码行数:10,代码来源:test_dbcaserecorder.py

示例2: test_string

 def test_string(self):
     recorder = DBCaseRecorder()
     case = Case(
         inputs=[("str", "Normal String"), ("unicode", u"Unicode String"), ("list", ["Hello", "world"])]
     )  # Check pickling.
     recorder.record(case)
     for case in recorder.get_iterator():
         self.assertEqual(case["str"], "Normal String")
         self.assertEqual(case["unicode"], u"Unicode String")
         self.assertEqual(case["list"], ["Hello", "world"])
开发者ID:naylor-b,项目名称:OpenMDAO-Framework,代码行数:10,代码来源:test_dbcaserecorder.py

示例3: test_pickle_conversion

 def test_pickle_conversion(self):
     recorder = DBCaseRecorder()
     for i in range(10):
         inputs = [('comp1.x', i), ('comp1.y', i*2.)]
         outputs = [('comp1.z', i*1.5), ('comp2.normal', NormalDistribution(float(i),0.5))]
         recorder.record(Case(inputs=inputs, outputs=outputs, label='case%s'%i))
     iterator = recorder.get_iterator()
     for i,case in enumerate(iterator):
         self.assertTrue(isinstance(case['comp2.normal'], NormalDistribution))
         self.assertEqual(case['comp2.normal'].mu, float(i))
         self.assertEqual(case['comp2.normal'].sigma, 0.5)
         self.assertTrue(isinstance(case['comp1.y'], float))
         self.assertEqual(case['comp1.y'], i*2.)
         self.assertEqual(case['comp1.z'], i*1.5)
开发者ID:akhi28,项目名称:OpenMDAO-Framework,代码行数:14,代码来源:test_dbcaserecorder.py

示例4: test_query

    def test_query(self):
        recorder = DBCaseRecorder()
        for i in range(10):
            inputs = [('comp1.x', i), ('comp1.y', i*2.)]
            outputs = [('comp1.z', i*1.5), ('comp2.normal', NormalDistribution(float(i),0.5))]
            recorder.record(Case(inputs=inputs, outputs=outputs, label='case%s'%i))
        iterator = recorder.get_iterator()
        iterator.selectors = ["value>=0","value<3"]

        count = 0
        for i,case in enumerate(iterator):
            count += 1
            for name,value in case.items():
                self.assertTrue(value >= 0 and value<3)
        self.assertEqual(count, 3)
开发者ID:akhi28,项目名称:OpenMDAO-Framework,代码行数:15,代码来源:test_dbcaserecorder.py

示例5: __init__

 def __init__(self):
     """Creates an Assembly to run DREA and HSRnoise."""
     super(DREA_HSRnoise, self).__init__()
     
     FO1 = Case(inputs=[('point', 1),('dreaprep.Mach',0.28),('alt',2000.0),('dreaprep.PC',100.0),('hsrnoise.phi', 0.0)], outputs=[('drea.CFG',0.0),('hsrnoise.thetas',0.0),('hsrnoise.Freq',0.0),('hsrnoise.SPL_corr',0),('hsrnoise.OASPL30',0.0),('hsrnoise.OASPL60',0.0),('hsrnoise.OASPL90',0.0),('hsrnoise.OASPL120',0.0),('hsrnoise.OASPL150',0.0)])
     FO2 = Case(inputs=[('point', 1),('dreaprep.Mach',0.28),('alt',2000.0),('dreaprep.PC', 65.0),('hsrnoise.phi', 0.0)], outputs=[('drea.CFG',0.0),('hsrnoise.thetas',0.0),('hsrnoise.Freq',0.0),('hsrnoise.SPL_corr',0),('hsrnoise.OASPL30',0.0),('hsrnoise.OASPL60',0.0),('hsrnoise.OASPL90',0.0),('hsrnoise.OASPL120',0.0),('hsrnoise.OASPL150',0.0)])
     App = Case(inputs=[('point', 2),('dreaprep.Mach',0.20),('alt', 394.0),('dreaprep.PC', 30.0),('hsrnoise.phi', 0.0)], outputs=[('drea.CFG',0.0),('hsrnoise.thetas',0.0),('hsrnoise.Freq',0.0),('hsrnoise.SPL_corr',0),('hsrnoise.OASPL30',0.0),('hsrnoise.OASPL60',0.0),('hsrnoise.OASPL90',0.0),('hsrnoise.OASPL120',0.0),('hsrnoise.OASPL150',0.0)])
     SL1 = Case(inputs=[('point', 3),('dreaprep.Mach',0.25),('alt',1000.0),('dreaprep.PC',100.0),('hsrnoise.phi', 0.0)], outputs=[('drea.CFG',0.0),('hsrnoise.thetas',0.0),('hsrnoise.Freq',0.0),('hsrnoise.SPL_corr',0),('hsrnoise.OASPL30',0.0),('hsrnoise.OASPL60',0.0),('hsrnoise.OASPL90',0.0),('hsrnoise.OASPL120',0.0),('hsrnoise.OASPL150',0.0)])
     SL2 = Case(inputs=[('point', 3),('dreaprep.Mach',0.25),('alt',1000.0),('dreaprep.PC',100.0),('hsrnoise.phi',30.0)], outputs=[('drea.CFG',0.0),('hsrnoise.thetas',0.0),('hsrnoise.Freq',0.0),('hsrnoise.SPL_corr',0),('hsrnoise.OASPL30',0.0),('hsrnoise.OASPL60',0.0),('hsrnoise.OASPL90',0.0),('hsrnoise.OASPL120',0.0),('hsrnoise.OASPL150',0.0)])
     SL3 = Case(inputs=[('point', 3),('dreaprep.Mach',0.25),('alt',1000.0),('dreaprep.PC',100.0),('hsrnoise.phi',60.0)], outputs=[('drea.CFG',0.0),('hsrnoise.thetas',0.0),('hsrnoise.Freq',0.0),('hsrnoise.SPL_corr',0),('hsrnoise.OASPL30',0.0),('hsrnoise.OASPL60',0.0),('hsrnoise.OASPL90',0.0),('hsrnoise.OASPL120',0.0),('hsrnoise.OASPL150',0.0)])
     SL4 = Case(inputs=[('point', 3),('dreaprep.Mach',0.25),('alt',1000.0),('dreaprep.PC',100.0),('hsrnoise.phi',90.0)], outputs=[('drea.CFG',0.0),('hsrnoise.thetas',0.0),('hsrnoise.Freq',0.0),('hsrnoise.SPL_corr',0),('hsrnoise.OASPL30',0.0),('hsrnoise.OASPL60',0.0),('hsrnoise.OASPL90',0.0),('hsrnoise.OASPL120',0.0),('hsrnoise.OASPL150',0.0)])
     
     cases = ListCaseIterator([FO1,FO2,App,SL1,SL2,SL3,SL4])
     db_recorder = DBCaseRecorder()
     
     self.add('geo', Geometry())
     
     self.add('dreaprep', DREAprep())
     self.add('drea', DREA())
     self.add('hsrnoise', HSRNOISE())
     self.add('ACDgen', ACDgen())
     
     self.add('analysis',CaseIteratorDriver())
     self.analysis.iterator = cases
     self.analysis.recorders = [db_recorder]
     self.ACDgen.case_data = db_recorder.get_iterator()
     
     # Set up the workflows
     #---------------------------
     #self.analysis.workflow.add(['dreaprep', 'drea', 'hsrnoise'])
     #self.driver.workflow.add(['analysis','ACDgen'])
     
     self.driver.workflow.add(['dreaprep', 'drea', 'hsrnoise'])
             
     # Connections
     #---------------------------
     self.connect('geo',['drea.geo_in','hsrnoise.geo_in'])
     self.connect('alt',['dreaprep.alt','hsrnoise.ALTEVO'])
     self.connect('dreaprep.flow_out','drea.flow_in')
     self.connect('drea.flow_out','hsrnoise.flow_in')
     self.connect('drea.CFG','hsrnoise.CFG')
开发者ID:Kenneth-T-Moore,项目名称:OpenMDAO-Framework,代码行数:41,代码来源:DREA_HSRnoise.py

示例6: test_db_to_dict

    def test_db_to_dict(self):
        tmpdir = tempfile.mkdtemp()
        dfile = os.path.join(tmpdir, 'junk.db')
        recorder = DBCaseRecorder(dfile)

        # create some Cases where some are missing a variable
        outputs = ['comp1.z', 'comp2.z']
        inputs = ['comp1.x', 'comp1.y', 'comp1.y2']
        recorder.register(self, inputs, outputs)
        for i in range(10):
            inputs = [i, i*2, i*3]
            outputs = [i*i, float('NaN')]
            recorder.record(self, inputs, outputs, None, '', '')

        varnames = ['comp1.x', 'comp1.y', 'comp1.y2']
        varinfo = case_db_to_dict(dfile, varnames)

        self.assertEqual(len(varinfo), 3)
        # each var list should have 10 data values in it
        for lst in varinfo.values():
            self.assertEqual(len(lst), 10)

        # now use caseiter_to_dict to grab the same data
        varinfo = caseiter_to_dict(recorder.get_iterator(), varnames)
        # each var list should have 10 data values in it
        for lst in varinfo.values():
            self.assertEqual(len(lst), 10)

        try:
            shutil.rmtree(tmpdir, onerror=onerror)
        except OSError:
            logging.error("problem removing directory %s", tmpdir)
开发者ID:FashtimeDotCom,项目名称:OpenMDAO-Framework,代码行数:32,代码来源:test_dbcaserecorder.py

示例7: test_db_to_dict

    def test_db_to_dict(self):
        tmpdir = tempfile.mkdtemp()
        dfile = os.path.join(tmpdir, 'junk.db')
        recorder = DBCaseRecorder(dfile)
        
        # create some Cases where some are missing a variable
        outputs = ['comp1.z', 'comp2.z']
        cases = []
        for i in range(10):
            if i>1:
                msg = ''
            else:
                msg = 'an error occurred'
            if i<5:
                inputs = [('comp1.x', i), ('comp1.y', i*2), ('comp1.y2', i*3)]
            else:
                inputs = [('comp1.x', i), ('comp1.y', i*2)]
            recorder.record(Case(inputs=inputs, outputs=outputs, msg=msg))

        varnames = ['comp1.x','comp1.y','comp1.y2']
        varinfo = case_db_to_dict(dfile, varnames)
        
        self.assertEqual(len(varinfo), 3)
        # each var list should have 3 data values in it (5 with the required variables minus
        # 2 with errors
        for name,lst in varinfo.items():
            self.assertEqual(len(lst), 3)
            
        # now use caseiter_to_dict to grab the same data
        varinfo = caseiter_to_dict(recorder.get_iterator(), varnames)
        # each var list should have 3 data values in it (5 with the required variables minus
        # 2 with errors
        for name,lst in varinfo.items():
            self.assertEqual(len(lst), 3)
        
        try:
            shutil.rmtree(tmpdir)
        except OSError:
            logging.error("problem removing directory %s" % tmpdir)
开发者ID:akhi28,项目名称:OpenMDAO-Framework,代码行数:39,代码来源:test_dbcaserecorder.py

示例8: test_tables_already_exist

    def test_tables_already_exist(self):
        dbdir = tempfile.mkdtemp()
        dbname = os.path.join(dbdir, 'junk_dbfile')

        recorder = DBCaseRecorder(dbname)
        recorder.close()
        recorder = DBCaseRecorder(dbname, append=True)
        recorder.close()
        try:
            recorder = DBCaseRecorder(dbname)
            recorder.close()
        except Exception as err:
            self.assertEqual('table cases already exists', str(err))
        else:
            self.fail('expected Exception')
        try:
            shutil.rmtree(dbdir, onerror=onerror)
        except OSError:
            logging.error("problem removing directory %s", dbdir)
开发者ID:FashtimeDotCom,项目名称:OpenMDAO-Framework,代码行数:19,代码来源:test_dbcaserecorder.py

示例9: test_string

 def test_string(self):
     recorder = DBCaseRecorder()
     inputs = ['str', 'unicode', 'list']  # Check pickling.
     recorder.register(self, inputs, [])
     inputs = ['Normal String', u'Unicode String', ['Hello', 'world']]
     recorder.record(self, inputs, [], None, '', '')
     for case in recorder.get_iterator():
         self.assertEqual(case['str'], 'Normal String')
         self.assertEqual(case['unicode'], u'Unicode String')
         self.assertEqual(case['list'], ['Hello', 'world'])
开发者ID:FashtimeDotCom,项目名称:OpenMDAO-Framework,代码行数:10,代码来源:test_dbcaserecorder.py

示例10: test_pickle_conversion

 def test_pickle_conversion(self):
     recorder = DBCaseRecorder()
     inputs = ['comp1.x', 'comp1.y']
     outputs = ['comp1.z', 'comp2.normal']
     recorder.register(self, inputs, outputs)
     for i in range(10):
         inputs = [i, i*2.]
         outputs = [i*1.5, NormalDistribution(float(i), 0.5)]
         recorder.record(self, inputs, outputs, None, '', '')
     iterator = recorder.get_iterator()
     for i, case in enumerate(iterator):
         self.assertTrue(isinstance(case['comp2.normal'], NormalDistribution))
         self.assertEqual(case['comp2.normal'].mu, float(i))
         self.assertEqual(case['comp2.normal'].sigma, 0.5)
         self.assertTrue(isinstance(case['comp1.y'], float))
         self.assertEqual(case['comp1.y'], i*2.)
         self.assertEqual(case['comp1.z'], i*1.5)
开发者ID:FashtimeDotCom,项目名称:OpenMDAO-Framework,代码行数:17,代码来源:test_dbcaserecorder.py

示例11: test_query

    def test_query(self):
        recorder = DBCaseRecorder()
        inputs = ['comp1.x', 'comp1.y']
        outputs = ['comp1.z', 'comp2.normal']
        recorder.register(self, inputs, outputs)
        for i in range(10):
            inputs = [i, i*2.]
            outputs = [i*1.5, NormalDistribution(float(i), 0.5)]
            recorder.record(self, inputs, outputs, None, '', '')
        iterator = recorder.get_iterator()
        iterator.selectors = ["value>=0", "value<3"]

        count = 0
        for i, case in enumerate(iterator):
            count += 1
            for value in case.values():
                self.assertTrue(value >= 0 and value < 3)
        self.assertEqual(count, 3)
开发者ID:FashtimeDotCom,项目名称:OpenMDAO-Framework,代码行数:18,代码来源:test_dbcaserecorder.py

示例12: configure

    def configure(self):    
        self._tdir = mkdtemp()        
        
        self.comp_name = None
        #check to make sure no more than one component is being referenced
        compnames = set()
        for param in self.parent.get_parameters().values():
            compnames.update(param.get_referenced_compnames())

        if len(compnames) > 1:
            self.parent.raise_exception('The EGO architecture can only be used on one'
                                        'component at a time, but parameters from %s '
                                        'were added to the problem formulation.' %compnames, 
                                        ValueError)
        self.comp_name = compnames.pop()        
        #change name of component to add '_model' to it. 
        #     lets me name the metamodel as the old name
        self.comp= getattr(self.parent,self.comp_name)
        self.comp.name = "%s_model"%self.comp_name
        
        #add in the metamodel
        meta_model = self.parent.add(self.comp_name,MetaModel()) #metamodel now replaces old component with same name
        meta_model.default_surrogate = KrigingSurrogate()
        meta_model.model = self.comp
        
        meta_model_recorder = DBCaseRecorder(os.path.join(self._tdir,'trainer.db'))
        meta_model.recorder = meta_model_recorder
        meta_model.force_execute = True
        
        EI = self.parent.add("EI",ExpectedImprovement())
        self.objective = self.parent.get_objectives().keys()[0]
        EI.criteria = self.objective
        
        pfilter = self.parent.add("filter",ParetoFilter())
        pfilter.criteria = [self.objective]
        pfilter.case_sets = [meta_model_recorder.get_iterator(),]
        pfilter.force_execute = True
        
        #Driver Configuration
        DOE_trainer = self.parent.add("DOE_trainer",DOEdriver())
        DOE_trainer.sequential = True
        DOE_trainer.DOEgenerator = OptLatinHypercube(num_samples=self.initial_DOE_size)
        
        for name,param in self.parent.get_parameters().iteritems(): 
            DOE_trainer.add_parameter(param)

        DOE_trainer.add_event("%s.train_next"%self.comp_name)
        
        DOE_trainer.case_outputs = [self.objective]
        DOE_trainer.recorders = [DBCaseRecorder(':memory:')]
        
        EI_opt = self.parent.add("EI_opt",Genetic())
        EI_opt.opt_type = "maximize"
        EI_opt.population_size = 100
        EI_opt.generations = 10
        #EI_opt.selection_method = "tournament"
        
        for name,param in self.parent.get_parameters().iteritems(): 
            EI_opt.add_parameter(param)
        EI_opt.add_objective("EI.%s"%self.EI_PI)
        
        retrain = self.parent.add("retrain",Driver())
        retrain.recorders = self.data_recorders
        
        retrain.add_event("%s.train_next"%self.comp_name)
        
        iter = self.parent.add("iter",IterateUntil())
        iter.max_iterations = self.sample_iterations
        iter.add_stop_condition('EI.PI <= %s'%self.min_ei_pi)
        
        #Data Connections
        self.parent.connect("filter.pareto_set","EI.best_case")
        self.parent.connect(self.objective,"EI.predicted_value")        
        
        #Iteration Heirarchy
        self.parent.driver.workflow.add(['DOE_trainer', 'iter'])
        #DOE_trainer.workflow.add(self.comp_name)
        
        iter.workflow = SequentialWorkflow()
        iter.workflow.add(['filter', 'EI_opt', 'retrain'])
        
        #EI_opt.workflow.add([self.comp_name,'EI'])
        retrain.workflow.add(self.comp_name)
开发者ID:Kenneth-T-Moore,项目名称:OpenMDAO-Framework,代码行数:83,代码来源:ego.py

示例13: test_close

    def test_close(self):
        # :memory: can be used after close.
        recorder = DBCaseRecorder()
        inps = ['str', 'unicode', 'list']
        recorder.register(self, inps, [])
        inputs = ['Normal String', u'Unicode String', ['Hello', 'world']]
        recorder.record(self, inputs, [], None, '', '')
        recorder.close()
        recorder.record(self, inputs, [], None, '', '')

        # File-based DB recorder can not be used after close.
        tmpdir = tempfile.mkdtemp()
        try:
            dfile = os.path.join(tmpdir, 'junk.db')
            recorder = DBCaseRecorder(dfile)
            recorder.register(self, inps, [])
            recorder.record(self, inputs, [], None, '', '')
            recorder.close()
            code = "recorder.record(self, inputs, [], None, '', '')"
            assert_raises(self, code, globals(), locals(), RuntimeError,
                          'Attempt to record on closed recorder')
        finally:
            try:
                shutil.rmtree(tmpdir, onerror=onerror)
            except OSError:
                logging.error("problem removing directory %s", tmpdir)
开发者ID:FashtimeDotCom,项目名称:OpenMDAO-Framework,代码行数:26,代码来源:test_dbcaserecorder.py

示例14: test_close

    def test_close(self):
        # :memory: can be used after close.
        recorder = DBCaseRecorder()
        case = Case(inputs=[('str', 'Normal String'),
                            ('unicode', u'Unicode String'),
                            ('list', ['Hello', 'world'])])  # Check pickling.
        recorder.record(case)
        recorder.close()
        recorder.record(case)

        # File-based DB recorder can not be used after close.
        tmpdir = tempfile.mkdtemp()
        try:
            dfile = os.path.join(tmpdir, 'junk.db')
            recorder = DBCaseRecorder(dfile)
            recorder.record(case)
            recorder.close()
            assert_raises(self, 'recorder.record(case)',
                          globals(), locals(), RuntimeError,
                          'Attempt to record on closed recorder')
        finally:
            try:
                shutil.rmtree(tmpdir)
            except OSError:
                logging.error("problem removing directory %s" % tmpdir)
开发者ID:akhi28,项目名称:OpenMDAO-Framework,代码行数:25,代码来源:test_dbcaserecorder.py

示例15: test_close

    def test_close(self):
        # :memory: can be used after close.
        recorder = DBCaseRecorder()
        case = Case(
            inputs=[("str", "Normal String"), ("unicode", u"Unicode String"), ("list", ["Hello", "world"])]
        )  # Check pickling.
        recorder.record(case)
        recorder.close()
        recorder.record(case)

        # File-based DB recorder can not be used after close.
        tmpdir = tempfile.mkdtemp()
        try:
            dfile = os.path.join(tmpdir, "junk.db")
            recorder = DBCaseRecorder(dfile)
            recorder.record(case)
            recorder.close()
            assert_raises(
                self, "recorder.record(case)", globals(), locals(), RuntimeError, "Attempt to record on closed recorder"
            )
        finally:
            try:
                shutil.rmtree(tmpdir)
            except OSError:
                logging.error("problem removing directory %s" % tmpdir)
开发者ID:naylor-b,项目名称:OpenMDAO-Framework,代码行数:25,代码来源:test_dbcaserecorder.py


注:本文中的openmdao.lib.casehandlers.api.DBCaseRecorder类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。