本文整理汇总了Python中coverage_model.parameter.ParameterDictionary.dump方法的典型用法代码示例。如果您正苦于以下问题:Python ParameterDictionary.dump方法的具体用法?Python ParameterDictionary.dump怎么用?Python ParameterDictionary.dump使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类coverage_model.parameter.ParameterDictionary
的用法示例。
在下文中一共展示了ParameterDictionary.dump方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _setup_resources
# 需要导入模块: from coverage_model.parameter import ParameterDictionary [as 别名]
# 或者: from coverage_model.parameter.ParameterDictionary import dump [as 别名]
def _setup_resources(self):
# TODO: some or all of this (or some variation) should move to DAMS'
# Build the test resources for the dataset
dams_cli = DataAcquisitionManagementServiceClient()
dpms_cli = DataProductManagementServiceClient()
rr_cli = ResourceRegistryServiceClient()
pubsub_cli = PubsubManagementServiceClient()
eda = ExternalDatasetAgent()
eda_id = dams_cli.create_external_dataset_agent(eda)
eda_inst = ExternalDatasetAgentInstance()
eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id)
# Create and register the necessary resources/objects
# Create DataProvider
dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
dprov.contact.name = 'Christopher Mueller'
dprov.contact.email = '[email protected]ascience.com'
# Create DataSource
dsrc = DataSource(protocol_type='FILE', institution=Institution(), contact=ContactInformation())
dsrc.connection_params['base_data_url'] = ''
dsrc.contact.name='Tim Giguere'
dsrc.contact.email = '[email protected]'
# Create ExternalDataset
ds_name = 'ruv_test_dataset'
dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())
dset.dataset_description.parameters['base_url'] = 'test_data/ruv/'
dset.dataset_description.parameters['list_pattern'] = 'RDLi_SEAB_2011_08_24_1600.ruv'
dset.dataset_description.parameters['date_pattern'] = '%Y %m %d %H %M'
dset.dataset_description.parameters['date_extraction_pattern'] = 'RDLi_SEAB_([\d]{4})_([\d]{2})_([\d]{2})_([\d]{2})([\d]{2}).ruv'
dset.dataset_description.parameters['temporal_dimension'] = None
dset.dataset_description.parameters['zonal_dimension'] = None
dset.dataset_description.parameters['meridional_dimension'] = None
dset.dataset_description.parameters['vertical_dimension'] = None
dset.dataset_description.parameters['variables'] = [
]
# Create DataSourceModel
dsrc_model = DataSourceModel(name='ruv_model')
dsrc_model.model = 'RUV'
dsrc_model.data_handler_module = 'N/A'
dsrc_model.data_handler_class = 'N/A'
## Run everything through DAMS
ds_id = dams_cli.create_external_dataset(external_dataset=dset)
ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)
# Register the ExternalDataset
dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)
# Or using each method
dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
# dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)
#create temp streamdef so the data product can create the stream
craft = CoverageCraft
sdom, tdom = craft.create_domains()
sdom = sdom.dump()
tdom = tdom.dump()
parameter_dictionary = craft.create_parameters()
parameter_dictionary = parameter_dictionary.dump()
dprod = IonObject(RT.DataProduct,
name='ruv_parsed_product',
description='parsed ruv product',
temporal_domain = tdom,
spatial_domain = sdom)
streamdef_id = pubsub_cli.create_stream_definition(name="temp", description="temp")
# Generate the data product and associate it to the ExternalDataset
dproduct_id = dpms_cli.create_data_product(data_product=dprod,
stream_definition_id=streamdef_id,
parameter_dictionary=parameter_dictionary)
dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id)
stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
stream_id = stream_id[0]
log.info('Created resources: {0}'.format({'ExternalDataset':ds_id, 'ExternalDataProvider':ext_dprov_id, 'DataSource':ext_dsrc_id, 'DataSourceModel':ext_dsrc_model_id, 'DataProducer':dproducer_id, 'DataProduct':dproduct_id, 'Stream':stream_id}))
#CBM: Use CF standard_names
#ttool = TaxyTool()
#
#ttool.add_taxonomy_set('data','test data')
pdict = ParameterDictionary()
#.........这里部分代码省略.........
示例2: _setup_resources
# 需要导入模块: from coverage_model.parameter import ParameterDictionary [as 别名]
# 或者: from coverage_model.parameter.ParameterDictionary import dump [as 别名]
def _setup_resources(self):
# TODO: some or all of this (or some variation) should move to DAMS'
# Build the test resources for the dataset
dams_cli = DataAcquisitionManagementServiceClient()
dpms_cli = DataProductManagementServiceClient()
rr_cli = ResourceRegistryServiceClient()
pubsub_cli = PubsubManagementServiceClient()
eda = ExternalDatasetAgent()
eda_id = dams_cli.create_external_dataset_agent(eda)
eda_inst = ExternalDatasetAgentInstance()
eda_inst_id = dams_cli.create_external_dataset_agent_instance(eda_inst, external_dataset_agent_id=eda_id)
# Create and register the necessary resources/objects
# Create DataProvider
dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation())
dprov.contact.name = 'Christopher Mueller'
dprov.contact.email = '[email protected]'
# Create DataSource
dsrc = DataSource(protocol_type='DAP', institution=Institution(), contact=ContactInformation())
dsrc.connection_params['base_data_url'] = ''
dsrc.contact.name='Tim Giguere'
dsrc.contact.email = '[email protected]'
# Create ExternalDataset
ds_name = 'usgs_test_dataset'
dset = ExternalDataset(name=ds_name, dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation())
# The usgs.nc test dataset is a download of the R1 dataset found here:
# http://thredds-test.oceanobservatories.org/thredds/dodsC/ooiciData/E66B1A74-A684-454A-9ADE-8388C2C634E5.ncml
dset.dataset_description.parameters['dataset_path'] = 'test_data/usgs.nc'
dset.dataset_description.parameters['temporal_dimension'] = 'time'
dset.dataset_description.parameters['zonal_dimension'] = 'lon'
dset.dataset_description.parameters['meridional_dimension'] = 'lat'
dset.dataset_description.parameters['vertical_dimension'] = 'z'
dset.dataset_description.parameters['variables'] = [
'water_temperature',
'streamflow',
'water_temperature_bottom',
'water_temperature_middle',
'specific_conductance',
'data_qualifier',
]
# Create DataSourceModel
dsrc_model = DataSourceModel(name='dap_model')
dsrc_model.model = 'DAP'
dsrc_model.data_handler_module = 'N/A'
dsrc_model.data_handler_class = 'N/A'
## Run everything through DAMS
ds_id = dams_cli.create_external_dataset(external_dataset=dset)
ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)
# Register the ExternalDataset
dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)
# Or using each method
dams_cli.assign_data_source_to_external_data_provider(data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id)
dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
# dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)
#create temp streamdef so the data product can create the stream
streamdef_id = pubsub_cli.create_stream_definition(name="temp", description="temp")
craft = CoverageCraft
sdom, tdom = craft.create_domains()
sdom = sdom.dump()
tdom = tdom.dump()
parameter_dictionary = craft.create_parameters()
parameter_dictionary = parameter_dictionary.dump()
dprod = IonObject(RT.DataProduct,
name='usgs_parsed_product',
description='parsed usgs product',
temporal_domain = tdom,
spatial_domain = sdom)
# Generate the data product and associate it to the ExternalDataset
dproduct_id = dpms_cli.create_data_product(data_product=dprod,
stream_definition_id=streamdef_id,
parameter_dictionary=parameter_dictionary)
dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id)
stream_id, assn = rr_cli.find_objects(subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True)
stream_id = stream_id[0]
log.info('Created resources: {0}'.format({'ExternalDataset':ds_id, 'ExternalDataProvider':ext_dprov_id, 'DataSource':ext_dsrc_id, 'DataSourceModel':ext_dsrc_model_id, 'DataProducer':dproducer_id, 'DataProduct':dproduct_id, 'Stream':stream_id}))
#CBM: Use CF standard_names
#.........这里部分代码省略.........
示例3: _setup_resources
# 需要导入模块: from coverage_model.parameter import ParameterDictionary [as 别名]
# 或者: from coverage_model.parameter.ParameterDictionary import dump [as 别名]
#.........这里部分代码省略.........
]
# Create DataSourceModel
dsrc_model = DataSourceModel(name="slocum_model")
dsrc_model.model = "SLOCUM"
dsrc_model.data_handler_module = "N/A"
dsrc_model.data_handler_class = "N/A"
## Run everything through DAMS
ds_id = dams_cli.create_external_dataset(external_dataset=dset)
ext_dprov_id = dams_cli.create_external_data_provider(external_data_provider=dprov)
ext_dsrc_id = dams_cli.create_data_source(data_source=dsrc)
ext_dsrc_model_id = dams_cli.create_data_source_model(dsrc_model)
# Register the ExternalDataset
dproducer_id = dams_cli.register_external_data_set(external_dataset_id=ds_id)
# Or using each method
dams_cli.assign_data_source_to_external_data_provider(
data_source_id=ext_dsrc_id, external_data_provider_id=ext_dprov_id
)
dams_cli.assign_data_source_to_data_model(data_source_id=ext_dsrc_id, data_source_model_id=ext_dsrc_model_id)
dams_cli.assign_external_dataset_to_data_source(external_dataset_id=ds_id, data_source_id=ext_dsrc_id)
dams_cli.assign_external_dataset_to_agent_instance(external_dataset_id=ds_id, agent_instance_id=eda_inst_id)
# dams_cli.assign_external_data_agent_to_agent_instance(external_data_agent_id=self.eda_id, agent_instance_id=self.eda_inst_id)
# create temp streamdef so the data product can create the stream
streamdef_id = pubsub_cli.create_stream_definition(name="temp", description="temp")
# Generate the data product and associate it to the ExternalDataset
craft = CoverageCraft
sdom, tdom = craft.create_domains()
sdom = sdom.dump()
tdom = tdom.dump()
parameter_dictionary = craft.create_parameters()
parameter_dictionary = parameter_dictionary.dump()
dprod = IonObject(
RT.DataProduct,
name="slocum_parsed_product",
description="parsed slocum product",
temporal_domain=tdom,
spatial_domain=sdom,
)
dproduct_id = dpms_cli.create_data_product(
data_product=dprod, stream_definition_id=streamdef_id, parameter_dictionary=parameter_dictionary
)
dams_cli.assign_data_product(input_resource_id=ds_id, data_product_id=dproduct_id)
stream_id, assn = rr_cli.find_objects(
subject=dproduct_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True
)
stream_id = stream_id[0]
log.info(
"Created resources: {0}".format(
{
"ExternalDataset": ds_id,
"ExternalDataProvider": ext_dprov_id,
"DataSource": ext_dsrc_id,
"DataSourceModel": ext_dsrc_model_id,
"DataProducer": dproducer_id,
"DataProduct": dproduct_id,