本文整理汇总了Python中interface.services.dm.iuser_notification_service.UserNotificationServiceClient类的典型用法代码示例。如果您正苦于以下问题:Python UserNotificationServiceClient类的具体用法?Python UserNotificationServiceClient怎么用?Python UserNotificationServiceClient使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了UserNotificationServiceClient类的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: setUp
def setUp(self):
self._start_container()
self.container.start_rel_from_url('res/deploy/r2dm.yml')
self.unsc = UserNotificationServiceClient(node=self.container.node)
self.rrc = ResourceRegistryServiceClient(node=self.container.node)
self.imc = IdentityManagementServiceClient(node=self.container.node)
示例2: setUp
def setUp(self):
# Start container
super(TestActivateInstrumentIntegration, self).setUp()
config = DotDict()
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml', config)
# Now create client to DataProductManagementService
self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
self.pubsubcli = PubsubManagementServiceClient(node=self.container.node)
self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
self.dpclient = DataProductManagementServiceClient(node=self.container.node)
self.datasetclient = DatasetManagementServiceClient(node=self.container.node)
self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
self.dataretrieverclient = DataRetrieverServiceClient(node=self.container.node)
self.dataset_management = DatasetManagementServiceClient()
self.usernotificationclient = UserNotificationServiceClient()
#setup listerner vars
self._data_greenlets = []
self._no_samples = None
self._samples_received = []
self.event_publisher = EventPublisher()
示例3: setUp
def setUp(self):
# Start container
#print 'instantiating container'
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.dpsc_cli = DataProductManagementServiceClient(node=self.container.node)
self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
self.pubsubcli = PubsubManagementServiceClient(node=self.container.node)
self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
self.process_dispatcher = ProcessDispatcherServiceClient()
self.dataset_management = DatasetManagementServiceClient()
self.unsc = UserNotificationServiceClient()
self.data_retriever = DataRetrieverServiceClient()
#------------------------------------------
# Create the environment
#------------------------------------------
datastore_name = CACHE_DATASTORE_NAME
self.db = self.container.datastore_manager.get_datastore(datastore_name)
self.stream_def_id = self.pubsubcli.create_stream_definition(name='SBE37_CDM')
self.process_definitions = {}
ingestion_worker_definition = ProcessDefinition(name='ingestion worker')
ingestion_worker_definition.executable = {
'module':'ion.processes.data.ingestion.science_granule_ingestion_worker',
'class' :'ScienceGranuleIngestionWorker'
}
process_definition_id = self.process_dispatcher.create_process_definition(process_definition=ingestion_worker_definition)
self.process_definitions['ingestion_worker'] = process_definition_id
self.pids = []
self.exchange_points = []
self.exchange_names = []
#------------------------------------------------------------------------------------------------
# First launch the ingestors
#------------------------------------------------------------------------------------------------
self.exchange_space = 'science_granule_ingestion'
self.exchange_point = 'science_data'
config = DotDict()
config.process.datastore_name = 'datasets'
config.process.queue_name = self.exchange_space
self.exchange_names.append(self.exchange_space)
self.exchange_points.append(self.exchange_point)
pid = self.process_dispatcher.schedule_process(self.process_definitions['ingestion_worker'],configuration=config)
log.debug("the ingestion worker process id: %s", pid)
self.pids.append(pid)
self.addCleanup(self.cleaning_up)
示例4: TestDataProductManagementServiceIntegration
class TestDataProductManagementServiceIntegration(IonIntegrationTestCase):
def setUp(self):
# Start container
#print 'instantiating container'
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.dpsc_cli = DataProductManagementServiceClient(node=self.container.node)
self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
self.pubsubcli = PubsubManagementServiceClient(node=self.container.node)
self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
self.process_dispatcher = ProcessDispatcherServiceClient()
self.dataset_management = DatasetManagementServiceClient()
self.unsc = UserNotificationServiceClient()
self.data_retriever = DataRetrieverServiceClient()
#------------------------------------------
# Create the environment
#------------------------------------------
datastore_name = CACHE_DATASTORE_NAME
self.db = self.container.datastore_manager.get_datastore(datastore_name)
self.stream_def_id = self.pubsubcli.create_stream_definition(name='SBE37_CDM')
self.process_definitions = {}
ingestion_worker_definition = ProcessDefinition(name='ingestion worker')
ingestion_worker_definition.executable = {
'module':'ion.processes.data.ingestion.science_granule_ingestion_worker',
'class' :'ScienceGranuleIngestionWorker'
}
process_definition_id = self.process_dispatcher.create_process_definition(process_definition=ingestion_worker_definition)
self.process_definitions['ingestion_worker'] = process_definition_id
self.pids = []
self.exchange_points = []
self.exchange_names = []
#------------------------------------------------------------------------------------------------
# First launch the ingestors
#------------------------------------------------------------------------------------------------
self.exchange_space = 'science_granule_ingestion'
self.exchange_point = 'science_data'
config = DotDict()
config.process.datastore_name = 'datasets'
config.process.queue_name = self.exchange_space
self.exchange_names.append(self.exchange_space)
self.exchange_points.append(self.exchange_point)
pid = self.process_dispatcher.schedule_process(self.process_definitions['ingestion_worker'],configuration=config)
log.debug("the ingestion worker process id: %s", pid)
self.pids.append(pid)
self.addCleanup(self.cleaning_up)
def cleaning_up(self):
for pid in self.pids:
log.debug("number of pids to be terminated: %s", len(self.pids))
try:
self.process_dispatcher.cancel_process(pid)
log.debug("Terminated the process: %s", pid)
except:
log.debug("could not terminate the process id: %s" % pid)
IngestionManagementIntTest.clean_subscriptions()
for xn in self.exchange_names:
xni = self.container.ex_manager.create_xn_queue(xn)
xni.delete()
for xp in self.exchange_points:
xpi = self.container.ex_manager.create_xp(xp)
xpi.delete()
def get_datastore(self, dataset_id):
dataset = self.dataset_management.read_dataset(dataset_id)
datastore_name = dataset.datastore_name
datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA)
return datastore
def test_create_data_product(self):
#------------------------------------------------------------------------------------------------
# create a stream definition for the data from the ctd simulator
#------------------------------------------------------------------------------------------------
parameter_dictionary_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict')
ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary_id)
log.debug("Created stream def id %s" % ctd_stream_def_id)
#------------------------------------------------------------------------------------------------
# test creating a new data product w/o a stream definition
#------------------------------------------------------------------------------------------------
# Generic time-series data domain creation
tdom, sdom = time_series_domain()
#.........这里部分代码省略.........
示例5: TestDataProductManagementServiceIntegration
class TestDataProductManagementServiceIntegration(IonIntegrationTestCase):
def setUp(self):
# Start container
#print 'instantiating container'
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.dpsc_cli = DataProductManagementServiceClient()
self.rrclient = ResourceRegistryServiceClient()
self.damsclient = DataAcquisitionManagementServiceClient()
self.pubsubcli = PubsubManagementServiceClient()
self.ingestclient = IngestionManagementServiceClient()
self.process_dispatcher = ProcessDispatcherServiceClient()
self.dataset_management = DatasetManagementServiceClient()
self.unsc = UserNotificationServiceClient()
self.data_retriever = DataRetrieverServiceClient()
self.identcli = IdentityManagementServiceClient()
#------------------------------------------
# Create the environment
#------------------------------------------
self.stream_def_id = self.pubsubcli.create_stream_definition(name='SBE37_CDM')
self.process_definitions = {}
ingestion_worker_definition = ProcessDefinition(name='ingestion worker')
ingestion_worker_definition.executable = {
'module':'ion.processes.data.ingestion.science_granule_ingestion_worker',
'class' :'ScienceGranuleIngestionWorker'
}
process_definition_id = self.process_dispatcher.create_process_definition(process_definition=ingestion_worker_definition)
self.process_definitions['ingestion_worker'] = process_definition_id
self.pids = []
self.exchange_points = []
self.exchange_names = []
#------------------------------------------------------------------------------------------------
# First launch the ingestors
#------------------------------------------------------------------------------------------------
self.exchange_space = 'science_granule_ingestion'
self.exchange_point = 'science_data'
config = DotDict()
config.process.datastore_name = 'datasets'
config.process.queue_name = self.exchange_space
self.exchange_names.append(self.exchange_space)
self.exchange_points.append(self.exchange_point)
pid = self.process_dispatcher.schedule_process(self.process_definitions['ingestion_worker'],configuration=config)
log.debug("the ingestion worker process id: %s", pid)
self.pids.append(pid)
self.addCleanup(self.cleaning_up)
def cleaning_up(self):
for pid in self.pids:
log.debug("number of pids to be terminated: %s", len(self.pids))
try:
self.process_dispatcher.cancel_process(pid)
log.debug("Terminated the process: %s", pid)
except:
log.debug("could not terminate the process id: %s" % pid)
IngestionManagementIntTest.clean_subscriptions()
for xn in self.exchange_names:
xni = self.container.ex_manager.create_xn_queue(xn)
xni.delete()
for xp in self.exchange_points:
xpi = self.container.ex_manager.create_xp(xp)
xpi.delete()
def get_datastore(self, dataset_id):
dataset = self.dataset_management.read_dataset(dataset_id)
datastore_name = dataset.datastore_name
datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA)
return datastore
@attr('EXT')
@attr('PREP')
def test_create_data_product(self):
#------------------------------------------------------------------------------------------------
# create a stream definition for the data from the ctd simulator
#------------------------------------------------------------------------------------------------
parameter_dictionary = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict')
ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary._id)
log.debug("Created stream def id %s" % ctd_stream_def_id)
#------------------------------------------------------------------------------------------------
# test creating a new data product w/o a stream definition
#------------------------------------------------------------------------------------------------
dp_obj = IonObject(RT.DataProduct,
#.........这里部分代码省略.........
示例6: TestActivateInstrumentIntegration
class TestActivateInstrumentIntegration(IonIntegrationTestCase):
def setUp(self):
# Start container
super(TestActivateInstrumentIntegration, self).setUp()
config = DotDict()
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml', config)
# Now create client to DataProductManagementService
self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
self.pubsubcli = PubsubManagementServiceClient(node=self.container.node)
self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
self.dpclient = DataProductManagementServiceClient(node=self.container.node)
self.datasetclient = DatasetManagementServiceClient(node=self.container.node)
self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
self.dataretrieverclient = DataRetrieverServiceClient(node=self.container.node)
self.dataset_management = DatasetManagementServiceClient()
self.usernotificationclient = UserNotificationServiceClient()
#setup listerner vars
self._data_greenlets = []
self._no_samples = None
self._samples_received = []
self.event_publisher = EventPublisher()
def create_logger(self, name, stream_id=''):
# logger process
producer_definition = ProcessDefinition(name=name+'_logger')
producer_definition.executable = {
'module':'ion.processes.data.stream_granule_logger',
'class':'StreamGranuleLogger'
}
logger_procdef_id = self.processdispatchclient.create_process_definition(process_definition=producer_definition)
configuration = {
'process':{
'stream_id':stream_id,
}
}
pid = self.processdispatchclient.schedule_process(process_definition_id=logger_procdef_id,
configuration=configuration)
return pid
def _create_notification(self, user_name = '', instrument_id='', product_id=''):
#--------------------------------------------------------------------------------------
# Make notification request objects
#--------------------------------------------------------------------------------------
notification_request_1 = NotificationRequest( name= 'notification_1',
origin=instrument_id,
origin_type="instrument",
event_type='ResourceLifecycleEvent')
notification_request_2 = NotificationRequest( name='notification_2',
origin=product_id,
origin_type="data product",
event_type='DetectionEvent')
#--------------------------------------------------------------------------------------
# Create a user and get the user_id
#--------------------------------------------------------------------------------------
user = UserInfo()
user.name = user_name
user.contact.email = '%[email protected]' % user_name
user_id, _ = self.rrclient.create(user)
#--------------------------------------------------------------------------------------
# Create notification
#--------------------------------------------------------------------------------------
self.usernotificationclient.create_notification(notification=notification_request_1, user_id=user_id)
self.usernotificationclient.create_notification(notification=notification_request_2, user_id=user_id)
log.debug( "test_activateInstrumentSample: create_user_notifications user_id %s", str(user_id) )
return user_id
def get_datastore(self, dataset_id):
dataset = self.datasetclient.read_dataset(dataset_id)
datastore_name = dataset.datastore_name
datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA)
return datastore
def _check_computed_attributes_of_extended_instrument(self, expected_instrument_device_id = '',extended_instrument = None):
# Verify that computed attributes exist for the extended instrument
self.assertIsInstance(extended_instrument.computed.last_data_received_datetime, ComputedFloatValue)
self.assertIsInstance(extended_instrument.computed.uptime, ComputedStringValue)
#.........这里部分代码省略.........
示例7: setUp
def setUp(self):
self._start_container()
# patch the CFG service.user_notification.max_daily_notifications value so we only test 10
original_CFG_max = CFG.get_safe("service.user_notification.max_daily_notifications", 1000)
CFG['service']['user_notification']['max_daily_notifications'] = 10
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.object_store = self.container.object_store
self.resource_registry = self.container.resource_registry
self.user_notification = UserNotificationServiceClient()
self.event_publisher = EventPublisher()
# create UserInfo object (user)
user = UserInfo()
user.name = 'Iceman'
user.contact.email = '[email protected]'
user_id, _ = self.resource_registry.create(user)
self.user = self.resource_registry.read(user_id)
# create NotificationRequest objects (notifications)
# 4 notifications are created:
# REAL_TIME, EMAIL(user default via UserInfo)
# REAL_TIME, EMAIL(in DeliveryConfiguration)
# DISABLED, EMAIL(in DeliveryConfiguration)
# REAL_TIME, SMS(in DeliveryConfiguration)
# REAL_TIME, EMAIL(user default via UserInfo)
delivery_configuration = IonObject(OT.DeliveryConfiguration,
mode=DeliveryModeEnum.EMAIL,
frequency=NotificationFrequencyEnum.REAL_TIME)
notification_request = IonObject(OT.NotificationRequest,
name='REAL_TIME to default UserInfo email',
type=NotificationTypeEnum.SIMPLE,
origin='Miramar',
event_type=OT.ResourceLifecycleEvent,
delivery_configurations=[delivery_configuration])
# store this notification_id to check disabled_by_system status later
self.notification_id = self.user_notification.create_notification(notification=notification_request, user_id=self.user._id)
# REAL_TIME, EMAIL(in DeliveryConfiguration), 10 notifications/day max
delivery_configuration = IonObject(OT.DeliveryConfiguration,
email='[email protected]',
mode=DeliveryModeEnum.EMAIL,
frequency=NotificationFrequencyEnum.REAL_TIME)
notification_request = IonObject(OT.NotificationRequest,
name='REAL_TIME to alternate email, 10 notifications/day max',
type=NotificationTypeEnum.SIMPLE,
origin="Miramar",
event_type=OT.ResourceLifecycleEvent,
delivery_configurations=[delivery_configuration])
self.user_notification.create_notification(notification=notification_request, user_id=self.user._id)
# DISABLED, EMAIL(in DeliveryConfiguration)
delivery_configuration = IonObject(OT.DeliveryConfiguration,
email='[email protected]',
mode=DeliveryModeEnum.EMAIL,
frequency=NotificationFrequencyEnum.DISABLED)
notification_request = IonObject(OT.NotificationRequest,
name='DISABLED to alternate email',
type=NotificationTypeEnum.SIMPLE,
origin="Miramar",
event_type=OT.ResourceLifecycleEvent,
delivery_configurations=[delivery_configuration])
self.user_notification.create_notification(notification=notification_request, user_id=self.user._id)
# REAL_TIME, SMS(in DeliveryConfiguration)
delivery_configuration = IonObject(OT.DeliveryConfiguration,
email='[email protected]',
mode=DeliveryModeEnum.SMS,
frequency=NotificationFrequencyEnum.REAL_TIME)
notification_request = IonObject(OT.NotificationRequest,
name='SMS to alternate email',
type=NotificationTypeEnum.SIMPLE,
origin="Miramar",
event_type=OT.ResourceLifecycleEvent,
delivery_configurations=[delivery_configuration])
self.user_notification.create_notification(notification=notification_request, user_id=self.user._id)
示例8: RealTimeNotificationTestCase
class RealTimeNotificationTestCase(IonIntegrationTestCase):
def setUp(self):
self._start_container()
# patch the CFG service.user_notification.max_daily_notifications value so we only test 10
original_CFG_max = CFG.get_safe("service.user_notification.max_daily_notifications", 1000)
CFG['service']['user_notification']['max_daily_notifications'] = 10
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.object_store = self.container.object_store
self.resource_registry = self.container.resource_registry
self.user_notification = UserNotificationServiceClient()
self.event_publisher = EventPublisher()
# create UserInfo object (user)
user = UserInfo()
user.name = 'Iceman'
user.contact.email = '[email protected]'
user_id, _ = self.resource_registry.create(user)
self.user = self.resource_registry.read(user_id)
# create NotificationRequest objects (notifications)
# 4 notifications are created:
# REAL_TIME, EMAIL(user default via UserInfo)
# REAL_TIME, EMAIL(in DeliveryConfiguration)
# DISABLED, EMAIL(in DeliveryConfiguration)
# REAL_TIME, SMS(in DeliveryConfiguration)
# REAL_TIME, EMAIL(user default via UserInfo)
delivery_configuration = IonObject(OT.DeliveryConfiguration,
mode=DeliveryModeEnum.EMAIL,
frequency=NotificationFrequencyEnum.REAL_TIME)
notification_request = IonObject(OT.NotificationRequest,
name='REAL_TIME to default UserInfo email',
type=NotificationTypeEnum.SIMPLE,
origin='Miramar',
event_type=OT.ResourceLifecycleEvent,
delivery_configurations=[delivery_configuration])
# store this notification_id to check disabled_by_system status later
self.notification_id = self.user_notification.create_notification(notification=notification_request, user_id=self.user._id)
# REAL_TIME, EMAIL(in DeliveryConfiguration), 10 notifications/day max
delivery_configuration = IonObject(OT.DeliveryConfiguration,
email='[email protected]',
mode=DeliveryModeEnum.EMAIL,
frequency=NotificationFrequencyEnum.REAL_TIME)
notification_request = IonObject(OT.NotificationRequest,
name='REAL_TIME to alternate email, 10 notifications/day max',
type=NotificationTypeEnum.SIMPLE,
origin="Miramar",
event_type=OT.ResourceLifecycleEvent,
delivery_configurations=[delivery_configuration])
self.user_notification.create_notification(notification=notification_request, user_id=self.user._id)
# DISABLED, EMAIL(in DeliveryConfiguration)
delivery_configuration = IonObject(OT.DeliveryConfiguration,
email='[email protected]',
mode=DeliveryModeEnum.EMAIL,
frequency=NotificationFrequencyEnum.DISABLED)
notification_request = IonObject(OT.NotificationRequest,
name='DISABLED to alternate email',
type=NotificationTypeEnum.SIMPLE,
origin="Miramar",
event_type=OT.ResourceLifecycleEvent,
delivery_configurations=[delivery_configuration])
self.user_notification.create_notification(notification=notification_request, user_id=self.user._id)
# REAL_TIME, SMS(in DeliveryConfiguration)
delivery_configuration = IonObject(OT.DeliveryConfiguration,
email='[email protected]',
mode=DeliveryModeEnum.SMS,
frequency=NotificationFrequencyEnum.REAL_TIME)
notification_request = IonObject(OT.NotificationRequest,
name='SMS to alternate email',
type=NotificationTypeEnum.SIMPLE,
origin="Miramar",
event_type=OT.ResourceLifecycleEvent,
delivery_configurations=[delivery_configuration])
self.user_notification.create_notification(notification=notification_request, user_id=self.user._id)
def test_realtime_notifications(self):
# monkey patch smtplib.SMTP to capture sent emails
original_SMTP = smtplib.SMTP # store original for restoration
class MonkeyPatchSMTP(object):
def __init__(self, address, host):
self.address = address
self.host = host
def login(self,username,password):
self.username = username
self.password = password
def sendmail(self,from_addr, to_addrs, msg):
global outbox
outbox.append((from_addr, to_addrs, msg,time.time()))
return []
def quit(self):
pass
#.........这里部分代码省略.........
示例9: UserNotificationIntTest
class UserNotificationIntTest(IonIntegrationTestCase):
def setUp(self):
self._start_container()
self.container.start_rel_from_url('res/deploy/r2dm.yml')
self.unsc = UserNotificationServiceClient(node=self.container.node)
self.rrc = ResourceRegistryServiceClient(node=self.container.node)
self.imc = IdentityManagementServiceClient(node=self.container.node)
@attr('LOCOINT')
@unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
def test_email(self):
proc1 = self.container.proc_manager.procs_by_name['user_notification']
# Create a user and get the user_id
user = UserInfo(name = 'new_user')
user_id, _ = self.rrc.create(user)
# set up....
notification_id = self.unsc.create_email(event_type='ResourceLifecycleEvent',
event_subtype=None,
origin='Some_Resource_Agent_ID1',
origin_type=None,
user_id=user_id,
email='[email protected]',
mode = DeliveryMode.DIGEST,
message_header='message_header',
parser='parser',
period=1)
#------------------------------------------------------------------------------------------------------
# Setup so as to be able to get the message and headers going into the
# subscription callback method of the EmailEventProcessor
#------------------------------------------------------------------------------------------------------
# publish an event for each notification to generate the emails
rle_publisher = EventPublisher("ResourceLifecycleEvent")
rle_publisher.publish_event(origin='Some_Resource_Agent_ID1', description="RLE test event")
msg_tuple = proc1.event_processors[notification_id].smtp_client.sentmail.get(timeout=4)
self.assertTrue(proc1.event_processors[notification_id].smtp_client.sentmail.empty())
message = msg_tuple[2]
list_lines = message.split("\n")
#-------------------------------------------------------
# parse the message body
#-------------------------------------------------------
message_dict = {}
for line in list_lines:
key_item = line.split(": ")
if key_item[0] == 'Subject':
message_dict['Subject'] = key_item[1] + key_item[2]
else:
try:
message_dict[key_item[0]] = key_item[1]
except IndexError as exc:
# these IndexError exceptions happen only because the message sometimes
# has successive /r/n (i.e. new lines) and therefore,
# the indexing goes out of range. These new lines
# can just be ignored. So we ignore the exceptions here.
pass
#-------------------------------------------------------
# make assertions
#-------------------------------------------------------
self.assertEquals(msg_tuple[1], '[email protected]' )
#self.assertEquals(msg_tuple[0], ION_NOTIFICATION_EMAIL_ADDRESS)
#self.assertEquals(message_dict['From'], ION_NOTIFICATION_EMAIL_ADDRESS)
self.assertEquals(message_dict['To'], '[email protected]')
self.assertEquals(message_dict['Event'].rstrip('\r'), 'ResourceLifecycleEvent')
self.assertEquals(message_dict['Originator'].rstrip('\r'), 'Some_Resource_Agent_ID1')
self.assertEquals(message_dict['Description'].rstrip('\r'), 'RLE test event')
@attr('LOCOINT')
@unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
def test_sms(self):
proc1 = self.container.proc_manager.procs_by_name['user_notification']
# Create a user and get the user_id
user = UserInfo(name = 'new_user')
user_id, _ = self.rrc.create(user)
# set up....
notification_id = self.unsc.create_sms(event_type='ResourceLifecycleEvent',
event_subtype=None,
origin='Some_Resource_Agent_ID1',
origin_type=None,
user_id=user_id,
phone = '401-XXX-XXXX',
provider='T-Mobile',
message_header='message_header',
parser='parser',
#.........这里部分代码省略.........
示例10: TestDataProductManagementServiceIntegration
class TestDataProductManagementServiceIntegration(IonIntegrationTestCase):
def setUp(self):
# Start container
#print 'instantiating container'
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.dpsc_cli = DataProductManagementServiceClient(node=self.container.node)
self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
self.pubsubcli = PubsubManagementServiceClient(node=self.container.node)
self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
self.process_dispatcher = ProcessDispatcherServiceClient()
self.dataset_management = DatasetManagementServiceClient()
self.unsc = UserNotificationServiceClient()
#------------------------------------------
# Create the environment
#------------------------------------------
datastore_name = CACHE_DATASTORE_NAME
self.db = self.container.datastore_manager.get_datastore(datastore_name)
self.stream_def_id = self.pubsubcli.create_stream_definition(name='SBE37_CDM')
self.process_definitions = {}
ingestion_worker_definition = ProcessDefinition(name='ingestion worker')
ingestion_worker_definition.executable = {
'module':'ion.processes.data.ingestion.science_granule_ingestion_worker',
'class' :'ScienceGranuleIngestionWorker'
}
process_definition_id = self.process_dispatcher.create_process_definition(process_definition=ingestion_worker_definition)
self.process_definitions['ingestion_worker'] = process_definition_id
#------------------------------------------------------------------------------------------------
# First launch the ingestors
#------------------------------------------------------------------------------------------------
self.exchange_space = 'science_granule_ingestion'
self.exchange_point = 'science_data'
config = DotDict()
config.process.datastore_name = 'datasets'
config.process.queue_name = self.exchange_space
self.process_dispatcher.schedule_process(self.process_definitions['ingestion_worker'],configuration=config)
def get_datastore(self, dataset_id):
dataset = self.dataset_management.read_dataset(dataset_id)
datastore_name = dataset.datastore_name
datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA)
return datastore
@unittest.skip('OBE')
def test_get_last_update(self):
# Construct temporal and spatial Coordinate Reference System objects
tcrs = CRS([AxisTypeEnum.TIME])
scrs = CRS([AxisTypeEnum.LON, AxisTypeEnum.LAT])
# Construct temporal and spatial Domain objects
tdom = GridDomain(GridShape('temporal', [0]), tcrs, MutabilityEnum.EXTENSIBLE) # 1d (timeline)
sdom = GridDomain(GridShape('spatial', [0]), scrs, MutabilityEnum.IMMUTABLE) # 1d spatial topology (station/trajectory)
sdom = sdom.dump()
tdom = tdom.dump()
#@TODO: DO NOT DO THIS, WHEN THIS TEST IS REWRITTEN GET RID OF THIS, IT WILL FAIL, thanks -Luke
parameter_dictionary = get_param_dict('ctd_parsed_param_dict')
parameter_dictionary = parameter_dictionary.dump()
dp_obj = IonObject(RT.DataProduct,
name='DP1',
description='some new dp',
temporal_domain = tdom,
spatial_domain = sdom)
data_product_id = self.dpsc_cli.create_data_product(data_product=dp_obj, stream_definition_id=self.stream_def_id, parameter_dictionary=parameter_dictionary)
stream_ids, garbage = self.rrclient.find_objects(data_product_id, PRED.hasStream, id_only=True)
stream_id = stream_ids[0]
fake_lu = LastUpdate()
fake_lu_doc = self.db._ion_object_to_persistence_dict(fake_lu)
self.db.create_doc(fake_lu_doc, object_id=stream_id)
#------------------------------------------
# Now execute
#------------------------------------------
res = self.dpsc_cli.get_last_update(data_product_id=data_product_id)
self.assertTrue(isinstance(res[stream_id], LastUpdate), 'retrieving documents failed')
def test_create_data_product(self):
#------------------------------------------------------------------------------------------------
# create a stream definition for the data from the ctd simulator
#------------------------------------------------------------------------------------------------
parameter_dictionary_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict')
#.........这里部分代码省略.........
示例11: TestDataProductManagementServiceIntegration
class TestDataProductManagementServiceIntegration(IonIntegrationTestCase):
def setUp(self):
# Start container
#print 'instantiating container'
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.dpsc_cli = DataProductManagementServiceClient()
self.rrclient = ResourceRegistryServiceClient()
self.damsclient = DataAcquisitionManagementServiceClient()
self.pubsubcli = PubsubManagementServiceClient()
self.ingestclient = IngestionManagementServiceClient()
self.process_dispatcher = ProcessDispatcherServiceClient()
self.dataset_management = DatasetManagementServiceClient()
self.unsc = UserNotificationServiceClient()
self.data_retriever = DataRetrieverServiceClient()
#------------------------------------------
# Create the environment
#------------------------------------------
datastore_name = CACHE_DATASTORE_NAME
self.db = self.container.datastore_manager.get_datastore(datastore_name)
self.stream_def_id = self.pubsubcli.create_stream_definition(name='SBE37_CDM')
self.process_definitions = {}
ingestion_worker_definition = ProcessDefinition(name='ingestion worker')
ingestion_worker_definition.executable = {
'module':'ion.processes.data.ingestion.science_granule_ingestion_worker',
'class' :'ScienceGranuleIngestionWorker'
}
process_definition_id = self.process_dispatcher.create_process_definition(process_definition=ingestion_worker_definition)
self.process_definitions['ingestion_worker'] = process_definition_id
self.pids = []
self.exchange_points = []
self.exchange_names = []
#------------------------------------------------------------------------------------------------
# First launch the ingestors
#------------------------------------------------------------------------------------------------
self.exchange_space = 'science_granule_ingestion'
self.exchange_point = 'science_data'
config = DotDict()
config.process.datastore_name = 'datasets'
config.process.queue_name = self.exchange_space
self.exchange_names.append(self.exchange_space)
self.exchange_points.append(self.exchange_point)
pid = self.process_dispatcher.schedule_process(self.process_definitions['ingestion_worker'],configuration=config)
log.debug("the ingestion worker process id: %s", pid)
self.pids.append(pid)
self.addCleanup(self.cleaning_up)
def cleaning_up(self):
for pid in self.pids:
log.debug("number of pids to be terminated: %s", len(self.pids))
try:
self.process_dispatcher.cancel_process(pid)
log.debug("Terminated the process: %s", pid)
except:
log.debug("could not terminate the process id: %s" % pid)
IngestionManagementIntTest.clean_subscriptions()
for xn in self.exchange_names:
xni = self.container.ex_manager.create_xn_queue(xn)
xni.delete()
for xp in self.exchange_points:
xpi = self.container.ex_manager.create_xp(xp)
xpi.delete()
def get_datastore(self, dataset_id):
dataset = self.dataset_management.read_dataset(dataset_id)
datastore_name = dataset.datastore_name
datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA)
return datastore
@attr('EXT')
@attr('PREP')
def test_create_data_product(self):
#------------------------------------------------------------------------------------------------
# create a stream definition for the data from the ctd simulator
#------------------------------------------------------------------------------------------------
parameter_dictionary = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict')
ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary._id)
log.debug("Created stream def id %s" % ctd_stream_def_id)
#------------------------------------------------------------------------------------------------
# test creating a new data product w/o a stream definition
#------------------------------------------------------------------------------------------------
# Generic time-series data domain creation
tdom, sdom = time_series_domain()
#.........这里部分代码省略.........
开发者ID:ateranishi,项目名称:coi-services,代码行数:101,代码来源:test_data_product_management_service_integration.py
示例12: UserNotificationIntTest
class UserNotificationIntTest(IonIntegrationTestCase):
def setUp(self):
self._start_container()
self.container.start_rel_from_url('res/deploy/r2dm.yml')
self.unsc = UserNotificationServiceClient(node=self.container.node)
self.rrc = ResourceRegistryServiceClient(node=self.container.node)
self.imc = IdentityManagementServiceClient(node=self.container.node)
def xtest_find_event_types_for_resource(self):
dataset_object = IonObject(RT.DataSet, name="dataset1")
dataset_id, version = self.rrc.create(dataset_object)
events = self.unsc.find_event_types_for_resource(dataset_id)
log.debug("dataset events = " + str(events))
try:
events = self.unsc.find_event_types_for_resource("bogus_id")
self.fail("failed to detect non-existant resource")
except:
pass
def test_create_two_user_notifications(self):
user_identty_object = IonObject(RT.UserIdentity, name="user1")
user_id = self.imc.create_user_identity(user_identty_object)
user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'[email protected]'}})
self.imc.create_user_info(user_id, user_info_object)
notification_object = IonObject(RT.NotificationRequest, {"name":"notification1",
"origin_list":['Some_Resource_Agent_ID1'],
"events_list":['resource_lifecycle']})
self.unsc.create_notification(notification_object, user_id)
notification_object = IonObject(RT.NotificationRequest, {"name":"notification2",
"origin_list":['Some_Resource_Agent_ID2'],
"events_list":['data']})
self.unsc.create_notification(notification_object, user_id)
def test_delete_user_notifications(self):
user_identty_object = IonObject(RT.UserIdentity, name="user1")
user_id = self.imc.create_user_identity(user_identty_object)
user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'[email protected]'}})
self.imc.create_user_info(user_id, user_info_object)
notification_object1 = IonObject(RT.NotificationRequest, {"name":"notification1",
"origin_list":['Some_Resource_Agent_ID1'],
"events_list":['resource_lifecycle']})
notification1_id = self.unsc.create_notification(notification_object1, user_id)
notification_object2 = IonObject(RT.NotificationRequest, {"name":"notification2",
"origin_list":['Some_Resource_Agent_ID2'],
"events_list":['data']})
notification2_id = self.unsc.create_notification(notification_object2, user_id)
self.unsc.delete_notification(notification1_id)
self.unsc.delete_notification(notification2_id)
def test_find_user_notifications(self):
user_identty_object = IonObject(RT.UserIdentity, name="user1")
user_id = self.imc.create_user_identity(user_identty_object)
user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'[email protected]'}})
self.imc.create_user_info(user_id, user_info_object)
notification_object = IonObject(RT.NotificationRequest, {"name":"notification1",
"origin_list":['Some_Resource_Agent_ID1'],
"events_list":['resource_lifecycle']})
self.unsc.create_notification(notification_object, user_id)
notification_object = IonObject(RT.NotificationRequest, {"name":"notification2",
"origin_list":['Some_Resource_Agent_ID2'],
"events_list":['data']})
self.unsc.create_notification(notification_object, user_id)
notifications = self.unsc.find_notifications_by_user(user_id)
for n in notifications:
log.debug("n = " +str(n))
def test_update_user_notification(self):
user_identty_object = IonObject(RT.UserIdentity, name="user1")
user_id = self.imc.create_user_identity(user_identty_object)
user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'[email protected]'}})
self.imc.create_user_info(user_id, user_info_object)
notification_object = IonObject(RT.NotificationRequest, {"name":"notification1",
"origin_list":['Some_Resource_Agent_ID1'],
"events_list":['resource_lifecycle']})
notification_id = self.unsc.create_notification(notification_object, user_id)
notification = self.rrc.read(notification_id)
notification.origin_list = ['Some_Resource_Agent_ID5']
self.unsc.update_notification(notification)
def test_send_notification_emails(self):
user_identty_object = IonObject(RT.UserIdentity, name="user1")
user_id = self.imc.create_user_identity(user_identty_object)
user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'[email protected]'}})
self.imc.create_user_info(user_id, user_info_object)
notification_object = IonObject(RT.NotificationRequest, {"name":"notification1",
"origin_list":['Some_Resource_Agent_ID1'],
"events_list":['resource_lifecycle']})
self.unsc.create_notification(notification_object, user_id)
notification_object = IonObject(RT.NotificationRequest, {"name":"notification2",
"origin_list":['Some_Resource_Agent_ID2'],
"events_list":['data']})
self.unsc.create_notification(notification_object, user_id)
rle_publisher = ResourceLifecycleEventPublisher()
rle_publisher.create_and_publish_event(origin='Some_Resource_Agent_ID1', description="RLE test event")
de_publisher = DataEventPublisher()
de_publisher.create_and_publish_event(origin='Some_Resource_Agent_ID2', description="DE test event")
gevent.sleep(1)
def test_find_events(self):
#.........这里部分代码省略.........
示例13: UserNotificationIntTest
class UserNotificationIntTest(IonIntegrationTestCase):
def setUp(self):
self._start_container()
self.container.start_rel_from_url('res/deploy/r2dm.yml')
self.unsc = UserNotificationServiceClient(node=self.container.node)
self.rrc = ResourceRegistryServiceClient(node=self.container.node)
self.imc = IdentityManagementServiceClient(node=self.container.node)
def test_find_event_types_for_resource(self):
# create a dataset object in the RR to pass into the UNS method
dataset_object = IonObject(RT.DataSet, name="dataset1")
dataset_id, version = self.rrc.create(dataset_object)
# get the list of event types for the dataset
events = self.unsc.find_event_types_for_resource(dataset_id)
log.debug("dataset events = " + str(events))
if not events == ['dataset_supplement_added', 'dataset_change']:
self.fail("failed to return correct list of event types")
# try to pass in an id of a resource that doesn't exist (should fail)
try:
events = self.unsc.find_event_types_for_resource("bogus_id")
self.fail("failed to detect non-existant resource")
except:
pass
def test_create_two_user_notifications(self):
# create user with email address in RR
user_identty_object = IonObject(RT.ActorIdentity, name="user1")
user_id = self.imc.create_actor_identity(user_identty_object)
user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'[email protected]'}})
self.imc.create_user_info(user_id, user_info_object)
# create first notification
notification_object1 = IonObject(RT.NotificationRequest, {"name":"notification1",
"origin_list":['Some_Resource_Agent_ID1'],
"events_list":['ResourceLifecycleEvent']})
notification_id1 = self.unsc.create_notification(notification_object1, user_id)
# create second notification
notification_object2 = IonObject(RT.NotificationRequest, {"name":"notification2",
"origin_list":['Some_Resource_Agent_ID2'],
"events_list":['DataEvent']})
notification_id2 = self.unsc.create_notification(notification_object2, user_id)
# read the notifications back and check that they are correct
n1 = self.unsc.read_notification(notification_id1)
if n1.name != notification_object1.name or \
n1.origin_list != notification_object1.origin_list or \
n1.events_list != notification_object1.events_list:
self.fail("notification was not correct")
n2 = self.unsc.read_notification(notification_id2)
if n2.name != notification_object2.name or \
n2.origin_list != notification_object2.origin_list or \
n2.events_list != notification_object2.events_list:
self.fail("notification was not correct")
def test_delete_user_notifications(self):
# create user with email address in RR
user_identty_object = IonObject(RT.ActorIdentity, name="user1")
user_id = self.imc.create_actor_identity(user_identty_object)
user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'[email protected]'}})
self.imc.create_user_info(user_id, user_info_object)
# create first notification
notification_object1 = IonObject(RT.NotificationRequest, {"name":"notification1",
"origin_list":['Some_Resource_Agent_ID1'],
"events_list":['ResourceLifecycleEvent']})
notification1_id = self.unsc.create_notification(notification_object1, user_id)
# create second notification
notification_object2 = IonObject(RT.NotificationRequest, {"name":"notification2",
"origin_list":['Some_Resource_Agent_ID2'],
"events_list":['DataEvent']})
notification2_id = self.unsc.create_notification(notification_object2, user_id)
# delete both notifications
self.unsc.delete_notification(notification1_id)
self.unsc.delete_notification(notification2_id)
# check that the notifications are not there
try:
n1 = self.unsc.read_notification(notification1_id)
except:
try:
n2 = self.unsc.read_notification(notification2_id)
except:
return
self.fail("failed to delete notifications")
def test_find_user_notifications(self):
# create user with email address in RR
user_identty_object = IonObject(RT.ActorIdentity, name="user1")
user_id = self.imc.create_actor_identity(user_identty_object)
user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'[email protected]'}})
self.imc.create_user_info(user_id, user_info_object)
# create first notification
notification_object = IonObject(RT.NotificationRequest, {"name":"notification1",
"origin_list":['Some_Resource_Agent_ID1'],
"events_list":['ResourceLifecycleEvent']})
#.........这里部分代码省略.........
示例14: TestActivateInstrumentIntegration
class TestActivateInstrumentIntegration(IonIntegrationTestCase):
def setUp(self):
# Start container
super(TestActivateInstrumentIntegration, self).setUp()
config = DotDict()
config.bootstrap.use_es = True
self._start_container()
self.addCleanup(TestActivateInstrumentIntegration.es_cleanup)
self.container.start_rel_from_url('res/deploy/r2deploy.yml', config)
# Now create client to DataProductManagementService
self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
self.pubsubcli = PubsubManagementServiceClient(node=self.container.node)
self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
self.dpclient = DataProductManagementServiceClient(node=self.container.node)
self.datasetclient = DatasetManagementServiceClient(node=self.container.node)
self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node)
self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
self.dataretrieverclient = DataRetrieverServiceClient(node=self.container.node)
self.dataset_management = DatasetManagementServiceClient()
self.usernotificationclient = UserNotificationServiceClient()
#setup listerner vars
self._data_greenlets = []
self._no_samples = None
self._samples_received = []
self.event_publisher = EventPublisher()
@staticmethod
def es_cleanup():
es_host = CFG.get_safe('server.elasticsearch.host', 'localhost')
es_port = CFG.get_safe('server.elasticsearch.port', '9200')
es = ep.ElasticSearch(
host=es_host,
port=es_port,
timeout=10
)
indexes = STD_INDEXES.keys()
indexes.append('%s_resources_index' % get_sys_name().lower())
indexes.append('%s_events_index' % get_sys_name().lower())
for index in indexes:
IndexManagementService._es_call(es.river_couchdb_delete,index)
IndexManagementService._es_call(es.index_delete,index)
def create_logger(self, name, stream_id=''):
# logger process
producer_definition = ProcessDefinition(name=name+'_logger')
producer_definition.executable = {
'module':'ion.processes.data.stream_granule_logger',
'class':'StreamGranuleLogger'
}
logger_procdef_id = self.processdispatchclient.create_process_definition(process_definition=producer_definition)
configuration = {
'process':{
'stream_id':stream_id,
}
}
pid = self.processdispatchclient.schedule_process(process_definition_id=logger_procdef_id,
configuration=configuration)
return pid
def _create_notification(self, user_name = '', instrument_id='', product_id=''):
#--------------------------------------------------------------------------------------
# Make notification request objects
#--------------------------------------------------------------------------------------
notification_request_1 = NotificationRequest( name= 'notification_1',
origin=instrument_id,
origin_type="instrument",
event_type='ResourceLifecycleEvent')
notification_request_2 = NotificationRequest( name='notification_2',
origin=product_id,
origin_type="data product",
event_type='DetectionEvent')
#--------------------------------------------------------------------------------------
# Create a user and get the user_id
#--------------------------------------------------------------------------------------
user = UserInfo()
user.name = user_name
user.contact.email = '%[email protected]' % user_name
user_id, _ = self.rrclient.create(user)
#--------------------------------------------------------------------------------------
# Create notification
#--------------------------------------------------------------------------------------
#.........这里部分代码省略.........