当前位置: 首页>>代码示例>>Python>>正文


Python PersistentDataObject.load方法代码示例

本文整理汇总了Python中mpx.lib.persistent.PersistentDataObject.load方法的典型用法代码示例。如果您正苦于以下问题:Python PersistentDataObject.load方法的具体用法?Python PersistentDataObject.load怎么用?Python PersistentDataObject.load使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在mpx.lib.persistent.PersistentDataObject的用法示例。


在下文中一共展示了PersistentDataObject.load方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: Logger

# 需要导入模块: from mpx.lib.persistent import PersistentDataObject [as 别名]
# 或者: from mpx.lib.persistent.PersistentDataObject import load [as 别名]
class Logger(ServiceNode):

    ##
    # @author Craig Warren
    # @param config
    # @return None
    def configure(self,config):
        ServiceNode.configure(self,config)
    ##
    # @author Craig Warren
    #   starts the logger service
    # @return None
    def start(self):
        ServiceNode.start(self)
        # this will correctly add the msglog as a child
        #  to the logger.
        if 'msglog' not in self.children_names():
            columns = mpx.lib.msglog.get_columns()
            log = Log()
            log.configure({'name':'msglog', 'parent':self})
            for c in columns:
                column = mpx.lib.factory('mpx.service.logger.column')
                config = c.configuration()
                config['parent'] = log
                column.configure(config)
        self._logs = PersistentDataObject(self)
        self._logs.names = []
        self._logs.load()
        for name in self._logs.names:
            if ((not mpx.lib.log.log_exists(name)) and 
                (name not in self.children_names())):
                log = mpx.lib.log.log(name)
                log.destroy()
                del(log)
        self._logs.names = []
        for child in self.children_nodes():
            if not isinstance(child, Alias):
                # Don't manage other managers' logs...
                self._logs.names.append(child.name)
        self._logs.save()

    ##
    # @author Craig Warren
    #   stops the logger service
    # @return None
    def stop(self):
        return ServiceNode.stop(self)

    ##
    # @author Craig Warren
    # @param log_name
    #   the name of the log to return
    # @return Log
    #   returns the log if it can't find the log it
    #   returns None
    def get_log(self,log_name):
        for child in self.children_nodes():
            if child.name == log_name:
                return child
        return None
开发者ID:mcruse,项目名称:monotone,代码行数:62,代码来源:logger.py

示例2: start

# 需要导入模块: from mpx.lib.persistent import PersistentDataObject [as 别名]
# 或者: from mpx.lib.persistent.PersistentDataObject import load [as 别名]
 def start(self):
     self.managernode = self.as_node(self.manager)
     self.synclock.acquire()
     try:
         alarmsname = '%s (%s)' % (self.name, 'alarms')
         eventsname = '%s (%s)' % (self.name, 'events')
         self.alarms = PersistentDictionary(alarmsname,
                                            encode=self.encode,
                                            decode=self.decode)
         self.events = PersistentDictionary(eventsname,
                                            encode=self.encode,
                                            decode=self.decode)
         # Migrate PDO data from old style persistence.
         pdodata = PersistentDataObject(self, dmtype=GC_NEVER)
         if os.path.exists(pdodata.filename()):
             msglog.log('broadway', msglog.types.INFO,
                        "Migrating previous alarm and event data")
             pdodata.events = {}
             pdodata.alarms = {}
             pdodata.load()
             migrate(pdodata, self.decode)
             self.rebuildstorage()
             pdodata.destroy()
         del(pdodata)
     finally:
         self.synclock.release()
     self.securitymanager = self.as_node('/services/Security Manager')
     
     register = self.managernode.register_for_type
     self.sub = register(self.handle_event, StateEvent)
     self.running.set()
     super(AlarmConfigurator, self).start()
开发者ID:mcruse,项目名称:monotone,代码行数:34,代码来源:request_handler.py

示例3: LastAlarm

# 需要导入模块: from mpx.lib.persistent import PersistentDataObject [as 别名]
# 或者: from mpx.lib.persistent.PersistentDataObject import load [as 别名]
class LastAlarm(CompositeNode,EventConsumerMixin):
    def __init__(self):
        self._last_alarm = None
        self._started = 0
        CompositeNode.__init__(self)
        EventConsumerMixin.__init__(self,self._alarm_triggered)
    def configure(self, config):
        CompositeNode.configure(self, config)
    def configuration(self):
        config = CompositeNode.configuration(self)
        return config
    def start(self):
        self._pdo = PersistentDataObject(self)
        self._pdo.last_dictionary = None
        self._pdo.load()
        self._started = 1
        self.parent.event_subscribe(self,AlarmTriggerEvent)
        CompositeNode.start(self)
    def stop(self):
        selt._started = 0
        self.parent.cancel(self,AlarmTriggerEvent)
        CompositeNode.stop(self)
    def _alarm_triggered(self, alarm):
        self._last_alarm = alarm
        self._pdo.last_dictionary = alarm.dictionary()
        self._pdo.save()
    def get(self, skipCache=0):
        return self._last_alarm
    def get_dictionary(self):
        return self._pdo.last_dictionary
开发者ID:mcruse,项目名称:monotone,代码行数:32,代码来源:last_alarm.py

示例4: start

# 需要导入模块: from mpx.lib.persistent import PersistentDataObject [as 别名]
# 或者: from mpx.lib.persistent.PersistentDataObject import load [as 别名]
 def start(self):
     filename = '%s (%s)' % (self.name, 'triggers')
     self.manager = self.nodespace.as_node(self.manager)
     self._pdo_lock.acquire()
     try:
         if self._triggers is None:
             self._triggers = PersistentDictionary(
                 filename, encode=None, decode=None)
         if not self._triggers:
             pdodata = PersistentDataObject(self, dmtype=GC_NEVER)
             if os.path.exists(pdodata.filename()):
                 msglog.log('broadway', msglog.types.INFO, 
                            "Migrating previous trigger data.")
                 pdodata.triggers = {}
                 pdodata.load()
                 self._triggers.update(pdodata.triggers)
                 pdodata.destroy()
             del(pdodata)
         self._loadtriggers()
         if self.secured:
             self.security_manager = self.as_node("/services/Security Manager")
         else:
             self.security_manager = None
     finally: 
         self._pdo_lock.release()
     return super(TriggersConfigurator, self).start()
开发者ID:mcruse,项目名称:monotone,代码行数:28,代码来源:request_handler.py

示例5: TriggeredExporter

# 需要导入模块: from mpx.lib.persistent import PersistentDataObject [as 别名]
# 或者: from mpx.lib.persistent.PersistentDataObject import load [as 别名]
class TriggeredExporter(SynchronizedExporter):
    def __init__(self):
        SynchronizedExporter.__init__(self)
        self._sid = None
        self.evt = None #dil - debug
    def handle_log(self,event):
        self.debug_information('Log export triggered.')
        self.evt = event #dil - debug 
        value = event.results()[1]['value']
        if isinstance(value,Exception):
            raise value
        if value: # only export when value is true
            self.debug_information('Going to start export thread.')
            if self._lock.acquire(0):
                try:
                    thread = Thread(name=self.name, target=self.go,
                                    args=(time.time(),))
                    thread.start()
                finally:
                    self._lock.release()
            else:
                msglog.log('broadway',msglog.types.WARN, 
                           ('Last export still active, ' + 
                            'skipping current request.'))
                            
    def configure(self, config):
        set_attribute(self, 'trigger',REQUIRED,config)
        SynchronizedExporter.configure(self, config)
        
    def configuration(self):
        config = SynchronizedExporter.configuration(self)
        get_attribute(self,'trigger',config,str)
        return config
        
    def start(self):
        Exporter.start(self)
        if not self.running:
            self.running = 1
            self.connection = as_node(self.connection_node)
            self._time_keeper = PersistentDataObject(self)
            self._time_keeper.start_time = 0
            self._time_keeper.load()
            self._period = self.parent.parent.period
            self._setup_trigger()
        else: 
            raise EAlreadyRunning
            
    def _setup_trigger(self):
        try:
            self._sid = SM.create_delivered(self, {1:as_node(self.trigger)})
        except ENotStarted, ENoSuchNode:
            msg = 'TriggeredExporter trigger: %s does not exist - could be nascent' % self._trigger
            msglog.log('broadway',msglog.types.WARN,msg)
            scheduler.seconds_from_now_do(60, self._setup_trigger)
开发者ID:mcruse,项目名称:monotone,代码行数:56,代码来源:triggered_exporter.py

示例6: __init__

# 需要导入模块: from mpx.lib.persistent import PersistentDataObject [as 别名]
# 或者: from mpx.lib.persistent.PersistentDataObject import load [as 别名]
 def __init__(self, name, password_file=PASSWD_FILE, group_file=GROUP_FILE, shadow_file=SHADOW_FILE):
     self.__lock = Lock()
     self.__password_file = password_file
     self.__group_file = group_file
     self.__shadow_file = shadow_file
     self.meta = {}
     self.USERS.load()
     if not self.USERS.has_key(self.name()):
         msglog.log("broadway", msglog.types.INFO, ("No profile for user %s found, creating" " new profile" % name))
         self.USERS[self.name()] = str(UUID())
     PersistentDataObject.__init__(self, self.USERS[self.name()])
     PersistentDataObject.load(self)
开发者ID:,项目名称:,代码行数:14,代码来源:

示例7: start

# 需要导入模块: from mpx.lib.persistent import PersistentDataObject [as 别名]
# 或者: from mpx.lib.persistent.PersistentDataObject import load [as 别名]
 def start(self):
     try:
         self._pdo_lock.acquire()
         try:
             if self.__running:
                 return
             self.__running = True
             self._trendconfig = PersistentDictionary(filename(self), encode=None, decode=None)
             if not self._trendconfig:
                 pdodata = PersistentDataObject(self, dmtype=GC_NEVER)
                 if os.path.exists(pdodata.filename()):
                     msglog.log("broadway", msglog.types.INFO, "Migrating previous trend data")
                     pdodata.trends = {}
                     pdodata.load()
                     self._trendconfig.update(pdodata.trends)
                 del (pdodata)
         finally:
             self._pdo_lock.release()
         super(TrendManager, self).start()
         self.logger = node.as_internal_node(self.logger_url)
         if self.has_child("trends"):
             self.trends = self.get_child("trends")
         else:
             self.trends = CompositeNode()
             self.trends.configure({"parent": self, "name": "trends"})
             self.trends.start()
         corrupt_trends = []
         for trendname, trenddump in self._trendconfig.items():
             msg = "Loading trend: %s" % trendname
             msglog.log("trendmanager", msglog.types.INFO, msg)
             try:
                 trend = unmarshal(trenddump)
             except:
                 corrupt_trends.append(trendname)
                 msg = "Failed to load trend: %s" % trendname
                 msglog.log("trendmanager", msglog.types.ERR, msg)
                 msglog.exception(prefix="Handled")
         for trendname in corrupt_trends:
             try:
                 msg = "Deleting trend information: %s" % trendname
                 msglog.log("trendmanager", msglog.types.INFO, msg)
                 self._delete_trend_configuration(trendname)
                 if self.trends.has_child(trendname):
                     trend = self.trends.get_child(trendname)
                     trend.prune(force=True)
             except:
                 msglog.exception(prefix="Handled")
     except:
         self.__running = False
         raise
     return
开发者ID:,项目名称:,代码行数:53,代码来源:

示例8: WritingTransporter

# 需要导入模块: from mpx.lib.persistent import PersistentDataObject [as 别名]
# 或者: from mpx.lib.persistent.PersistentDataObject import load [as 别名]
class WritingTransporter(Transporter):
    def configure(self, config):
        set_attribute(self, 'directory', '/tmp', config)
        set_attribute(self, 'file_prefix', REQUIRED, config)
        set_attribute(self, 'file_suffix', REQUIRED, config)
        set_attribute(self, 'name_scheme', None, config)
        set_attribute(self, 'timestamp_format', '%s', config)
        Transporter.configure(self, config)
        self._last = PersistentDataObject(self)
        self._last.filename = None
        self._last.count = 1
        self._last.load()
    def configuration(self):
        config = Transporter.configuration(self)
        get_attribute(self, 'directory', config)
        get_attribute(self, 'file_prefix', config)
        get_attribute(self, 'file_suffix', config)
        get_attribute(self, 'name_scheme', config)
        get_attribute(self, 'timestamp_format', config)
        return config
    def transport(self, data):
        if type(data) == type(''):
            data = StringIO.StringIO(data)
        filename = self._generate_filename()
        tempname = filename + '.tmp'
        file = open(tempname,'w')
        try:
            write = data.read(1024)
            while write:
                file.write(write)
                write = data.read(1024)
        finally:
            file.close()
        os.chmod(tempname,0444)
        os.rename(tempname,filename)
    def _generate_filename(self):
        filename = self.file_prefix
        append = ''
        if self.name_scheme == 'incremental':
            append = '%s' % self._last.count
        elif self.name_scheme == 'timestamp':
            file_time = self.parent.time_function(self.parent.scheduled_time())
            filename = filename + time.strftime(self.timestamp_format,file_time)
            append = '_%s' % (self._last.count + 1)
            if filename != self._last.filename:
                self._last.count = 0
                append = ''
        self._last.count += 1
        self._last.filename = filename
        return os.path.join(self.directory,filename + append + self.file_suffix)
开发者ID:mcruse,项目名称:monotone,代码行数:52,代码来源:writing_transporter.py

示例9: test_upgrade

# 需要导入模块: from mpx.lib.persistent import PersistentDataObject [as 别名]
# 或者: from mpx.lib.persistent.PersistentDataObject import load [as 别名]
    def test_upgrade(self):
        from mpx.upgrade.persistent import persistent_0

        old = persistent_0.PersistentDataObject("upgrade_test")
        old.purpose = "testing"
        old.save()
        old_filename = old._persistent.filename
        del (old.__dict__["_persistent"])
        del (old)
        new = PersistentDataObject("upgrade_test")
        self.failIf(os.path.exists(old_filename), "Upgrade failed to remove old version")
        new.purpose = None
        new.load()
        self.failUnless(new.purpose == "testing", "Upgrade failed to get old value")
        new.destroy()
        del (new)
开发者ID:,项目名称:,代码行数:18,代码来源:

示例10: SimplePersistentValue

# 需要导入模块: from mpx.lib.persistent import PersistentDataObject [as 别名]
# 或者: from mpx.lib.persistent.PersistentDataObject import load [as 别名]
class SimplePersistentValue(SimpleValue):
    def configure(self, config):
        SimpleValue.configure(self, config)
        self._pdo = PersistentDataObject(self)
        self._pdo.value = None
        self._pdo.conversion = None
        self._pdo.load()
        conversion = _get_name(self.conversion)
        if (self._pdo.value == None or 
            self._pdo.conversion != conversion):
            self._pdo.value = self.value
            self._pdo.conversion = conversion
            self._pdo.save()
        else:
            self.value = self._pdo.value
    def configuration(self):
        self.value = self._pdo.value
        return SimpleValue.configuration(self)
    def set(self,value,asyncOK=1):
        SimpleValue.set(self, value, asyncOK)
        self._pdo.value = self.value
        self._pdo.save()
    def get(self, skipCache=0):
        return self._pdo.value
开发者ID:mcruse,项目名称:monotone,代码行数:26,代码来源:simple_value.py

示例11: ExportersConfigurator

# 需要导入模块: from mpx.lib.persistent import PersistentDataObject [as 别名]
# 或者: from mpx.lib.persistent.PersistentDataObject import load [as 别名]
class ExportersConfigurator(CompositeNode):
    security = SecurityInformation.from_default()
    secured_by(security)
    def __init__(self, *args):
        self._pdo_lock = Lock()
        self.manager = None
        super(ExportersConfigurator, self).__init__(*args)
    def configure(self, config):
        self.setattr('path', config.get('path','/exportconfig'))
        self.setattr('container', config.get('container','/services/Alarm Exporters'))
        self.secured = as_internal_node("/services").secured
        super(ExportersConfigurator, self).configure(config)
    def configuration(self):
        config = super(ExportersConfigurator, self).configuration()
        config['path'] = self.getattr('path')
        config['container'] = self.getattr('container')
        return config
    def start(self):
        self.container = self.nodespace.as_node(self.container)
        self._pdo_lock.acquire()
        try:
            self._pdo = PersistentDataObject(self, dmtype=GC_NEVER)
            self._pdo.exporters = {}
            self._pdo.load()
            exporterdumps = self._pdo.exporters.values()
        finally: 
            self._pdo_lock.release()
        super(ExportersConfigurator, self).start()
        tstart = time.time()
        for exporterdump in exporterdumps:
            IPickles(cPickle.loads(exporterdump))()
        tend = time.time()
        tlapse = tend - tstart
        msglog.log('broadway', msglog.types.INFO,
                   'Exporter Configurator loaded '
                   '%s exporters in %s seconds.' % (len(exporterdumps), tlapse))
        self.manager = self.container
    def stop(self):
        super(ExportersConfigurator, self).stop()
        self.container = None
    def match(self, path):
        return path.startswith(self.path)
    security.protect('create_node', 'Configure')
    def create_node(self, name, config=()):
        config = dict(config)
        config.setdefault("name", name)
        config.setdefault("parent", self.manager)
        exporter = self.manager.nodespace.create_node(AlarmExporter)
        exporter.configure(config)
        exporter.start()
        self.updatepdo()
        return exporter.name
    security.protect('remove_node', 'Configure')
    def remove_node(self, name):
        exporter = self.manager.get_child(name)
        exporter.prune()
        self.updatepdo()
        return exporter.name
    security.protect('configure_node', 'Configure')
    def configure_node(self, name, config):
        exporter = self.manager.get_child(name)
        exporter.configure(config)
        self.updatepdo()
        return exporter.name
    security.protect('node_configuration', 'View')
    def node_configuration(self, name, extended=False):
        exporter = self.manager.get_child(name)
        return exporter.configuration()
    security.protect('configure_formatter', 'Configure')
    def configure_formatter(self, exporter, config):
        return self.configure_node(exporter, {"formatter": config})
    security.protect('formatter_configuration', 'View')
    def formatter_configuration(self, exporter, extended=False):
        return self.node_configuration(exporter).get("formatter", {})
    security.protect('configure_transporter', 'Configure')
    def configure_transporter(self, exporter, config):
        return self.configure_node(exporter, {"transporter": config})
    security.protect('transporter_configuration', 'View')
    def transporter_configuration(self, exporter, extended=False):
        return self.node_configuration(exporter).get("transporter", {})
    security.protect('trigger_configuration', 'View')
    def trigger_configuration(self, name=None):
        manager = self.nodespace.as_node('/services/Alarm Manager')
        sources = [manager] + manager.get_alarms()
        configuration = dict([(source.url, []) for source in sources])
        if name:
            exporter = self.manager.get_child(name)
            configuration.update(exporter.trigger_configuration())
        configs = []
        for source,events in configuration.items():
            configs.append({"source": source, "events": events})
        return configs
    security.protect('configure_triggers', 'Configure')
    def configure_triggers(self, name, triggers=()):
        configuration = {}
        for config in triggers:
            configuration[config["source"]] = config["events"]
        exporter = self.manager.get_child(name)
        exporter.configure_triggers(configuration)
        self.updatepdo()
#.........这里部分代码省略.........
开发者ID:mcruse,项目名称:monotone,代码行数:103,代码来源:request_handler.py

示例12: __load

# 需要导入模块: from mpx.lib.persistent import PersistentDataObject [as 别名]
# 或者: from mpx.lib.persistent.PersistentDataObject import load [as 别名]
 def __load(self):
     result = PersistentDataObject.load(self)
     self.__snapshot(self.loaded())
     return result
开发者ID:mcruse,项目名称:monotone,代码行数:6,代码来源:trap_exporter.py

示例13: EnergywiseManager

# 需要导入模块: from mpx.lib.persistent import PersistentDataObject [as 别名]
# 或者: from mpx.lib.persistent.PersistentDataObject import load [as 别名]
class EnergywiseManager(CompositeNode):
    def __init__(self):
        CompositeNode.__init__(self)
        self._pdo_lock = Lock()
        self._pdo = None
        self.__running = False
        self.debug = 0
        return

    def configure(self,config):
        if self.debug:
            msglog.log('EnergywiseManager:', msglog.types.INFO,
                       'Inside configure' )
        CompositeNode.configure(self, config)
        set_attribute(self, 'debug', 0, config, int)
        return

    def configuration(self):
        config = CompositeNode.configuration(self)
        get_attribute(self, 'debug', config, str)
        return config

  

   # def configure_trend_in_switches(self, start_node, frequency):
    #    for child in start_node.children_nodes():
     #       if child.children_nodes():
      #          self.configure_trend_in_switches(child, frequency)
       #     else:
                # reached upto leaf, each energywise switch has trends as child
        #        child.new_trend(frequency)
        #return

    def delete_trend_configuration(self, trend_domain):
        self._pdo_lock.acquire()
        try:
            if self._pdo.trends.has_key(trend_domain): 
                # stop logging as well
                del self._pdo.trends[trend_domain]
            self._pdo.save()
        finally:
            self._pdo_lock.release()
        return
    def delete_trends(self, trendList):
        if self.debug:
            msglog.log('EnergywiseManager:', msglog.types.INFO, 
                       'Inside delete_trends' )

        for domain in trendList.split(':'):
            if domain:
                domain_node = as_node(domain)
                domain_node.delete_trend()
                self.delete_trend_configuration(domain)
        return
         
    def start(self):
        if self.__running:
            return
        if self.debug:
            msglog.log('EnergywiseManager :', msglog.types.INFO, 'Inside start' )
        CompositeNode.start(self)
#        start_node = as_node('/services/EnergywiseManager/')
#        self.configure_trend_in_switches(start_node, 60)
        self.__running = True
        self._pdo_lock.acquire()
        self._pdo = PersistentDataObject(self, dmtype=GC_NEVER)
        self._pdo.trends = {}
        self._pdo.load()
        self._pdo_lock.release()
        if self.has_child('trends'):
            self.trends = self.get_child('trends')
        else:
            self.trends = CompositeNode()
            self.trends.configure({'parent':self, 'name':'trends'})
            self.trends.start()
        # start trending for saved domains
        for domain,freq in self._pdo.trends.items():
            try:
                start_node = as_node(domain)
               # self.configure_trend_in_switches( start_node,freq )
                start_node.new_trend(freq)
            except:
                self.delete_trend_configuration(domain)
        return

    def get_trends(self):
        return self._pdo.trends.items()

    def add_trend_configuration(self, trend_period, trend_domain):
        self._pdo_lock.acquire()
        self._pdo.trends[trend_domain] = trend_period
        self._pdo.save()
        self._pdo_lock.release()
        return
    def save_trends(self, trend_list):
        # Traverse through _pdo.items and check if new domain is either subset
        # of any configured or superset. 
        # If subset return with msg already covered and dont save this
        # If superset then configure new ones and delete subset from 
        # _pdo.items
#.........这里部分代码省略.........
开发者ID:mcruse,项目名称:monotone,代码行数:103,代码来源:energywise_manager.py

示例14: CloudManager

# 需要导入模块: from mpx.lib.persistent import PersistentDataObject [as 别名]
# 或者: from mpx.lib.persistent.PersistentDataObject import load [as 别名]
class CloudManager(CompositeNode):
    implements(ICloudManager)
    security = SecurityInformation.from_default()
    secured_by(security)
    def __init__(self, *args):
        super(CloudManager, self).__init__(*args)
        self.dispatcher = Dispatcher('Cloud Manager:Dispatcher')
        register_utility(self, ICloudManager, 'Cloud Manager')
        self.peer = Event.LOCALORIGIN
        self.formation = []
        self._scheduled = None
        self.unreachable = {}
        self.subscription = None
        if((as_node('/services/network/https_server')).is_enabled()):
            self.secure_http = True
        else:
            self.secure_http = False
        self.channel_monitor = ChannelMonitor()
        self.channel_monitor.trigger = CallbackTrigger(self.channel_monitor)
    def stop(self):
        if self.subscription:
            self.remove_listener(self.subscription)
        if self.channel_monitor.is_running():
            self.channel_monitor.stop_monitor()
        self.subscription = None
        super(CloudManager, self).stop()

    def is_event_valid(self,cloudevent):
        portal = self.nformation.get_portal()
        topic=cloudevent.topics[0]
        if(topic == 'EventResend' ):
            if( (portal != None ) and utils.same_host(cloudevent.origin,portal) ):
                return(True)
        elif(topic == 'Alarm Manager' ):
            #if (self.is_peer_in_formation(cloudevent.origin) == True):
            return(True)
        elif(topic == 'CloudFormation' ):
            return(True)

        return(False)


    def handle_remote_event(self, data):
        cloudevent = IPickles(cPickle.loads(data))()
        self.message('Handling remote event from : %s topic=%s ' %(cloudevent.origin,cloudevent.topics))
        cloudevent.set_data(data)
        if(self.is_event_valid(cloudevent) == False ):
            self.message('Dropping the remote event from : %s topic=%s ' 
                           %(cloudevent.origin,cloudevent.topics),msglog.types.WARN)
            return
            
        self.dispatcher.dispatch(cloudevent, cloudevent.topics)
        if(not ('CloudFormation' in cloudevent.topics) ):
            return
        '''
        Dont propogate an event if we are Portal
        '''
        if((cloudevent.portal != None )and (utils.same_host(self.peer,cloudevent.portal)) ):
            self.message('Not Propagating remote event, since I am getting it as a portal:')
            return
        
        
        self.propogate(cloudevent)

    def send_event_to_portal(self,event,topic,target):
        cloudevent = CloudEvent(
            self, self.peer, [target], self.nformation.get_portal(),topic, event)

        protocol = "https" if self.secure_http else "http"
        notifier = CloudNotifier(self.channel_monitor,target, protocol,'/cloud', self.debug)
        notifier.cloudevent = cloudevent
        if not cloudevent.has_data():
            cloudevent.set_data(cPickle.dumps(IPickles(cloudevent)))
        clouddata = cloudevent.get_data()
        notifier.notify(clouddata, self.handle_send_failure_portal,self.handle_send_success_portal)

    def handle_send_failure_portal(self, notifier):
        cloudevent = notifier.cloudevent
        target_peer = notifier.peer
        self.message('Unable to send alarm events to portal=%s ' % (target_peer),msglog.types.WARN)

    def handle_send_success_portal(self,notifier):
        cloudevent = notifier.cloudevent
        target_peer = notifier.peer
        self.message('Succesfully sent alarm events to portal=%s ' % (target_peer))



    def handle_local_event(self, event, topics = []):
        cloudevent = CloudEvent(
            self, self.peer, self.target_formation, self.nformation.get_portal(),topics, event)
        self.propogate(cloudevent)
    
    def is_peer_in_formation(self,peer,formation=None):
        if not formation:
            formation = self.get_formation()
        peer_ip=utils.get_ip_addr(peer)
        return any(utils.same_host(peer_ip, ip) for ip in formation)
    
    def is_host_the_portal(self,host):
#.........这里部分代码省略.........
开发者ID:mcruse,项目名称:monotone,代码行数:103,代码来源:manager.py

示例15: XMLFormatter

# 需要导入模块: from mpx.lib.persistent import PersistentDataObject [as 别名]
# 或者: from mpx.lib.persistent.PersistentDataObject import load [as 别名]
class XMLFormatter(Formatter):
    MIME_TYPE='text/xml'
    def __init__(self):
        Formatter.__init__(self)
        self._channels = {} # {name:{uom:,meastype:,Delta:,Totalized:,key:}}
        self._exception_log = None
        # NEVER CREATE A PDO BEFORE THE NODE IS INSERTED IN THE NODE TREE!
        self._PDO = None
        return
    ##
    # @param config
    # @key timestamp_format the timestamp format string example: Y-%m-%dT%H:%M:%S.
    # @key info the information that will be placed in the info attribute of the 
    # data tag
    def configure(self, config):
        Formatter.configure(self, config)
        set_attribute(self, 'debug_lvl', 0, config, int)
        set_attribute(self, 'timestamp_format', '%Y-%m-%dT%H:%M:%S', config)
        set_attribute(self, 'pretty_format',0,config,as_boolean)
        set_attribute(self, 'location_info','DefaultLocationInfo',config)
        set_attribute(self, 'location_key','DefaultLocationKey',config)
        set_attribute(self, 'panel_info','DefaultPanelInfo',config)
        set_attribute(self, 'panel_key','DefaultPanelKey',config)
        set_attribute(self, 'capture_period',24.0,config,float) # capture period preceding data transmission time (hrs)
        set_attribute(self, 'exception_log_url','/services/logger/fsg_exception_log',config)
    ##
    # @returns returns the configuratation
    def configuration(self):
        config = Formatter.configuration(self)
        get_attribute(self, 'debug_lvl', config, int)
        get_attribute(self, 'timestamp_format', config)
        get_attribute(self, 'pretty_format',config,str)
        get_attribute(self, 'location_info',config)
        get_attribute(self, 'location_key',config)
        get_attribute(self, 'panel_info',config)
        get_attribute(self, 'panel_key',config)
        get_attribute(self, 'capture_period',config,float) # capture period preceding data transmission time (hrs)
        get_attribute(self, 'exception_log_url',config)
        return config
    
    def start(self):
        self._PDO = PersistentDataObject(self,dmtype=GC_NEVER)
        self._PDO.exception_log_last_time = 0.0
        self._PDO.load()
        # Scan subtree of grandparent logger for channel (column) 'fsg_attrs'
        # nodes containing info required for FSG Demo, so that we don't have
        # to do the scan every time format() is called:
        self._channels = {}
        columns_node = self.parent.parent.parent.get_child('columns')
        column_nodes = columns_node.children_nodes()
        for column_node in column_nodes:
            if column_node.name == 'timestamp':
                continue
            assert isinstance(column_node, ChannelAttrsColumn) \
                   or isinstance(column_node, ChannelAttrsDeltaColumn), \
                   'Column %s should be class ChannelAttrsColumn, but is class %s' \
                   % (column_node.name, column_node.__class__.__name__)
            self._channels[column_node.name] = {
                'channel_node':column_node,'values':[]

                }
            
        self._exception_log = None
        try:
            self._exception_log = as_node(self.exception_log_url)
        except ENoSuchName:
            pass
        return
    ##
    # cancel():
    # Called by exporter if attempted transport fails, to clear out pre-formatted
    # data waiting in self._channels value dicts. Else, the pre-formatted data
    # in self._channels is still present at next attempt, and will cause transport
    # of multiple copies of same data:
    #
    def cancel(self):
        for channel_dict in self._channels.values():
            channel_dict['values'] = []
        return
    ##
    # @param data list of dictionary values to be converted in to XML format.
    # @param pretty_format 0,1 optional parameter to return pretty xml, xml that has
    # carriage returns in it
    # @default 0
    # @note timestamp MUST be on of the dictionary keys.
    # @trhows EIncompatibleFormat if timestamp is not a key in a dictionary entry.
    def format(self, data, pretty_format=None):
        # Organize all log data (list of time-based dicts) into a dict of 
        # point-based lists. (Dict of lists could get REALLY large; may 
        # need to do only one point at a time...
        # self._channels:K=col_name,V=col_dict
        # col_dict:K='column_node':,'values':list_of_2tuples
        # list_of_2tuples: [(timestamp,value),]
        # Only want records for preceding self.capture_period-hr period:
        end_time = time.time()
        start_time = self.parent.last_time() # ASSUME that parent is a periodic exporter...
        # Comment out line below, in favor of line above, because FSG tends to
        # disable their FTP server (effectively) for days at a time, but still
        # want all the data gathered during those blackout periods to go to the
        # FTP server when the server reappears with respect to the Mediator. 
#.........这里部分代码省略.........
开发者ID:mcruse,项目名称:monotone,代码行数:103,代码来源:xml_formatter.py


注:本文中的mpx.lib.persistent.PersistentDataObject.load方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。