本文整理汇总了Python中shinken.macroresolver.MacroResolver.init方法的典型用法代码示例。如果您正苦于以下问题:Python MacroResolver.init方法的具体用法?Python MacroResolver.init怎么用?Python MacroResolver.init使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类shinken.macroresolver.MacroResolver
的用法示例。
在下文中一共展示了MacroResolver.init方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: hook_late_configuration
# 需要导入模块: from shinken.macroresolver import MacroResolver [as 别名]
# 或者: from shinken.macroresolver.MacroResolver import init [as 别名]
def hook_late_configuration(self, arb):
""" Read config and fill database """
mac_resol = MacroResolver()
mac_resol.init(arb.conf)
for serv in arb.conf.services:
if serv.check_command.command.module_type == 'snmp_booster':
try:
# Serialize service
dict_serv = dict_serialize(serv,
mac_resol,
self.datasource)
except Exception as exp:
logger.error("[SnmpBooster] [code 0907] [%s,%s] "
"%s" % (serv.host.get_name(),
serv.get_name(),
str(exp)))
continue
# We want to make a diff between arbiter insert and poller insert. Some backend may need it.
try:
self.db_client.update_service_init(dict_serv['host'],
dict_serv['service'],
dict_serv)
except Exception as exp:
logger.error("[SnmpBooster] [code 0909] [%s,%s] "
"%s" % (dict_serv['host'],
dict_serv['service'],
str(exp)))
continue
logger.info("[SnmpBooster] [code 0908] Done parsing")
# Disconnect from database
self.db_client.disconnect()
示例2: setup_with_file
# 需要导入模块: from shinken.macroresolver import MacroResolver [as 别名]
# 或者: from shinken.macroresolver.MacroResolver import init [as 别名]
def setup_with_file(self, path):
# i am arbiter-like
self.broks = {}
self.me = None
self.log = logger
self.log.load_obj(self)
self.config_files = [path]
self.conf = Config()
buf = self.conf.read_config(self.config_files)
raw_objects = self.conf.read_config_buf(buf)
self.conf.create_objects_for_type(raw_objects, 'arbiter')
self.conf.create_objects_for_type(raw_objects, 'module')
self.conf.early_arbiter_linking()
self.conf.create_objects(raw_objects)
self.conf.old_properties_names_to_new()
self.conf.instance_id = 0
self.conf.instance_name = 'test'
# Hack push_flavor, that is set by the dispatcher
self.conf.push_flavor = 0
self.conf.linkify_templates()
self.conf.apply_inheritance()
self.conf.explode()
print "Aconf.services has %d elements" % len(self.conf.services)
self.conf.create_reversed_list()
self.conf.remove_twins()
self.conf.apply_implicit_inheritance()
self.conf.fill_default()
self.conf.remove_templates()
print "conf.services has %d elements" % len(self.conf.services)
self.conf.create_reversed_list()
self.conf.pythonize()
self.conf.linkify()
self.conf.apply_dependencies()
self.conf.explode_global_conf()
self.conf.propagate_timezone_option()
self.conf.create_business_rules()
self.conf.create_business_rules_dependencies()
self.conf.is_correct()
self.confs = self.conf.cut_into_parts()
self.conf.show_errors()
self.dispatcher = Dispatcher(self.conf, self.me)
scheddaemon = Shinken(None, False, False, False, None)
self.sched = Scheduler(scheddaemon)
scheddaemon.sched = self.sched
m = MacroResolver()
m.init(self.conf)
self.sched.load_conf(self.conf)
e = ExternalCommandManager(self.conf, 'applyer')
self.sched.external_command = e
e.load_scheduler(self.sched)
e2 = ExternalCommandManager(self.conf, 'dispatcher')
e2.load_arbiter(self)
self.external_command_dispatcher = e2
self.sched.schedule()
示例3: __init__
# 需要导入模块: from shinken.macroresolver import MacroResolver [as 别名]
# 或者: from shinken.macroresolver.MacroResolver import init [as 别名]
def __init__(self, path, macros, overwrite, runners, output_dir=None, dbmod='', db_direct_insert=False):
# i am arbiter-like
self.log = logger
self.overwrite = overwrite
self.runners = runners
self.output_dir = output_dir
self.dbmod = dbmod
self.db_direct_insert = db_direct_insert
self.log.load_obj(self)
self.config_files = [path]
self.conf = Config()
buf = self.conf.read_config(self.config_files)
# Add macros on the end of the buf so they will
# overwrite the resource.cfg ones
for (m, v) in macros:
buf += '\n$%s$=%s\n' % (m, v)
raw_objects = self.conf.read_config_buf(buf)
self.conf.create_objects_for_type(raw_objects, 'arbiter')
self.conf.create_objects_for_type(raw_objects, 'module')
self.conf.early_arbiter_linking()
self.conf.create_objects(raw_objects)
self.conf.linkify_templates()
self.conf.apply_inheritance()
self.conf.explode()
self.conf.create_reversed_list()
self.conf.remove_twins()
self.conf.apply_implicit_inheritance()
self.conf.fill_default()
self.conf.remove_templates()
self.conf.pythonize()
self.conf.linkify()
self.conf.apply_dependencies()
self.conf.is_correct()
self.discoveryrules = self.conf.discoveryrules
self.discoveryruns = self.conf.discoveryruns
m = MacroResolver()
m.init(self.conf)
# Hash = name, and in it (key, value)
self.disco_data = {}
# Hash = name, and in it rules that apply
self.disco_matches = {}
self.init_database()
示例4: get_mr
# 需要导入模块: from shinken.macroresolver import MacroResolver [as 别名]
# 或者: from shinken.macroresolver.MacroResolver import init [as 别名]
def get_mr(self):
mr = MacroResolver()
mr.init(self.conf)
return mr
示例5: setup_with_file
# 需要导入模块: from shinken.macroresolver import MacroResolver [as 别名]
# 或者: from shinken.macroresolver.MacroResolver import init [as 别名]
def setup_with_file(self, path):
time_hacker.set_my_time()
self.print_header()
# i am arbiter-like
self.broks = {}
self.me = None
self.log = logger
self.log.load_obj(self)
self.config_files = [path]
self.conf = Config()
buf = self.conf.read_config(self.config_files)
raw_objects = self.conf.read_config_buf(buf)
self.conf.create_objects_for_type(raw_objects, 'arbiter')
self.conf.create_objects_for_type(raw_objects, 'module')
self.conf.early_arbiter_linking()
# If we got one arbiter defined here (before default) we should be in a case where
# the tester want to load/test a module, so we simulate an arbiter daemon
# and the modules loading phase. As it has its own modulesmanager, should
# not impact scheduler modules ones, especially we are asking for arbiter type :)
if len(self.conf.arbiters) == 1:
arbdaemon = Arbiter([''],[''], False, False, None, None)
# only load if the module_dir is reallyexisting, so was set explicitly
# in the test configuration
if os.path.exists(getattr(self.conf, 'modules_dir', '')):
arbdaemon.modules_dir = self.conf.modules_dir
arbdaemon.load_modules_manager()
# we request the instances without them being *started*
# (for those that are concerned ("external" modules):
# we will *start* these instances after we have been daemonized (if requested)
me = None
for arb in self.conf.arbiters:
me = arb
arbdaemon.modules_manager.set_modules(arb.modules)
arbdaemon.do_load_modules()
arbdaemon.load_modules_configuration_objects(raw_objects)
self.conf.create_objects(raw_objects)
self.conf.instance_id = 0
self.conf.instance_name = 'test'
# Hack push_flavor, that is set by the dispatcher
self.conf.push_flavor = 0
self.conf.load_triggers()
#import pdb;pdb.set_trace()
self.conf.linkify_templates()
#import pdb;pdb.set_trace()
self.conf.apply_inheritance()
#import pdb;pdb.set_trace()
self.conf.explode()
#print "Aconf.services has %d elements" % len(self.conf.services)
self.conf.apply_implicit_inheritance()
self.conf.fill_default()
self.conf.remove_templates()
self.conf.compute_hash()
#print "conf.services has %d elements" % len(self.conf.services)
self.conf.override_properties()
self.conf.linkify()
self.conf.apply_dependencies()
self.conf.set_initial_state()
self.conf.explode_global_conf()
self.conf.propagate_timezone_option()
self.conf.create_business_rules()
self.conf.create_business_rules_dependencies()
self.conf.is_correct()
if not self.conf.conf_is_correct:
print "The conf is not correct, I stop here"
self.conf.dump()
return
self.conf.clean()
self.confs = self.conf.cut_into_parts()
self.conf.prepare_for_sending()
self.conf.show_errors()
self.dispatcher = Dispatcher(self.conf, self.me)
scheddaemon = Shinken(None, False, False, False, None, None)
self.scheddaemon = scheddaemon
self.sched = scheddaemon.sched
scheddaemon.modules_dir = modules_dir
scheddaemon.load_modules_manager()
# Remember to clean the logs we just created before launching tests
self.clear_logs()
m = MacroResolver()
m.init(self.conf)
self.sched.load_conf(self.conf, in_test=True)
e = ExternalCommandManager(self.conf, 'applyer')
self.sched.external_command = e
e.load_scheduler(self.sched)
e2 = ExternalCommandManager(self.conf, 'dispatcher')
e2.load_arbiter(self)
self.external_command_dispatcher = e2
self.sched.conf.accept_passive_unknown_check_results = False
self.sched.schedule()
示例6: setup_with_file
# 需要导入模块: from shinken.macroresolver import MacroResolver [as 别名]
# 或者: from shinken.macroresolver.MacroResolver import init [as 别名]
def setup_with_file(self, path):
time_hacker.set_my_time()
self.print_header()
# i am arbiter-like
self.broks = {}
self.me = None
self.log = logger
self.log.load_obj(self)
self.config_files = [path]
self.conf = Config()
buf = self.conf.read_config(self.config_files)
raw_objects = self.conf.read_config_buf(buf)
self.conf.create_objects_for_type(raw_objects, 'arbiter')
self.conf.create_objects_for_type(raw_objects, 'module')
self.conf.early_arbiter_linking()
self.conf.create_objects(raw_objects)
self.conf.instance_id = 0
self.conf.instance_name = 'test'
# Hack push_flavor, that is set by the dispatcher
self.conf.push_flavor = 0
self.conf.load_triggers()
#import pdb;pdb.set_trace()
self.conf.linkify_templates()
#import pdb;pdb.set_trace()
self.conf.apply_inheritance()
#import pdb;pdb.set_trace()
self.conf.explode()
#print "Aconf.services has %d elements" % len(self.conf.services)
self.conf.apply_implicit_inheritance()
self.conf.fill_default()
self.conf.remove_templates()
self.conf.compute_hash()
#print "conf.services has %d elements" % len(self.conf.services)
self.conf.override_properties()
self.conf.linkify()
self.conf.apply_dependencies()
self.conf.explode_global_conf()
self.conf.propagate_timezone_option()
self.conf.create_business_rules()
self.conf.create_business_rules_dependencies()
self.conf.is_correct()
if not self.conf.conf_is_correct:
print "The conf is not correct, I stop here"
self.conf.dump()
return
self.conf.clean()
self.confs = self.conf.cut_into_parts()
self.conf.prepare_for_sending()
self.conf.show_errors()
self.dispatcher = Dispatcher(self.conf, self.me)
scheddaemon = Shinken(None, False, False, False, None, None)
self.sched = Scheduler(scheddaemon)
scheddaemon.sched = self.sched
scheddaemon.modules_dir = modules_dir
scheddaemon.load_modules_manager()
# Remember to clean the logs we just created before launching tests
self.clear_logs()
m = MacroResolver()
m.init(self.conf)
self.sched.load_conf(self.conf, in_test=True)
e = ExternalCommandManager(self.conf, 'applyer')
self.sched.external_command = e
e.load_scheduler(self.sched)
e2 = ExternalCommandManager(self.conf, 'dispatcher')
e2.load_arbiter(self)
self.external_command_dispatcher = e2
self.sched.conf.accept_passive_unknown_check_results = False
self.sched.schedule()
示例7: setup_new_conf
# 需要导入模块: from shinken.macroresolver import MacroResolver [as 别名]
# 或者: from shinken.macroresolver.MacroResolver import init [as 别名]
def setup_new_conf(self):
pk = self.new_conf
conf_raw = pk['conf']
override_conf = pk['override_conf']
modules = pk['modules']
satellites = pk['satellites']
instance_name = pk['instance_name']
push_flavor = pk['push_flavor']
skip_initial_broks = pk['skip_initial_broks']
accept_passive_unknown_check_results = pk['accept_passive_unknown_check_results']
# horay, we got a name, we can set it in our stats objects
statsmgr.register(instance_name, 'scheduler')
t0 = time.time()
conf = cPickle.loads(conf_raw)
logger.debug("Conf received at %d. Unserialized in %d secs" % (t0, time.time() - t0))
self.new_conf = None
# Tag the conf with our data
self.conf = conf
self.conf.push_flavor = push_flavor
self.conf.instance_name = instance_name
self.conf.skip_initial_broks = skip_initial_broks
self.conf.accept_passive_unknown_check_results = accept_passive_unknown_check_results
self.cur_conf = conf
self.override_conf = override_conf
self.modules = modules
self.satellites = satellites
#self.pollers = self.app.pollers
if self.conf.human_timestamp_log:
logger.set_human_format()
# Now We create our pollers
for pol_id in satellites['pollers']:
# Must look if we already have it
already_got = pol_id in self.pollers
p = satellites['pollers'][pol_id]
self.pollers[pol_id] = p
if p['name'] in override_conf['satellitemap']:
p = dict(p) # make a copy
p.update(override_conf['satellitemap'][p['name']])
proto = 'http'
if p['use_ssl']:
proto = 'https'
uri = '%s://%s:%s/' % (proto, p['address'], p['port'])
self.pollers[pol_id]['uri'] = uri
self.pollers[pol_id]['last_connection'] = 0
# First mix conf and override_conf to have our definitive conf
for prop in self.override_conf:
#print "Overriding the property %s with value %s" % (prop, self.override_conf[prop])
val = self.override_conf[prop]
setattr(self.conf, prop, val)
if self.conf.use_timezone != '':
logger.debug("Setting our timezone to %s" % str(self.conf.use_timezone))
os.environ['TZ'] = self.conf.use_timezone
time.tzset()
if len(self.modules) != 0:
logger.debug("I've got %s modules" % str(self.modules))
# TODO: if scheduler had previous modules instanciated it must clean them!
self.modules_manager.set_modules(self.modules)
self.do_load_modules()
# give it an interface
# But first remove previous interface if exists
if self.ichecks is not None:
logger.debug("Deconnecting previous Check Interface")
self.http_daemon.unregister(self.ichecks)
# Now create and connect it
self.ichecks = IChecks(self.sched)
self.http_daemon.register(self.ichecks)
logger.debug("The Scheduler Interface uri is: %s" % self.uri)
# Same for Broks
if self.ibroks is not None:
logger.debug("Deconnecting previous Broks Interface")
self.http_daemon.unregister(self.ibroks)
# Create and connect it
self.ibroks = IBroks(self.sched)
self.http_daemon.register(self.ibroks)
logger.info("Loading configuration.")
self.conf.explode_global_conf()
# we give sched it's conf
self.sched.reset()
self.sched.load_conf(self.conf)
self.sched.load_satellites(self.pollers, self.reactionners)
# We must update our Config dict macro with good value
# from the config parameters
self.sched.conf.fill_resource_macros_names_macros()
#.........这里部分代码省略.........
示例8: setup_new_conf
# 需要导入模块: from shinken.macroresolver import MacroResolver [as 别名]
# 或者: from shinken.macroresolver.MacroResolver import init [as 别名]
def setup_new_conf(self):
#self.use_ssl = self.app.use_ssl
(conf, override_conf, modules, satellites) = self.new_conf
self.new_conf = None
# In fact it make the scheduler just DIE as a bad guy.
# Must manage it better or not manage it at all!
#if self.cur_conf and self.cur_conf.magic_hash == conf.magic_hash:
# print("I received a conf with same hash than me, I skip it.")
# return
self.conf = conf
self.cur_conf = conf
self.override_conf = override_conf
self.modules = modules
self.satellites = satellites
#self.pollers = self.app.pollers
# Now We create our pollers
for pol_id in satellites['pollers']:
# Must look if we already have it
already_got = pol_id in self.pollers
p = satellites['pollers'][pol_id]
self.pollers[pol_id] = p
uri = pyro.create_uri(p['address'], p['port'], 'Schedulers', self.use_ssl)
self.pollers[pol_id]['uri'] = uri
self.pollers[pol_id]['last_connexion'] = 0
print "Got a poller", p
#First mix conf and override_conf to have our definitive conf
for prop in self.override_conf:
print "Overriding the property %s with value %s" % (prop, self.override_conf[prop])
val = self.override_conf[prop]
setattr(self.conf, prop, val)
if self.conf.use_timezone != 'NOTSET':
print "Setting our timezone to", self.conf.use_timezone
os.environ['TZ'] = self.conf.use_timezone
time.tzset()
print "I've got modules", self.modules
# TODO: if scheduler had previous modules instanciated it must clean them !
self.modules_manager.set_modules(self.modules)
self.do_load_modules()
# And start external ones too
self.modules_manager.start_external_instances()
# give it an interface
# But first remove previous interface if exists
if self.ichecks is not None:
print "Deconnecting previous Check Interface from pyro_daemon"
self.pyro_daemon.unregister(self.ichecks)
#Now create and connect it
self.ichecks = IChecks(self.sched)
self.uri = self.pyro_daemon.register(self.ichecks, "Checks")
print "The Checks Interface uri is:", self.uri
#Same for Broks
if self.ibroks is not None:
print "Deconnecting previous Broks Interface from pyro_daemon"
self.pyro_daemon.unregister(self.ibroks)
#Create and connect it
self.ibroks = IBroks(self.sched)
self.uri2 = self.pyro_daemon.register(self.ibroks, "Broks")
print "The Broks Interface uri is:", self.uri2
print("Loading configuration..")
self.conf.explode_global_conf()
#we give sched it's conf
self.sched.reset()
self.sched.load_conf(self.conf)
self.sched.load_satellites(self.pollers, self.reactionners)
#We must update our Config dict macro with good value
#from the config parameters
self.sched.conf.fill_resource_macros_names_macros()
#print "DBG: got macors", self.sched.conf.macros
#Creating the Macroresolver Class & unique instance
m = MacroResolver()
m.init(self.conf)
#self.conf.dump()
#self.conf.quick_debug()
#Now create the external commander
#it's a applyer : it role is not to dispatch commands,
#but to apply them
e = ExternalCommandManager(self.conf, 'applyer')
#Scheduler need to know about external command to
#activate it if necessery
self.sched.load_external_command(e)
#External command need the sched because he can raise checks
e.load_scheduler(self.sched)
示例9: setup_new_conf
# 需要导入模块: from shinken.macroresolver import MacroResolver [as 别名]
# 或者: from shinken.macroresolver.MacroResolver import init [as 别名]
def setup_new_conf(self):
pk = self.new_conf
conf_raw = pk["conf"]
override_conf = pk["override_conf"]
modules = pk["modules"]
satellites = pk["satellites"]
instance_name = pk["instance_name"]
push_flavor = pk["push_flavor"]
skip_initial_broks = pk["skip_initial_broks"]
accept_passive_unknown_check_results = pk["accept_passive_unknown_check_results"]
api_key = pk["api_key"]
secret = pk["secret"]
http_proxy = pk["http_proxy"]
statsd_host = pk["statsd_host"]
statsd_port = pk["statsd_port"]
statsd_prefix = pk["statsd_prefix"]
statsd_enabled = pk["statsd_enabled"]
statsd_interval = pk["statsd_interval"]
statsd_types = pk["statsd_types"]
statsd_pattern = pk["statsd_pattern"]
# horay, we got a name, we can set it in our stats objects
statsmgr.register(
self.sched,
instance_name,
"scheduler",
api_key=api_key,
secret=secret,
http_proxy=http_proxy,
statsd_host=statsd_host,
statsd_port=statsd_port,
statsd_prefix=statsd_prefix,
statsd_enabled=statsd_enabled,
statsd_interval=statsd_interval,
statsd_types=statsd_types,
statsd_pattern=statsd_pattern,
)
t0 = time.time()
conf = cPickle.loads(conf_raw)
logger.debug("Conf received at %d. Unserialized in %d secs", t0, time.time() - t0)
self.new_conf = None
# Tag the conf with our data
self.conf = conf
self.conf.push_flavor = push_flavor
self.conf.instance_name = instance_name
self.conf.skip_initial_broks = skip_initial_broks
self.conf.accept_passive_unknown_check_results = accept_passive_unknown_check_results
self.cur_conf = conf
self.override_conf = override_conf
self.modules = modules
self.satellites = satellites
# self.pollers = self.app.pollers
if self.conf.human_timestamp_log:
logger.set_human_format()
# Now We create our pollers
for pol_id in satellites["pollers"]:
# Must look if we already have it
already_got = pol_id in self.pollers
p = satellites["pollers"][pol_id]
self.pollers[pol_id] = p
if p["name"] in override_conf["satellitemap"]:
p = dict(p) # make a copy
p.update(override_conf["satellitemap"][p["name"]])
proto = "http"
if p["use_ssl"]:
proto = "https"
uri = "%s://%s:%s/" % (proto, p["address"], p["port"])
self.pollers[pol_id]["uri"] = uri
self.pollers[pol_id]["last_connection"] = 0
# Now We create our reactionners
for reac_id in satellites["reactionners"]:
# Must look if we already have it
already_got = reac_id in self.reactionners
reac = satellites["reactionners"][reac_id]
self.reactionners[reac_id] = reac
if reac["name"] in override_conf["satellitemap"]:
reac = dict(reac) # make a copy
reac.update(override_conf["satellitemap"][reac["name"]])
proto = "http"
if p["use_ssl"]:
proto = "https"
uri = "%s://%s:%s/" % (proto, reac["address"], reac["port"])
self.reactionners[reac_id]["uri"] = uri
self.reactionners[reac_id]["last_connection"] = 0
# First mix conf and override_conf to have our definitive conf
for prop in self.override_conf:
# print "Overriding the property %s with value %s" % (prop, self.override_conf[prop])
val = self.override_conf[prop]
setattr(self.conf, prop, val)
#.........这里部分代码省略.........
示例10: __init__
# 需要导入模块: from shinken.macroresolver import MacroResolver [as 别名]
# 或者: from shinken.macroresolver.MacroResolver import init [as 别名]
def __init__(self, path, output_dir, macros, overright, runners):
# i am arbiter-like
self.log = logger
self.overright = overright
self.runners = runners
self.output_dir = output_dir
self.log.load_obj(self)
self.config_files = [path]
self.conf = Config()
self.conf.read_config(self.config_files)
buf = self.conf.read_config(self.config_files)
# Add macros on the end of the buf so they will
# overright the resource.cfg ones
for (m, v) in macros:
buf += '\n$%s$=%s\n' % (m, v)
raw_objects = self.conf.read_config_buf(buf)
self.conf.create_objects_for_type(raw_objects, 'arbiter')
self.conf.create_objects_for_type(raw_objects, 'module')
self.conf.early_arbiter_linking()
self.conf.create_objects(raw_objects)
#self.conf.instance_id = 0
#self.conf.instance_name = ''
self.conf.linkify_templates()
self.conf.apply_inheritance()
self.conf.explode()
self.conf.create_reversed_list()
self.conf.remove_twins()
self.conf.apply_implicit_inheritance()
self.conf.fill_default()
self.conf.clean_useless()
self.conf.pythonize()
self.conf.linkify()
self.conf.apply_dependancies()
#self.conf.explode_global_conf()
#self.conf.propagate_timezone_option()
#self.conf.create_business_rules()
#self.conf.create_business_rules_dependencies()
self.conf.is_correct()
self.discoveryrules = self.conf.discoveryrules
self.discoveryruns = self.conf.discoveryruns
#self.confs = self.conf.cut_into_parts()
#self.dispatcher = Dispatcher(self.conf, self.me)
#scheddaemon = Shinken(None, False, False, False, None)
#self.sched = Scheduler(scheddaemon)
#scheddaemon.sched = self.sched
m = MacroResolver()
m.init(self.conf)
#self.sched.load_conf(self.conf)
#e = ExternalCommandManager(self.conf, 'applyer')
#self.sched.external_command = e
#e.load_scheduler(self.sched)
#e2 = ExternalCommandManager(self.conf, 'dispatcher')
#e2.load_arbiter(self)
#self.external_command_dispatcher = e2
#self.sched.schedule()
# Hash = name, and in it (key, value)
self.disco_data = {}
# Hash = name, and in it rules that apply
self.disco_matches = {}
示例11: hook_late_configuration
# 需要导入模块: from shinken.macroresolver import MacroResolver [as 别名]
# 或者: from shinken.macroresolver.MacroResolver import init [as 别名]
def hook_late_configuration(self, arb):
""" Read config and fill memcached """
for serv in arb.conf.services:
if serv.check_command.command.module_type == 'snmp_booster':
chk = serv.check_command.command
mac_resol = MacroResolver()
mac_resol.init(arb.conf)
data = serv.get_data_for_checks()
command_line = mac_resol.resolve_command(serv.check_command,
data)
# Clean command
clean_command = shlex.split(command_line.encode('utf8',
'ignore'))
# If the command doesn't seem good
if len(clean_command) <= 1:
logger.error("[SnmpBooster] Bad command "
"detected: %s" % chk.command)
continue
# we do not want the first member, check_snmp thing
args = parse_args(clean_command[1:])
(host, community, version,
triggergroup, dstemplate, instance, instance_name) = args
# Get key from memcached
obj_key = str(host)
# looking for old datas
obj = self.memcached.get(obj_key)
# Don't force check on first launch
try:
if not obj is None:
# Host found
new_obj = SNMPHost(host, community, version)
if not obj == new_obj:
# Update host
obj.community = new_obj.community
obj.version = new_obj.version
new_serv = SNMPService(serv, obj, triggergroup,
dstemplate, instance,
instance_name,
serv.service_description)
new_serv.set_oids(self.datasource)
new_serv.set_triggers(self.datasource)
obj.update_service(new_serv)
obj.frequences[serv.check_interval].forced = False
self.memcached.set(obj_key, obj, time=604800)
else:
# No old datas for this host
new_obj = SNMPHost(host, community, version)
new_serv = SNMPService(serv, new_obj, triggergroup,
dstemplate, instance,
instance_name,
serv.service_description)
new_serv.set_oids(self.datasource)
new_serv.set_triggers(self.datasource)
new_obj.update_service(new_serv)
# Save new host in memcache
self.memcached.set(obj_key, new_obj, time=604800)
except Exception, e:
message = ("[SnmpBooster] Error adding : "
"Host %s - Service %s - Error related "
"to: %s" % (obj_key,
serv.service_description,
str(e)))
logger.error(message)