本文整理汇总了Python中shinken.macroresolver.MacroResolver类的典型用法代码示例。如果您正苦于以下问题:Python MacroResolver类的具体用法?Python MacroResolver怎么用?Python MacroResolver使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了MacroResolver类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: hook_late_configuration
def hook_late_configuration(self, arb):
""" Read config and fill database """
mac_resol = MacroResolver()
mac_resol.init(arb.conf)
for serv in arb.conf.services:
if serv.check_command.command.module_type == 'snmp_booster':
try:
# Serialize service
dict_serv = dict_serialize(serv,
mac_resol,
self.datasource)
except Exception as exp:
logger.error("[SnmpBooster] [code 0907] [%s,%s] "
"%s" % (serv.host.get_name(),
serv.get_name(),
str(exp)))
continue
# We want to make a diff between arbiter insert and poller insert. Some backend may need it.
try:
self.db_client.update_service_init(dict_serv['host'],
dict_serv['service'],
dict_serv)
except Exception as exp:
logger.error("[SnmpBooster] [code 0909] [%s,%s] "
"%s" % (dict_serv['host'],
dict_serv['service'],
str(exp)))
continue
logger.info("[SnmpBooster] [code 0908] Done parsing")
# Disconnect from database
self.db_client.disconnect()
示例2: setup_with_file
def setup_with_file(self, path):
# i am arbiter-like
self.broks = {}
self.me = None
self.log = logger
self.log.load_obj(self)
self.config_files = [path]
self.conf = Config()
buf = self.conf.read_config(self.config_files)
raw_objects = self.conf.read_config_buf(buf)
self.conf.create_objects_for_type(raw_objects, 'arbiter')
self.conf.create_objects_for_type(raw_objects, 'module')
self.conf.early_arbiter_linking()
self.conf.create_objects(raw_objects)
self.conf.old_properties_names_to_new()
self.conf.instance_id = 0
self.conf.instance_name = 'test'
# Hack push_flavor, that is set by the dispatcher
self.conf.push_flavor = 0
self.conf.linkify_templates()
self.conf.apply_inheritance()
self.conf.explode()
print "Aconf.services has %d elements" % len(self.conf.services)
self.conf.create_reversed_list()
self.conf.remove_twins()
self.conf.apply_implicit_inheritance()
self.conf.fill_default()
self.conf.remove_templates()
print "conf.services has %d elements" % len(self.conf.services)
self.conf.create_reversed_list()
self.conf.pythonize()
self.conf.linkify()
self.conf.apply_dependencies()
self.conf.explode_global_conf()
self.conf.propagate_timezone_option()
self.conf.create_business_rules()
self.conf.create_business_rules_dependencies()
self.conf.is_correct()
self.confs = self.conf.cut_into_parts()
self.conf.show_errors()
self.dispatcher = Dispatcher(self.conf, self.me)
scheddaemon = Shinken(None, False, False, False, None)
self.sched = Scheduler(scheddaemon)
scheddaemon.sched = self.sched
m = MacroResolver()
m.init(self.conf)
self.sched.load_conf(self.conf)
e = ExternalCommandManager(self.conf, 'applyer')
self.sched.external_command = e
e.load_scheduler(self.sched)
e2 = ExternalCommandManager(self.conf, 'dispatcher')
e2.load_arbiter(self)
self.external_command_dispatcher = e2
self.sched.schedule()
示例3: get_obsessive_compulsive_processor_command
def get_obsessive_compulsive_processor_command(self):
cls = self.__class__
if not cls.obsess_over or not self.obsess_over_host:
return
m = MacroResolver()
data = self.get_data_for_event_handler()
cmd = m.resolve_command(cls.ochp_command, data)
e = EventHandler(cmd, timeout=cls.ochp_timeout)
# ok we can put it in our temp action queue
self.actions.append(e)
示例4: __init__
def __init__(self, path, macros, overwrite, runners, output_dir=None, dbmod='', db_direct_insert=False):
# i am arbiter-like
self.log = logger
self.overwrite = overwrite
self.runners = runners
self.output_dir = output_dir
self.dbmod = dbmod
self.db_direct_insert = db_direct_insert
self.log.load_obj(self)
self.config_files = [path]
self.conf = Config()
buf = self.conf.read_config(self.config_files)
# Add macros on the end of the buf so they will
# overwrite the resource.cfg ones
for (m, v) in macros:
buf += '\n$%s$=%s\n' % (m, v)
raw_objects = self.conf.read_config_buf(buf)
self.conf.create_objects_for_type(raw_objects, 'arbiter')
self.conf.create_objects_for_type(raw_objects, 'module')
self.conf.early_arbiter_linking()
self.conf.create_objects(raw_objects)
self.conf.linkify_templates()
self.conf.apply_inheritance()
self.conf.explode()
self.conf.create_reversed_list()
self.conf.remove_twins()
self.conf.apply_implicit_inheritance()
self.conf.fill_default()
self.conf.remove_templates()
self.conf.pythonize()
self.conf.linkify()
self.conf.apply_dependencies()
self.conf.is_correct()
self.discoveryrules = self.conf.discoveryrules
self.discoveryruns = self.conf.discoveryruns
m = MacroResolver()
m.init(self.conf)
# Hash = name, and in it (key, value)
self.disco_data = {}
# Hash = name, and in it rules that apply
self.disco_matches = {}
self.init_database()
示例5: init
def init(self):
print "Initialisation of the livestatus broker"
#to_queue is where we get broks from Broker
#self.to_q = self.properties['to_queue']
#from_quue is where we push back objects like
#external commands to the broker
#self.from_q = self.properties['from_queue']
self.prepare_log_db()
self.prepare_pnp_path()
self.livestatus = LiveStatus(self.configs, self.hostname_lookup_table, self.servicename_lookup_table, self.hosts, self.services, self.contacts, self.hostgroups, self.servicegroups, self.contactgroups, self.timeperiods, self.commands, self.schedulers, self.pollers, self.reactionners, self.brokers, self.dbconn, self.pnp_path, self.from_q)
m = MacroResolver()
m.output_macros = ['HOSTOUTPUT', 'HOSTPERFDATA', 'HOSTACKAUTHOR', 'HOSTACKCOMMENT', 'SERVICEOUTPUT', 'SERVICEPERFDATA', 'SERVICEACKAUTHOR', 'SERVICEACKCOMMENT']
示例6: init
def init(self):
print "Initialisation of the thrift broker"
# to_queue is where we get broks from Broker
# self.to_q = self.properties['to_queue']
# from_quue is where we push back objects like
# external commands to the broker
# self.from_q = self.properties['from_queue']
# db has to be opened in the manage_brok thread
self.prepare_log_db()
self.prepare_pnp_path()
self.thrift = Thrift_status(
self.configs,
self.hosts,
self.services,
self.contacts,
self.hostgroups,
self.servicegroups,
self.contactgroups,
self.timeperiods,
self.commands,
self.schedulers,
self.pollers,
self.reactionners,
self.brokers,
self.dbconn,
self.pnp_path,
self.from_q,
)
m = MacroResolver()
m.output_macros = [
"HOSTOUTPUT",
"HOSTPERFDATA",
"HOSTACKAUTHOR",
"HOSTACKCOMMENT",
"SERVICEOUTPUT",
"SERVICEPERFDATA",
"SERVICEACKAUTHOR",
"SERVICEACKCOMMENT",
]
示例7: test_bprule_expand_template_macros
def test_bprule_expand_template_macros(self):
svc_cor = self.sched.services.find_srv_by_name_and_hostname("dummy", "formatted_bp_rule_output")
self.assert_(svc_cor.got_business_rule is True)
self.assert_(svc_cor.business_rule is not None)
svc1 = self.sched.services.find_srv_by_name_and_hostname("test_host_01", "srv1")
svc2 = self.sched.services.find_srv_by_name_and_hostname("test_host_02", "srv2")
svc3 = self.sched.services.find_srv_by_name_and_hostname("test_host_03", "srv3")
hst4 = self.sched.hosts.find_by_name("test_host_04")
for i in range(2):
self.scheduler_loop(
1,
[
[svc1, 0, "OK test_host_01/srv1"],
[svc2, 1, "WARNING test_host_02/srv2"],
[svc3, 2, "CRITICAL test_host_03/srv3"],
[hst4, 2, "DOWN test_host_04"],
],
)
time.sleep(61)
self.sched.manage_internal_checks()
self.sched.consume_results()
# Performs checks
m = MacroResolver()
template = "$STATUS$,$SHORTSTATUS$,$HOSTNAME$,$SERVICEDESC$,$FULLNAME$"
data = svc1.get_data_for_checks()
output = m.resolve_simple_macros_in_string(template, data)
self.assert_(output == "OK,O,test_host_01,srv1,test_host_01/srv1")
data = svc2.get_data_for_checks()
output = m.resolve_simple_macros_in_string(template, data)
self.assert_(output == "WARNING,W,test_host_02,srv2,test_host_02/srv2")
data = svc3.get_data_for_checks()
output = m.resolve_simple_macros_in_string(template, data)
self.assert_(output == "CRITICAL,C,test_host_03,srv3,test_host_03/srv3")
data = hst4.get_data_for_checks()
output = m.resolve_simple_macros_in_string(template, data)
self.assert_(output == "DOWN,D,test_host_04,,test_host_04")
data = svc_cor.get_data_for_checks()
output = m.resolve_simple_macros_in_string(template, data)
self.assert_(output == "CRITICAL,C,dummy,formatted_bp_rule_output,dummy/formatted_bp_rule_output")
示例8: setup_new_conf
#.........这里部分代码省略.........
self.conf.accept_passive_unknown_check_results = accept_passive_unknown_check_results
self.cur_conf = conf
self.override_conf = override_conf
self.modules = modules
self.satellites = satellites
#self.pollers = self.app.pollers
if self.conf.human_timestamp_log:
logger.set_human_format()
# Now We create our pollers
for pol_id in satellites['pollers']:
# Must look if we already have it
already_got = pol_id in self.pollers
p = satellites['pollers'][pol_id]
self.pollers[pol_id] = p
if p['name'] in override_conf['satellitemap']:
p = dict(p) # make a copy
p.update(override_conf['satellitemap'][p['name']])
proto = 'http'
if p['use_ssl']:
proto = 'https'
uri = '%s://%s:%s/' % (proto, p['address'], p['port'])
self.pollers[pol_id]['uri'] = uri
self.pollers[pol_id]['last_connection'] = 0
# First mix conf and override_conf to have our definitive conf
for prop in self.override_conf:
#print "Overriding the property %s with value %s" % (prop, self.override_conf[prop])
val = self.override_conf[prop]
setattr(self.conf, prop, val)
if self.conf.use_timezone != '':
logger.debug("Setting our timezone to %s" % str(self.conf.use_timezone))
os.environ['TZ'] = self.conf.use_timezone
time.tzset()
if len(self.modules) != 0:
logger.debug("I've got %s modules" % str(self.modules))
# TODO: if scheduler had previous modules instanciated it must clean them!
self.modules_manager.set_modules(self.modules)
self.do_load_modules()
# give it an interface
# But first remove previous interface if exists
if self.ichecks is not None:
logger.debug("Deconnecting previous Check Interface")
self.http_daemon.unregister(self.ichecks)
# Now create and connect it
self.ichecks = IChecks(self.sched)
self.http_daemon.register(self.ichecks)
logger.debug("The Scheduler Interface uri is: %s" % self.uri)
# Same for Broks
if self.ibroks is not None:
logger.debug("Deconnecting previous Broks Interface")
self.http_daemon.unregister(self.ibroks)
# Create and connect it
self.ibroks = IBroks(self.sched)
self.http_daemon.register(self.ibroks)
logger.info("Loading configuration.")
self.conf.explode_global_conf()
# we give sched it's conf
self.sched.reset()
self.sched.load_conf(self.conf)
self.sched.load_satellites(self.pollers, self.reactionners)
# We must update our Config dict macro with good value
# from the config parameters
self.sched.conf.fill_resource_macros_names_macros()
#print "DBG: got macros", self.sched.conf.macros
# Creating the Macroresolver Class & unique instance
m = MacroResolver()
m.init(self.conf)
#self.conf.dump()
#self.conf.quick_debug()
# Now create the external commander
# it's a applyer: it role is not to dispatch commands,
# but to apply them
e = ExternalCommandManager(self.conf, 'applyer')
# Scheduler need to know about external command to
# activate it if necessary
self.sched.load_external_command(e)
# External command need the sched because he can raise checks
e.load_scheduler(self.sched)
# We clear our schedulers managed (it's us :) )
# and set ourself in it
self.schedulers = {self.conf.instance_id: self.sched}
示例9: launch
def launch(self, ctx=[], timeout=300):
m = MacroResolver()
cmd = m.resolve_command(self.discoveryrun_command, ctx)
self.current_launch = EventHandler(cmd, timeout=timeout)
self.current_launch.execute()
示例10: get_mr
def get_mr(self):
mr = MacroResolver()
mr.init(self.conf)
return mr
示例11: setup_with_file
def setup_with_file(self, path):
time_hacker.set_my_time()
self.print_header()
# i am arbiter-like
self.broks = {}
self.me = None
self.log = logger
self.log.load_obj(self)
self.config_files = [path]
self.conf = Config()
buf = self.conf.read_config(self.config_files)
raw_objects = self.conf.read_config_buf(buf)
self.conf.create_objects_for_type(raw_objects, 'arbiter')
self.conf.create_objects_for_type(raw_objects, 'module')
self.conf.early_arbiter_linking()
# If we got one arbiter defined here (before default) we should be in a case where
# the tester want to load/test a module, so we simulate an arbiter daemon
# and the modules loading phase. As it has its own modulesmanager, should
# not impact scheduler modules ones, especially we are asking for arbiter type :)
if len(self.conf.arbiters) == 1:
arbdaemon = Arbiter([''],[''], False, False, None, None)
# only load if the module_dir is reallyexisting, so was set explicitly
# in the test configuration
if os.path.exists(getattr(self.conf, 'modules_dir', '')):
arbdaemon.modules_dir = self.conf.modules_dir
arbdaemon.load_modules_manager()
# we request the instances without them being *started*
# (for those that are concerned ("external" modules):
# we will *start* these instances after we have been daemonized (if requested)
me = None
for arb in self.conf.arbiters:
me = arb
arbdaemon.modules_manager.set_modules(arb.modules)
arbdaemon.do_load_modules()
arbdaemon.load_modules_configuration_objects(raw_objects)
self.conf.create_objects(raw_objects)
self.conf.instance_id = 0
self.conf.instance_name = 'test'
# Hack push_flavor, that is set by the dispatcher
self.conf.push_flavor = 0
self.conf.load_triggers()
#import pdb;pdb.set_trace()
self.conf.linkify_templates()
#import pdb;pdb.set_trace()
self.conf.apply_inheritance()
#import pdb;pdb.set_trace()
self.conf.explode()
#print "Aconf.services has %d elements" % len(self.conf.services)
self.conf.apply_implicit_inheritance()
self.conf.fill_default()
self.conf.remove_templates()
self.conf.compute_hash()
#print "conf.services has %d elements" % len(self.conf.services)
self.conf.override_properties()
self.conf.linkify()
self.conf.apply_dependencies()
self.conf.set_initial_state()
self.conf.explode_global_conf()
self.conf.propagate_timezone_option()
self.conf.create_business_rules()
self.conf.create_business_rules_dependencies()
self.conf.is_correct()
if not self.conf.conf_is_correct:
print "The conf is not correct, I stop here"
self.conf.dump()
return
self.conf.clean()
self.confs = self.conf.cut_into_parts()
self.conf.prepare_for_sending()
self.conf.show_errors()
self.dispatcher = Dispatcher(self.conf, self.me)
scheddaemon = Shinken(None, False, False, False, None, None)
self.scheddaemon = scheddaemon
self.sched = scheddaemon.sched
scheddaemon.modules_dir = modules_dir
scheddaemon.load_modules_manager()
# Remember to clean the logs we just created before launching tests
self.clear_logs()
m = MacroResolver()
m.init(self.conf)
self.sched.load_conf(self.conf, in_test=True)
e = ExternalCommandManager(self.conf, 'applyer')
self.sched.external_command = e
e.load_scheduler(self.sched)
e2 = ExternalCommandManager(self.conf, 'dispatcher')
e2.load_arbiter(self)
self.external_command_dispatcher = e2
self.sched.conf.accept_passive_unknown_check_results = False
self.sched.schedule()
示例12: launch
def launch(self):
m = MacroResolver()
data = []
cmd = m.resolve_command(self.discoveryrun_command, data)
self.current_launch = EventHandler(cmd, timeout=300)
self.current_launch.execute()
示例13: setup_with_file
def setup_with_file(self, path):
time_hacker.set_my_time()
self.print_header()
# i am arbiter-like
self.broks = {}
self.me = None
self.log = logger
self.log.load_obj(self)
self.config_files = [path]
self.conf = Config()
buf = self.conf.read_config(self.config_files)
raw_objects = self.conf.read_config_buf(buf)
self.conf.create_objects_for_type(raw_objects, 'arbiter')
self.conf.create_objects_for_type(raw_objects, 'module')
self.conf.early_arbiter_linking()
self.conf.create_objects(raw_objects)
self.conf.instance_id = 0
self.conf.instance_name = 'test'
# Hack push_flavor, that is set by the dispatcher
self.conf.push_flavor = 0
self.conf.load_triggers()
#import pdb;pdb.set_trace()
self.conf.linkify_templates()
#import pdb;pdb.set_trace()
self.conf.apply_inheritance()
#import pdb;pdb.set_trace()
self.conf.explode()
#print "Aconf.services has %d elements" % len(self.conf.services)
self.conf.apply_implicit_inheritance()
self.conf.fill_default()
self.conf.remove_templates()
self.conf.compute_hash()
#print "conf.services has %d elements" % len(self.conf.services)
self.conf.override_properties()
self.conf.linkify()
self.conf.apply_dependencies()
self.conf.explode_global_conf()
self.conf.propagate_timezone_option()
self.conf.create_business_rules()
self.conf.create_business_rules_dependencies()
self.conf.is_correct()
if not self.conf.conf_is_correct:
print "The conf is not correct, I stop here"
self.conf.dump()
return
self.conf.clean()
self.confs = self.conf.cut_into_parts()
self.conf.prepare_for_sending()
self.conf.show_errors()
self.dispatcher = Dispatcher(self.conf, self.me)
scheddaemon = Shinken(None, False, False, False, None, None)
self.sched = Scheduler(scheddaemon)
scheddaemon.sched = self.sched
scheddaemon.modules_dir = modules_dir
scheddaemon.load_modules_manager()
# Remember to clean the logs we just created before launching tests
self.clear_logs()
m = MacroResolver()
m.init(self.conf)
self.sched.load_conf(self.conf, in_test=True)
e = ExternalCommandManager(self.conf, 'applyer')
self.sched.external_command = e
e.load_scheduler(self.sched)
e2 = ExternalCommandManager(self.conf, 'dispatcher')
e2.load_arbiter(self)
self.external_command_dispatcher = e2
self.sched.conf.accept_passive_unknown_check_results = False
self.sched.schedule()
示例14: hook_late_configuration
def hook_late_configuration(self, arb):
""" Read config and fill memcached """
for serv in arb.conf.services:
if serv.check_command.command.module_type == 'snmp_booster':
chk = serv.check_command.command
mac_resol = MacroResolver()
mac_resol.init(arb.conf)
data = serv.get_data_for_checks()
command_line = mac_resol.resolve_command(serv.check_command,
data)
# Clean command
clean_command = shlex.split(command_line.encode('utf8',
'ignore'))
# If the command doesn't seem good
if len(clean_command) <= 1:
logger.error("[SnmpBooster] Bad command "
"detected: %s" % chk.command)
continue
# we do not want the first member, check_snmp thing
args = parse_args(clean_command[1:])
(host, community, version,
triggergroup, dstemplate, instance, instance_name) = args
# Get key from memcached
obj_key = str(host)
# looking for old datas
obj = self.memcached.get(obj_key)
# Don't force check on first launch
try:
if not obj is None:
# Host found
new_obj = SNMPHost(host, community, version)
if not obj == new_obj:
# Update host
obj.community = new_obj.community
obj.version = new_obj.version
new_serv = SNMPService(serv, obj, triggergroup,
dstemplate, instance,
instance_name,
serv.service_description)
new_serv.set_oids(self.datasource)
new_serv.set_triggers(self.datasource)
obj.update_service(new_serv)
obj.frequences[serv.check_interval].forced = False
self.memcached.set(obj_key, obj, time=604800)
else:
# No old datas for this host
new_obj = SNMPHost(host, community, version)
new_serv = SNMPService(serv, new_obj, triggergroup,
dstemplate, instance,
instance_name,
serv.service_description)
new_serv.set_oids(self.datasource)
new_serv.set_triggers(self.datasource)
new_obj.update_service(new_serv)
# Save new host in memcache
self.memcached.set(obj_key, new_obj, time=604800)
except Exception, e:
message = ("[SnmpBooster] Error adding : "
"Host %s - Service %s - Error related "
"to: %s" % (obj_key,
serv.service_description,
str(e)))
logger.error(message)
示例15: setup_new_conf
def setup_new_conf(self):
#self.use_ssl = self.app.use_ssl
(conf, override_conf, modules, satellites) = self.new_conf
self.new_conf = None
# In fact it make the scheduler just DIE as a bad guy.
# Must manage it better or not manage it at all!
#if self.cur_conf and self.cur_conf.magic_hash == conf.magic_hash:
# print("I received a conf with same hash than me, I skip it.")
# return
self.conf = conf
self.cur_conf = conf
self.override_conf = override_conf
self.modules = modules
self.satellites = satellites
#self.pollers = self.app.pollers
# Now We create our pollers
for pol_id in satellites['pollers']:
# Must look if we already have it
already_got = pol_id in self.pollers
p = satellites['pollers'][pol_id]
self.pollers[pol_id] = p
uri = pyro.create_uri(p['address'], p['port'], 'Schedulers', self.use_ssl)
self.pollers[pol_id]['uri'] = uri
self.pollers[pol_id]['last_connexion'] = 0
print "Got a poller", p
#First mix conf and override_conf to have our definitive conf
for prop in self.override_conf:
print "Overriding the property %s with value %s" % (prop, self.override_conf[prop])
val = self.override_conf[prop]
setattr(self.conf, prop, val)
if self.conf.use_timezone != 'NOTSET':
print "Setting our timezone to", self.conf.use_timezone
os.environ['TZ'] = self.conf.use_timezone
time.tzset()
print "I've got modules", self.modules
# TODO: if scheduler had previous modules instanciated it must clean them !
self.modules_manager.set_modules(self.modules)
self.do_load_modules()
# And start external ones too
self.modules_manager.start_external_instances()
# give it an interface
# But first remove previous interface if exists
if self.ichecks is not None:
print "Deconnecting previous Check Interface from pyro_daemon"
self.pyro_daemon.unregister(self.ichecks)
#Now create and connect it
self.ichecks = IChecks(self.sched)
self.uri = self.pyro_daemon.register(self.ichecks, "Checks")
print "The Checks Interface uri is:", self.uri
#Same for Broks
if self.ibroks is not None:
print "Deconnecting previous Broks Interface from pyro_daemon"
self.pyro_daemon.unregister(self.ibroks)
#Create and connect it
self.ibroks = IBroks(self.sched)
self.uri2 = self.pyro_daemon.register(self.ibroks, "Broks")
print "The Broks Interface uri is:", self.uri2
print("Loading configuration..")
self.conf.explode_global_conf()
#we give sched it's conf
self.sched.reset()
self.sched.load_conf(self.conf)
self.sched.load_satellites(self.pollers, self.reactionners)
#We must update our Config dict macro with good value
#from the config parameters
self.sched.conf.fill_resource_macros_names_macros()
#print "DBG: got macors", self.sched.conf.macros
#Creating the Macroresolver Class & unique instance
m = MacroResolver()
m.init(self.conf)
#self.conf.dump()
#self.conf.quick_debug()
#Now create the external commander
#it's a applyer : it role is not to dispatch commands,
#but to apply them
e = ExternalCommandManager(self.conf, 'applyer')
#Scheduler need to know about external command to
#activate it if necessery
self.sched.load_external_command(e)
#External command need the sched because he can raise checks
e.load_scheduler(self.sched)