本文整理汇总了Python中freenas.dispatcher.client.Client.emit_event方法的典型用法代码示例。如果您正苦于以下问题:Python Client.emit_event方法的具体用法?Python Client.emit_event怎么用?Python Client.emit_event使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类freenas.dispatcher.client.Client
的用法示例。
在下文中一共展示了Client.emit_event方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: main
# 需要导入模块: from freenas.dispatcher.client import Client [as 别名]
# 或者: from freenas.dispatcher.client.Client import emit_event [as 别名]
def main(name, *args):
connection = Client()
connection.connect('127.0.0.1')
connection.login_service('ups')
connection.emit_event('service.ups.signal', {
'name': name,
'type': os.environ['NOTIFYTYPE'],
})
connection.disconnect()
示例2: Context
# 需要导入模块: from freenas.dispatcher.client import Client [as 别名]
# 或者: from freenas.dispatcher.client.Client import emit_event [as 别名]
#.........这里部分代码省略.........
def init_scheduler(self):
store = MongoDBJobStore(database='freenas', collection='calendar_tasks', client=self.datastore.client)
self.scheduler = BackgroundScheduler(jobstores={'default': store}, timezone=pytz.utc)
self.scheduler.start()
def register_schemas(self):
self.client.register_schema('calendar-task', {
'type': 'object',
'additionalProperties': False,
'properties': {
'id': {'type': 'string'},
'name': {'type': 'string'},
'args': {'type': 'array'},
'description': {'type': 'string'},
'enabled': {'type': 'boolean'},
'hidden': {'type': 'boolean'},
'protected': {'type': 'boolean'},
'status': {'$ref': 'calendar-task-status'},
'schedule': {
'type': 'object',
'additionalProperties': False,
'properties': {
'coalesce': {'type': ['boolean', 'integer', 'null']},
'year': {'type': ['string', 'integer', 'null']},
'month': {'type': ['string', 'integer', 'null']},
'day': {'type': ['string', 'integer', 'null']},
'week': {'type': ['string', 'integer', 'null']},
'day_of_week': {'type': ['string', 'integer', 'null']},
'hour': {'type': ['string', 'integer', 'null']},
'minute': {'type': ['string', 'integer', 'null']},
'second': {'type': ['string', 'integer', 'null']},
'timezone': {'type': ['string', 'null']}
}
}
}
})
self.client.register_schema('calendar-task-status', {
'type': 'object',
'properties': {
'next_run_time': {'type': 'string'},
'last_run_status': {'type': 'string'},
'current_run_status': {'type': ['string', 'null']},
'current_run_progress': {'type': ['object', 'null']}
}
})
def connect(self):
while True:
try:
self.client.connect('unix:')
self.client.login_service('schedulerd')
self.client.enable_server()
self.client.register_service('scheduler.management', ManagementService(self))
self.client.register_service('scheduler.debug', DebugService())
self.client.resume_service('scheduler.management')
self.client.resume_service('scheduler.debug')
return
except (OSError, RpcException) as err:
self.logger.warning('Cannot connect to dispatcher: {0}, retrying in 1 second'.format(str(err)))
time.sleep(1)
def run_job(self, *args, **kwargs):
tid = self.client.submit_task(*args)
self.active_tasks[kwargs['id']] = tid
self.client.call_sync('task.wait', tid, timeout=None)
result = self.client.call_sync('task.status', tid)
if result['state'] != 'FINISHED':
try:
self.client.call_sync('alerts.emit', {
'name': 'scheduler.task.failed',
'severity': 'CRITICAL',
'description': 'Task {0} has failed: {1}'.format(kwargs['name'], result['error']['message']),
})
except RpcException as e:
self.logger.error('Failed to emit alert', exc_info=True)
del self.active_tasks[kwargs['id']]
self.datastore.insert('schedulerd.runs', {
'job_id': kwargs['id'],
'task_id': result['id']
})
def emit_event(self, name, params):
self.client.emit_event(name, params)
def main(self):
parser = argparse.ArgumentParser()
parser.add_argument('-c', metavar='CONFIG', default=DEFAULT_CONFIGFILE, help='Middleware config file')
parser.add_argument('-f', action='store_true', default=False, help='Run in foreground')
args = parser.parse_args()
configure_logging('/var/log/schedulerd.log', 'DEBUG')
setproctitle.setproctitle('schedulerd')
self.config = args.c
self.init_datastore()
self.init_scheduler()
self.init_dispatcher()
self.register_schemas()
self.client.wait_forever()
示例3: Main
# 需要导入模块: from freenas.dispatcher.client import Client [as 别名]
# 或者: from freenas.dispatcher.client.Client import emit_event [as 别名]
#.........这里部分代码省略.........
def init_dispatcher(self):
def on_error(reason, **kwargs):
if reason in (ClientError.CONNECTION_CLOSED, ClientError.LOGOUT):
self.logger.warning('Connection to dispatcher lost')
self.connect()
self.client = Client()
self.client.on_error(on_error)
self.connect()
def connect(self):
while True:
try:
self.client.connect('unix:')
self.client.login_service('etcd')
self.client.enable_server()
self.client.register_service('etcd.generation', FileGenerationService(self))
self.client.register_service('etcd.management', ManagementService(self))
self.client.register_service('etcd.debug', DebugService())
self.client.resume_service('etcd.generation')
self.client.resume_service('etcd.management')
self.client.resume_service('etcd.debug')
return
except (OSError, RpcException) as err:
self.logger.warning('Cannot connect to dispatcher: {0}, retrying in 1 second'.format(str(err)))
time.sleep(1)
def init_renderers(self):
for name, impl in TEMPLATE_RENDERERS.items():
self.renderers[name] = impl(self)
def parse_config(self, filename):
try:
f = open(filename, 'r')
self.config = json.load(f)
f.close()
except IOError as err:
self.logger.error('Cannot read config file: %s', err.message)
sys.exit(1)
except ValueError:
self.logger.error('Config file has unreadable format (not valid JSON)')
sys.exit(1)
self.plugin_dirs = self.config['etcd']['plugin-dirs']
def scan_plugins(self):
for i in self.plugin_dirs:
self.scan_plugin_dir(i)
def scan_plugin_dir(self, dir):
self.logger.debug('Scanning plugin directory %s', dir)
for root, dirs, files in os.walk(dir):
for name in files:
abspath = os.path.join(root, name)
path = os.path.relpath(abspath, dir)
name, ext = os.path.splitext(path)
if name in self.managed_files.keys():
continue
if ext in TEMPLATE_RENDERERS.keys():
self.managed_files[name] = abspath
self.logger.info('Adding managed file %s [%s]', name, ext)
def generate_file(self, file_path):
if file_path not in self.managed_files.keys():
raise RpcException(errno.ENOENT, 'No such file')
template_path = self.managed_files[file_path]
name, ext = os.path.splitext(template_path)
if ext not in self.renderers.keys():
raise RuntimeError("Can't find renderer for {0}".format(file_path))
renderer = self.renderers[ext]
try:
return renderer.render_template(template_path)
except Exception as e:
self.logger.warn('Cannot generate file {0}: {1}'.format(file_path, str(e)))
return "# FILE GENERATION FAILED: {0}\n".format(str(e))
def emit_event(self, name, params):
self.client.emit_event(name, params)
def main(self):
parser = argparse.ArgumentParser()
parser.add_argument('-c', metavar='CONFIG', default=DEFAULT_CONFIGFILE, help='Middleware config file')
parser.add_argument('-f', action='store_true', default=False, help='Run in foreground')
parser.add_argument('mountpoint', metavar='MOUNTPOINT', default='/etc', help='/etc mount point')
args = parser.parse_args()
configure_logging('/var/log/etcd.log', 'DEBUG')
setproctitle.setproctitle('etcd')
self.root = args.mountpoint
self.configfile = args.c
self.parse_config(args.c)
self.scan_plugins()
self.init_renderers()
self.init_datastore()
self.init_dispatcher()
self.client.wait_forever()
示例4: Context
# 需要导入模块: from freenas.dispatcher.client import Client [as 别名]
# 或者: from freenas.dispatcher.client.Client import emit_event [as 别名]
#.........这里部分代码省略.........
if ev.filter == select.KQ_FILTER_PROC:
job = self.job_by_pid(ev.ident)
if job:
job.pid_event(ev)
continue
if ev.fflags & select.KQ_NOTE_CHILD:
if ev.fflags & select.KQ_NOTE_EXIT:
continue
pjob = self.job_by_pid(ev.data)
if not pjob:
self.untrack_pid(ev.ident)
continue
# Stop tracking at session ID boundary
try:
if pjob.pgid != os.getpgid(ev.ident):
self.untrack_pid(ev.ident)
continue
except ProcessLookupError:
continue
with self.lock:
job = Job(self)
job.load_anonymous(pjob, ev.ident)
self.jobs[job.id] = job
self.logger.info('Added job {0}'.format(job.label))
def track_pid(self, pid):
ev = select.kevent(
pid,
select.KQ_FILTER_PROC,
select.KQ_EV_ADD | select.KQ_EV_ENABLE,
select.KQ_NOTE_EXIT | select.KQ_NOTE_EXEC | select.KQ_NOTE_FORK | select.KQ_NOTE_TRACK,
0, 0
)
self.kq.control([ev], 0)
def untrack_pid(self, pid):
ev = select.kevent(
pid,
select.KQ_FILTER_PROC,
select.KQ_EV_DELETE,
0, 0, 0
)
with contextlib.suppress(FileNotFoundError):
self.kq.control([ev], 0)
def emit_event(self, name, args):
self.server.broadcast_event(name, args)
if self.client and self.client.connected:
self.client.emit_event(name, args)
def connect(self):
while True:
try:
self.client.connect('unix:')
self.client.login_service('serviced')
self.client.enable_server(self.rpc)
self.client.resume_service('serviced.job')
self.client.resume_service('serviced.management')
return
except (OSError, RpcException) as err:
self.logger.warning('Cannot connect to dispatcher: {0}, retrying in 1 second'.format(str(err)))
time.sleep(1)
def bootstrap(self):
def doit():
with self.lock:
job = Job(self)
job.load({
'Label': 'org.freenas.serviced.bootstrap',
'ProgramArguments': BOOTSTRAP_JOB,
'OneShot': True,
'RunAtLoad': True,
})
self.jobs[job.id] = job
Thread(target=doit).start()
def shutdown(self):
self.client.disconnect()
self.server.close()
sys.exit(0)
def main(self):
parser = argparse.ArgumentParser()
parser.add_argument('-s', metavar='SOCKET', default=DEFAULT_SOCKET_ADDRESS, help='Socket address to listen on')
args = parser.parse_args()
configure_logging('/var/log/serviced.log', 'DEBUG', file=True)
bsd.setproctitle('serviced')
self.logger.info('Started')
self.init_server(args.s)
self.bootstrap()
self.event_loop()
示例5: Context
# 需要导入模块: from freenas.dispatcher.client import Client [as 别名]
# 或者: from freenas.dispatcher.client.Client import emit_event [as 别名]
class Context(object):
def __init__(self):
self.logger = logging.getLogger('schedulerd')
self.config = None
self.datastore = None
self.configstore = None
self.client = None
self.scheduler = None
self.active_tasks = {}
def init_datastore(self):
try:
self.datastore = get_datastore(self.config)
except DatastoreException as err:
self.logger.error('Cannot initialize datastore: %s', str(err))
sys.exit(1)
self.configstore = ConfigStore(self.datastore)
def init_dispatcher(self):
def on_error(reason, **kwargs):
if reason in (ClientError.CONNECTION_CLOSED, ClientError.LOGOUT):
self.logger.warning('Connection to dispatcher lost')
self.connect()
self.client = Client()
self.client.on_error(on_error)
self.connect()
def init_scheduler(self):
store = FreeNASJobStore()
self.scheduler = BackgroundScheduler(jobstores={'default': store, 'temp': MemoryJobStore()}, timezone=pytz.utc)
self.scheduler.start()
def connect(self):
while True:
try:
self.client.connect('unix:')
self.client.login_service('schedulerd')
self.client.enable_server()
self.client.register_service('scheduler.management', ManagementService(self))
self.client.register_service('scheduler.debug', DebugService())
self.client.resume_service('scheduler.management')
self.client.resume_service('scheduler.debug')
return
except (OSError, RpcException) as err:
self.logger.warning('Cannot connect to dispatcher: {0}, retrying in 1 second'.format(str(err)))
time.sleep(1)
def run_job(self, *args, **kwargs):
tid = self.client.call_sync('task.submit_with_env', args[0], args[1:], {
'RUN_AS_USER': 'root',
'CALENDAR_TASK_NAME': kwargs.get('name')
})
self.active_tasks[kwargs['id']] = tid
self.client.call_sync('task.wait', tid, timeout=None)
result = self.client.call_sync('task.status', tid)
if result['state'] != 'FINISHED':
try:
self.client.call_sync('alert.emit', {
'name': 'scheduler.task.failed',
'severity': 'CRITICAL',
'description': 'Task {0} has failed: {1}'.format(
kwargs.get('name', tid),
result['error']['message']
),
})
except RpcException as e:
self.logger.error('Failed to emit alert', exc_info=True)
del self.active_tasks[kwargs['id']]
self.datastore.insert('schedulerd.runs', {
'job_id': kwargs['id'],
'task_id': result['id']
})
def emit_event(self, name, params):
self.client.emit_event(name, params)
def checkin(self):
checkin()
def main(self):
parser = argparse.ArgumentParser()
parser.add_argument('-c', metavar='CONFIG', default=DEFAULT_CONFIGFILE, help='Middleware config file')
parser.add_argument('-f', action='store_true', default=False, help='Run in foreground')
args = parser.parse_args()
configure_logging('/var/log/schedulerd.log', 'DEBUG')
setproctitle('schedulerd')
self.config = args.c
self.init_datastore()
self.init_scheduler()
self.init_dispatcher()
self.checkin()
self.client.wait_forever()