當前位置: 首頁>>代碼示例>>Python>>正文


Python daemon.pidfile方法代碼示例

本文整理匯總了Python中daemon.pidfile方法的典型用法代碼示例。如果您正苦於以下問題:Python daemon.pidfile方法的具體用法?Python daemon.pidfile怎麽用?Python daemon.pidfile使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在daemon的用法示例。


在下文中一共展示了daemon.pidfile方法的7個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: kerberos

# 需要導入模塊: import daemon [as 別名]
# 或者: from daemon import pidfile [as 別名]
def kerberos(args):
    """Start a kerberos ticket renewer"""
    print(settings.HEADER)

    if args.daemon:
        pid, stdout, stderr, _ = setup_locations(
            "kerberos", args.pid, args.stdout, args.stderr, args.log_file
        )
        stdout = open(stdout, 'w+')
        stderr = open(stderr, 'w+')

        ctx = daemon.DaemonContext(
            pidfile=TimeoutPIDLockFile(pid, -1),
            stdout=stdout,
            stderr=stderr,
        )

        with ctx:
            krb.run(principal=args.principal, keytab=args.keytab)

        stdout.close()
        stderr.close()
    else:
        krb.run(principal=args.principal, keytab=args.keytab) 
開發者ID:apache,項目名稱:airflow,代碼行數:26,代碼來源:kerberos_command.py

示例2: start_queue_monitor

# 需要導入模塊: import daemon [as 別名]
# 或者: from daemon import pidfile [as 別名]
def start_queue_monitor(poll_frequency, max_workers, user, group, directory):
    pw = pwd.getpwnam(user)
    gid = pw.pw_gid if group is None else grp.getgrnam(group).gr_gid
    # This will capture stderr from this process as well as all child
    # energyPATHWAYS processes. Normally it will be empty, but it can
    # help capture model startup problems that would otherwise be hard to see.
    err = open('/var/log/queue_monitor/qm_stderr_%s.log' % start_time, 'w+')

    with daemon.DaemonContext(
        files_preserve=[logging.root.handlers[0].stream.fileno()],
        pidfile=daemon.pidfile.PIDLockFile('/var/run/queue_monitor/queue_monitor.pid'),
        uid=pw.pw_uid,
        gid=gid,
        working_directory=directory,
        stderr=err
    ):
        logger.info('My process id is %i' % os.getpid())
        qm = QueueMonitor(poll_frequency, max_workers)
        qm.start() 
開發者ID:energyPATHWAYS,項目名稱:EnergyPATHWAYS,代碼行數:21,代碼來源:queue_monitor.py

示例3: stop_endpoint

# 需要導入模塊: import daemon [as 別名]
# 或者: from daemon import pidfile [as 別名]
def stop_endpoint(name: str = typer.Argument("default", autocompletion=complete_endpoint_name)):
    """ Stops an endpoint using the pidfile

    """

    endpoint_dir = os.path.join(State.FUNCX_DIR, name)
    pid_file = os.path.join(endpoint_dir, "daemon.pid")

    if os.path.exists(pid_file):
        logger.debug(f"{name} has a daemon.pid file")
        pid = None
        with open(pid_file, 'r') as f:
            pid = int(f.read())
        # Attempt terminating
        try:
            logger.debug("Signalling process: {}".format(pid))
            os.kill(pid, signal.SIGTERM)
            time.sleep(0.1)
            os.kill(pid, signal.SIGKILL)
            time.sleep(0.1)
            # Wait to confirm that the pid file disappears
            if not os.path.exists(pid_file):
                logger.info("Endpoint <{}> is now stopped".format(name))

        except OSError:
            logger.warning("Endpoint {} could not be terminated".format(name))
            logger.warning("Attempting Endpoint {} cleanup".format(name))
            os.remove(pid_file)
            sys.exit(-1)
    else:
        logger.info("Endpoint <{}> is not active.".format(name)) 
開發者ID:funcx-faas,項目名稱:funcX,代碼行數:33,代碼來源:endpoint.py

示例4: scheduler

# 需要導入模塊: import daemon [as 別名]
# 或者: from daemon import pidfile [as 別名]
def scheduler(args):
    """Starts Airflow Scheduler"""
    print(settings.HEADER)
    job = SchedulerJob(
        dag_id=args.dag_id,
        subdir=process_subdir(args.subdir),
        num_runs=args.num_runs,
        do_pickle=args.do_pickle)

    if args.daemon:
        pid, stdout, stderr, log_file = setup_locations("scheduler",
                                                        args.pid,
                                                        args.stdout,
                                                        args.stderr,
                                                        args.log_file)
        handle = setup_logging(log_file)
        stdout = open(stdout, 'w+')
        stderr = open(stderr, 'w+')

        ctx = daemon.DaemonContext(
            pidfile=TimeoutPIDLockFile(pid, -1),
            files_preserve=[handle],
            stdout=stdout,
            stderr=stderr,
        )
        with ctx:
            job.run()

        stdout.close()
        stderr.close()
    else:
        signal.signal(signal.SIGINT, sigint_handler)
        signal.signal(signal.SIGTERM, sigint_handler)
        signal.signal(signal.SIGQUIT, sigquit_handler)
        job.run() 
開發者ID:apache,項目名稱:airflow,代碼行數:37,代碼來源:scheduler_command.py

示例5: flower

# 需要導入模塊: import daemon [as 別名]
# 或者: from daemon import pidfile [as 別名]
def flower(args):
    """Starts Flower, Celery monitoring tool"""
    options = [
        conf.get('celery', 'BROKER_URL'),
        f"--address={args.hostname}",
        f"--port={args.port}",
    ]

    if args.broker_api:
        options.append(f"--broker-api={args.broker_api}")

    if args.url_prefix:
        options.append(f"--url-prefix={args.url_prefix}")

    if args.basic_auth:
        options.append(f"--basic-auth={args.basic_auth}")

    if args.flower_conf:
        options.append(f"--conf={args.flower_conf}")

    flower_cmd = FlowerCommand()

    if args.daemon:
        pidfile, stdout, stderr, _ = setup_locations(
            process="flower",
            pid=args.pid,
            stdout=args.stdout,
            stderr=args.stderr,
            log=args.log_file,
        )
        with open(stdout, "w+") as stdout, open(stderr, "w+") as stderr:
            ctx = daemon.DaemonContext(
                pidfile=TimeoutPIDLockFile(pidfile, -1),
                stdout=stdout,
                stderr=stderr,
            )
            with ctx:
                flower_cmd.execute_from_commandline(argv=options)
    else:
        flower_cmd.execute_from_commandline(argv=options) 
開發者ID:apache,項目名稱:airflow,代碼行數:42,代碼來源:celery_command.py

示例6: main

# 需要導入模塊: import daemon [as 別名]
# 或者: from daemon import pidfile [as 別名]
def main():
    parser = argparse.ArgumentParser(description=needl.__description__)
    parser.add_argument('--datadir', default=os.getcwd() + '/data', help='Data directory')
    parser.add_argument('-d', '--daemon', action='store_true', help='Run as a deamon')
    parser.add_argument('-v', '--verbose', action='store_true', help='Increase logging')
    parser.add_argument('--logfile', type=argparse.FileType('a'), default=sys.stdout, help='Log to this file. Default is stdout.')
    parser.add_argument('--pidfile', default='/tmp/needl.pid', help='Save process PID to this file. Default is /tmp/needl.pid. Only valid when running as a daemon.')
    args = parser.parse_args()

    if args.daemon and args.logfile is sys.stdout:
        args.logfile = open('/tmp/needl.log', 'a')

    needl.init(args)
    daemonize(args.logfile, args.pidfile) if args.daemon else start() 
開發者ID:eth0izzle,項目名稱:Needl,代碼行數:16,代碼來源:needl.py

示例7: daemonize

# 需要導入模塊: import daemon [as 別名]
# 或者: from daemon import pidfile [as 別名]
def daemonize(logfile, pidfile):
    needl.log.info('Daemonizing and logging to %s', logfile)

    with daemon.DaemonContext(working_directory=os.getcwd(),
                              stderr=logfile,
                              umask=0o002,
                              pidfile=daemon.pidfile.PIDLockFile(pidfile)) as dc:

        start() 
開發者ID:eth0izzle,項目名稱:Needl,代碼行數:11,代碼來源:needl.py


注:本文中的daemon.pidfile方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。