本文整理汇总了Python中utils.setup_logging函数的典型用法代码示例。如果您正苦于以下问题:Python setup_logging函数的具体用法?Python setup_logging怎么用?Python setup_logging使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了setup_logging函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: main
def main():
_mkdirs(SRCDIR, INSTALLDIR)
setup_logging()
fetch_and_build()
for db in ('sqlite3', 'mysql'):
shell('rm -rf {}/*'.format(INSTALLDIR))
setup_and_test(db)
示例2: __init__
def __init__(self, name):
logger = logging.getLogger('log')
setup_logging()
try:
import smbus
except ImportError:
logger.critical('[Arduino Socket]: SMBUS not configured properly!')
sys.exit(1)
arduino_device = None # Global arduino_device variable
states = None
# Define the socket parameters
HOST = ''
PORT = 7893
connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connection.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# Bind socket to local host and port
try:
connection.bind((HOST, PORT))
except socket.error, msg:
logger.critical('[Arduino Socket]: Bind failed. Error Code: ' + str(msg[0]) + ' Message ' \
+ msg[1])
sys.exit()
示例3: start_tracker
def start_tracker():
"""Start the Torrent Tracker.
"""
# parse commandline options
parser = OptionParser()
parser.add_option('-p', '--port', help='Tracker Port', default=0)
parser.add_option('-b', '--background', action='store_true', default=False,
help='Start in background')
parser.add_option('-d', '--debug', action='store_true', default=False,
help='Debug mode')
(options, args) = parser.parse_args()
# setup directories
utils.create_pytt_dirs()
# setup logging
utils.setup_logging(options.debug)
try:
# start the torrent tracker
run_app(int(options.port) or utils.get_config().getint('tracker',
'port'))
except KeyboardInterrupt:
logging.info('Tracker Stopped.')
utils.close_db()
sys.exit(0)
except Exception, ex:
logging.fatal('%s' % str(ex))
utils.close_db()
sys.exit(-1)
示例4: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--ticker_file', required=True)
parser.add_argument('--input_dir', required=True)
parser.add_argument('--output_dir', required=True)
parser.add_argument('--overwrite', action='store_true')
parser.add_argument('--verbose', action='store_true')
args = parser.parse_args()
utils.setup_logging(args.verbose)
# Tickers are listed one per line.
with open(args.ticker_file, 'r') as fp:
tickers = fp.read().splitlines()
logging.info('Processing %d tickers' % len(tickers))
for i in range(len(tickers)):
ticker = tickers[i]
if ticker in SKIPPED_TICKERS:
logging.warning('%d/%d: skipped %s' % (i+1, len(tickers), ticker))
continue
logging.info('%d/%d: %s' % (i+1, len(tickers), ticker))
input_path = '%s/%s.csv' % (args.input_dir, ticker)
output_path = '%s/%s.csv' % (args.output_dir, ticker)
if not path.isfile(input_path):
logging.warning('Input file does not exist: %s' % input_path)
continue
if path.isfile(output_path) and not args.overwrite:
logging.warning('Output file exists and not overwritable: %s'
% output_path)
continue
parse(input_path, output_path)
示例5: run
def run():
"""
Main loop. Run this TA for ever
"""
try:
meta_configs, stanza_configs = conf.parse_modinput_configs(
sys.stdin.read())
except Exception as ex:
_LOGGER.error("Failed to setup config for manager TA: %s", ex.message)
_LOGGER.error(traceback.format_exc())
raise
if not stanza_configs:
_LOGGER.info("No config, exiting...")
return 0
if stanza_configs:
loglevel = stanza_configs[0].get("loglevel", "INFO")
_LOGGER.info("Setup logging level=%s", loglevel)
for log_file in all_logs:
utils.setup_logging(log_file, loglevel, True)
ta_manager = tm.TAManager(meta_configs, stanza_configs[0])
_setup_signal_handler(ta_manager)
ta_manager.run()
示例6: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--ticker_file', required=True)
parser.add_argument('--input_dir', required=True)
parser.add_argument('--from_ticker', default='')
parser.add_argument('--verbose', action='store_true')
args = parser.parse_args()
utils.setup_logging(args.verbose)
# Tickers are listed one per line.
with open(args.ticker_file, 'r') as fp:
lines = fp.read().splitlines()
tickers = []
for line in lines:
if line >= args.from_ticker:
tickers.append(line)
logging.info('Processing %d tickers' % len(tickers))
for i in range(len(tickers)):
ticker = tickers[i]
logging.info('%d/%d: %s' % (i+1, len(tickers), ticker))
input_path = '%s/%s.csv' % (args.input_dir, ticker.replace('^', '_'))
if not path.isfile(input_path):
logging.warning('Input file does not exist: %s' % input_path)
continue
validate(input_path)
示例7: __init__
def __init__(self, name, port, pin, scale_factor, zero_point):
logger = logging.getLogger('log')
setup_logging(name)
try:
import RPi.GPIO as GPIO
except ImportError:
logger.critical('[Servo Socket]: GPIO not configured properly!')
sys.exit(1)
self.port = port
self.pin = pin
self.scale_factor = scale_factor
self.zero_point = zero_point
# Configure the servo
GPIO.setmode(GPIO.BOARD)
GPIO.setup(self.pin, GPIO.OUT)
# Define the socket parameters
HOST = ''
PORT = self.port
connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connection.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# Bind socket to local host and port
try:
connection.bind((HOST, PORT))
except socket.error, msg:
logger.critical('[Servo Socket]: Bind failed. Error Code: ' + str(msg[0]) + ' Message ' \
+ msg[1])
sys.exit()
示例8: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--total_assets_path', required=True)
parser.add_argument('--intangible_assets_path', required=True)
parser.add_argument('--total_liabilities_path', required=True)
parser.add_argument('--prices_path', required=True)
parser.add_argument('--outstanding_shares_path', required=True)
parser.add_argument('--output_path', required=True)
parser.add_argument('--verbose', action='store_true')
args = parser.parse_args()
utils.setup_logging(args.verbose)
ta_map = utils.read_map(args.total_assets_path)
tl_map = utils.read_map(args.total_liabilities_path)
p_map = utils.read_map(args.prices_path)
s_map = utils.read_map(args.outstanding_shares_path)
tickers = ta_map.keys() & tl_map.keys() & p_map.keys() & s_map.keys()
# intangible assets are 0 by default
ia_map = dict()
for t in tickers:
ia_map[t] = 0.0
ia_part = utils.read_map(args.intangible_assets_path)
for k, v in ia_part.items():
ia_map[k] = v
with open(args.output_path, 'w') as fp:
for ticker in sorted(tickers):
output = ((ta_map[ticker] - ia_map[ticker] - tl_map[ticker])
/ s_map[ticker] / p_map[ticker])
print('%s %f' % (ticker, output), file=fp)
示例9: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--ticker_file', required=True)
parser.add_argument('--input_dir', required=True)
parser.add_argument('--output_dir', required=True)
parser.add_argument('--overwrite', action='store_true')
parser.add_argument('--verbose', action='store_true')
args = parser.parse_args()
# Sanity check.
assert args.input_dir != args.output_dir
utils.setup_logging(args.verbose)
# Tickers are listed one per line.
with open(args.ticker_file, 'r') as fp:
tickers = fp.read().splitlines()
logging.info('Processing %d tickers' % len(tickers))
for i in range(len(tickers)):
ticker = tickers[i]
logging.info('%d/%d: %s' % (i+1, len(tickers), ticker))
input_path = '%s/%s.csv' % (args.input_dir, ticker.replace('^', '_'))
if not path.isfile(input_path):
logging.warning('Input file is missing: %s' % input_path)
continue
output_path = '%s/%s.csv' % (args.output_dir, ticker.replace('^', '_'))
if path.isfile(output_path) and not args.overwrite:
logging.warning('Output file exists and not overwritable: %s'
% output_path)
continue
sample(input_path, output_path)
示例10: main
def main():
_mkdirs(SRCDIR, INSTALLDIR)
setup_logging()
fetch_and_build()
for db in ('sqlite3', 'mysql'):
if db == 'mysql':
shell('mysqladmin -u root password %s' % MYSQL_ROOT_PASSWD)
for i in ('prompt', 'auto'):
shell('rm -rf {}/*'.format(INSTALLDIR))
setup_and_test(db, i)
示例11: setup
def setup():
global copied, uploaded, last_scanned, warnings
copied = open_shelf("copied.db")
uploaded = open_shelf("uploaded.db")
last_scanned = []
log_path = os.path.join(PROJECT_PATH, "smugsync.log")
utils.setup_logging(log_path)
warnings = StringIO.StringIO()
handler = logging.StreamHandler(warnings)
handler.setLevel(logging.WARNING)
logging.getLogger("").addHandler(handler)
示例12: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--ticker_file', required=True)
parser.add_argument('--price_dir', required=True)
parser.add_argument('--yyyy_mm', required=True)
parser.add_argument('--k', default='12')
parser.add_argument('--output_path', required=True)
parser.add_argument('--verbose', action='store_true')
args = parser.parse_args()
utils.setup_logging(args.verbose)
# Tickers are listed one per line.
with open(args.ticker_file, 'r') as fp:
tickers = fp.read().splitlines()
logging.info('Processing %d tickers' % len(tickers))
k = int(args.k)
assert k > 0
volume_map = dict()
for i in range(len(tickers)):
ticker = tickers[i]
logging.info('%d/%d: %s' % (i+1, len(tickers), ticker))
input_path = '%s/%s.csv' % (args.price_dir, ticker.replace('^', '_'))
if not path.isfile(input_path):
logging.warning('Input file is missing: %s' % input_path)
continue
with open(input_path, 'r') as fp:
lines = fp.read().splitlines()
vmap = dict()
assert len(lines) > 0
for j in range(1, len(lines)):
d, o, h, l, c, v, a = lines[j].split(',')
d = d[:7]
if args.yyyy_mm < d: continue
if distance(args.yyyy_mm, d) >= k: break
v = float(v) * float(a)
if d in vmap: vmap[d] += v
else: vmap[d] = v
assert len(vmap) <= k
if len(vmap) < k: #max(1, k/2):
logging.warning('Could not find enough data for %s' % ticker)
continue
volume_map[ticker] = sum(vmap.values()) / len(vmap)
with open(args.output_path, 'w') as fp:
for ticker in sorted(volume_map.keys()):
print('%s %f' % (ticker, volume_map[ticker]), file=fp)
示例13: start
def start():
""" Запуск планировщика """
setup_logging(logging.DEBUG if settings.DEBUG is True else logging.INFO)
queue = Queue()
# Start scheduler subprocess
Process(target=scheduler_process, args=(queue, os.getpid())).start()
# To support Ctrl+C in debug mode
if not settings.DEBUG:
Thread(target=amqp_thread, args=(queue, )).start()
else:
Process(target=amqp_thread, args=(queue, )).start()
示例14: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--ticker_file', required=True)
parser.add_argument('--price_sample_dir', required=True)
parser.add_argument('--market_sample_path', required=True)
parser.add_argument('--yyyy_mm', required=True)
parser.add_argument('--k', required=True)
parser.add_argument('--output_path', required=True)
parser.add_argument('--verbose', action='store_true')
args = parser.parse_args()
utils.setup_logging(args.verbose)
k = int(args.k)
assert k > 0
market_samples = read_samples(args.market_sample_path)
curr_date = args.yyyy_mm
prev_date = compute_date(curr_date, k)
logging.info('current date = %s, previous date = %s' % (curr_date, prev_date))
assert curr_date in market_samples
assert prev_date in market_samples
# Tickers are listed one per line.
with open(args.ticker_file, 'r') as fp:
tickers = fp.read().splitlines()
logging.info('Processing %d tickers' % len(tickers))
excess_map = dict()
for i in range(len(tickers)):
ticker = tickers[i]
assert ticker.find('^') == -1 # ^GSPC should not be in tickers.
logging.info('%d/%d: %s' % (i+1, len(tickers), ticker))
stock_sample_path = '%s/%s.csv' % (args.price_sample_dir, ticker)
if not path.isfile(stock_sample_path):
logging.warning('Input file does not exist: %s' % stock_sample_path)
continue
stock_samples = read_samples(stock_sample_path)
if (curr_date not in stock_samples
or prev_date not in stock_samples):
logging.warning('Insufficient data for %s' % ticker)
continue
excess = compute_excess(
stock_samples[prev_date], stock_samples[curr_date],
market_samples[prev_date], market_samples[curr_date])
excess_map[ticker] = excess
with open(args.output_path, 'w') as fp:
for ticker in sorted(excess_map.keys()):
print('%s %f' % (ticker, excess_map[ticker]), file=fp)
示例15: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--ticker_file', required=True)
parser.add_argument('--from_ticker', default='')
parser.add_argument('--report_type', required=True)
parser.add_argument('--input_dir', required=True)
parser.add_argument('--verbose', action='store_true')
args = parser.parse_args()
utils.setup_logging(args.verbose)
rt = args.report_type
assert rt in TYPE_MAP, (
'report_type must be one of %s' % TYPE_MAP.keys())
(req_map, opt_map, add_map, skip_map) = TYPE_MAP[rt]
# Tickers are listed one per line.
with open(args.ticker_file, 'r') as fp:
tickers = fp.read().splitlines()
logging.info('Processing %d tickers' % len(tickers))
total, opts, quarterly = 0, 0, 0
common_keys = None
for i in range(len(tickers)):
ticker = tickers[i]
if ticker < args.from_ticker or ticker in SKIPPED_TICKERS:
logging.info('%d/%d: skipped %s' % (i+1, len(tickers), ticker))
continue
logging.info('%d/%d: %s' % (i+1, len(tickers), ticker))
input_path = '%s/%s.csv' % (args.input_dir, ticker)
if not path.isfile(input_path):
logging.warning('Input file does not exist: %s' % input_path)
continue
keys, has_opt, is_quarterly = validate(
input_path, ticker, req_map, opt_map, add_map, skip_map)
if common_keys is None:
common_keys = keys
else:
common_keys &= keys
if has_opt:
opts += 1
if is_quarterly:
quarterly += 1
total += 1
logging.info('%d out of %d have optional metrics' % (opts, total))
logging.info('%d out of %d are consecutive quarters' % (quarterly, total))
logging.info('Common keys: %s' % common_keys)