本文整理汇总了Python中process.logging.Logger.info方法的典型用法代码示例。如果您正苦于以下问题:Python Logger.info方法的具体用法?Python Logger.info怎么用?Python Logger.info使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类process.logging.Logger
的用法示例。
在下文中一共展示了Logger.info方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: load_config
# 需要导入模块: from process.logging import Logger [as 别名]
# 或者: from process.logging.Logger import info [as 别名]
def load_config(app_name):
global config
search_filenames = [
os.path.expanduser("~/.fundraising/%s.yaml" % app_name),
os.path.expanduser("~/.%s.yaml" % app_name),
# FIXME: relative path fail
os.path.dirname(__file__) + "/../%s/config.yaml" % app_name,
"/etc/fundraising/%s.yaml" % app_name,
"/etc/%s.yaml" % app_name,
# FIXME: relative path fail
os.path.dirname(__file__) + "/../%s/%s.yaml" % (app_name, app_name,)
]
# TODO: if getops.get(--config/-f): search_filenames.append
for filename in search_filenames:
if not os.path.exists(filename):
continue
config = DictAsAttrDict(load_yaml(file(filename, 'r')))
log.info("Loaded config from {path}.".format(path=filename))
config.app_name = app_name
return
raise Exception("No config found, searched " + ", ".join(search_filenames))
示例2: write_gdoc_results
# 需要导入模块: from process.logging import Logger [as 别名]
# 或者: from process.logging.Logger import info [as 别名]
def write_gdoc_results(doc=None, results=[]):
log.info("Writing test results to {url}".format(url=doc))
doc = Spreadsheet(doc=doc)
for result in results:
props = {}
props.update(result['criteria'])
props.update(result['results'])
doc.append_row(props)
示例3: get
# 需要导入模块: from process.logging import Logger [as 别名]
# 或者: from process.logging.Logger import info [as 别名]
def get(self, filename, dest_path):
try:
self.client.get(filename, dest_path)
except:
if os.path.exists(dest_path):
log.info("Removing corrupted download: {path}".format(path=dest_path))
os.unlink(dest_path)
raise
示例4: __init__
# 需要导入模块: from process.logging import Logger [as 别名]
# 或者: from process.logging.Logger import info [as 别名]
def __init__(self, name):
self.name = name
sql = "INSERT INTO donor_autoreview_job SET name = %s"
dbc = db.get_db(config.drupal_schema)
dbc.execute(sql, (name, ))
self.id = dbc.last_insert_id()
log.info("This job has ID %d" % self.id)
示例5: rotate_files
# 需要导入模块: from process.logging import Logger [as 别名]
# 或者: from process.logging.Logger import info [as 别名]
def rotate_files():
# Clean up after ourselves
if config.days_to_keep_files:
now = time.time()
for f in os.listdir(config.working_path):
path = os.path.join(config.working_path, f)
if os.stat(path).st_mtime < (now - config.days_to_keep_files * 86400):
if os.path.isfile(path):
log.info("Removing old file %s" % path)
os.remove(path)
示例6: run_queries
# 需要导入模块: from process.logging import Logger [as 别名]
# 或者: from process.logging.Logger import info [as 别名]
def run_queries(db, queries):
"""
Build silverpop_export database from CiviCRM.
"""
i = 1
for query in queries:
no_prefix = query[query.index("\n") + 1 :]
info = (i, no_prefix[:80])
log.info("Running query #%s: %s" % info)
db.execute(query)
i += 1
示例7: export_data
# 需要导入模块: from process.logging import Logger [as 别名]
# 或者: from process.logging.Logger import info [as 别名]
def export_data(output_path=None):
db = DbConnection(**config.silverpop_db)
log.info("Starting full data export")
exportq = DbQuery()
exportq.tables.append('silverpop_export_view')
exportq.columns.append('*')
run_export_query(
db=db,
query=exportq,
output=output_path,
sort_by_index="ContactID"
)
示例8: connect
# 需要导入模块: from process.logging import Logger [as 别名]
# 或者: from process.logging.Logger import info [as 别名]
def connect(self):
log.info("Connecting to {host}".format(host=config.sftp.host))
transport = paramiko.Transport((config.sftp.host, 22))
params = {
'username': config.sftp.username,
}
if hasattr(config.sftp, 'host_key'):
params['hostkey'] = make_key(config.sftp.host_key)
if hasattr(config.sftp, 'password'):
params['password'] = config.sftp.password
if hasattr(config.sftp, 'private_key'):
params['pkey'] = make_key(config.sftp.private_key)
transport.connect(**params)
self.client = paramiko.SFTPClient.from_transport(transport)
示例9: export_unsubscribes
# 需要导入模块: from process.logging import Logger [as 别名]
# 或者: from process.logging.Logger import info [as 别名]
def export_unsubscribes(output_path=None):
db = DbConnection(**config.silverpop_db)
log.info("Starting unsubscribe data export")
exportq = DbQuery()
exportq.tables.append('silverpop_export')
exportq.columns.append('contact_id')
exportq.columns.append('email')
exportq.where.append('opted_out=1')
run_export_query(
db=db,
query=exportq,
output=output_path,
sort_by_index="contact_id"
)
示例10: addMatch
# 需要导入模块: from process.logging import Logger [as 别名]
# 或者: from process.logging.Logger import info [as 别名]
def addMatch(job_id, oldId, newId, action, match):
log.info("Found a match: {old} -> {new} : {match}".format(old=oldId, new=newId, match=match))
db.get_db(config.drupal_schema).execute("""
INSERT INTO donor_review_queue
SET
job_id = %(job_id)s,
old_id = %(old_id)s,
new_id = %(new_id)s,
action_id = %(action_id)s,
match_description = %(match)s
""", {
'job_id': job_id,
'old_id': oldId,
'new_id': newId,
'action_id': action.id,
'match': match,
})
示例11: export_and_upload
# 需要导入模块: from process.logging import Logger [as 别名]
# 或者: from process.logging.Logger import info [as 别名]
def export_and_upload():
log.info("Begin Silverpop Export")
make_sure_path_exists(config.working_path)
updatefile = os.path.join(
config.working_path,
'DatabaseUpdate-' + time.strftime("%Y%m%d%H%M%S") + '.csv'
)
unsubfile = os.path.join(
config.working_path,
'Unsubscribes-' + time.strftime("%Y%m%d%H%M%S") + '.csv'
)
export_data(output_path=updatefile)
export_unsubscribes(output_path=unsubfile)
upload([updatefile, unsubfile])
rotate_files()
log.info("End Silverpop Export")
示例12: update_gdoc_results
# 需要导入模块: from process.logging import Logger [as 别名]
# 或者: from process.logging.Logger import info [as 别名]
def update_gdoc_results(doc=None, results=[]):
log.info("Updating results in {url}".format(url=doc))
doc = Spreadsheet(doc=doc)
existing = list(doc.get_all_rows())
def find_matching_cases(criteria):
matching = []
def fuzzy_compare_row(row, criteria):
if not row:
return False
if criteria['banner'] == row['banner'] and criteria['campaign'] == row['campaign'] and criteria['start'] == row['start']:
return True
for n, row in enumerate(existing, 1):
if fuzzy_compare_row(row, criteria):
matching.append(n)
return matching
for result in results:
if not result:
continue
matching = find_matching_cases(result['criteria'])
props = {}
props.update(result['results'])
props.update(result['criteria'])
if len(matching) == 0:
doc.append_row(props)
else:
if len(matching) > 1:
log.warn("more than one result row {match} matches criteria: {criteria}".format(match=matching, criteria=result['criteria']))
index = matching[-1]
log.debug("updating row {rownum} with {banner}".format(rownum=index, banner=result['criteria']['banner']))
doc.update_row(props, index=index)
示例13: parse_line
# 需要导入模块: from process.logging import Logger [as 别名]
# 或者: from process.logging.Logger import info [as 别名]
def parse_line(self, row):
if row['Billing Address Line1']:
addr_prefix = 'Billing Address '
else:
addr_prefix = 'Shipping Address '
out = {
'gateway_txn_id': row['Transaction ID'],
'date': row['Transaction Initiation Date'],
'settled_date': row['Transaction Completion Date'],
'gross': float(row['Gross Transaction Amount']) / 100.0,
'currency': row['Gross Transaction Currency'],
'gateway_status': row['Transactional Status'],
'gateway': 'paypal',
'note': row['Transaction Note'],
'email': row['Payer\'s Account ID'],
'street_address': row[addr_prefix + 'Line1'],
'supplemental_address_1': row[addr_prefix + 'Line2'],
'city': row[addr_prefix + 'City'],
'state_province': row[addr_prefix + 'State'],
'postal_code': row[addr_prefix + 'Zip'],
'country': row[addr_prefix + 'Country'],
}
if row['Fee Amount']:
out['fee'] = float(row['Fee Amount']) / 100.0
if row['Fee Currency'] and row['Gross Transaction Currency'] != row['Fee Currency']:
raise RuntimeError("Failed to import because multiple currencies for one transaction is not handled.")
if 'First Name' in row:
out['first_name'] = row['First Name']
if 'Last Name' in row:
out['last_name'] = row['Last Name']
if 'Payment Source' in row:
out['payment_method'] = row['Payment Source']
if 'Card Type' in row:
out['payment_submethod'] = row['Card Type']
if row['PayPal Reference ID Type'] == 'SUB':
out['subscr_id'] = row['PayPal Reference ID']
event_type = row['Transaction Event Code'][0:3]
queue = None
if event_type in ('T00', 'T03', 'T05', 'T07', 'T22'):
if row['Transaction Event Code'] == 'T0002':
queue = 'recurring'
out = self.normalize_recurring(out)
elif row['Transaction Debit or Credit'] == 'DR':
# sic: double-space is coming from the upstream
log.info("-Debit\t{id}\t{date}\tPayment to".format(id=out['gateway_txn_id'], date=out['date']))
# This payment is from us! Do not send to the CRM.
return
else:
queue = 'donations'
elif event_type in ('T11', 'T12'):
out['gateway_refund_id'] = out['gateway_txn_id']
out['gross_currency'] = out['currency']
if row['PayPal Reference ID Type'] == 'TXN':
out['gateway_parent_id'] = row['PayPal Reference ID']
if row['Transaction Event Code'] == 'T1106':
out['type'] = 'reversal'
elif row['Transaction Event Code'] == 'T1107':
out['type'] = 'refund'
elif row['Transaction Event Code'] == 'T1201':
out['type'] = 'chargeback'
else:
log.info("-Unknown\t{id}\t{date}\t(Refundish type {type})".format(id=out['gateway_txn_id'], date=out['date'], type=row['Transaction Event Code']))
return
queue = 'refund'
if not queue:
log.info("-Unknown\t{id}\t{date}\t(Type {type})".format(id=out['gateway_txn_id'], date=out['date'], type=event_type))
return
if self.crm.transaction_exists(gateway_txn_id=out['gateway_txn_id'], gateway='paypal'):
log.info("-Duplicate\t{id}\t{date}\t{type}".format(id=out['gateway_txn_id'], date=row['Transaction Initiation Date'], type=queue))
return
if 'last_name' not in out and queue != 'refund':
out['first_name'], out['last_name'] = self.fetch_donor_name(out['gateway_txn_id'])
if config.no_thankyou:
out['thankyou_date'] = 0
log.info("+Sending\t{id}\t{date}\t{type}".format(id=out['gateway_txn_id'], date=row['Transaction Initiation Date'], type=queue))
self.send(queue, out)
示例14: run_queries
# 需要导入模块: from process.logging import Logger [as 别名]
# 或者: from process.logging.Logger import info [as 别名]
f.close()
return queries
def run_queries(db, queries):
i = 1
for query in queries:
log.info("Running query #%s" % i)
db.execute(query)
i += 1
if __name__ == '__main__':
global config
log.info("Begin Silverpop Update")
lock.begin()
log.info("Loading update query set")
update_queries = load_queries('update_table.sql')
db = DbConnection(**config.silverpop_db)
log.info("Starting update query run")
run_queries(db, update_queries)
export.export_and_upload()
lock.end()
log.info("End Silverpop Export")
示例15: buildQuery
# 需要导入模块: from process.logging import Logger [as 别名]
# 或者: from process.logging.Logger import info [as 别名]
def buildQuery(self):
query = super(PagedGroup, self).buildQuery()
log.info("Limiting batch contact retrieval to {num} records.".format(num=self.pagesize))
query.limit = self.pagesize
query.offset = self.offset
return query