本文整理汇总了Python中pyLibrary.debugs.logs.Log.note方法的典型用法代码示例。如果您正苦于以下问题:Python Log.note方法的具体用法?Python Log.note怎么用?Python Log.note使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pyLibrary.debugs.logs.Log
的用法示例。
在下文中一共展示了Log.note方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: main
# 需要导入模块: from pyLibrary.debugs.logs import Log [as 别名]
# 或者: from pyLibrary.debugs.logs.Log import note [as 别名]
def main():
try:
settings = startup.read_settings()
Log.start(settings.debug)
constants.set(settings.constants)
with startup.SingleInstance(flavor_id=settings.args.filename):
with aws.s3.Bucket(settings.destination) as bucket:
if settings.param.debug:
if settings.source.durable:
Log.error("Can not run in debug mode with a durable queue")
synch = SynchState(bucket.get_key(SYNCHRONIZATION_KEY, must_exist=False))
else:
synch = SynchState(bucket.get_key(SYNCHRONIZATION_KEY, must_exist=False))
if settings.source.durable:
synch.startup()
queue = PersistentQueue(settings.param.queue_file)
if queue:
last_item = queue[len(queue) - 1]
synch.source_key = last_item._meta.count + 1
with pulse.Consumer(settings=settings.source, target=None, target_queue=queue, start=synch.source_key):
Thread.run("pulse log loop", log_loop, settings, synch, queue, bucket)
Thread.wait_for_shutdown_signal(allow_exit=True)
Log.warning("starting shutdown")
queue.close()
Log.note("write shutdown state to S3")
synch.shutdown()
except Exception, e:
Log.error("Problem with etl", e)
示例2: extend
# 需要导入模块: from pyLibrary.debugs.logs import Log [as 别名]
# 或者: from pyLibrary.debugs.logs.Log import note [as 别名]
def extend(self, records):
"""
records - MUST HAVE FORM OF
[{"value":value}, ... {"value":value}] OR
[{"json":json}, ... {"json":json}]
OPTIONAL "id" PROPERTY IS ALSO ACCEPTED
"""
lines = []
try:
for r in records:
id = r.get("id")
if id == None:
id = Random.hex(40)
if "json" in r:
json = r["json"]
elif "value" in r:
json = convert.value2json(r["value"])
else:
json = None
Log.error("Expecting every record given to have \"value\" or \"json\" property")
lines.append('{"index":{"_id": ' + convert.value2json(id) + '}}')
lines.append(json)
del records
if not lines:
return
try:
data_bytes = "\n".join(lines) + "\n"
data_bytes = data_bytes.encode("utf8")
except Exception, e:
Log.error("can not make request body from\n{{lines|indent}}", lines= lines, cause=e)
response = self.cluster._post(
self.path + "/_bulk",
data=data_bytes,
headers={"Content-Type": "text"},
timeout=self.settings.timeout
)
items = response["items"]
for i, item in enumerate(items):
if self.cluster.version.startswith("0.90."):
if not item.index.ok:
Log.error("{{error}} while loading line:\n{{line}}",
error= item.index.error,
line= lines[i * 2 + 1])
elif self.cluster.version.startswith("1.4."):
if item.index.status not in [200, 201]:
Log.error("{{error}} while loading line:\n{{line}}",
error= item.index.error,
line= lines[i * 2 + 1])
else:
Log.error("version not supported {{version}}", version=self.cluster.version)
if self.debug:
Log.note("{{num}} documents added", num= len(items))
示例3: commit
# 需要导入模块: from pyLibrary.debugs.logs import Log [as 别名]
# 或者: from pyLibrary.debugs.logs.Log import note [as 别名]
def commit(self):
with self.lock:
if self.closed:
Log.error("Queue is closed, commit not allowed")
try:
self._add_pending({"add": {"status.start": self.start}})
for i in range(self.db.status.start, self.start):
self._add_pending({"remove": str(i)})
if (
self.db.status.end - self.start < 10 or Random.range(0, 1000) == 0
): # FORCE RE-WRITE TO LIMIT FILE SIZE
# SIMPLY RE-WRITE FILE
if DEBUG:
Log.note(
"Re-write {{num_keys}} keys to persistent queue", num_keys=self.db.status.end - self.start
)
for k in self.db.keys():
if k == "status" or int(k) >= self.db.status.start:
continue
Log.error("Not expecting {{key}}", key=k)
self._commit()
self.file.write(convert.value2json({"add": self.db}) + "\n")
else:
self._commit()
except Exception, e:
raise e
示例4: close
# 需要导入模块: from pyLibrary.debugs.logs import Log [as 别名]
# 或者: from pyLibrary.debugs.logs.Log import note [as 别名]
def close(self):
self.please_stop.go()
with self.lock:
if self.db is None:
return
self.add(Thread.STOP)
if self.db.status.end == self.start:
if DEBUG:
Log.note("persistent queue clear and closed")
self.file.delete()
else:
if DEBUG:
Log.note("persistent queue closed with {{num}} items left", num=len(self))
try:
self._add_pending({"add": {"status.start": self.start}})
for i in range(self.db.status.start, self.start):
self._add_pending({"remove": str(i)})
self.file.write(
convert.value2json({"add": self.db})
+ "\n"
+ ("\n".join(convert.value2json(p) for p in self.pending))
+ "\n"
)
self._apply_pending()
except Exception, e:
raise e
self.db = None
示例5: get_columns
# 需要导入模块: from pyLibrary.debugs.logs import Log [as 别名]
# 或者: from pyLibrary.debugs.logs.Log import note [as 别名]
def get_columns(self, table_name, column_name=None, force=False):
"""
RETURN METADATA COLUMNS
"""
try:
# LAST TIME WE GOT INFO FOR THIS TABLE
short_name = join_field(split_field(table_name)[0:1])
table = self.get_table(short_name)[0]
if not table:
table = Table(
name=short_name,
url=None,
query_path=None,
timestamp=Date.now()
)
with self.meta.tables.locker:
self.meta.tables.add(table)
self._get_columns(table=short_name)
elif force or table.timestamp == None or table.timestamp < Date.now() - MAX_COLUMN_METADATA_AGE:
table.timestamp = Date.now()
self._get_columns(table=short_name)
with self.meta.columns.locker:
columns = self.meta.columns.find(table_name, column_name)
if columns:
columns = jx.sort(columns, "name")
# AT LEAST WAIT FOR THE COLUMNS TO UPDATE
while len(self.todo) and not all(columns.get("last_updated")):
Log.note("waiting for columns to update {{columns|json}}", columns=[c.table+"."+c.es_column for c in columns if not c.last_updated])
Thread.sleep(seconds=1)
return columns
except Exception, e:
Log.error("Not expected", cause=e)
示例6: pop
# 需要导入模块: from pyLibrary.debugs.logs import Log [as 别名]
# 或者: from pyLibrary.debugs.logs.Log import note [as 别名]
def pop(self, timeout=None):
"""
:param timeout: OPTIONAL DURATION
:return: None, IF timeout PASSES
"""
with self.lock:
while not self.please_stop:
if self.db.status.end > self.start:
value = self.db[str(self.start)]
self.start += 1
return value
if timeout is not None:
try:
self.lock.wait(timeout=timeout)
if self.db.status.end <= self.start:
return None
except Exception:
pass
else:
try:
self.lock.wait()
except Exception:
pass
if DEBUG:
Log.note("persistent queue already stopped")
return Thread.STOP
示例7: main
# 需要导入模块: from pyLibrary.debugs.logs import Log [as 别名]
# 或者: from pyLibrary.debugs.logs.Log import note [as 别名]
def main():
"""
CLEAR OUT KEYS FROM BUCKET BY RANGE, OR BY FILE
"""
settings = startup.read_settings(defs=[
{
"name": ["--bucket"],
"help": "bucket to scan",
"type": str,
"dest": "bucket",
"required": True
}
])
Log.start(settings.debug)
source = Connection(settings.aws).get_bucket(settings.args.bucket)
for k in qb.sort(source.keys()):
try:
data = source.read_bytes(k)
if convert.ascii2unicode(data).find("2e2834fa7ecd8d3bb1ad49ec981fdb89eb4df95e18") >= 0:
Log.note("Found at {{key}}", key=k)
except Exception, e:
Log.warning("Problem with {{key}}", key=k, cause=e)
finally:
示例8: get_revision
# 需要导入模块: from pyLibrary.debugs.logs import Log [as 别名]
# 或者: from pyLibrary.debugs.logs.Log import note [as 别名]
def get_revision(self, revision, locale=None):
"""
EXPECTING INCOMPLETE revision
RETURNS revision
"""
rev = revision.changeset.id
if not rev:
return Null
elif rev == "None":
return Null
elif revision.branch.name == None:
return Null
locale = coalesce(locale, revision.branch.locale, DEFAULT_LOCALE)
doc = self._get_from_elasticsearch(revision, locale=locale)
if doc:
Log.note(
"Got hg ({{branch}}, {{locale}}, {{revision}}) from ES",
branch=doc.branch.name,
locale=locale,
revision=doc.changeset.id,
)
return doc
output = self._load_all_in_push(revision, locale=locale)
return output
示例9: monitor
# 需要导入模块: from pyLibrary.debugs.logs import Log [as 别名]
# 或者: from pyLibrary.debugs.logs.Log import note [as 别名]
def monitor(self, please_stop):
please_stop.on_go(lambda: self.todo.add(Thread.STOP))
while not please_stop:
try:
if not self.todo:
with self.columns.locker:
old_columns = filter(
lambda c: (c.last_updated == None or c.last_updated < Date.now()-TOO_OLD) and c.type not in ["object", "nested"],
self.columns
)
if old_columns:
self.todo.extend(old_columns)
# TEST CONSISTENCY
for c, d in product(list(self.todo.queue), list(self.todo.queue)):
if c.abs_name==d.abs_name and c.table==d.table and c!=d:
Log.error("")
else:
Log.note("no more metatdata to update")
column = self.todo.pop(timeout=10*MINUTE)
if column:
if column.type in ["object", "nested"]:
continue
elif column.last_updated >= Date.now()-TOO_OLD:
continue
try:
self._update_cardinality(column)
Log.note("updated {{column.name}}", column=column)
except Exception, e:
Log.warning("problem getting cardinality for {{column.name}}", column=column, cause=e)
except Exception, e:
Log.warning("problem in cardinality monitor", cause=e)
示例10: pretty_json
# 需要导入模块: from pyLibrary.debugs.logs import Log [as 别名]
# 或者: from pyLibrary.debugs.logs.Log import note [as 别名]
def pretty_json(value):
try:
if scrub(value) is None:
return "null"
elif isinstance(value, basestring):
if isinstance(value, str):
value = utf82unicode(value)
try:
return quote(value)
except Exception, e:
from pyLibrary.debugs.logs import Log
try:
Log.note("try explicit convert of string with length {{length}}", length= len(value))
acc = [u"\""]
for c in value:
try:
try:
c2 = ESCAPE_DCT[c]
except Exception, h:
c2 = c
c3 = unicode(c2)
acc.append(c3)
except BaseException, g:
pass
# Log.warning("odd character {{ord}} found in string. Ignored.", ord= ord(c)}, cause=g)
acc.append(u"\"")
output = u"".join(acc)
Log.note("return value of length {{length}}", length= len(output))
return output
示例11: safe_size
# 需要导入模块: from pyLibrary.debugs.logs import Log [as 别名]
# 或者: from pyLibrary.debugs.logs.Log import note [as 别名]
def safe_size(source):
"""
READ THE source UP TO SOME LIMIT, THEN COPY TO A FILE IF TOO BIG
RETURN A str() OR A FileString()
"""
if source is None:
return None
total_bytes = 0
bytes = []
b = source.read(MIN_READ_SIZE)
while b:
total_bytes += len(b)
bytes.append(b)
if total_bytes > MAX_STRING_SIZE:
try:
data = FileString(TemporaryFile())
for bb in bytes:
data.write(bb)
del bytes
del bb
b = source.read(MIN_READ_SIZE)
while b:
total_bytes += len(b)
data.write(b)
b = source.read(MIN_READ_SIZE)
data.seek(0)
Log.note("Using file of size {{length}} instead of str()", length= total_bytes)
return data
except Exception, e:
Log.error("Could not write file > {{num}} bytes", num= total_bytes, cause=e)
b = source.read(MIN_READ_SIZE)
示例12: verify_blobber_file
# 需要导入模块: from pyLibrary.debugs.logs import Log [as 别名]
# 或者: from pyLibrary.debugs.logs.Log import note [as 别名]
def verify_blobber_file(line_number, name, url):
"""
:param line_number: for debugging
:param name: for debugging
:param url: TO BE READ
:return: RETURNS BYTES **NOT** UNICODE
"""
if name in ["emulator-5554.log", "qemu.log"] or any(map(name.endswith, [".png", ".html"])):
return None, 0
with Timer("Read {{name}}: {{url}}", {"name": name, "url": url}, debug=DEBUG):
response = http.get(url)
try:
logs = response.all_lines
except Exception, e:
if name.endswith("_raw.log"):
Log.error(
"Line {{line}}: {{name}} = {{url}} is NOT structured log",
line=line_number,
name=name,
url=url,
cause=e
)
if DEBUG:
Log.note(
"Line {{line}}: {{name}} = {{url}} is NOT structured log",
line=line_number,
name=name,
url=url
)
return None, 0
示例13: event_loop
# 需要导入模块: from pyLibrary.debugs.logs import Log [as 别名]
# 或者: from pyLibrary.debugs.logs.Log import note [as 别名]
def event_loop(self, please_stop):
got_stop_message = False
while not please_stop.is_go():
with Timer("get more work", debug=DEBUG):
request = self.in_queue.pop()
if request == Thread.STOP:
if DEBUG:
Log.note("{{name}} got a stop message", name=self.name)
got_stop_message = True
if self.in_queue:
Log.warning(
"programmer error, queue not empty. {{num}} requests lost:\n{{requests}}",
num=len(self.in_queue.queue),
requests=list(self.in_queue.queue)[:5:] + list(self.in_queue.queue)[-5::],
)
break
if please_stop.is_go():
break
with Timer("run {{function}}", {"function": get_function_name(self.function)}, debug=DEBUG):
try:
result = self.function(**request)
if self.out_queue != None:
self.out_queue.add({"response": result})
except Exception, e:
Log.warning("Can not execute with params={{params}}", params=request, cause=e)
if self.out_queue != None:
self.out_queue.add({"exception": e})
finally:
self.num_runs += 1
示例14: forall
# 需要导入模块: from pyLibrary.debugs.logs import Log [as 别名]
# 或者: from pyLibrary.debugs.logs.Log import note [as 别名]
def forall(self, sql, param=None, _execute=None):
assert _execute
num = 0
self._execute_backlog()
try:
old_cursor = self.cursor
if not old_cursor: # ALLOW NON-TRANSACTIONAL READS
self.cursor = self.db.cursor()
if param:
sql = expand_template(sql, self.quote_param(param))
sql = self.preamble + outdent(sql)
if self.debug:
Log.note("Execute SQL:\n{{sql}}", sql= indent(sql))
self.cursor.execute(sql)
columns = tuple([utf8_to_unicode(d[0]) for d in self.cursor.description])
for r in self.cursor:
num += 1
_execute(wrap(dict(zip(columns, [utf8_to_unicode(c) for c in r]))))
if not old_cursor: # CLEANUP AFTER NON-TRANSACTIONAL READS
self.cursor.close()
self.cursor = None
except Exception, e:
Log.error("Problem executing SQL:\n{{sql|indent}}", sql= sql, cause=e, stack_depth=1)
示例15: get_file
# 需要导入模块: from pyLibrary.debugs.logs import Log [as 别名]
# 或者: from pyLibrary.debugs.logs.Log import note [as 别名]
def get_file(ref, url):
from pyLibrary.env.files import File
if ref.path.startswith("~"):
home_path = os.path.expanduser("~")
if os.sep == "\\":
home_path = "/" + home_path.replace(os.sep, "/")
if home_path.endswith("/"):
home_path = home_path[:-1]
ref.path = home_path + ref.path[1::]
elif not ref.path.startswith("/"):
# CONVERT RELATIVE TO ABSOLUTE
if ref.path[0] == ".":
num_dot = 1
while ref.path[num_dot] == ".":
num_dot += 1
parent = url.path.rstrip("/").split("/")[:-num_dot]
ref.path = "/".join(parent) + ref.path[num_dot:]
else:
parent = url.path.rstrip("/").split("/")[:-1]
ref.path = "/".join(parent) + "/" + ref.path
path = ref.path if os.sep != "\\" else ref.path[1::].replace("/", "\\")
try:
if DEBUG:
_Log.note("reading file {{path}}", path=path)
content = File(path).read()
except Exception, e:
content = None
_Log.error("Could not read file {{filename}}", filename=path, cause=e)