本文整理匯總了Python中tornado.log.gen_log.info方法的典型用法代碼示例。如果您正苦於以下問題:Python gen_log.info方法的具體用法?Python gen_log.info怎麽用?Python gen_log.info使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類tornado.log.gen_log
的用法示例。
在下文中一共展示了gen_log.info方法的13個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: run
# 需要導入模塊: from tornado.log import gen_log [as 別名]
# 或者: from tornado.log.gen_log import info [as 別名]
def run(self, result=None):
logger = logging.getLogger()
if not logger.handlers:
logging.basicConfig()
handler = logger.handlers[0]
if (len(logger.handlers) > 1 or
not isinstance(handler, logging.StreamHandler)):
# Logging has been configured in a way we don't recognize,
# so just leave it alone.
super(LogTrapTestCase, self).run(result)
return
old_stream = handler.stream
try:
handler.stream = StringIO()
gen_log.info("RUNNING TEST: " + str(self))
old_error_count = len(result.failures) + len(result.errors)
super(LogTrapTestCase, self).run(result)
new_error_count = len(result.failures) + len(result.errors)
if new_error_count != old_error_count:
old_stream.write(handler.stream.getvalue())
finally:
handler.stream = old_stream
示例2: reset_is_running_on_all_spider
# 需要導入模塊: from tornado.log import gen_log [as 別名]
# 或者: from tornado.log.gen_log import info [as 別名]
def reset_is_running_on_all_spider( coll_model ) :
"""
reset is_running on all spiders to avoid errors if app shut down while one spider was running
"""
print ()
app_log.warning('>>> reset_is_running_on_all_spider ... ')
# find if any spider was running
running_spiders = coll_model.find({"scraper_log.is_running" : True})
app_log.info(">>> running_spiders : \n %s" , list(running_spiders) )
coll_model.update_many({'scraper_log.is_running' : True }, {"$set": {'scraper_log.is_running' : False }})
# if list(running_spiders) != [] :
# app_log.warning('>>> reset_is_running_on_all_spider / some spiders were blocked in is_running == True ... ')
# app_log.warning('>>> spiders are : \n %s', pformat(list(running_spiders)) )
# coll_model.update({"scraper_log.is_running":True}, {"$set" : {"scraper_log.is_running" : False }})
# print
示例3: post
# 需要導入模塊: from tornado.log import gen_log [as 別名]
# 或者: from tornado.log.gen_log import info [as 別名]
def post(self, *args):
"""Example handle ajax post"""
dic = tornado.escape.json_decode(self.request.body)
app_log.info("ajax / dic : \n %s " , pformat(dic) )
# useful code goes here
self.write(json.dumps({'status': 'ok', 'sent': dic}))
self.finish()
### + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ###
### INFOS / DOC ############################################################################
### + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ###
示例4: get
# 需要導入模塊: from tornado.log import gen_log [as 別名]
# 或者: from tornado.log.gen_log import info [as 別名]
def get(self):
app_log.info("InfosWhyHandler.get... ")
self.site_section = "infos"
self.render(
"why.html",
page_title = app_main_texts["main_title"],
app_host = self.request.host,
site_section = self.site_section,
user = self.current_user,
is_user_connected = self.is_user_connected,
user_email = self.user_email,
user_auth_level = self.user_auth_level,
user_auth_level_dict = self.user_auth_level_dict,
)
示例5: read_until_regex
# 需要導入模塊: from tornado.log import gen_log [as 別名]
# 或者: from tornado.log.gen_log import info [as 別名]
def read_until_regex(self, regex, callback=None, max_bytes=None):
"""Asynchronously read until we have matched the given regex.
The result includes the data that matches the regex and anything
that came before it. If a callback is given, it will be run
with the data as an argument; if not, this method returns a
`.Future`.
If ``max_bytes`` is not None, the connection will be closed
if more than ``max_bytes`` bytes have been read and the regex is
not satisfied.
.. versionchanged:: 4.0
Added the ``max_bytes`` argument. The ``callback`` argument is
now optional and a `.Future` will be returned if it is omitted.
"""
future = self._set_read_callback(callback)
self._read_regex = re.compile(regex)
self._read_max_bytes = max_bytes
try:
self._try_inline_read()
except UnsatisfiableReadError as e:
# Handle this the same way as in _handle_events.
gen_log.info("Unsatisfiable read, closing connection: %s" % e)
self.close(exc_info=True)
return future
except:
if future is not None:
# Ensure that the future doesn't log an error because its
# failure was never examined.
future.add_done_callback(lambda f: f.exception())
raise
return future
示例6: read_until
# 需要導入模塊: from tornado.log import gen_log [as 別名]
# 或者: from tornado.log.gen_log import info [as 別名]
def read_until(self, delimiter, callback=None, max_bytes=None):
"""Asynchronously read until we have found the given delimiter.
The result includes all the data read including the delimiter.
If a callback is given, it will be run with the data as an argument;
if not, this method returns a `.Future`.
If ``max_bytes`` is not None, the connection will be closed
if more than ``max_bytes`` bytes have been read and the delimiter
is not found.
.. versionchanged:: 4.0
Added the ``max_bytes`` argument. The ``callback`` argument is
now optional and a `.Future` will be returned if it is omitted.
"""
future = self._set_read_callback(callback)
self._read_delimiter = delimiter
self._read_max_bytes = max_bytes
try:
self._try_inline_read()
except UnsatisfiableReadError as e:
# Handle this the same way as in _handle_events.
gen_log.info("Unsatisfiable read, closing connection: %s" % e)
self.close(exc_info=True)
return future
except:
if future is not None:
future.add_done_callback(lambda f: f.exception())
raise
return future
示例7: _check_file
# 需要導入模塊: from tornado.log import gen_log [as 別名]
# 或者: from tornado.log.gen_log import info [as 別名]
def _check_file(modify_times, path):
try:
modified = os.stat(path).st_mtime
except Exception:
return
if path not in modify_times:
modify_times[path] = modified
return
if modify_times[path] != modified:
gen_log.info("%s modified; restarting server", path)
_reload()
示例8: read_until_regex
# 需要導入模塊: from tornado.log import gen_log [as 別名]
# 或者: from tornado.log.gen_log import info [as 別名]
def read_until_regex(self, regex: bytes, max_bytes: int = None) -> Awaitable[bytes]:
"""Asynchronously read until we have matched the given regex.
The result includes the data that matches the regex and anything
that came before it.
If ``max_bytes`` is not None, the connection will be closed
if more than ``max_bytes`` bytes have been read and the regex is
not satisfied.
.. versionchanged:: 4.0
Added the ``max_bytes`` argument. The ``callback`` argument is
now optional and a `.Future` will be returned if it is omitted.
.. versionchanged:: 6.0
The ``callback`` argument was removed. Use the returned
`.Future` instead.
"""
future = self._start_read()
self._read_regex = re.compile(regex)
self._read_max_bytes = max_bytes
try:
self._try_inline_read()
except UnsatisfiableReadError as e:
# Handle this the same way as in _handle_events.
gen_log.info("Unsatisfiable read, closing connection: %s" % e)
self.close(exc_info=e)
return future
except:
# Ensure that the future doesn't log an error because its
# failure was never examined.
future.add_done_callback(lambda f: f.exception())
raise
return future
示例9: read_until
# 需要導入模塊: from tornado.log import gen_log [as 別名]
# 或者: from tornado.log.gen_log import info [as 別名]
def read_until(self, delimiter: bytes, max_bytes: int = None) -> Awaitable[bytes]:
"""Asynchronously read until we have found the given delimiter.
The result includes all the data read including the delimiter.
If ``max_bytes`` is not None, the connection will be closed
if more than ``max_bytes`` bytes have been read and the delimiter
is not found.
.. versionchanged:: 4.0
Added the ``max_bytes`` argument. The ``callback`` argument is
now optional and a `.Future` will be returned if it is omitted.
.. versionchanged:: 6.0
The ``callback`` argument was removed. Use the returned
`.Future` instead.
"""
future = self._start_read()
self._read_delimiter = delimiter
self._read_max_bytes = max_bytes
try:
self._try_inline_read()
except UnsatisfiableReadError as e:
# Handle this the same way as in _handle_events.
gen_log.info("Unsatisfiable read, closing connection: %s" % e)
self.close(exc_info=e)
return future
except:
future.add_done_callback(lambda f: f.exception())
raise
return future
示例10: _check_file
# 需要導入模塊: from tornado.log import gen_log [as 別名]
# 或者: from tornado.log.gen_log import info [as 別名]
def _check_file(modify_times: Dict[str, float], path: str) -> None:
try:
modified = os.stat(path).st_mtime
except Exception:
return
if path not in modify_times:
modify_times[path] = modified
return
if modify_times[path] != modified:
gen_log.info("%s modified; restarting server", path)
_reload()
示例11: read_until_regex
# 需要導入模塊: from tornado.log import gen_log [as 別名]
# 或者: from tornado.log.gen_log import info [as 別名]
def read_until_regex(self, regex, callback=None, max_bytes=None):
"""Asynchronously read until we have matched the given regex.
The result includes the data that matches the regex and anything
that came before it. If a callback is given, it will be run
with the data as an argument; if not, this method returns a
`.Future`.
If ``max_bytes`` is not None, the connection will be closed
if more than ``max_bytes`` bytes have been read and the regex is
not satisfied.
.. versionchanged:: 4.0
Added the ``max_bytes`` argument. The ``callback`` argument is
now optional and a `.Future` will be returned if it is omitted.
.. deprecated:: 5.1
The ``callback`` argument is deprecated and will be removed
in Tornado 6.0. Use the returned `.Future` instead.
"""
future = self._set_read_callback(callback)
self._read_regex = re.compile(regex)
self._read_max_bytes = max_bytes
try:
self._try_inline_read()
except UnsatisfiableReadError as e:
# Handle this the same way as in _handle_events.
gen_log.info("Unsatisfiable read, closing connection: %s" % e)
self.close(exc_info=e)
return future
except:
if future is not None:
# Ensure that the future doesn't log an error because its
# failure was never examined.
future.add_done_callback(lambda f: f.exception())
raise
return future
示例12: read_until
# 需要導入模塊: from tornado.log import gen_log [as 別名]
# 或者: from tornado.log.gen_log import info [as 別名]
def read_until(self, delimiter, callback=None, max_bytes=None):
"""Asynchronously read until we have found the given delimiter.
The result includes all the data read including the delimiter.
If a callback is given, it will be run with the data as an argument;
if not, this method returns a `.Future`.
If ``max_bytes`` is not None, the connection will be closed
if more than ``max_bytes`` bytes have been read and the delimiter
is not found.
.. versionchanged:: 4.0
Added the ``max_bytes`` argument. The ``callback`` argument is
now optional and a `.Future` will be returned if it is omitted.
.. deprecated:: 5.1
The ``callback`` argument is deprecated and will be removed
in Tornado 6.0. Use the returned `.Future` instead.
"""
future = self._set_read_callback(callback)
self._read_delimiter = delimiter
self._read_max_bytes = max_bytes
try:
self._try_inline_read()
except UnsatisfiableReadError as e:
# Handle this the same way as in _handle_events.
gen_log.info("Unsatisfiable read, closing connection: %s" % e)
self.close(exc_info=e)
return future
except:
if future is not None:
future.add_done_callback(lambda f: f.exception())
raise
return future
示例13: _check_file
# 需要導入模塊: from tornado.log import gen_log [as 別名]
# 或者: from tornado.log.gen_log import info [as 別名]
def _check_file(modify_times, module, path):
try:
modified = os.stat(path).st_mtime
except Exception:
return
if path not in modify_times:
modify_times[path] = modified
return
if modify_times[path] != modified:
gen_log.info("%s modified; restarting server", path)
modify_times[path] = modified
else:
return
if path == __file__ or path == os.path.join(os.path.dirname(__file__),
"event_queue.py"):
# Assume that the autoreload library itself imports correctly,
# because reloading this file will destroy its state,
# including _reload_hooks
return True
try:
importlib.reload(module)
except Exception:
gen_log.error(f"Error importing {path}, not reloading")
traceback.print_exc()
return False
return True