本文整理汇总了Python中vsc.fancylogger.getLogger函数的典型用法代码示例。如果您正苦于以下问题:Python getLogger函数的具体用法?Python getLogger怎么用?Python getLogger使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了getLogger函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_parentinfo
def test_parentinfo(self):
"""Test the collection of parentinfo"""
log_fr = fancylogger.getLogger(fname=False) # rootfancylogger
pi_fr = log_fr._get_parent_info()
self.assertEqual(len(pi_fr), 2)
log_l1 = fancylogger.getLogger("level1", fname=False)
# fname=False is required to have the naming similar for child relations
pi_l1 = log_l1._get_parent_info()
self.assertEqual(len(pi_l1), 3)
py_v_27 = sys.version_info >= (2, 7, 0)
if py_v_27:
log_l2a = log_l1.getChild("level2a")
pi_l2a = log_l2a._get_parent_info()
self.assertEqual(len(pi_l2a), 4)
# this should be identical to getChild
log_l2b = fancylogger.getLogger("level1.level2b", fname=False)
# fname=False is required to have the name similar
# cutoff last letter (a vs b)
if py_v_27:
self.assertEqual(log_l2a.name[:-1], log_l2b.name[:-1])
pi_l2b = log_l2b._get_parent_info()
# yes, this broken on several levels (incl in logging itself)
# adding '.' in the name does not automatically create the parent/child relations
# if the parent with the name exists, this works
self.assertEqual(len(pi_l2b), 4)
log_l2c = fancylogger.getLogger("level1a.level2c", fname=False)
pi_l2c = log_l2c._get_parent_info()
self.assertEqual(len(pi_l2c), 3) # level1a as parent does not exist
示例2: setUp
def setUp(self):
""" dynamically replace Modules class with MockModule """
# replace Modules class with something we have control over
config.modules_tool = mock_module
main.modules_tool = mock_module
self.log = fancylogger.getLogger("RobotTest", fname=False)
# redefine the main log when calling the main functions directly
main._log = fancylogger.getLogger("main", fname=False)
self.cwd = os.getcwd()
self.base_easyconfig_dir = find_full_path(os.path.join("test", "framework", "easyconfigs"))
self.assertTrue(self.base_easyconfig_dir)
示例3: __init__
def __init__(self, exe, args):
"""Initialisation arguments determine executable to run and arguments of said executable"""
self.log = fancylogger.getLogger(self.__class__.__name__)
self.exe = exe
self.args = args
self.maxprocs = 2
self.commself = None
self.selfsize = None
self.selfrank = None
self.commclone = None
self.clonesize = None
self.clonerank = None
self.comm = None
self.size = None
self.rank = None
self.group = None
self.groupsize = None
self.grouprank = None
self.stopwithbarrier = True
self.workinitbarrier = True
self.work = None
self.dowork = None ## no work to be done is default
示例4: __init__
def __init__(self, mod_paths=None):
"""
Create a ModulesTool object
@param mod_paths: A list of paths where the modules can be located
@type mod_paths: list
"""
self.log = fancylogger.getLogger(self.__class__.__name__, fname=False)
# make sure we don't have the same path twice
if mod_paths:
self.mod_paths = nub(mod_paths)
else:
self.mod_paths = None
# DEPRECATED!
self._modules = []
self.check_module_path()
# actual module command (i.e., not the 'module' wrapper function, but the binary)
self.cmd = None
# shell that should be used to run module command (specified above) in (if any)
self.shell = None
# version of modules tool
self.version = None
# terse command line option
self.add_terse_opt_fn = lambda x: x.insert(0, '--terse')
示例5: __init__
def __init__(self, ranks):
self.log = fancylogger.getLogger(self.__class__.__name__)
MpiService.__init__(self, initcomm=False, log=self.log)
self.allranks = ranks
self.commands = {} ## dict with command : list of ranks
示例6: __init__
def __init__(self, filename, retain_old=False):
"""Initializer.
Checks if the file can be accessed and load the data therein if any. If the file does not yet exist, start
with an empty shelf. This ensures that old data is readily available when the FileCache instance is created.
The file is closed after reading the data.
@type filename: string
@param filename: (absolute) path to the cache file.
"""
self.log = fancylogger.getLogger(self.__class__.__name__)
self.filename = filename
self.retain_old = retain_old
try:
f = open(self.filename, 'rb')
try:
# FIXME: This double block is due to a workaround for Python 2.4
# see http://stackoverflow.com/questions/820778/syntaxerror-in-finally-django
try:
self.shelf = pickle.load(f)
except:
self.log.raiseException("Could not load pickle data from %s" % (self.filename))
finally:
f.close()
except (OSError, IOError), err:
self.log.warning("Could not access the file cache at %s [%s]" % (self.filename, err))
self.shelf = {}
示例7: __init__
def __init__(self, filename, retain_old=False):
"""Initializer.
Checks if the file can be accessed and load the data therein if any. If the file does not yet exist, start
with an empty shelf. This ensures that old data is readily available when the FileCache instance is created.
The file is closed after reading the data.
@type filename: string
@param filename: (absolute) path to the cache file.
"""
self.log = fancylogger.getLogger(self.__class__.__name__)
self.filename = filename
self.retain_old = retain_old
try:
f = open(self.filename, 'rb')
try:
self.shelf = pickle.load(f)
except:
self.log.raiseException("Could not load pickle data from %s" % (self.filename))
finally:
f.close()
except (OSError, IOError), err:
self.log.error("Could not access the file cache at %s [%s]" % (self.filename, err))
self.shelf = {}
示例8: __init__
def __init__(self, cmd=None, **kwargs):
if kwargs.pop('disable_log', None):
self.log = DummyFunction() ## No logging
if not hasattr(self, 'log'):
self.log = getLogger(self._get_log_name())
self.cmd = cmd ## actual command
self.input = None
self.startpath = None
self._cwd_before_startpath = None
self._process_module = None
self._process = None
self.readsize = 1024 ## number of bytes to read blocking
self._shellcmd = None
self._popen_named_args = None
self._process_exitcode = None
self._process_output = None
self._post_exitcode_log_failure = self.log.error
super(Run, self).__init__(**kwargs)
示例9: __init__
def __init__(self, modulePath=None):
"""
Create a Modules object
@param modulePath: A list of paths where the modules can be located
@type modulePath: list
"""
self.log = fancylogger.getLogger(self.__class__.__name__, fname=False)
# make sure we don't have the same path twice
if modulePath:
self.modulePath = set(modulePath)
else:
self.modulePath = None
self.modules = []
self.check_module_path()
# make sure lmod is available somewhere
eclm = subprocess.call(["which", "lmod"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
# make sure environment-modules is installed
ecem = subprocess.call(["which", "modulecmd"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
if ecem and eclm:
msg = "Could not find the modulecmd command, environment-modules is not installed?\n"
msg += "Exit code of 'which modulecmd': %d" % ecem
self.log.error(msg)
raise EasyBuildError(msg)
elif ecem:
self.modulecmd = "lmod"
os.environ['LMOD_EXPERT'] = '1'
elif eclm:
self.modulecmd = "modulecmd"
示例10: __init__
def __init__(self, options=None, **kwargs):
if not hasattr(self, 'log'):
self.log = getLogger(self.__class__.__name__)
if not hasattr(self, 'options'):
self.options = options
self.nodes = None
self.nrnodes = None
self.uniquenodes = None
self.nruniquenodes = None
self.mpinodes = None
self.mpinrnodes = None
self.mpitotalppn = None
self.id = None
self.foundppn = None
self.ppn = None
self.totalppn = None
self.cpus = []
# # collect data
self.get_id()
self.cores_on_this_node()
self.which_cpus()
self.get_node_list()
self.get_unique_nodes()
self.set_ppn()
super(Sched, self).__init__(**kwargs)
示例11: test_deprecated
def test_deprecated(self):
"""Test deprecated log function."""
# truncate the logfile
open(self.logfn, "w")
# log message
logger = fancylogger.getLogger("deprecated_test")
max_ver = "1.0"
# test whether deprecation works
msgre_tpl_error = r"DEPRECATED\s*\(since v%s\).*%s" % (max_ver, MSG)
self.assertErrorRegex(Exception, msgre_tpl_error, logger.deprecated, MSG, "1.1", max_ver)
# test whether deprecated warning works
logger.deprecated(MSG, "0.9", max_ver)
msgre_tpl_warning = r"WARNING.*DEPRECATED\s*\(since v%s\).*%s" % (max_ver, MSG)
msgre_warning = re.compile(msgre_tpl_warning)
txt = open(self.logfn, "r").read()
self.assertTrue(msgre_warning.search(txt))
# test handling of non-UTF8 chars
msg = MSG + "\x81"
msgre_tpl_error = r"DEPRECATED\s*\(since v%s\).*%s" % (max_ver, msg)
self.assertErrorRegex(Exception, msgre_tpl_error, logger.deprecated, msg, "1.1", max_ver)
logger.deprecated(msg, "0.9", max_ver)
txt = open(self.logfn, "r").read()
self.assertTrue(msgre_warning.search(txt))
示例12: __init__
def __init__(self, initcomm=True, log=None):
self.log = log
if self.log is None:
self.log = fancylogger.getLogger(name=self.__class__.__name__, fname=False)
self.comm = None
self.size = -1
self.rank = -1
self.masterrank = MASTERRANK
self.barriercounter = 0
self.stopwithbarrier = True
self.wait_iter_sleep = 60 # run through all active work, then wait wait_iter_sleep seconds
self.allnodes = None # Node info per rank
self.topocomm = None
self.tempcomm = []
self.active_work = []
self.dists = None
self.thisnode = None
if initcomm:
self.log.debug(
"Going to initialise the __init__ default communicators")
self.init_comm()
else:
self.log.debug("No communicators initialised in __init__")
示例13: __init__
def __init__(self, script, name, env_vars=None, resources={}, conn=None, ppn=None):
"""
create a new Job to be submitted to PBS
env_vars is a dictionary with key-value pairs of environment variables that should be passed on to the job
resources is a dictionary with optional keys: ['hours', 'cores'] both of these should be integer values.
hours can be 1 - MAX_WALLTIME, cores depends on which cluster it is being run.
"""
self.clean_conn = True
self.log = fancylogger.getLogger(self.__class__.__name__, fname=False)
self.script = script
if env_vars:
self.env_vars = env_vars.copy()
else:
self.env_vars = {}
self.name = name
if pbs_import_failed:
self.log.error(pbs_import_failed)
try:
self.pbs_server = pbs.pbs_default()
if conn:
self.pbsconn = conn
self.clean_conn = False
else:
self.pbsconn = pbs.pbs_connect(self.pbs_server)
except Exception, err:
self.log.error("Failed to connect to the default pbs server: %s" % err)
示例14: request
def request(self, method, url, body, headers):
if self.username:
headers['Authorization'] = self.auth_header
fancylogger.getLogger().debug('cli request: %s, %s, %s %s', method, url, body, headers)
#TODO: Context manager
conn = self.get_connection()
conn.request(method, url, body, headers)
response = conn.getresponse()
status = response.status
body = response.read()
try:
pybody = json.loads(body)
except ValueError:
pybody = body
fancylogger.getLogger().debug('reponse len: %s ', len(pybody))
conn.close()
return status, pybody
示例15: setUp
def setUp(self):
""" create temporary easyconfig file """
self.log = fancylogger.getLogger("EasyConfigTest", fname=False)
self.cwd = os.getcwd()
self.all_stops = [x[0] for x in EasyBlock.get_steps()]
if os.path.exists(self.eb_file):
os.remove(self.eb_file)
config.variables['source_path'] = os.path.join(os.path.dirname(__file__), 'easyconfigs')