当前位置: 首页>>代码示例>>Python>>正文


Python PrintManager.debug方法代码示例

本文整理汇总了Python中DAS.utils.logger.PrintManager.debug方法的典型用法代码示例。如果您正苦于以下问题:Python PrintManager.debug方法的具体用法?Python PrintManager.debug怎么用?Python PrintManager.debug使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在DAS.utils.logger.PrintManager的用法示例。


在下文中一共展示了PrintManager.debug方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_debug

# 需要导入模块: from DAS.utils.logger import PrintManager [as 别名]
# 或者: from DAS.utils.logger.PrintManager import debug [as 别名]
 def test_debug(self):
     "Test logger debug method"
     old_stdout = sys.stdout
     logger = PrintManager(self.name, verbose=2)
     sys.stdout = StringIO()
     logger.debug('test')
     result = sys.stdout.getvalue()
     expect = 'DEBUG %s:%s test\n' % (self.name, funcname())
     self.assertEqual(expect, result)
     sys.stdout = old_stdout
开发者ID:ktf,项目名称:DAS,代码行数:12,代码来源:logger_t.py

示例2: DASAnalytics

# 需要导入模块: from DAS.utils.logger import PrintManager [as 别名]
# 或者: from DAS.utils.logger.PrintManager import debug [as 别名]
class DASAnalytics(object):
    """
    DAS analytics DB manager.
    """
    def __init__(self, config):
        self.verbose = config['verbose']
        self.logger  = PrintManager('DASAnalytics', self.verbose)
        self.dburi   = config['mongodb']['dburi']
        self.dbname  = config['analyticsdb']['dbname']        
        self.colname = config['analyticsdb']['collname']
        self.history = config['analyticsdb']['history']
        msg = "%[email protected]%s" % (self.dburi, self.dbname)
        self.logger.info(msg)
        self.create_db()

    def create_db(self):
        """
        Create analytics DB in MongoDB back-end.
        """
        self.conn = db_connection(self.dburi)
        database  = self.conn[self.dbname]
        das_son_manipulator = DAS_SONManipulator()
        database.add_son_manipulator(das_son_manipulator)
        self.col  = database[self.colname]
#        if  self.dbname not in self.conn.database_names():
#            capped_size = 104857600
#            options   = {'capped':True, 'size': capped_size}
#            database  = self.conn[self.dbname]
#            database.create_collection('self.colname', **options)
#            print "####CREATE CAPPED ANALYTICS"
#        self.col  = self.conn[self.dbname][self.colname] 

    def delete_db(self):
        """
        Delete analytics DB in MongoDB back-end.
        """
        self.conn.drop_database(self.dbname)

    def delete_db_collection(self):
        """
        Delete analytics DB collection in MongoDB.
        """
        self.conn.drop_collection(self.colname)

    def add_query(self, query, mongoquery):
        """
        Add DAS-QL/MongoDB-QL queries into analytics.
        
        A unique record is contained for each (qhash, dhash) pair.
        For each an array of call-times is contained.
        """
        if  isinstance(mongoquery, dict):
            mongoquery = encode_mongo_query(mongoquery)
        msg = 'query=%s, mongoquery=%s' % (query, mongoquery)
        self.logger.debug(msg)
        dhash = genkey(query)
        qhash = genkey(mongoquery)

        now = time.time()

        existing = self.col.find_one({'qhash': qhash, 'dhash': dhash})
        if existing:
            # check if times contains very old timestamps
            rec = self.col.find({'_id': ObjectId(existing['_id']), 
                                 'times':{'$lt' : now - self.history}})
            if  rec:
                self.col.update({'_id': ObjectId(existing['_id'])},
                    {'$pull': {'times': {'$lt' : now - self.history}}})
            # update times array with new timestamp
            self.col.update({'_id': ObjectId(existing['_id'])},
                            {'$push': {'times': now}})
        else:
            record = dict(query=query, mongoquery=mongoquery,
                        qhash=qhash, dhash=dhash, times=[now])
            self.col.insert(record)

        index = [('qhash', DESCENDING),
                 ('dhash', DESCENDING)]
        create_indexes(self.col, index)
        
    def clean_queries(self):
        """
        Standalone method to clean up expired call-times from query records,
        since otherwise only the active record is cleaned.
        
        This is too expensive to do with every operation, and mongodb
        does not allow multiple modifications to a single field in a single
        update operation (ie, we can't do $push and $pull in one update),
        so it should probably be done asynchronously at fixed intervals.
        """
        
        self.logger.debug('')
        
        now = time.time()
        
        #clean out the times array
        self.col.update({'times': {'$exists': True}},
                        {'$pull': {'times': {'$lt': now - self.history}}})
        #now delete any with no times
        self.col.remove({'times': {'$size': 0}})
#.........这里部分代码省略.........
开发者ID:zdenekmaxa,项目名称:DAS,代码行数:103,代码来源:das_analytics_db.py

示例3: DASMapping

# 需要导入模块: from DAS.utils.logger import PrintManager [as 别名]
# 或者: from DAS.utils.logger.PrintManager import debug [as 别名]

#.........这里部分代码省略.........
        self.col.remove(spec)

    def add(self, record):
        """
        Add new record into mapping DB. Example of URI record

        .. doctest::

            {
             system:dbs, 
             urn : listBlocks, 
             url : "http://a.b.com/api"
             params : [
                 {"apiversion":1_2_2, test:"*"}
             ]
             daskeys: [
                 {"key" : "block", "map":"block.name", "pattern":""}
             ]
             das2api: [
                 {"das_key":"site", "api_param":"se", 
                       "pattern":"re.compile('^T[0-3]_')"}
             ]
            }

        Example of notation record:

        .. doctest::

             notations: [
                 {"notation" : "storage_element_name", "map":"site", "api": ""},
             ]
        """
        msg = "record=%s" % record
        self.logger.debug(msg)
        self.col.insert(record)
        index = None
        if record.has_key("urn"):
            index = [("system", DESCENDING), ("daskeys", DESCENDING), ("urn", DESCENDING)]
        elif record.has_key("notations"):
            index = [("system", DESCENDING), ("notations.api_param", DESCENDING)]
        elif record.has_key("presentation"):
            index = []
        else:
            msg = "Invalid record %s" % record
            raise Exception(msg)
        if index:
            create_indexes(self.col, index)

    # ==================
    # Informational APIs
    # ==================
    def list_systems(self):
        """
        List all DAS systems.
        """
        cond = {"system": {"$ne": None}}
        gen = (row["system"] for row in self.col.find(cond, ["system"]))
        return list(set(gen2list(gen)) & set(self.services))

    def list_apis(self, system=None):
        """
        List all APIs.
        """
        if self.apicache and self.apicache.has_key(system):
            return self.apicache[system]
        cond = {"urn": {"$ne": None}}
开发者ID:zdenekmaxa,项目名称:DAS,代码行数:70,代码来源:das_mapping_db.py

示例4: DASMongocache

# 需要导入模块: from DAS.utils.logger import PrintManager [as 别名]
# 或者: from DAS.utils.logger.PrintManager import debug [as 别名]
class DASMongocache(object):
    """
    DAS cache based MongoDB. 
    """
    def __init__(self, config):
        self.emptyset_expire = expire_timestamp(\
            config['das'].get('emptyset_expire', 5))
        self.dburi   = config['mongodb']['dburi']
        self.cache_size = config['mongodb']['bulkupdate_size']
        self.dbname  = config['dasdb']['dbname']
        self.verbose = config['verbose']
        self.logger  = PrintManager('DASMongocache', self.verbose)
        self.mapping = config['dasmapping']

        self.conn    = db_connection(self.dburi)
        self.mdb     = self.conn[self.dbname]
        self.col     = self.mdb[config['dasdb']['cachecollection']]
        self.mrcol   = self.mdb[config['dasdb']['mrcollection']]
        self.merge   = self.mdb[config['dasdb']['mergecollection']]
        self.gfs     = db_gridfs(self.dburi)

        self.logdb   = DASLogdb(config)

        self.das_internal_keys = ['das_id', 'das', 'cache_id', 'qhash']

        msg = "%[email protected]%s" % (self.dburi, self.dbname)
        self.logger.info(msg)

        self.add_manipulator()

        # ensure that we have the following indexes
        index_list = [('das.expire', ASCENDING), ('das_id', ASCENDING),
                      ('das.system', ASCENDING),
                      ('qhash', DESCENDING),
                      ('das.empty_record', ASCENDING)]
        create_indexes(self.col, index_list)
        index_list = [('das.expire', ASCENDING), ('das_id', ASCENDING),
                      ('qhash', DESCENDING),
                      ('das.empty_record', ASCENDING), ('das.ts', ASCENDING)]
        create_indexes(self.merge, index_list)
        
    def add_manipulator(self):
        """
        Add DAS-specific MongoDB SON manipulator to perform
        conversion of inserted data into DAS cache.
        """
        das_son_manipulator = DAS_SONManipulator()
        self.mdb.add_son_manipulator(das_son_manipulator)
        msg = "DAS_SONManipulator %s" \
        % das_son_manipulator
        self.logger.debug(msg)

    def similar_queries(self, dasquery):
        """
        Check if we have query results in cache whose conditions are
        superset of provided query. The method only works for single
        key whose value is substring of value in input query.
        For example, if cache contains records about T1 sites, 
        then input query T1_CH_CERN is subset of results stored in cache.
        """
        spec = dasquery.mongo_query.get('spec', {})
        cond = {'query.spec.key': {'$in' : spec.keys()}, 'qhash':dasquery.qhash}
        for row in self.col.find(cond):
            found_query = DASQuery(row['query'])
            if  dasquery.qhash == found_query.qhash:
                msg = "%s similar to %s" % (dasquery, found_query)
                self.logger.info(msg)
                return found_query
        return False
    
    def get_superset_keys(self, key, value):
        """
        This is a special-case version of similar_keys,
        intended for analysers that want to quickly
        find possible superset queries of a simple
        query of the form key=value.
        """
        
        msg = "%s=%s" % (key, value)
        self.logger.debug(msg)
        cond = {'query.spec.key': key}
        for row in self.col.find(cond):
            mongo_query = decode_mongo_query(row['query'])
            for thiskey, thisvalue in mongo_query.iteritems():
                if thiskey == key:
                    if fnmatch.fnmatch(value, thisvalue):
                        yield thisvalue

    def get_fields(self, dasquery):
        "Prepare fields to extract from MongoDB"
        fields     = dasquery.mongo_query.get('fields', None)
        if  fields == ['records']:
            fields = None # look-up all records
        filters    = dasquery.filters
        cond       = {}
        if  filters:
            new_fields = []
            for dasfilter in filters:
                if  dasfilter == 'unique':
                    continue
#.........这里部分代码省略.........
开发者ID:zdenekmaxa,项目名称:DAS,代码行数:103,代码来源:das_mongocache.py

示例5: DASParserDB

# 需要导入模块: from DAS.utils.logger import PrintManager [as 别名]
# 或者: from DAS.utils.logger.PrintManager import debug [as 别名]
class DASParserDB(object):
    """
    Caching layer for the PLY parser.
    """
    def __init__(self, config):
        self.verbose  = config['verbose']
        self.logger   = PrintManager('DASParserDB', self.verbose)
        self.dburi    = config['mongodb']['dburi']
        self.dbname   = config['parserdb']['dbname']
        self.sizecap  = config['parserdb'].get('sizecap', 5*1024*1024)
        self.colname  = config['parserdb']['collname']
        
        msg = "DASParserCache::__init__ %[email protected]%s" % (self.dburi, self.dbname)
        self.logger.info(msg)
        
        self.col = None
        self.create_db()

    def create_db(self):
        """
        Create db collection
        """
        conn = db_connection(self.dburi)
        dbn  = conn[self.dbname]
        if  self.colname not in dbn.collection_names():
            dbn.create_collection(self.colname, capped=True, size=self.sizecap)
        self.col = dbn[self.colname]

    def lookup_query(self, rawtext):
        """
        Check the parser cache for a given rawtext query.
        Search is done with the hash of this string.
        
        Returns a tuple (status, value) for the cases
        (PARSERCACHE_VALID, mongo_query) - valid query found
        (PARSERCACHE_INVALID, error) - error message for invalid query
        (PARSERCACHE_NOTFOUND, None) - not in the cache
        """
        result = self.col.find_one({'hash':genkey(rawtext)},
                        fields=['query', 'error'])

        if result and result['query']:
            if self.verbose:
                self.logger.debug("DASParserCache: found valid %s->%s" %\
                                  (rawtext, result['query']))
            
            query = decode_mongo_query(result['query'])
            return (PARSERCACHE_VALID, query)
        elif result and result['error']:
            if self.verbose:
                self.logger.debug("DASParserCache: found invalid %s->%s" %\
                                  (rawtext, result['error']))
            return (PARSERCACHE_INVALID, result['error'])
        else:
            if self.verbose:
                self.logger.debug("DASParserCache: not found %s" %\
                                  (rawtext))
            return (PARSERCACHE_NOTFOUND, None)

    def insert_valid_query(self, rawtext, query):
        "Insert a query that was successfully transformed"	
        self._insert_query(rawtext, query, None)

    def insert_invalid_query(self, rawtext, error):
        "Insert the error message for an invalid query"
        self._insert_query(rawtext, None, error)

    def _insert_query(self, rawtext, query, error):
        """Internal method to insert a query"""
        if  self.verbose:
            self.logger.debug("DASParserCache: insert %s->%s/%s" %\
	                          (rawtext, query, error))
        # since MongoDB does not support insertion of $ sign in queries
        # we need to encode inserted query
        if  query:
            encquery = encode_mongo_query(query)
        else:
            encquery = ""
        self.col.insert({'raw':rawtext, 'hash':genkey(rawtext),
                         'query':encquery, 'error':str(error)})
开发者ID:zdenekmaxa,项目名称:DAS,代码行数:82,代码来源:das_parsercache.py

示例6: DASMongocache

# 需要导入模块: from DAS.utils.logger import PrintManager [as 别名]
# 或者: from DAS.utils.logger.PrintManager import debug [as 别名]

#.........这里部分代码省略.........
    def get_dataset_hashes(self, dasquery):
        "Get dataset hashes from DBS database"
        spec = dasquery.mongo_query.get('spec', {})
        inst = dasquery.instance
        conn = db_connection(self.dburi)
        if  spec and inst:
            dataset = spec.get('dataset.name', None)
            if  dataset:
                if  dataset.find('*') != -1:
                    cond = {'dataset':re.compile(dataset.replace('*', '.*'))}
                else:
                    cond = {'dataset': dataset}
                for row in conn['dbs'][inst].find(cond):
                    if  'qhash' in row:
                        yield row['qhash']

    def check_datasets(self, dasquery):
        "Check dataset presence in DAS cache for given das query"
        hashes = [r for r in self.get_dataset_hashes(dasquery)]
        if  hashes:
            spec = {'qhash': {'$in': hashes}}
            if  len(hashes) == self.merge.find(spec, **PYMONGO_OPTS).count():
                dasquery._hashes = hashes

    def get_superset_keys(self, key, value):
        """
        This is a special-case version of similar_keys,
        intended for analysers that want to quickly
        find possible superset queries of a simple
        query of the form key=value.
        """

        msg = "%s=%s" % (key, value)
        self.logger.debug(msg)
        cond = {'query.spec.key': key}
        for row in self.col.find(cond, **PYMONGO_OPTS):
            mongo_query = decode_mongo_query(row['query'])
            for thiskey, thisvalue in mongo_query.items():
                if thiskey == key:
                    if fnmatch.fnmatch(value, thisvalue):
                        yield thisvalue

    def get_fields(self, dasquery):
        "Prepare fields to extract from MongoDB"
        fields     = dasquery.mongo_query.get('fields', [])
        if  fields and 'records' in fields:
            fields = None # look-up all records
        filters    = dasquery.filters
        cond       = {}
        if  filters:
            new_fields = []
            for dasfilter in filters:
                if  dasfilter == 'unique':
                    continue
                if  fields and dasfilter not in fields and \
                    dasfilter not in new_fields:
                    if  dasfilter.find('=') == -1 and dasfilter.find('<') == -1\
                    and dasfilter.find('>') == -1:
                        new_fields.append(dasfilter)
                    else:
                        cond = parse_filters(dasquery.mongo_query)
            if  not new_fields and fields:
                new_fields = list(fields)
            return new_fields, cond
        return fields, cond
开发者ID:dmwm,项目名称:DAS,代码行数:69,代码来源:das_mongocache.py

示例7: QLManager

# 需要导入模块: from DAS.utils.logger import PrintManager [as 别名]
# 或者: from DAS.utils.logger.PrintManager import debug [as 别名]
class QLManager(object):
    """
    DAS QL manager.
    """
    def __init__(self, config=None):
        if  not config:
            config = das_readconfig()
        if  not config.has_key('dasmapping'):
            config['dasmapping'] = DASMapping(config)
        if  not config.has_key('dasanalytics'):
            config['dasanalytics'] = DASAnalytics(config)
        if  not config['dasmapping'].check_maps():
            msg = "No DAS maps found in MappingDB"
            raise Exception(msg)
        self.map         = config['dasmapping']
        self.analytics   = config['dasanalytics']
        self.dasservices = config['services']
        self.daskeysmap  = self.map.daskeys()
        self.operators   = list(das_operators())
        self.daskeys     = list(das_special_keys())
        self.verbose     = config['verbose']
        self.logger      = PrintManager('QLManger', self.verbose)
        for val in self.daskeysmap.values():
            for item in val:
                self.daskeys.append(item)
        parserdir   = config['das']['parserdir']
        self.dasply = DASPLY(parserdir, self.daskeys, self.dasservices, 
                verbose=self.verbose)

        self.enabledb = config['parserdb']['enable']
        if  self.enabledb:
            self.parserdb = DASParserDB(config)

    def parse(self, query):
        """
        Parse input query and return query in MongoDB form.
        Optionally parsed query can be written into analytics DB.
        """
        mongo_query = self.mongo_query(query)
        self.convert2skeys(mongo_query)
        return mongo_query

    def add_to_analytics(self, query, mongo_query):
        "Add DAS query to analytics DB"
        self.analytics.add_query(query, mongo_query)

    def mongo_query(self, query):
        """
        Return mongo query for provided input query
        """
        # NOTE: somehow I need to keep build call just before using
        # PLY parser, otherwise it fails to parse.
        self.dasply.build()
        if  self.verbose:
            msg = "input query='%s'" % query
            self.logger.debug(msg)
            self.dasply.test_lexer(query)
        if  self.enabledb:
            status, value = self.parserdb.lookup_query(query)
            if status == PARSERCACHE_VALID and \
                len(last_key_pattern.findall(query)) == 0:
                mongo_query = value
            elif status == PARSERCACHE_INVALID:
                raise Exception(value)
            else:
                try:
                    ply_query = self.dasply.parser.parse(query)
                    mongo_query = ply2mongo(ply_query)
                    self.parserdb.insert_valid_query(query, mongo_query)
                except Exception as exp:
                    self.parserdb.insert_invalid_query(query, exp)
                    print "Input query=%s" % query
                    raise exp
        else:
            try:
                ply_query   = self.dasply.parser.parse(query)
                mongo_query = ply2mongo(ply_query)
            except Exception as exc:
                msg = "Fail to convert input query='%s' into MongoDB format" \
                    % query
                print_exc(msg, print_traceback=False)
                raise exc
        if  set(mongo_query.keys()) & set(['fields', 'spec']) != \
                set(['fields', 'spec']):
            raise Exception('Invalid MongoDB query %s' % mongo_query)
        if  not mongo_query['fields'] and len(mongo_query['spec'].keys()) > 1:
            raise Exception(ambiguous_msg(query, mongo_query['spec'].keys()))
        for key, val in mongo_query['spec'].iteritems():
            if  isinstance(val, list):
                raise Exception(ambiguos_val_msg(query, key, val))
        return mongo_query

    def convert2skeys(self, mongo_query):
        """
        Convert DAS input keys into DAS selection keys.
        """
        if  not mongo_query['spec']:
            for key in mongo_query['fields']:
                for system in self.map.list_systems():
                    mapkey = self.map.find_mapkey(system, key)
#.........这里部分代码省略.........
开发者ID:zdenekmaxa,项目名称:DAS,代码行数:103,代码来源:das_parser.py

示例8: DASMapping

# 需要导入模块: from DAS.utils.logger import PrintManager [as 别名]
# 或者: from DAS.utils.logger.PrintManager import debug [as 别名]

#.........这里部分代码省略.........
    def remove(self, spec):
        """
        Remove record in DAS Mapping DB for provided Mongo spec.
        """
        self.col.remove(spec)

    def add(self, record):
        """
        Add new record into mapping DB. Example of URI record

        .. doctest::

            {
             system:dbs, 
             urn : listBlocks, 
             url : "http://a.b.com/api"
             params : [{"apiversion":1_2_2, se:"*"}]
             lookup : block
             das_map: [
                 {"das_key":"block", "rec_key":"block.name"},
                 {"das_key":"site", "rec_key":"site.name", "api_arg":"se", "pattern":"^T[0-3]_},
             ]
            }

        Example of notation record:

        .. doctest::

             notations: [
                 {"api_output" : "storage_element_name", "rec_key":"site", "api": ""},
             ]
        """
        msg = "record=%s" % record
        self.logger.debug(msg)
        self.col.insert(record)
        self.init_dasmapscache([record])

    # ==================
    # Informational APIs
    # ==================
    def dbs_global_instance(self, system=None):
        "Retrive from mapping DB DBS url and extract DBS instance"
        if not system:
            system = self.main_dbs
        url = self.dbs_url(system)
        return get_dbs_instance(url)

    def dbs_url(self, system=None):
        "Retrive from mapping DB DBS url"
        if not system:
            system = self.main_dbs
        systems = self.list_systems()
        dbses = set(["dbs", "dbs3"])
        if dbses & set(systems) != dbses:
            # use caching only when we operate with single DBS
            if self.dbs_global_url:
                return self.dbs_global_url
        url = None
        for srv in systems:
            if srv == system:
                apis = self.list_apis(srv)
                url = self.api_info(srv, apis[0])["url"]
                url = parse_dbs_url(srv, url)
                self.dbs_global_url = url
                return url
        return url
开发者ID:ktf,项目名称:DAS,代码行数:70,代码来源:das_mapping_db.py


注:本文中的DAS.utils.logger.PrintManager.debug方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。