本文整理汇总了Python中socorro.lib.util.reportExceptionAndContinue函数的典型用法代码示例。如果您正苦于以下问题:Python reportExceptionAndContinue函数的具体用法?Python reportExceptionAndContinue怎么用?Python reportExceptionAndContinue使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了reportExceptionAndContinue函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: queuingThreadFunc
def queuingThreadFunc (self):
self.logger.debug('queuingThreadFunc start')
try:
try:
for aJob in self.jobSourceIterator(): # may never raise StopIteration
if aJob is None:
self.logger.info("there is nothing to do. Sleeping for 7 seconds")
self.responsiveSleep(7)
continue
self.quitCheck()
try:
self.logger.debug("queuing standard job %s", aJob)
self.workerPool.newTask(self.retryTaskFuncWrapper, (aJob,))
except Exception:
self.logger.warning('%s has failed', aJob)
sutil.reportExceptionAndContinue(self.logger)
except Exception:
self.logger.warning('The jobSourceIterator has failed')
sutil.reportExceptionAndContinue(self.logger)
except KeyboardInterrupt:
self.logger.debug('queuingThread gets quit request')
finally:
self.quit = True
self.logger.debug("we're quitting queuingThread")
self.logger.debug("waiting for standard worker threads to stop")
self.workerPool.waitForCompletion()
self.logger.debug("all worker threads stopped")
示例2: get
def get(self, **kwargs):
"""Return a single crash report from it's UUID. """
filters = [
("uuid", None, "str"),
]
params = external_common.parse_arguments(filters, kwargs)
day = int(params.uuid[-2:])
month = int(params.uuid[-4:-2])
# assuming we won't use this after year 2099
year = int("20%s" % params.uuid[-6:-4])
crash_date = datetime.date(year=year, month=month, day=day)
logger.debug("Looking for crash %s during day %s" % (params.uuid,
crash_date))
sql = """/* socorro.external.postgresql.crash.Crash.get */
SELECT reports.email, reports.url, reports.addons_checked,
( SELECT reports_duplicates.duplicate_of
FROM reports_duplicates
WHERE reports_duplicates.uuid = reports.uuid
) as duplicate_of
FROM reports
WHERE reports.uuid=%(uuid)s
AND reports.success IS NOT NULL
AND utc_day_is( reports.date_processed, %(crash_date)s)
"""
sql_params = {
"uuid": params.uuid,
"crash_date": crash_date
}
results = []
# Creating the connection to the DB
self.connection = self.database.connection()
cur = self.connection.cursor()
try:
results = db.execute(cur, sql, sql_params)
except psycopg2.Error:
util.reportExceptionAndContinue(logger)
json_result = {
"total": 0,
"hits": []
}
for crash in results:
row = dict(zip((
"email",
"url",
"addons_checked",
"duplicate_of"), crash))
json_result["hits"].append(row)
json_result["total"] = len(json_result["hits"])
self.connection.close()
return json_result
示例3: save_raw
def save_raw (self, ooid, jsonData, dump, currentTimestamp):
try:
if jsonData.legacy_processing == LegacyThrottler.DISCARD:
return CrashStorageSystem.DISCARDED
except KeyError:
pass
try:
#jsonDataAsString = json.dumps(jsonData)
jsonFileHandle, dumpFileHandle = self.localFS.newEntry(ooid, self.hostname, currentTimestamp)
try:
dumpFileHandle.write(dump)
json.dump(jsonData, jsonFileHandle)
finally:
dumpFileHandle.close()
jsonFileHandle.close()
self.logger.info('saved - %s', ooid)
return CrashStorageSystem.OK
except Exception, x:
sutil.reportExceptionAndContinue(self.logger)
self.logger.warning('local storage has failed: trying fallback storage for: %s', ooid)
try:
#jsonDataAsString = json.dumps(jsonData)
jsonFileHandle, dumpFileHandle = self.fallbackFS.newEntry(ooid, self.hostname, currentTimestamp)
try:
dumpFileHandle.write(dump)
json.dump(jsonData, jsonFileHandle)
finally:
dumpFileHandle.close()
jsonFileHandle.close()
return CrashStorageSystem.OK
except Exception, x:
sutil.reportExceptionAndContinue(self.logger)
self.logger.critical('fallback storage has failed: dropping %s on the floor', ooid)
示例4: resubmit
def resubmit (conf, jds=jds, hbc=hbc, open=open):
logger = conf.logger
logger.info('creating hbase connection: host: %s, port: %s', conf.hbaseHost, conf.hbasePort)
hbaseConnection = hbc.HBaseConnectionForCrashReports(conf.hbaseHost,
conf.hbasePort,
conf.hbaseTimeout)
logger.info('creating json/dump store object: root: %s', conf.hbaseFallbackFS)
fallbackStorage = jds.JsonDumpStorage(root=conf.hbaseFallbackFS,
maxDirectoryEntries = conf.hbaseFallbackDumpDirCount,
jsonSuffix = conf.jsonFileSuffix,
dumpSuffix = conf.dumpFileSuffix,
dumpGID = conf.hbaseFallbackDumpGID,
dumpPermissions = conf.hbaseFallbackDumpPermissions,
dirPermissions = conf.hbaseFallbackDirPermissions,
)
processedCrashList = []
for uuid in fallbackStorage.destructiveDateWalk():
logger.info('found uuid: %s', uuid)
try:
jsonFile = open(fallbackStorage.getJson(uuid))
try:
jsonContents = json.load(jsonFile)
finally:
jsonFile.close()
dumpFile = open(fallbackStorage.getDump(uuid))
try:
dumpContents = dumpFile.read()
finally:
dumpFile.close()
logger.debug('pushing %s to hbase', uuid)
hbaseConnection.put_json_dump(uuid, jsonContents, dumpContents)
processedCrashList.append(uuid)
except Exception, x:
sutil.reportExceptionAndContinue(logger)
示例5: cleanup
def cleanup (self):
for name, crashStore in self.iteritems():
try:
crashStore.close()
self.logger.debug("crashStore for %s closed", name)
except:
sutil.reportExceptionAndContinue(self.logger)
示例6: scrapeB2G
def scrapeB2G(config, cursor, product_name, urllib=urllib2, date=None):
month = date.strftime('%m')
b2g_url = '%s/%s/%s/' % (config.base_url, product_name,
'manifests')
try:
day = date.strftime('%d')
dir_prefix = '%s-%s-%s' % (date.year, month, day)
# I have no idea what this first level of directories means :/
# TODO get info about that and update this search
version_dirs = getLinks(b2g_url, startswith='1.', urllib=urllib)
for version_dir in version_dirs:
# /1.0.0/2013/01/2013-01-27-07/*.json
prod_url = '%s/%s/%s/%s/' % (b2g_url, version_dir, date.year, month)
nightlies = getLinks(prod_url, startswith=dir_prefix, urllib=urllib)
for nightly in nightlies:
for info in getB2G(nightly, prod_url):
(platform, repository, version, kvpairs) = info
build_id = kvpairs['buildid']
build_type = kvpairs['build_type']
buildutil.insert_build(cursor, product_name, version, platform,
build_id, build_type, kvpairs.get('beta_number', None), repository,
ignore_duplicates=True)
except urllib.URLError:
util.reportExceptionAndContinue(logger)
示例7: dailyUrlDump
def dailyUrlDump(config, sdb=sdb,
gzipped_csv_files=gzipped_csv_files,
IdCache=IdCache,
write_row=write_row,
process_crash=process_crash,
logger=logger):
dbConnectionPool = sdb.DatabaseConnectionPool(config, logger)
try:
try:
db_conn, db_cursor = dbConnectionPool.connectionCursorPair()
with gzipped_csv_files(config) as csv_file_handles_tuple:
headers_not_yet_written = True
id_cache = IdCache(db_cursor)
sql_parameters = setup_query_parameters(config)
logger.debug("config.day = %s; now = %s; yesterday = %s",
config.day,
sql_parameters.now_str,
sql_parameters.yesterday_str)
sql_query = sql % sql_parameters
logger.debug("SQL is: %s", sql_query)
for crash_row in sdb.execute(db_cursor, sql_query):
if headers_not_yet_written:
write_row(csv_file_handles_tuple,
[x[0] for x in db_cursor.description])
headers_not_yet_written = False
column_value_list = process_crash(crash_row, id_cache)
write_row(csv_file_handles_tuple,
column_value_list)
# end for loop over each crash_row
finally:
dbConnectionPool.cleanup()
except:
util.reportExceptionAndContinue(logger)
示例8: __init__
def __init__(self,host,port,timeout,
thrift=Thrift,
tsocket=TSocket,
ttrans=TTransport,
protocol=TBinaryProtocol,
ttp=ttypes,
client=Client,
column=ColumnDescriptor,
mutation=Mutation,
logger=utl.SilentFakeLogger()):
self.host = host
self.port = port
self.timeout = timeout
self.thriftModule = thrift
self.tsocketModule = tsocket
self.transportModule = ttrans
self.protocolModule = protocol
self.ttypesModule = ttp
self.clientClass = client
self.columnClass = column
self.mutationClass = mutation
self.logger = logger
self.hbaseThriftExceptions = (self.ttypesModule.IOError,
#self.ttypesModule.IllegalArgument,
#self.ttypesModule.AlreadyExists,
self.thriftModule.TException,
#HBaseClientException,
socket.timeout,
socket.error
)
try:
self.make_connection(timeout=self.timeout)
except NoConnectionException:
utl.reportExceptionAndContinue(logger=self.logger)
示例9: setup_query_parameters
def setup_query_parameters(config):
now = config.day + dt.timedelta(1)
now_str = now.strftime("%Y-%m-%d")
yesterday = config.day
yesterday_str = yesterday.strftime("%Y-%m-%d")
logger.debug("config.day = %s; now = %s; yesterday = %s", config.day, now, yesterday)
prod_phrase = ""
try:
if config.product != "":
if "," in config.product:
prod_list = [x.strip() for x in config.product.split(",")]
prod_phrase = "and r.product in ('%s')" % "','".join(prod_list)
else:
prod_phrase = "and r.product = '%s'" % config.product
except Exception:
util.reportExceptionAndContinue(logger)
ver_phrase = ""
try:
if config.version != "":
if "," in config.product:
ver_list = [x.strip() for x in config.version.split(",")]
ver_phrase = "and r.version in ('%s')" % "','".join(ver_list)
else:
ver_phrase = "and r.version = '%s'" % config.version
except Exception:
util.reportExceptionAndContinue(logger)
return util.DotDict(
{"now_str": now_str, "yesterday_str": yesterday_str, "prod_phrase": prod_phrase, "ver_phrase": ver_phrase}
)
示例10: doSubmission
def doSubmission(ooidTuple):
logger.debug("received: %s", str(ooidTuple))
try:
sourceStorage = crashStoragePoolForSource.crashStorage()
destStorage = crashStoragePoolForDest.crashStorage()
ooid = ooidTuple[0]
try:
logger.debug("trying to fetch %s", ooid)
jsonContents = sourceStorage.get_meta(ooid)
except ValueError:
logger.warning("the json for %s is degenerate and cannot be loaded" " - saving empty json", ooid)
jsonContents = {}
dumpContents = sourceStorage.get_raw_dump(ooid)
if conf.dryrun:
logger.info("dry run - pushing %s to dest", ooid)
else:
logger.debug("pushing %s to dest", ooid)
result = destStorage.save_raw(ooid, jsonContents, dumpContents)
if result == cstore.CrashStorageSystem.ERROR:
return iwf.FAILURE
elif result == cstore.CrashStorageSystem.RETRY:
return iwf.RETRY
try:
sourceStorage.quickDelete(ooid)
except Exception:
sutil.reportExceptionAndContinue(self.logger)
return iwf.OK
except Exception, x:
sutil.reportExceptionAndContinue(logger)
return iwf.FAILURE
示例11: scrapeReleases
def scrapeReleases(config, cursor, product_name, urllib=urllib2):
prod_url = "%s/%s/" % (config.base_url, product_name)
# releases are sometimes in nightly, sometimes in candidates dir.
# look in both.
for directory in ("nightly", "candidates"):
if not getLinks(prod_url, startswith=directory, urllib=urllib):
logger.debug("Dir %s not found for %s" % (directory, product_name))
continue
url = "%s/%s/%s/" % (config.base_url, product_name, directory)
try:
releases = getLinks(url, endswith="-candidates/", urllib=urllib)
for release in releases:
for info in getRelease(release, url):
(platform, version, build_number, kvpairs) = info
build_type = "Release"
beta_number = None
repository = "mozilla-release"
if "b" in version:
build_type = "Beta"
version, beta_number = version.split("b")
repository = "mozilla-beta"
build_id = kvpairs["buildID"]
buildutil.insert_build(
cursor, product_name, version, platform, build_id, build_type, beta_number, repository
)
except urllib.URLError:
util.reportExceptionAndContinue(logger)
示例12: scrapeReleases
def scrapeReleases(config, cursor, product_name, urllib=urllib2):
prod_url = '%s/%s/' % (config.base_url, product_name)
# releases are sometimes in nightly, sometimes in candidates dir.
# look in both.
for directory in ('nightly', 'candidates'):
if not getLinks(prod_url, startswith=directory, urllib=urllib):
logger.debug('Dir %s not found for %s' % (directory, product_name))
continue
url = '%s/%s/%s/' % (config.base_url, product_name, directory)
try:
releases = getLinks(url, endswith='-candidates/',
urllib=urllib)
for release in releases:
for info in getRelease(release, url):
(platform, version, build_number, kvpairs) = info
build_type = 'Release'
beta_number = None
repository = 'mozilla-release'
if 'b' in version:
build_type = 'Beta'
version, beta_number = version.split('b')
repository = 'mozilla-beta'
build_id = kvpairs['buildID']
buildutil.insert_build(cursor, product_name, version,
platform, build_id, build_type,
beta_number, repository)
except urllib.URLError:
util.reportExceptionAndContinue(logger)
示例13: load_json_transform_rules
def load_json_transform_rules(self):
sql = ("select predicate, predicate_args, predicate_kwargs, "
" action, action_args, action_kwargs "
"from transform_rules "
"where "
" category = 'processor.json_rewrite'")
try:
rules = sdb.transaction_execute_with_retry(self.databaseConnectionPool,
sql)
except Exception:
sutil.reportExceptionAndContinue(logger)
rules = [('socorro.processor.processor.json_equal_predicate',
'',
'key="ReleaseChannel", value="esr"',
'socorro.processor.processor.json_reformat_action',
'',
'key="Version", format_str="%(Version)sesr"'),
('socorro.processor.processor.json_ProductID_predicate',
'',
'',
'socorro.processor.processor.json_Product_rewrite_action',
'',
'') ]
self.json_transform_rule_system.load_rules(rules)
self.config.logger.info('done loading rules: %s',
str(self.json_transform_rule_system.rules))
示例14: handler
def handler(req):
global persistentStorage
try:
x = persistentStorage
except NameError:
persistentStorage = socorro.collector.initializer.createPersistentInitialization(configModule)
logger = persistentStorage["logger"]
config = persistentStorage["config"]
collectObject = persistentStorage["collectObject"]
logger.debug("handler invoked using subinterpreter: %s", req.interpreter)
if req.method == "POST":
try:
req.content_type = "text/plain"
theform = util.FieldStorage(req)
dump = theform[config.dumpField]
if not dump.file:
return apache.HTTP_BAD_REQUEST
currentTimestamp = dt.datetime.now()
jsonDataDictionary = collectObject.makeJsonDictFromForm(theform)
jsonDataDictionary["submitted_timestamp"] = currentTimestamp.isoformat()
try:
throttleable = int(jsonDataDictionary["Throttleable"])
except KeyError:
throttleable = 2
if not throttleable or (throttleable and not collectObject.throttle(jsonDataDictionary)):
fileSystemStorage = persistentStorage["standardFileSystemStorage"]
elif throttleable == 2:
fileSystemStorage = persistentStorage["deferredFileSystemStorage"]
else:
req.write("Discarded=1\n")
return apache.OK
uuid = ooid.createNewOoid(currentTimestamp, persistentStorage["config"].storageDepth)
jsonFileHandle, dumpFileHandle = fileSystemStorage.newEntry(uuid, persistentStorage["hostname"], dt.datetime.now())
try:
collectObject.storeDump(dump.file, dumpFileHandle)
collectObject.storeJson(jsonDataDictionary, jsonFileHandle)
finally:
dumpFileHandle.close()
jsonFileHandle.close()
req.write("CrashID=%s%s\n" % (config.dumpIDPrefix, uuid))
return apache.OK
except:
logger.info("mod-python subinterpreter name: %s", req.interpreter)
sutil.reportExceptionAndContinue(logger)
#print >>sys.stderr, "Exception: %s" % sys.exc_info()[0]
#print >>sys.stderr, sys.exc_info()[1]
#print >>sys.stderr
#sys.stderr.flush()
return apache.HTTP_INTERNAL_SERVER_ERROR
else:
return apache.HTTP_METHOD_NOT_ALLOWED
示例15: func
def func (paramsTuple):
jsonFilePathName, binaryFilePathName = paramsTuple[0]
with open(jsonFilePathName) as jsonFile:
formData = json.load(jsonFile)
if config.uniqueHang:
try:
if formData['HangId'] in existingHangIdCache:
formData['HangId'] = existingHangIdCache
else:
formData['HangId'] = \
existingHangIdCache[formData['HangId']] = uuid.uuid4()
except Exception:
pass
processTimeStatistic = statsPools.processTime.getStat()
submittedCountStatistic = statsPools.submittedCount.getStat()
try:
processTimeStatistic.start()
config.submissionFunc(formData, binaryFilePathName, config.url,
config.logger)
submittedCountStatistic.increment()
except Exception:
sutil.reportExceptionAndContinue(sutil.FakeLogger())
failureCountStatistic = statsPools.failureCount.getStat()
failureCountStatistic.increment()
return iwf.OK
finally:
processTimeStatistic.end()
return iwf.OK