本文整理汇总了Python中Utils.PipelineLogger.PipelineLogger.log方法的典型用法代码示例。如果您正苦于以下问题:Python PipelineLogger.log方法的具体用法?Python PipelineLogger.log怎么用?Python PipelineLogger.log使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Utils.PipelineLogger.PipelineLogger
的用法示例。
在下文中一共展示了PipelineLogger.log方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: createNewScanSession
# 需要导入模块: from Utils.PipelineLogger import PipelineLogger [as 别名]
# 或者: from Utils.PipelineLogger.PipelineLogger import log [as 别名]
def createNewScanSession(self,down_most_folder, filelist):
# Return parts of the folder path, the ones of interest
try:
folder = down_most_folder.replace(self.root_folder,"")
folder_parts = folder.split("/") # List containing each parts/folders of the full path
filename_parts = filelist[0].split("_") # Takes the first filename and create a list of its parts
rid = folder_parts[1][-4:] # Get the last 4 characters
if re.search('[a-zA-Z]', rid) is not None:
rid = filename_parts[3]
if re.search('[a-zA-Z]', rid) is not None:
PipelineLogger.log('root', 'error', 'File recurse error on Folder RID cannot be identified. - {0}, \n Filelist - {1}'.format(folder, filelist))
return None
scan_type = self.determineScanType(folder_parts[-3])
scan_date = folder_parts[-2].split('_')[0]
scan_time = folder_parts[-2].split('_', 1)[-1].replace("_", ":")
s_identifier = filename_parts[-2]
i_identifier = filename_parts[-1].split('.', 1)[0]
file_type = self.determineExtension(filename_parts)
download_folder = down_most_folder
raw_folder = '{0}/{1}/{2}/{3}/{4}_{5}_{6}/raw'.format(sc.studyDatabaseRootDict[self.study], 'ADNI', scan_type, rid, scan_date, s_identifier, i_identifier)
newScanSession = ScanSession\
('ADNI', rid, scan_type, scan_date, scan_time,
s_identifier, i_identifier, download_folder, raw_folder, file_type)
if scan_type == 'unknown':
newScanSession.printObject()
return None
return newScanSession
except Exception as e:
return None
示例2: checkMncFile
# 需要导入模块: from Utils.PipelineLogger import PipelineLogger [as 别名]
# 或者: from Utils.PipelineLogger.PipelineLogger import log [as 别名]
def checkMncFile(self, mncFile):
cmd = Config.ConverterConfig.mincSource_exec + '; mincinfo ' + mncFile + ' | grep \"time\" '
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
if not out: # If no output, string empty
PipelineLogger.log('converter', 'debug', mncFile + 'does not have a time axis!')
os.remove(mncFile)
示例3: process
# 需要导入模块: from Utils.PipelineLogger import PipelineLogger [as 别名]
# 或者: from Utils.PipelineLogger.PipelineLogger import log [as 别名]
def process(self, processingItem):
processingItemObj = ProcessingItemObj(processingItem)
matching_t1 = ADNI_T1_Helper().getMatchingT1(processingItemObj)
if not matching_t1:
PipelineLogger.log('root', 'error', 'PET cannot be processed no matching T1 found. - {0} - {1} - {2}.'.format(processingItemObj.subject_rid, processingItemObj.modality, processingItemObj.scan_date))
return 0
processed = ADNI_T1_Helper().checkProcessed(matching_t1)
if not processed:
PipelineLogger.log('root', 'error', 'PET cannot be processed due to matching T1 not being processed - {0}'.format(matching_t1))
return 0
else:
PipelineLogger.log('root', 'INFO', '+++++++++ PET ready to be processed. Will check for xfm. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
if processingItemObj.manual_xfm == '':
manualXFM = self.PETHelper.getManualXFM(processingItemObj, matching_t1)
processingItemObj.manual_xfm = manualXFM
elif processingItemObj.manual_xfm == 'Req_man_reg':
coregDone = self.PETHelper.checkIfAlreadyDone(processingItemObj, matching_t1)
if coregDone:
manualXFM = coregDone
setPPTableSQL = "UPDATE {0}_{1}_Pipeline SET MANUAL_XFM = '{2}' WHERE RECORD_ID = {3}".format(processingItemObj.study, processingItemObj.modality, manualXFM, processingItemObj.table_id)
self.DBClient.executeNoResult(setPPTableSQL)
else:
self.PETHelper.requestCoreg(processingItemObj, matching_t1)
PipelineLogger.log('root', 'INFO', 'Manual XFM was not found. Request to create one may have added. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
return 0
else:
manualXFM = processingItemObj.manual_xfm
if manualXFM:
self.processPET(processingItemObj, processed)
else:
PipelineLogger.log('root', 'INFO', 'Manual XFM was not found. Request to create one may have added. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
return 0
示例4: restart_mongo
# 需要导入模块: from Utils.PipelineLogger import PipelineLogger [as 别名]
# 或者: from Utils.PipelineLogger.PipelineLogger import log [as 别名]
def restart_mongo(self):
try:
if self.old_pid > 0:
os.kill(self.old_pid, 9)
mongo_Cmd = '/data/data03/MongoDB/mongodb/bin/mongod --dbpath /data/data03/MongoDB/data/db/'
Popen(mongo_Cmd, shell=True)
except OSError as e:
PipelineLogger.log('root', 'exception', 'MongoDB cannot be stopped or started.\n {0}\n'.format(e))
示例5: addTODB
# 需要导入模块: from Utils.PipelineLogger import PipelineLogger [as 别名]
# 或者: from Utils.PipelineLogger.PipelineLogger import log [as 别名]
def addTODB(result):
if result['converted']:
#### Add to correspoing table
#self.conversionTable.insertFromConvertionObj(convertionObj, self.version)
self.conversionTable.setConvertedTrue(result['obj'])
else:
PipelineLogger.log('manager', 'error', 'File conversion Error : {0} -> {1}. Moving to next...'.format(result['obj'].raw_folder, result['obj'].converted_folder))
self.conversionTable.setConvertedFailed(result['obj'])
示例6: runNiak
# 需要导入模块: from Utils.PipelineLogger import PipelineLogger [as 别名]
# 或者: from Utils.PipelineLogger.PipelineLogger import log [as 别名]
def runNiak(self, processingItemObj):
# Run Niak
if not processingItemObj.skip:
self.niak.process(processingItemObj)
elif processingItemObj.skip:
pass
else:
PipelineLogger.log('manager', 'error', 'Error handling obj for processing - {0}'.format(processingItemObj))
return 0
示例7: convert_dicom
# 需要导入模块: from Utils.PipelineLogger import PipelineLogger [as 别名]
# 或者: from Utils.PipelineLogger.PipelineLogger import log [as 别名]
def convert_dicom(self, convertionObj):
rawFolder = convertionObj.raw_folder
outFile = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(convertionObj.converted_folder, convertionObj.study,
convertionObj.rid, convertionObj.scan_date.replace('-', ''),
convertionObj.s_identifier, convertionObj.i_identifier,
convertionObj.scan_type)
self.createNewFolder(convertionObj.converted_folder) # Create output folder
tempFolder = convertionObj.converted_folder + '/../temp' # Generate path for temp folder
self.createNewFolder(tempFolder) # Create temp folder
# Move all the non-dicom stuff out of the original folder into tempFolder
otherFiles = self.removeOtherFilesInFolder(rawFolder, '.dcm', tempFolder)
# Run dcm2nii
cmd = Config.ConverterConfig.dcmToNii_exec + ' -a N -e N -p N -g N -o ' + tempFolder + '/ -v Y ' + rawFolder
PipelineLogger.log('converter', 'info',
'dcm2nii conversion starting for : {0} - {1} - {2} - {3}'.format(convertionObj.study,
convertionObj.rid,
convertionObj.scan_date,
convertionObj.scan_type))
PipelineLogger.log('converter', 'debug', 'Command : {0}'.format(cmd))
self.runShellCommand(cmd)
# Move all the non-dicom stuff back into the original folder
self.addBackOtherFiles(rawFolder, otherFiles, tempFolder)
# Run nii2mnc
fake_command = '{0} {1} {2}/../'.format(Config.ConverterConfig.niiToMnc_exec, rawFolder, convertionObj.converted_folder)
PipelineLogger.log('converter', 'info',
'nii2mnc conversion starting for : {0} - {1} - {2} - {3}'.format(convertionObj.study,
convertionObj.rid,
convertionObj.scan_date,
convertionObj.scan_type))
PipelineLogger.log('converter', 'debug', 'Command : {0}'.format(fake_command))
iterator = 1
for niiFile in glob.glob(tempFolder + '/*.nii'):
tempOutFile = outFile.replace('.mnc', '_run' + str(iterator) + '.mnc')
cmd = '%s %s %s' % (Config.ConverterConfig.niiToMnc_exec, niiFile, tempOutFile)
self.runShellCommand(cmd)
self.checkMncFile(tempOutFile) # Check whether the fMRI files have a time component/axis
iterator += 1
# Delete Temporary Folder
self.deleteFolder(tempFolder)
# Check how many mnc files were generated
mncList = []
for root, dirnames, filenames in os.walk(convertionObj.converted_folder):
for filename in fnmatch.filter(filenames, '*.mnc'):
mncList.append(os.path.join(root, filename))
if len(mncList) == 0:
PipelineLogger.log('converter', 'error',
'MINC Conversion unsuccessful : Check log for : {0} - {1} - {2} - {3}'.format(
convertionObj.study, convertionObj.rid, convertionObj.scan_date,
convertionObj.scan_type))
return 0
else:
return 1
示例8: removeCommaIfThere
# 需要导入模块: from Utils.PipelineLogger import PipelineLogger [as 别名]
# 或者: from Utils.PipelineLogger.PipelineLogger import log [as 别名]
def removeCommaIfThere(destFolder):
PipelineLogger.log('manager', 'debug',
'Removing unsupported chars from file names...... :')
for dpath, dnames, fnames in os.walk(destFolder):
for f in fnames:
os.chdir(dpath)
if ',' in f:
os.rename(f, f.replace(',', ''))
PipelineLogger.log('manager', 'debug',
'Removing unsupported chars from file names done ...:')
示例9: avgTime
# 需要导入模块: from Utils.PipelineLogger import PipelineLogger [as 别名]
# 或者: from Utils.PipelineLogger.PipelineLogger import log [as 别名]
def avgTime(self, inputMNC, outputMNC):
avgCMD = "/opt/minc-toolkit/bin/mincaverage -short -avgdim time {0} {1}".format(inputMNC, outputMNC)
p = subprocess.Popen(avgCMD, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
PipelineLogger.log("converter", "debug", "Averaging Time Output : \n{0}".format(out))
PipelineLogger.log("converter", "debug", "Averaging Time Err : \n{0}".format(err))
if os.path.exists(outputMNC):
return 1
else:
return 0
示例10: createNewScanSession
# 需要导入模块: from Utils.PipelineLogger import PipelineLogger [as 别名]
# 或者: from Utils.PipelineLogger.PipelineLogger import log [as 别名]
def createNewScanSession(self, down_most_folder, filelist):
# Return parts of the folder path, the ones of interest
folder = down_most_folder.replace(self.root_folder, "")
xmllist = [x for x in filelist if "xml" in x]
filelist = [x for x in filelist if "xml" not in x]
if filelist is None: # If no file in folder, ignore and skip
return None
try:
folder_parts = folder.split("/") # List containing each parts/folders of the full path
filename_parts = filelist[0].split("_") # Takes the first filename and create a list of its parts
xmlFileS = open("{0}/{1}".format(down_most_folder, xmllist[0])).read()
xmlDict = xmltodict.parse(xmlFileS)
rid = xmlDict["metadata"]["subject"]["@id"].split("_")[-1]
if re.search("[a-zA-Z]", rid) is not None:
rid = filename_parts[3]
if re.search("[a-zA-Z]", rid) is not None:
PipelineLogger.log(
"root",
"error",
"File recurse error on Folder RID cannot be identified. - {0}, \n Filelist - {1}".format(
folder, filelist
),
)
return None
s_identifier = xmlDict["metadata"]["series"]["@uid"]
i_identifier = xmlDict["metadata"]["image"]["@uid"]
scan_type = self.determineScanType(folder_parts[-3], self.study, rid, s_identifier, i_identifier)
scan_date = folder_parts[-2].split("_")[0]
scan_time = folder_parts[-2].split("_", 1)[-1].replace("_", ":")
file_type = self.determineExtension(filename_parts)
download_folder = down_most_folder
raw_folder = "{0}/{1}/{2}/{3}/{4}_{5}_{6}/raw".format(
sc.studyDatabaseRootDict[self.study], self.study, scan_type, rid, scan_date, s_identifier, i_identifier
)
except:
PipelineLogger.log(
"root", "exception", "File recurse error on Folder - {0}, \n Filelist - {1}".format(folder, filelist)
)
return None
newScanSession = ScanSession(
self.study,
rid,
scan_type,
scan_date,
scan_time,
s_identifier,
i_identifier,
download_folder,
raw_folder,
file_type,
)
return newScanSession
示例11: findCorrespondingMRI
# 需要导入模块: from Utils.PipelineLogger import PipelineLogger [as 别名]
# 或者: from Utils.PipelineLogger.PipelineLogger import log [as 别名]
def findCorrespondingMRI(self, processingItemObj):
# Find Matching T1
matching_t1 = ADNI_T1_Fmri_Helper().getMatchingT1(processingItemObj)
if not matching_t1:
return 0
# Find out whether T1 has been processed
processed = ADNI_T1_Fmri_Helper().checkProcessed(matching_t1)
if not processed:
PipelineLogger.log('root', 'error', 'FMRI cannot be processed due to matching T1 not being processed.')
return 0
else:
return processed
示例12: convertMinc
# 需要导入模块: from Utils.PipelineLogger import PipelineLogger [as 别名]
# 或者: from Utils.PipelineLogger.PipelineLogger import log [as 别名]
def convertMinc(self, convertionObj):
rawFile = '{0}/*.mnc'.format(convertionObj.raw_folder)
outFile = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(convertionObj.converted_folder, convertionObj.study,
convertionObj.rid, convertionObj.scan_date.replace('-', ''),
convertionObj.s_identifier, convertionObj.i_identifier,
convertionObj.scan_type)
# Move files
cmd = '{0} {1} {2}/../'.format('mv ' + rawFile + ' ' + outFile)
PipelineLogger.log('converter', 'info',
'MINC transfer starting for : {0} - {1} - {2} - {3}'.format(convertionObj.study,
convertionObj.rid,
convertionObj.scan_date,
convertionObj.scan_type))
PipelineLogger.log('converter', 'debug', 'Command : {0}'.format(cmd))
self.runShellCommand(cmd)
示例13: checkExternalJobs
# 需要导入模块: from Utils.PipelineLogger import PipelineLogger [as 别名]
# 或者: from Utils.PipelineLogger.PipelineLogger import log [as 别名]
def checkExternalJobs(self, study, modality):
getExtJobSql = "SELECT * FROM externalWaitingJobs WHERE JOB_ID LIKE '{0}_{1}_%'".format(study, modality)
extJobs = self.DBClient.executeAllResults(getExtJobSql)
for job in extJobs:
jobType = job[0].split('_')[-1]
reportTable = job[1]
tableID = job[0].split('_')[2]
reportField = job[2]
subjectScanID = job[0].split('_')[3]
success = 0
if jobType == 'CIVETRUN':
if glob.glob('{0}/{1}_{2}_*'.format(PipelineConfig.T1TempDirForCIVETDownload, study, subjectScanID)):
getProccessRecSql = "SELECT * FROM Processing WHERE RECORD_ID IN (SELECT PROCESSING_TID FROM {0}_T1_Pipeline WHERE RECORD_ID = {1})".format(study, tableID)
processingEntry = self.DBClient.executeAllResults(getProccessRecSql)[0]
civetFolder = '{0}/civet'.format(processingEntry[8])
if os.path.exists(civetFolder):
shutil.rmtree(civetFolder)
try:
PipelineLogger.log('manager', 'info', 'Copying - {0} -> {1}'.format(glob.glob('{0}/{1}_{2}_*'.format(PipelineConfig.T1TempDirForCIVETDownload, study, subjectScanID))[0], civetFolder))
dir_util.copy_tree(glob.glob('{0}/{1}_{2}_*'.format(PipelineConfig.T1TempDirForCIVETDownload, study, subjectScanID))[0], civetFolder)
success = 1
except:
success = 0
else:
continue
else:
PipelineLogger.log('manager', 'error', 'Unknown external job type - {}'.format(jobType))
if success:
updateSQL = "UPDATE {0} SET {1} = 1 WHERE RECORD_ID = {2}".format(reportTable, reportField, tableID)
self.DBClient.executeNoResult(updateSQL)
if jobType == 'CIVETRUN':
finishSQL = "UPDATE {0} SET FINISHED = 1 WHERE RECORD_ID = {1}".format(reportTable, tableID)
self.DBClient.executeNoResult(finishSQL)
modal_table = reportTable
modal_tableId = tableID
qcField = 'QC'
qctype = 'civet'
qcFolder = civetFolder
self.QCH.requestQC(study, modal_table, modal_tableId, qcField, qctype, qcFolder)
rmSql = "DELETE FROM externalWaitingJobs WHERE JOB_ID LIKE '{0}_{1}_{2}_{3}_%'".format(study, modality, tableID, subjectScanID)
self.DBClient.executeNoResult(rmSql)
示例14: checkProcessed
# 需要导入模块: from Utils.PipelineLogger import PipelineLogger [as 别名]
# 或者: from Utils.PipelineLogger.PipelineLogger import log [as 别名]
def checkProcessed(self, t1Record):
subject_id = t1Record[2]
version = t1Record[11]
s_id = t1Record[6]
i_id = t1Record[7]
checkProcessedSQL = "SELECT * FROM Processing WHERE RID = '{0}' AND VERSION = '{1}' AND S_IDENTIFIER = '{2}' AND I_IDENTIFIER = '{3}'".format(subject_id, version, s_id, i_id)
result = self.DBClient.executeAllResults(checkProcessedSQL)[0]
if len(result) < 1:
PipelineLogger.log('root', 'error', 'Matched T1 is not added to the processing table. {0} - {1} - {2}'.format(subject_id, s_id, i_id))
return False
else:
if result[12] == 1 and result[13] == 1:
return result[8]
else:
PipelineLogger.log('root', 'error', 'Matched T1 is not process or QC failed. {0} - {1} - {2}'.format(subject_id, s_id, i_id))
self.startProcessOFT1(result)
return False
示例15: process
# 需要导入模块: from Utils.PipelineLogger import PipelineLogger [as 别名]
# 或者: from Utils.PipelineLogger.PipelineLogger import log [as 别名]
def process(self, processingItemObj):
try:
matlabScript, nativeFileName, niakFolder = self.readTemplateFile(processingItemObj)
PipelineLogger.log('manager', 'info', 'NIAK starting for {0}'.format(nativeFileName))
except:
return 0
# Delete PIPE.lock file, if is exists
if os.path.isfile("%s/preprocessing/logs/PIPE.lock" % niakFolder):
os.remove("%s/preprocessing/logs/PIPE.lock" % niakFolder)
success = self.executeScript(processingItemObj, matlabScript, niakFolder)
#### After, if Niak succeeded, concatenate all runs together using combiningRuns
if False:
if success:
self.combiningRuns(processingItemObj)
else:
PipelineLogger.log()