本文整理汇总了Python中utility.date_time.date_time函数的典型用法代码示例。如果您正苦于以下问题:Python date_time函数的具体用法?Python date_time怎么用?Python date_time使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了date_time函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: platypus_germline
def platypus_germline(config_file, sample, log_dir, cflag):
loc = log_dir + sample + ".platypus.log"
# here for safety as python is confusing about whether variables exist outside of if-else statements or not
platypus_cmd = ''
if cflag == 'y':
(platypus, fasta, threads, project_dir, project, align) = parse_config(config_file, cflag)
bam = project_dir + project + '/' + align + '/' + sample + '/BAM/' + sample + '.merged.final.bam'
platypus_cmd = "python2.7 " + platypus + " callVariants --nCPU=" + threads + " --refFile=" + fasta \
+ " --bamFiles=" + bam + " -o " + sample + ".germline_calls.vcf --logFileName=" \
+ log_dir + sample + ".platypus.log" + " >> " + loc + " 2>&1"
else:
(platypus, fasta, threads, region_file, minVAF, samtools, project_dir, project, align) \
= parse_config(config_file, cflag)
bam = project_dir + project + '/' + align + '/' + sample + '/BAM/' + sample + '.merged.final.bam'
if not (os.path.isfile(bam + '.bai') or os.path.isfile(bam[:-1] + 'i')):
log(loc, date_time() + bam + ' not indexed. Indexing\n')
cmd = samtools + ' index ' + bam
log(loc, date_time() + cmd + '\n')
subprocess.call(cmd, shell=True)
platypus_cmd = "python2.7 " + platypus + " callVariants --nCPU=" + threads + " --refFile=" + fasta \
+ " --bamFiles=" + bam + " --filterDuplicates=0 -o " + sample \
+ ".germline_calls.vcf --minVarFreq=" + minVAF + " --regions=" + region_file \
+ " --logFileName=" + loc + " >> " + loc + " 2>&1"
log(loc, date_time() + platypus_cmd + "\n")
f = 0
try:
f = subprocess.call(platypus_cmd, shell=True)
except:
log(loc, 'platypus germline variant calling failed for sample ' + sample + '\n')
return f
return 0
示例2: downsample_bam
def downsample_bam(samtools, bam, frac, out_dir, th):
out_root = os.path.basename(bam.replace('.bam', ''))
cmd = 'sbatch -c ' + th + ' ' + samtools + ' view --threads ' + th + ' -b ' + bam + ' -s ' + frac + ' > ' \
+ out_dir + '/' + out_root + '_subsample_' + frac + '.bam'
sys.stderr.write(date_time() + 'Downsampling ' + bam + '\n' + cmd + '\n')
subprocess.call(cmd, shell=True)
sys.stderr.write(date_time() + 'process complete!\n')
示例3: novosort_merge_pe
def novosort_merge_pe(config_file, sample_list):
fh = open(sample_list, 'r')
(novosort, java_tool, picard_tool, project, project_dir, align, threads, ram, novo_merge_rmdup_slurm) \
= parse_config(config_file)
for sample in fh:
sample = sample.rstrip('\n')
loc = '../LOGS/' + sample + '.novosort_merge.log'
job_loc = sample + '.novosort_merge.log'
(bam_list, n) = list_bam(project, align, sample)
bam_string = " ".join(bam_list)
cur_dir = project_dir + project + '/' + align + '/' + sample + '/BAMS/'
os.chdir(cur_dir)
out_bam = sample + '.merged.transcriptome.bam'
if n > 1:
batch = 'sbatch -c ' + threads + ' --mem ' + ram + 'G -o ' + job_loc + ' --export=novosort="' \
+ novosort + '",threads="' + threads + '",ram="' + ram + 'G",out_bam="' + out_bam \
+ '",bam_string="' + bam_string + '",loc="' + loc + '"' + ' ' + novo_merge_rmdup_slurm
log(loc, date_time() + 'Submitting merge bam job for sample ' + batch + "\n")
subprocess.call(batch, shell=True)
else:
link_bam = 'ln -s ' + bam_list[0] + ' ' + sample + '.merged.transcriptome.bam;'
log(loc, date_time() + 'Creating symlink for merged final bam since only one exists\n'
+ link_bam + '\n')
subprocess.call(link_bam, shell=True)
sys.stderr.write(date_time() + 'Merged file request submitted and processed, check logs.\n')
return 0
示例4: batch_qc
def batch_qc(fn, cont, obj, t):
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
inputs = parser.parse_args()
fh = open(inputs.fn, 'r')
src_cmd = '. ~/.novarc;'
jobs = []
for line in fh:
line = line.rstrip('\n')
# All files for current bnid to be stored in cwd
swift_cmd = src_cmd + 'swift list ' + cont + ' --prefix ' + obj + '/' + line
sys.stderr.write(date_time() + 'Checking for sequence files for sample ' + line + '\n' + swift_cmd + '\n')
try:
contents = subprocess.check_output(swift_cmd, shell=True)
if len(contents) < len(line):
sys.stderr.write(date_time() + 'Can\'t find sequencing files for ' + line + ' skipping!\n')
continue
except:
sys.stderr.write(date_time() + 'Can\'t find sequencing files for ' + line + ' skipping!\n')
continue
seqfile = re.findall('(\S+[sequence|f*q]*\.gz)', contents)
sf1 = seqfile[0]
end1 = os.path.basename(sf1)
sf2 = seqfile[1]
end2 = os.path.basename(sf2)
swift_cmd = src_cmd + "swift download " + cont + " --skip-identical --prefix " + obj + '/' + line
link_cmd = 'ln -s ' + sf1 + ' .;ln -s ' + sf2
fastqc_cmd = 'mkdir -p PREQC/' + line + '; fastqc -t 2 -o PREQC/' + line + ' ' + sf1 + ' ' + sf2
upload_cmd = src_cmd + 'swift upload ' + cont + ' PREQC/' + line
cleanup_cmd = 'rm -rf RAW/' + line + ' PREQC/' + line + ' ' + end1 + ' ' + end2
jobs.append(';'.join([swift_cmd, link_cmd, fastqc_cmd, upload_cmd, cleanup_cmd]))
sys.stderr.write(date_time() + 'Job list created, running jobs!\n')
job_manager(jobs, t)
return 0
示例5: organize_dirs
def organize_dirs(self):
# check for existing BAM, QC and LOG dirs one level up
try:
if not os.path.isdir('../' + self.bam_dir):
mk_bam_dir = 'mkdir ../' + self.bam_dir
log(self.loc, date_time() + 'Making BAM directory ' + mk_bam_dir + '\n')
call(mk_bam_dir, shell=True)
if not os.path.isdir('../' + self.qc_dir):
mk_qc_dir = 'mkdir ../' + self.qc_dir
log(self.loc, date_time() + 'Making QC directory ' + mk_qc_dir + '\n')
call(mk_qc_dir, shell=True)
if not os.path.isdir('../' + self.log_dir):
mk_log_dir = 'mkdir ../' + self.log_dir
log(self.loc, date_time() + 'Making LOGS directory ' + mk_log_dir + '\n')
call(mk_log_dir, shell=True)
reloc_files = 'mv ' + self.bam_dir + '* ../' + self.bam_dir + '; mv ' + self.log_dir + '* ../' \
+ self.log_dir + '; mv ' + self.qc_dir + '* ../' + self.qc_dir
log(self.loc, date_time() + 'Relocating files ' + reloc_files + '\n')
call(reloc_files, shell=True)
# need to reassign log file location since it's being moved!
self.loc = '../' + self.loc
rm_old = 'rmdir ' + ' '.join((self.bam_dir , self.log_dir, self.qc_dir))
log(self.loc, date_time() + 'Clearing out working dirs ' + rm_old + '\n')
call(rm_old, shell=True)
return 0
except:
return 1
示例6: lane_express_quant
def lane_express_quant(bams, config_file):
(stranded, strand, express, express_sl, transcriptome) = parse_config(config_file)
for bam in open(bams):
bam = bam.rstrip('\n')
bam_dir = os.path.dirname(bam)
root = os.path.basename(re.sub('.Aligned.toTranscriptome.out.*', '', bam))
qc_dir = bam_dir.replace('BAMS', 'QC')
qc_file = qc_dir + '/' + root + '.qc_stats.json'
qc_data = json.loads(open(qc_file, 'r').read())
(x, s) = (str(int(round(float(qc_data['picard_stats']['x_ins_size'])))),
str(int(round(float(qc_data['picard_stats']['s_ins_size'])))))
wd = qc_dir + '/' + root + '/'
loc = wd + root + '.log'
express_cmd = 'mkdir ' + wd + ';'
call(express_cmd, shell=True)
sys.stderr.write(date_time() + 'Created dir ' + wd + ' to quantify ' + bam + '\n' + express_cmd + '\n')
if stranded == 'N':
express_cmd = express + ' ' + transcriptome + ' ' + bam + ' --no-update-check -o ' + wd + ' -m '\
+ x + ' -s ' + s + ' --logtostderr 2>> ' + loc + ';'
else:
express_cmd = 'sbatch -c 4 --export=express="' + express + '",transcriptome="' + transcriptome + '",bam="' \
+ bam + '",wd="' + wd + '",strand="' + strand + '",x="' + x + '",s="' + s + '",loc="' + loc \
+ '",root="' + root + '" ' + express_sl
# express + ' ' + transcriptome + ' ' + bam + ' --no-update-check -o ' + wd + ' --'\
# + strand + ' -m ' + x + ' -s ' + s + ' --logtostderr 2>> ' + loc + ';'
# express_cmd += 'mv ' + wd + 'results.xprs ' + wd + root + '.express_quantification.txt; mv ' + wd \
# + 'params.xprs ' + wd + root + '.params.xprs;'
sys.stderr.write(date_time() + 'Submitting quantification job\n' + express_cmd + '\n')
call(express_cmd, shell=True)
return 0
示例7: preprocess_bams
def preprocess_bams(config_file, sample_pairs):
# create sample list
sample_list = 'sample_list.txt'
fh = open(sample_pairs, 'r')
sl = open(sample_list, 'w')
temp = {}
for line in fh:
cur = line.rstrip('\n').split('\t')
if len(cur) == 3:
if cur[1] not in temp:
sl.write(cur[1] + '\n')
temp[cur[1]] = 1
if cur[2] not in temp:
sl.write(cur[2] + '\n')
temp[cur[2]] = 1
else:
if cur[0] not in temp:
sl.write(cur[0] + '\n')
temp[cur[0]] = 1
sl.close()
fh .close()
miss_list = check_for_merged_bams(config_file, sample_list)
if len(miss_list) > 0:
sys.stderr.write(date_time() + 'Missing files detected, merging lane files\n')
temp_fn = 'temp_samp_list.txt'
temp_fh = open(temp_fn, 'w')
temp_fh.write('\n'.join(miss_list))
temp_fh.close()
run_novosort(config_file, temp_fn)
else:
sys.stderr.write(date_time() + 'All bams found. Ready for next step!\n')
示例8: annot_platypus
def annot_platypus(config_file, sample, skip):
(vep_tool, vep_cache, plugin_dir, fasta, threads, java, cadd_snv, cadd_indel, tx_index, project_dir, project,
analysis, annotation, user, group) = parse_config(config_file)
src_env = '. /etc/environment'
subprocess.call(src_env, shell=True)
ana_dir = project_dir + project + '/' + analysis + '/' + sample
if skip == 'n':
pass_filter(ana_dir + '/' + sample)
set_acls(ana_dir, user, group)
in_vcf = ana_dir + '/' + sample + '.germline_pass.vcf'
out_vcf = sample + '.germline.vep91.vcf'
buffer_size = '5000'
ann_dir = project_dir + project + '/' + annotation + '/' + sample
if not os.path.isdir(ann_dir):
mk_ann = 'mkdir -p ' + ann_dir
sys.stderr.write('Creating annotation output directories ' + mk_ann + '\n')
subprocess.call(mk_ann, shell=True)
os.chdir(ann_dir)
sys.stderr.write(date_time() + 'Changed to working directory ' + ann_dir + '\n')
if int(threads) > 1:
threads = str(int(threads) - 1)
run_cmd = run_vep(vep_tool, in_vcf, out_vcf, threads, fasta, vep_cache, cadd_snv, cadd_indel, sample, buffer_size,
plugin_dir)
sys.stderr.write(date_time() + 'Annotating sample ' + in_vcf + ' ' + run_cmd + '\n')
# from stack overflow to allow killing of spawned processes in main process fails for cleaner restart
check = subprocess.Popen(run_cmd, stdout=subprocess.PIPE, shell=True, preexec_fn=os.setsid)
check_run = watch_mem(check, sample)
if check_run != 0:
buffer_size = str(int(buffer_size) // 2)
clean_up = 'rm \'' + out_vcf + '*\''
sys.stderr.write(date_time() + 'VEP failed. Status of run was ' + str(check_run)
+ ' Trying smaller buffer size of ' + buffer_size + '\n' + clean_up + '\n')
try:
os.killpg(os.getpgid(check.pid), signal.SIGINT)
except:
sys.stderr.write(date_time() + 'Killing process failed. Might have already died for other reasons...\n')
subprocess.call(clean_up, shell=True)
run_cmd = run_vep(vep_tool, in_vcf, out_vcf, threads, fasta, vep_cache, cadd_snv, cadd_indel, sample,
buffer_size, plugin_dir)
sys.stderr.write(date_time() + 'Annotating sample ' + sample + in_vcf + '\n')
check = subprocess.call(run_cmd, shell=True)
if check != 0:
sys.stderr.write(date_time() + 'VEP failed for sample ' + sample + '\n')
exit(1)
else:
sys.stderr.write(date_time() + 'VEP annotation of ' + in_vcf + ' successful!\n')
check = gen_report(out_vcf, sample, tx_index)
if check == 0:
sys.stderr.write(date_time() + 'Summary table of germline calls completed!\n')
else:
sys.stderr.write(date_time() + 'Summary table for ' + out_vcf + ' FAILED!\n')
return 1
set_acls(ann_dir, user, group)
sys.stderr.write(date_time() + 'VEP91 annotation of ' + sample + ' complete!\n')
return 0
示例9: run_novosort
def run_novosort(config_file, sample_list):
check = novosort_merge_pe(config_file, sample_list)
if check == 0:
sys.stderr.write(date_time() + 'File merge complete!\n')
else:
sys.stderr.write(date_time() + 'File download and merge failed.\n')
exit(1)
示例10: novosort_merge_pe
def novosort_merge_pe(config_file, sample_list):
fh = open(sample_list, 'r')
(novosort, java_tool, picard_tool, project, project_dir, align, threads, ram, rmdup, novo_merge_rmdup_slurm,
novo_picard_merge_rmdup_slurm) = parse_config(config_file)
for sample in fh:
sample = sample.rstrip('\n')
loc = sample + '.novosort_merge.log'
(bam_list, bai_list, n) = list_bam(project, align, sample)
bam_string = " ".join(bam_list)
cur_dir = project_dir + project + '/' + align + '/' + sample + '/BAM/'
os.chdir(cur_dir)
out_bam = sample + '.merged.final.bam'
if n > 1:
if rmdup == 'Y':
job_loc = sample + '.novosort_merge.log'
job_name = sample + '_novosort_merge'
batch = 'sbatch -c ' + threads + ' -J ' + job_name + ' --mem ' + ram + 'G -o ' + job_loc \
+ ' --export=novosort="' + novosort + '",threads="' + threads + '",ram="' + ram \
+ 'G",out_bam="' + out_bam + '",bam_string="' + bam_string + '",loc="' + loc + '"' + ' ' \
+ novo_merge_rmdup_slurm
log(loc, date_time() + 'Submitting merge bam job for sample ' + batch + "\n")
subprocess.call(batch, shell=True)
else:
# run legacy pipe for removing dups using picard
picard_tmp = 'picard_tmp'
job_loc = sample + '.novosort_merge.picard_rmdup.log'
job_name = sample + '_novosort_merge.picard_rmdup'
# setting max records in ram to half of ram
recs = str(int((int(ram) / 2) * (1000000000 / 200)))
in_bam = sample + '.merged.bam'
in_bai = sample + '.merged.bam.bai'
mets = sample + '.rmdup.srt.metrics'
batch = 'sbatch -c ' + threads + ' --mem ' + ram + 'G -o ' + job_loc + ' -J ' + job_name \
+ ' --export=novosort="' + novosort + '",threads="' + threads + '",ram="' + ram \
+ 'G",in_bam="' + in_bam + '",bam_string="' + bam_string + '",loc="' + job_loc \
+ '",java_tool="' + java_tool + '",picard_tool="' + picard_tool + '",tmp="' + picard_tmp \
+ '",recs="' + recs + '",out_bam="' + out_bam + '",mets="' + mets + '",in_bai="' + in_bai \
+ '" ' + novo_picard_merge_rmdup_slurm
sys.stderr.write(date_time() + 'Merging with novosort and rmdup with picard for legacy reasons!\n'
+ batch + '\n')
subprocess.call(batch, shell=True)
else:
link_bam = 'ln -s ' + bam_list[0] + ' ' + sample + '.merged.final.bam; ln -s ' + bai_list[0] + ' ' \
+ sample + '.merged.final.bam.bai'
log(loc, date_time() + 'Creating symlink for merged final bam since only one exists\n'
+ link_bam + '\n')
subprocess.call(link_bam, shell=True)
sys.stderr.write(date_time() + 'Merged file request submitted and processed, check logs.\n')
return 0
示例11: find_project_files
def find_project_files(file_dir, file_prefix):
find_cmd = "find " + file_dir + " -name \'" + file_prefix + '*\''
sys.stderr.write(date_time() + find_cmd + "\n")
try:
results = check_output(find_cmd, shell=True, stderr=subprocess.PIPE).decode()
return results
except:
sys.stderr.write(date_time() + "Search of " + file_prefix + " from " + file_dir + " failed\n")
exit(1)
return 0
示例12: watch_mem
def watch_mem(proc_obj, sample, loc):
from time import sleep
while proc_obj.poll() is None:
mem_pct = psutil.virtual_memory().percent
log(loc, date_time() + 'Current memory usage at ' + str(mem_pct) + '% processing sample ' + sample + '\n')
if mem_pct >= 99:
log(loc, date_time() + 'Memory exceeded while running VEP.')
return 1
sleep(30)
return proc_obj.poll()
示例13: watch_mem
def watch_mem(proc_obj, sample):
from time import sleep
while proc_obj.poll() is None:
mem_pct = psutil.virtual_memory().percent
sys.stderr.write(date_time() + 'Current memory usage at ' + str(mem_pct) + '% processing sample ' + sample
+ ' from platypus ' + '\n')
if mem_pct >= 99:
sys.stderr.write(date_time() + 'Memory exceeded while running VEP.')
return 1
sleep(30)
return proc_obj.poll()
示例14: vep
def vep(config_file, sample_pairs, in_suffix, out_suffix, in_mutect, source, vep_cache):
if vep_cache == '84':
from annotation.deprecated.annot_vcf_vep import annot_vcf_vep_pipe
else:
from annotation.annot_vcf_VEP91 import annot_vcf_vep_pipe
check = annot_vcf_vep_pipe(config_file, sample_pairs, in_suffix, out_suffix, in_mutect, source)
if check == 0:
sys.stderr.write(date_time() + 'vep annotation of ' + source + ' output successful.\n')
else:
sys.stderr.write(date_time() + 'vep annotation of ' + source + ' output failed.\n')
exit(1)
return 0
示例15: picard_insert_size
def picard_insert_size(java_tool, picard_tool, sample, log_dir, ram):
loc = log_dir + sample + ".picard.insert_size.log"
picard_insert_size_cmd = java_tool + " -Xmx" + ram + "g -jar " + picard_tool + " CollectInsertSizeMetrics I=" \
+ sample + ".rmdup.srt.bam H=" + sample + ".insert_metrics.pdf O=" \
+ sample + ".insert_metrics.hist >> " + log_dir + sample + ".picard.insert_size.log 2>&1"
log(loc , date_time() + picard_insert_size_cmd + "\n")
try:
call(picard_insert_size_cmd, shell=True)
return 0
except:
log(loc, date_time() + 'Picard failed using java ' + java_tool + '\n')
return 1