本文整理汇总了Python中msmbuilder.arglib.die_if_path_exists函数的典型用法代码示例。如果您正苦于以下问题:Python die_if_path_exists函数的具体用法?Python die_if_path_exists怎么用?Python die_if_path_exists使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了die_if_path_exists函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: entry_point
def entry_point():
args = parser.parse_args()
T = scipy.io.mmread(args.tProb)
U = np.loadtxt(args.starting).astype(int)
F = np.loadtxt(args.ending).astype(int)
# deal with case where have single start or end state
# TJL note: This should be done in the library now... but leaving it
if U.shape == ():
tmp = np.zeros(1, dtype=int)
tmp[0] = int(U)
U = tmp.copy()
if F.shape == ():
tmp = np.zeros(1, dtype=int)
tmp[0] = int(F)
F = tmp.copy()
# Check output isn't taken
output_list = ["committors.dat", "net_flux.mtx"]
output_flist = [os.path.join(args.output_dir, f) for f in output_list]
arglib.die_if_path_exists(output_flist)
Fc, NFlux = run(T, U, F)
np.savetxt(output_flist[0], Fc)
scipy.io.mmwrite(output_flist[1], NFlux)
logger.info("Saved output to %s", ', '.join(output_flist))
示例2: run
def run(MinLagtime, MaxLagtime, Interval, NumEigen, AssignmentsFn, symmetrize, nProc, output):
arglib.die_if_path_exists(output)
# Setup some model parameters
try:
Assignments = io.loadh(AssignmentsFn, "arr_0")
except KeyError:
Assignments = io.loadh(AssignmentsFn, "Data")
NumStates = max(Assignments.flatten()) + 1
if NumStates <= NumEigen - 1:
NumEigen = NumStates - 2
logger.warning(
"Number of requested eigenvalues exceeds the rank of the transition matrix! Defaulting to the maximum possible number of eigenvalues."
)
del Assignments
logger.info("Getting %d eigenvalues (timescales) for each lagtime...", NumEigen)
lagTimes = range(MinLagtime, MaxLagtime + 1, Interval)
logger.info("Building MSMs at the following lag times: %s", lagTimes)
# Get the implied timescales (eigenvalues)
impTimes = msm_analysis.get_implied_timescales(
AssignmentsFn, lagTimes, n_implied_times=NumEigen, sliding_window=True, symmetrize=symmetrize, n_procs=nProc
)
numpy.savetxt(output, impTimes)
return
示例3: entry_point
def entry_point():
args = parser.parse_args()
arglib.die_if_path_exists(args.output)
if args.atom_indices.lower() == 'all':
atom_indices = None
else:
atom_indices = np.loadtxt(args.atom_indices).astype(int)
project = Project.load_from(args.project)
SASA = run(project, atom_indices, args.traj_fn)
io.saveh(args.output, SASA)
示例4: entry_point
def entry_point():
args = parser.parse_args()
# load args
try:
assignments = io.loadh(args.assignments, 'arr_0')
except KeyError:
assignments = io.loadh(args.assignments, 'Data')
tProb = scipy.io.mmread(args.tProb)
# workaround for arglib funniness?
if args.do_minimization in ["False", "0"]:
args.do_minimization = False
else:
args.do_minimization = True
if args.algorithm == 'PCCA':
MacroAssignmentsFn = os.path.join(
args.output_dir, "MacroAssignments.h5")
MacroMapFn = os.path.join(args.output_dir, "MacroMapping.dat")
arglib.die_if_path_exists([MacroAssignmentsFn, MacroMapFn])
MAP, assignments = run_pcca(args.num_macrostates, assignments, tProb)
np.savetxt(MacroMapFn, MAP, "%d")
io.saveh(MacroAssignmentsFn, assignments)
logger.info("Saved output to: %s, %s", MacroAssignmentsFn, MacroMapFn)
elif args.algorithm == 'PCCA+':
MacroAssignmentsFn = os.path.join(
args.output_dir, "MacroAssignments.h5")
MacroMapFn = os.path.join(args.output_dir, "MacroMapping.dat")
ChiFn = os.path.join(args.output_dir, 'Chi.dat')
AFn = os.path.join(args.output_dir, 'A.dat')
arglib.die_if_path_exists([MacroAssignmentsFn, MacroMapFn, ChiFn, AFn])
chi, A, MAP, assignments = run_pcca_plus(args.num_macrostates,
assignments, tProb, args.flux_cutoff, objective_function=args.objective_function,
do_minimization=args.do_minimization)
np.savetxt(ChiFn, chi)
np.savetxt(AFn, A)
np.savetxt(MacroMapFn, MAP, "%d")
io.saveh(MacroAssignmentsFn, assignments)
logger.info('Saved output to: %s, %s, %s, %s',
ChiFn, AFn, MacroMapFn, MacroAssignmentsFn)
else:
raise Exception()
示例5: run
def run(lagtime, assignments, symmetrize='MLE', input_mapping="None", trim=True, out_dir="./Data/"):
# set the filenames for output
FnTProb = os.path.join(out_dir, "tProb.mtx")
FnTCounts = os.path.join(out_dir, "tCounts.mtx")
FnMap = os.path.join(out_dir, "Mapping.dat")
FnAss = os.path.join(out_dir, "Assignments.Fixed.h5")
FnPops = os.path.join(out_dir, "Populations.dat")
# make sure none are taken
outputlist = [FnTProb, FnTCounts, FnMap, FnAss, FnPops]
arglib.die_if_path_exists(outputlist)
# Check for valid lag time
assert lagtime > 0, 'Please specify a positive lag time.'
# if given, apply mapping to assignments
if input_mapping != "None":
MSMLib.apply_mapping_to_assignments(assignments, input_mapping)
n_assigns_before_trim = len(np.where(assignments.flatten() != -1)[0])
counts = MSMLib.get_count_matrix_from_assignments(assignments, lag_time=lagtime, sliding_window=True)
rev_counts, t_matrix, populations, mapping = MSMLib.build_msm(counts, symmetrize=symmetrize, ergodic_trimming=trim)
if trim:
MSMLib.apply_mapping_to_assignments(assignments, mapping)
n_assigns_after_trim = len(np.where(assignments.flatten() != -1)[0])
# if had input mapping, then update it
if input_mapping != "None":
mapping = mapping[input_mapping]
# Print a statement showing how much data was discarded in trimming
percent = (1.0 - float(n_assigns_after_trim) / float(n_assigns_before_trim)) * 100.0
logger.warning("Ergodic trimming discarded: %f percent of your data", percent)
else:
logger.warning("No ergodic trimming applied")
# Save all output
np.savetxt(FnPops, populations)
np.savetxt(FnMap, mapping, "%d")
scipy.io.mmwrite(str(FnTProb), t_matrix)
scipy.io.mmwrite(str(FnTCounts), rev_counts)
io.saveh(FnAss, assignments)
for output in outputlist:
logger.info("Wrote: %s", output)
return
示例6: check_paths
def check_paths(args):
if args.alg == 'hierarchical':
die_if_path_exists(args.hierarchical_save_zmatrix)
else:
die_if_path_exists(args.generators)
if args.stride == 1:
die_if_path_exists(args.assignments)
die_if_path_exists(args.distances)
示例7: entry_point
def entry_point():
args = parser.parse_args()
k = int(args.num_states) if args.num_states != 'none' else None
d = float(args.cutoff_distance) if args.cutoff_distance != 'none' else None
arglib.die_if_path_exists(args.assignments)
if k is None and d is None:
logger.error(
'You need to supply either a number of states or a cutoff distance')
sys.exit(1)
project = Project.load_from(args.project)
assignments = main(
k, d, args.hierarchical_clustering_zmatrix, args.stride, project)
io.saveh(args.assignments, assignments)
logger.info('Saved assignments to %s', args.assignments)
示例8: entry_point
def entry_point():
args = parser.parse_args()
arglib.die_if_path_exists(args.output)
try:
assignments = io.loadh(args.assignments, 'arr_0')
distances = io.loadh(args.distances, 'arr_0')
except KeyError:
assignments = io.loadh(args.assignments, 'Data')
distances = io.loadh(args.distances, 'Data')
trimmed = run(assignments, distances, args.rmsd_cutoff)
io.saveh(args.output, trimmed)
logger.info('Saved output to %s', args.output)
示例9: entry_point
def entry_point():
args, metric = parser.parse_args()
arglib.die_if_path_exists(args.output)
project = Project.load_from(args.project)
pdb = md.load(args.pdb)
if args.traj_fn.lower() == 'all':
traj_fn = None
else:
traj_fn = args.traj_fn
distances = run(project, pdb, metric, traj_fn)
io.saveh(args.output, distances)
logger.info('Saved to %s', args.output)
示例10: run_pcca
def run_pcca(num_macrostates, assignments, tProb, output_dir):
MacroAssignmentsFn = os.path.join(output_dir, "MacroAssignments.h5")
MacroMapFn = os.path.join(output_dir, "MacroMapping.dat")
arglib.die_if_path_exists([MacroAssignmentsFn, MacroMapFn])
logger.info("Running PCCA...")
MAP = lumping.PCCA(tProb, num_macrostates)
# MAP the new assignments and save, make sure don't
# mess up negaitve one's (ie where don't have data)
MSMLib.apply_mapping_to_assignments(assignments, MAP)
np.savetxt(MacroMapFn, MAP, "%d")
msmbuilder.io.saveh(MacroAssignmentsFn, assignments)
logger.info("Saved output to: %s, %s", MacroAssignmentsFn, MacroMapFn)
示例11: run
def run(LagTime, assignments, Symmetrize='MLE', input_mapping="None", Prior=0.0, OutDir="./Data/"):
# set the filenames for output
FnTProb = os.path.join(OutDir, "tProb.mtx")
FnTCounts = os.path.join(OutDir, "tCounts.mtx")
FnMap = os.path.join(OutDir, "Mapping.dat")
FnAss = os.path.join(OutDir, "Assignments.Fixed.h5")
FnPops = os.path.join(OutDir, "Populations.dat")
# make sure none are taken
outputlist = [FnTProb, FnTCounts, FnMap, FnAss, FnPops]
arglib.die_if_path_exists(outputlist)
# if given, apply mapping to assignments
if input_mapping != "None":
MSMLib.apply_mapping_to_assignments(assignments, input_mapping)
n_states = np.max(assignments.flatten()) + 1
n_assigns_before_trim = len( np.where( assignments.flatten() != -1 )[0] )
rev_counts, t_matrix, populations, mapping = MSMLib.build_msm(assignments,
lag_time=LagTime, symmetrize=Symmetrize,
sliding_window=True, trim=True)
MSMLib.apply_mapping_to_assignments(assignments, mapping)
n_assigns_after_trim = len( np.where( assignments.flatten() != -1 )[0] )
# if had input mapping, then update it
if input_mapping != "None":
mapping = mapping[input_mapping]
# Print a statement showing how much data was discarded in trimming
percent = (1.0 - float(n_assigns_after_trim) / float(n_assigns_before_trim)) * 100.0
logger.warning("Ergodic trimming discarded: %f percent of your data", percent)
# Save all output
np.savetxt(FnPops, populations)
np.savetxt(FnMap, mapping,"%d")
scipy.io.mmwrite(str(FnTProb), t_matrix)
scipy.io.mmwrite(str(FnTCounts), rev_counts)
msmbuilder.io.saveh(FnAss, assignments)
for output in outputlist:
logger.info("Wrote: %s", output)
return
示例12: entry_point
def entry_point():
args = parser.parse_args()
arglib.die_if_path_exists(args.output)
LagTimes = args.lagtime.split(',')
MinLagtime = int(LagTimes[0])
MaxLagtime = int(LagTimes[1])
# Pass the symmetric flag
if args.symmetrize in ["None", "none", None]:
args.symmetrize = None
impTimes = run(
MinLagtime, MaxLagtime, args.interval, args.eigvals, args.assignments,
(not args.notrim), args.symmetrize, args.procs)
np.savetxt(args.output, impTimes)
logger.info("Saved output to %s", args.output)
示例13: entry_point
def entry_point():
args, prep_metric = parser.parse_args()
arglib.die_if_path_exists(args.output)
if args.atom_indices.lower() == 'all':
atom_indices = None
else:
atom_indices = np.loadtxt(args.atom_indices).astype(int)
project = Project.load_from(args.project)
min_length = int(float(args.min_length))
# need to convert to float first because int can't
# convert a string that is '1E3' for example...weird.
tica_obj = run(
prep_metric, project, args.delta_time, atom_indices=atom_indices,
output=args.output, min_length=min_length, stride=args.stride)
示例14: run_pcca_plus
def run_pcca_plus(num_macrostates, assignments, tProb, output_dir, flux_cutoff=0.0,objective_function="crispness",do_minimization=True):
MacroAssignmentsFn = os.path.join(output_dir, "MacroAssignments.h5")
MacroMapFn = os.path.join(output_dir, "MacroMapping.dat")
ChiFn = os.path.join(output_dir, 'Chi.dat')
AFn = os.path.join(output_dir, 'A.dat')
arglib.die_if_path_exists([MacroAssignmentsFn, MacroMapFn, ChiFn, AFn])
logger.info("Running PCCA+...")
A, chi, vr, MAP = lumping.pcca_plus(tProb, num_macrostates, flux_cutoff=flux_cutoff,
do_minimization=do_minimization, objective_function=objective_function)
MSMLib.apply_mapping_to_assignments(assignments, MAP)
np.savetxt(ChiFn, chi)
np.savetxt(AFn, A)
np.savetxt(MacroMapFn, MAP,"%d")
msmbuilder.io.saveh(MacroAssignmentsFn, assignments)
logger.info('Saved output to: %s, %s, %s, %s', ChiFn, AFn, MacroMapFn, MacroAssignmentsFn)
示例15: entry_point
def entry_point():
args = parser.parse_args()
T = scipy.io.mmread(args.tProb)
state = int(args.state)
print(args.state, state)
# Check output isn't taken
if state == -1:
base_filename = "PairwiseMFPTs.dat"
else:
base_filename = "MFPTs_%d.dat" % state
output_filename = os.path.join(args.output_dir, base_filename)
arglib.die_if_path_exists(output_filename)
MFPTs = run(T, state)
np.savetxt(output_filename, MFPTs)
logger.info("Saved output to %s" % output_filename)