本文整理汇总了Python中pymatgen.apps.borg.queen.BorgQueen.save_data方法的典型用法代码示例。如果您正苦于以下问题:Python BorgQueen.save_data方法的具体用法?Python BorgQueen.save_data怎么用?Python BorgQueen.save_data使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pymatgen.apps.borg.queen.BorgQueen
的用法示例。
在下文中一共展示了BorgQueen.save_data方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_energies
# 需要导入模块: from pymatgen.apps.borg.queen import BorgQueen [as 别名]
# 或者: from pymatgen.apps.borg.queen.BorgQueen import save_data [as 别名]
def get_energies(rootdir, reanalyze, verbose, detailed, sort):
"""
Doc string.
"""
if verbose:
FORMAT = "%(relativeCreated)d msecs : %(message)s"
logging.basicConfig(level=logging.INFO, format=FORMAT)
if not detailed:
drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
else:
drone = VaspToComputedEntryDrone(inc_structure=True,
data=["filename",
"initial_structure"])
ncpus = multiprocessing.cpu_count()
logging.info("Detected {} cpus".format(ncpus))
queen = BorgQueen(drone, number_of_drones=ncpus)
if os.path.exists(SAVE_FILE) and not reanalyze:
msg = "Using previously assimilated data from {}.".format(SAVE_FILE) \
+ " Use -f to force re-analysis."
queen.load_data(SAVE_FILE)
else:
if ncpus > 1:
queen.parallel_assimilate(rootdir)
else:
queen.serial_assimilate(rootdir)
msg = "Analysis results saved to {} for faster ".format(SAVE_FILE) + \
"subsequent loading."
queen.save_data(SAVE_FILE)
entries = queen.get_data()
if sort == "energy_per_atom":
entries = sorted(entries, key=lambda x: x.energy_per_atom)
elif sort == "filename":
entries = sorted(entries, key=lambda x: x.data["filename"])
all_data = []
for e in entries:
if not detailed:
delta_vol = "{:.2f}".format(e.data["delta_volume"] * 100)
else:
delta_vol = e.structure.volume / \
e.data["initial_structure"].volume - 1
delta_vol = "{:.2f}".format(delta_vol * 100)
all_data.append((e.data["filename"].replace("./", ""),
re.sub("\s+", "", e.composition.formula),
"{:.5f}".format(e.energy),
"{:.5f}".format(e.energy_per_atom),
delta_vol))
if len(all_data) > 0:
headers = ("Directory", "Formula", "Energy", "E/Atom", "% vol chg")
t = PrettyTable(headers)
t.align["Directory"] = "l"
for d in all_data:
t.add_row(d)
print(t)
print(msg)
else:
print("No valid vasp run found.")
示例2: get_energies
# 需要导入模块: from pymatgen.apps.borg.queen import BorgQueen [as 别名]
# 或者: from pymatgen.apps.borg.queen.BorgQueen import save_data [as 别名]
def get_energies(rootdir, reanalyze, verbose, pretty):
if verbose:
FORMAT = "%(relativeCreated)d msecs : %(message)s"
logging.basicConfig(level=logging.INFO, format=FORMAT)
drone = GaussianToComputedEntryDrone(inc_structure=True,
parameters=['filename'])
ncpus = multiprocessing.cpu_count()
logging.info('Detected {} cpus'.format(ncpus))
queen = BorgQueen(drone, number_of_drones=ncpus)
if os.path.exists(save_file) and not reanalyze:
msg = 'Using previously assimilated data from {}. ' + \
'Use -f to force re-analysis'.format(save_file)
queen.load_data(save_file)
else:
queen.parallel_assimilate(rootdir)
msg = 'Results saved to {} for faster reloading.'.format(save_file)
queen.save_data(save_file)
entries = queen.get_data()
entries = sorted(entries, key=lambda x: x.parameters['filename'])
all_data = [(e.parameters['filename'].replace("./", ""),
re.sub("\s+", "", e.composition.formula),
"{}".format(e.parameters['charge']),
"{}".format(e.parameters['spin_mult']),
"{:.5f}".format(e.energy), "{:.5f}".format(e.energy_per_atom),
) for e in entries]
headers = ("Directory", "Formula", "Charge", "Spin Mult.", "Energy",
"E/Atom")
print(tabulate(all_data, headers=headers))
print("")
print(msg)
示例3: get_energies
# 需要导入模块: from pymatgen.apps.borg.queen import BorgQueen [as 别名]
# 或者: from pymatgen.apps.borg.queen.BorgQueen import save_data [as 别名]
def get_energies(rootdir, reanalyze, verbose, detailed,
sort, formulaunit, debug, hull, threshold, args, templatestructure):
ion_list = 'Novalue'
ave_key_list = 'Novalue'
threscount = 0
"""
Doc string.
"""
if (verbose and not debug):
FORMAT = "%(relativeCreated)d msecs : %(message)s"
logging.basicConfig(level=logging.INFO, format=FORMAT)
elif debug:
logging.basicConfig(level=logging.DEBUG)
if not detailed:
drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
else:
drone = VaspToComputedEntryDrone(inc_structure=True,
data=["filename",
"initial_structure"])
ncpus = multiprocessing.cpu_count()
logging.info("Detected {} cpus".format(ncpus))
queen = BorgQueen(drone, number_of_drones=ncpus)
if os.path.exists(SAVE_FILE) and not reanalyze:
msg = "Using previously assimilated data from {}.".format(SAVE_FILE) \
+ " Use -f to force re-analysis."
queen.load_data(SAVE_FILE)
else:
if ncpus > 1:
queen.parallel_assimilate(rootdir)
else:
queen.serial_assimilate(rootdir)
msg = "Analysis results saved to {} for faster ".format(SAVE_FILE) + \
"subsequent loading."
queen.save_data(SAVE_FILE)
entries = queen.get_data()
if sort == "energy_per_atom":
entries = sorted(entries, key=lambda x: x.energy_per_atom)
elif sort == "filename":
entries = sorted(entries, key=lambda x: x.data["filename"])
# logging.debug('First Energy entry is {}'.format(entries[0]))
base_energy = entries[0].energy
logging.debug('Type of entries is: {}'.format(type(entries)))
logging.debug('First Element of Entries is:{}'.format(entries[0]))
# logging.debug('First Energy entry structure is {}'.format(entries[0].structure))
xy_direction = int(args.XYdirection)
tolerance = float(args.tolerance)
if args.template:
logging.debug('Temp Structure site info is: {}'.format(Na12(['Co','Mn'],['Na'],templatestructure,templatestructure,XY_Direction=xy_direction,tol=tolerance)))
template_site_info = Na12(['Co','Mn'],['Na'],templatestructure,templatestructure,XY_Direction=xy_direction,tol=tolerance)
all_data = []
energy_diff = []
threshold=float(threshold)
Structure_info_dict = {}
check_ion_seq = [args.dupion]
for e in entries:
if not detailed:
delta_vol = "{:.2f}".format(e.data["delta_volume"] * 100)
else:
delta_vol = e.structure.volume / \
e.data["initial_structure"].volume - 1
delta_vol = "{:.2f}".format(delta_vol * 100)
entry_path = e.data['filename'].rsplit('/',1)[0]
entry_site_info = Na12(['Co','Mn'],['Na'],e.structure,e.structure,XY_Direction=xy_direction,tol=tolerance)
logging.debug('Total Na site: {}'.format(entry_site_info['Total_Na_Site']))
#Coordination extraction part
# na_layer_site_fcoords = [site._fcoords for site in s if site.specie.symbol == "Na"]
# if 'Cif_Structure' in e.data.keys():
# na_sites_fcoords = [site._fcoords for site in e.data['Cif_Structure'] if site.specie.symbol == 'Na']
# na_sites_fcoords_list_tuple = [tuple(coord) for coord in na_sites_fcoords]
na_sites_fcoords = [site._fcoords for site in e.data['CONTCAR_Structure'] if site.specie.symbol == 'Na']
na_sites_fcoords_list_tuple = [tuple(coord) for coord in na_sites_fcoords]
#.........这里部分代码省略.........