当前位置: 首页>>代码示例>>Python>>正文


Python path.zpath函数代码示例

本文整理汇总了Python中monty.os.path.zpath函数的典型用法代码示例。如果您正苦于以下问题:Python zpath函数的具体用法?Python zpath怎么用?Python zpath使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了zpath函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: run_task

    def run_task(self, fw_spec):
        chgcar_start = False
        # read the VaspInput from the previous run

        poscar = Poscar.from_file(zpath('POSCAR'))
        incar = Incar.from_file(zpath('INCAR'))

        # figure out what GGA+U values to use and override them
        # LDAU values to use
        mpvis = MPVaspInputSet()
        ggau_incar = mpvis.get_incar(poscar.structure).as_dict()
        incar_updates = {k: ggau_incar[k] for k in ggau_incar.keys() if 'LDAU' in k}

        for k in ggau_incar:
            # update any parameters not set explicitly in previous INCAR
            if k not in incar and k in ggau_incar:
                incar_updates[k] = ggau_incar[k]

        incar.update(incar_updates)  # override the +U keys


        # start from the CHGCAR of previous run
        if os.path.exists('CHGCAR'):
            incar['ICHARG'] = 1
            chgcar_start = True

        # write back the new INCAR to the current directory
        incar.write_file('INCAR')
        return FWAction(stored_data={'chgcar_start': chgcar_start})
开发者ID:aykol,项目名称:MPWorks,代码行数:29,代码来源:vasp_setup_tasks.py

示例2: from_directory

    def from_directory(input_dir):
        """
        Read in a set of FEFF input files from a directory, which is
        useful when existing FEFF input needs some adjustment.
        """
        sub_d = {}
        for fname, ftype in [("HEADER", Header), ("PARAMETERS", Tags)]:
            fullzpath = zpath(os.path.join(input_dir, fname))
            sub_d[fname.lower()] = ftype.from_file(fullzpath)

        # Generation of FEFFDict set requires absorbing atom, need to search
        # the index of absorption atom in the structure according to the
        # distance matrix and shell species information contained in feff.inp

        absorber_index = []
        radius = None
        feffinp = zpath(os.path.join(input_dir, 'feff.inp'))

        if "RECIPROCAL" not in sub_d["parameters"]:
            input_atoms = Atoms.cluster_from_file(feffinp)
            shell_species = np.array([x.species_string for x in input_atoms])

            # First row of distance matrix represents the distance from the absorber to
            # the rest atoms
            distance_matrix = input_atoms.distance_matrix[0, :]

            # Get radius value
            from math import ceil
            radius = int(ceil(input_atoms.get_distance(input_atoms.index(input_atoms[0]),
                                                       input_atoms.index(input_atoms[-1]))))

            for site_index, site in enumerate(sub_d['header'].struct):

                if site.specie == input_atoms[0].specie:
                    site_atoms = Atoms(sub_d['header'].struct, absorbing_atom=site_index,
                                       radius=radius)
                    site_distance = np.array(site_atoms.get_lines())[:, 5].astype(np.float64)
                    site_shell_species = np.array(site_atoms.get_lines())[:, 4]
                    shell_overlap = min(shell_species.shape[0], site_shell_species.shape[0])

                    if np.allclose(distance_matrix[:shell_overlap], site_distance[:shell_overlap]) and \
                            np.all(site_shell_species[:shell_overlap] == shell_species[:shell_overlap]):
                        absorber_index.append(site_index)

        if "RECIPROCAL" in sub_d["parameters"]:
            absorber_index = sub_d["parameters"]["TARGET"]
            absorber_index[0] = int(absorber_index[0]) - 1

        # Generate the input set
        if 'XANES' in sub_d["parameters"]:
            CONFIG = loadfn(os.path.join(MODULE_DIR, "MPXANESSet.yaml"))
            if radius is None:
                radius = 10
            return FEFFDictSet(absorber_index[0], sub_d['header'].struct, radius=radius,
                               config_dict=CONFIG, edge=sub_d["parameters"]["EDGE"],
                               nkpts=1000, user_tag_settings=sub_d["parameters"])
开发者ID:ExpHP,项目名称:pymatgen,代码行数:56,代码来源:sets.py

示例3: process_task

    def process_task(self, path):
        try:
            #Override incorrect outcar subdocs for two step relaxations
            if os.path.exists(os.path.join(path, "relax2")):
                try:
                    run_stats = {}
                    for i in [1,2]:
                        outcar = Outcar(zpath(os.path.join(path,"relax"+str(i), "OUTCAR")))
                        m_key = "calculations."+str(i-1)+".output.outcar"
                        self.tasks.update({'dir_name_full': path}, {'$set': {m_key: outcar.to_dict}})
                        run_stats["relax"+str(i)] = outcar.run_stats
                except:
                    logger.error("Bad OUTCAR for {}.".format(path))

                try:
                    overall_run_stats = {}
                    for key in ["Total CPU time used (sec)", "User time (sec)",
                                "System time (sec)", "Elapsed time (sec)"]:
                        overall_run_stats[key] = sum([v[key]
                                          for v in run_stats.values()])
                    run_stats["overall"] = overall_run_stats
                except:
                    logger.error("Bad run stats for {}.".format(path))

                self.tasks.update({'dir_name_full': path}, {'$set': {"run_stats": run_stats}})
                print 'FINISHED', path
            else:
                print 'SKIPPING', path
        except:
            print '-----'
            print 'ENCOUNTERED AN EXCEPTION!!!', path
            traceback.print_exc()
            print '-----'
开发者ID:matk86,项目名称:MPWorks,代码行数:33,代码来源:reparse_old_tasks.py

示例4: is_valid_vasp_dir

def is_valid_vasp_dir(mydir):
    # note that the OUTCAR and POSCAR are known to be empty in some
    # situations
    files = ["OUTCAR", "POSCAR", "INCAR", "KPOINTS"]
    for f in files:
        m_file = os.path.join(mydir, f)
        if not os.path.exists(zpath(m_file)) or not(os.stat(m_file).st_size > 0 or os.stat(m_file+'.gz').st_size > 0):
            return False
    return True
开发者ID:xhqu1981,项目名称:MPWorks,代码行数:9,代码来源:mp_vaspdrone.py

示例5: correct

    def correct(self):
        backup(VASP_BACKUP_FILES | {self.output_filename})
        actions = []
        vi = VaspInput.from_directory(".")

        if "lrf_comm" in self.errors:
            if Outcar(zpath(os.path.join(
                    os.getcwd(), "OUTCAR"))).is_stopped is False:
                if not vi["INCAR"].get("LPEAD"):
                    actions.append({"dict": "INCAR",
                                    "action": {"_set": {"LPEAD": True}}})

        VaspModder(vi=vi).apply_actions(actions)
        return {"errors": list(self.errors), "actions": actions}
开发者ID:matk86,项目名称:custodian,代码行数:14,代码来源:handlers.py

示例6: detect

    def detect(self, dir_name):

        signals = set()

        for filename in self.filename_list:
            #find the strings that match in the file
            if not self.ignore_nonexistent_file or os.path.exists(zpath(os.path.join(dir_name, filename))):
                f = last_relax(os.path.join(dir_name, filename))
                errors = string_list_in_file(self.signames_targetstrings.values(), f, ignore_case=self.ignore_case)
                if self.invert_search:
                    errors_inverted = [item for item in self.targetstrings_signames.keys() if item not in errors]
                    errors = errors_inverted

                #add the signal names for those strings
                for e in errors:
                    signals.add(self.targetstrings_signames[e])
        return signals
开发者ID:aykol,项目名称:MPWorks,代码行数:17,代码来源:signals.py

示例7: update_checkpoint

    def update_checkpoint(launchpad, launch_dir, launch_id, checkpoint):
        """
        Helper function to update checkpoint

        Args:
            launchpad (LaunchPad): LaunchPad to ping with checkpoint data
            launch_dir (str): directory in which FW_offline.json was created
            launch_id (int): launch id to update
            checkpoint (dict): checkpoint data
        """
        if launchpad:
            launchpad.ping_launch(launch_id, checkpoint=checkpoint)
        else:
            fpath = zpath("FW_offline.json")
            with zopen(fpath) as f_in:
                d = json.loads(f_in.read())
                d['checkpoint'] = checkpoint
                with zopen(fpath, "wt") as f_out:
                    f_out.write(json.dumps(d, ensure_ascii=False))
开发者ID:digitalsatori,项目名称:fireworks,代码行数:19,代码来源:rocket.py

示例8: track_file

    def track_file(self, launch_dir=None):
        """
        Reads the monitored file and returns back the last N lines
        :param launch_dir: directory where job was launched in case of relative filename
        :return:
        """
        m_file = self.filename
        if launch_dir and not os.path.isabs(self.filename):
            m_file = os.path.join(launch_dir, m_file)

        lines = []
        if os.path.exists(m_file):
            with zopen(zpath(m_file)) as f:
                for l in reverse_readline(f):
                    lines.append(l)
                    if len(lines) == self.nlines:
                        break
            self.content = '\n'.join(reversed(lines))

        return self.content
开发者ID:catchmonster,项目名称:fireworks,代码行数:20,代码来源:firework.py

示例9: track_file

    def track_file(self, launch_dir=None):
        """
        Reads the monitored file and returns back the last N lines

        Args:
            launch_dir (str): directory where job was launched in case of relative filename

        Returns:
            str: the content(last N lines)
        """
        m_file = self.filename
        if launch_dir and not os.path.isabs(self.filename):
            m_file = os.path.join(launch_dir, m_file)
        lines = []
        if self.allow_zipped:
            m_file = zpath(m_file)
        if os.path.exists(m_file):
            with zopen(m_file, "rt") as f:
                for l in reverse_readline(f):
                    lines.append(l)
                    if len(lines) == self.nlines:
                        break
            self.content = '\n'.join(reversed(lines))
        return self.content
开发者ID:blondegeek,项目名称:fireworks,代码行数:24,代码来源:firework.py

示例10: last_relax

def last_relax(filename):
    # for old runs
    m_dir = os.path.dirname(filename)
    m_file = os.path.basename(filename)

    if os.path.exists(zpath(os.path.join(m_dir, 'relax2', m_file))):
        return zpath(os.path.join(m_dir, 'relax2', m_file))

    elif os.path.exists(zpath(filename)):
        return zpath(filename)

    relaxations = glob.glob('%s.relax*' % filename)
    if relaxations:
        return sorted(relaxations)[-1]

    # backup for old runs
    elif os.path.exists(zpath(os.path.join(m_dir, 'relax1', m_file))):
        return zpath(os.path.join(m_dir, 'relax1', m_file))

    return filename
开发者ID:aykol,项目名称:MPWorks,代码行数:20,代码来源:wf_utils.py

示例11: run_task

 def run_task(self, fw_spec):
     incar = Incar.from_file(zpath("INCAR"))
     incar.update({"ISIF": 2})
     incar.write_file("INCAR")
     return FWAction()
开发者ID:materialsproject,项目名称:MPWorks,代码行数:5,代码来源:elastic_tasks.py

示例12: run

    def run(self, pdb_on_exception=False):
        """
        Run the rocket (check out a job from the database and execute it)

        Args:
            pdb_on_exception (bool): whether to invoke the debugger on
                a caught exception.  Default False.
        """
        all_stored_data = {}  # combined stored data for *all* the Tasks
        all_update_spec = {}  # combined update_spec for *all* the Tasks
        all_mod_spec = []  # combined mod_spec for *all* the Tasks

        lp = self.launchpad
        launch_dir = os.path.abspath(os.getcwd())
        logdir = lp.get_logdir() if lp else None
        l_logger = get_fw_logger('rocket.launcher', l_dir=logdir,
                                 stream_level=ROCKET_STREAM_LOGLEVEL)

        # check a FW job out of the launchpad
        if lp:
            m_fw, launch_id = lp.checkout_fw(self.fworker, launch_dir, self.fw_id)
        else:  # offline mode
            m_fw = Firework.from_file(os.path.join(os.getcwd(), "FW.json"))

            # set the run start time
            fpath = zpath("FW_offline.json")
            with zopen(fpath) as f_in:
                d = json.loads(f_in.read())
                d['started_on'] = datetime.utcnow().isoformat()
                with zopen(fpath, "wt") as f_out:
                    f_out.write(json.dumps(d, ensure_ascii=False))

            launch_id = None  # we don't need this in offline mode...

        if not m_fw:
            print("No FireWorks are ready to run and match query! {}".format(self.fworker.query))
            return False

        final_state = None
        ping_stop = None
        btask_stops = []

        try:
            if '_launch_dir' in m_fw.spec and lp:
                prev_dir = launch_dir
                launch_dir = os.path.expandvars(m_fw.spec['_launch_dir'])
                if not os.path.abspath(launch_dir):
                    launch_dir = os.path.normpath(os.path.join(os.getcwd(), launch_dir))
                # thread-safe "mkdir -p"
                try:
                    os.makedirs(launch_dir)
                except OSError as exception:
                    if exception.errno != errno.EEXIST:
                        raise
                os.chdir(launch_dir)

                if not os.path.samefile(launch_dir, prev_dir):
                    lp.change_launch_dir(launch_id, launch_dir)

                if not os.listdir(prev_dir) and REMOVE_USELESS_DIRS:
                    try:
                        os.rmdir(prev_dir)
                    except:
                        pass

            recovery = m_fw.spec.get('_recovery', None)
            if recovery:
                recovery_dir = recovery.get('_prev_dir')
                recovery_mode = recovery.get('_mode')
                starting_task = recovery.get('_task_n')
                all_stored_data.update(recovery.get('_all_stored_data'))
                all_update_spec.update(recovery.get('_all_update_spec'))
                all_mod_spec.extend(recovery.get('_all_mod_spec'))
                if lp:
                    l_logger.log(
                                logging.INFO,
                                'Recovering from task number {} in folder {}.'.format(starting_task,
                                                                                      recovery_dir))
                if recovery_mode == 'cp' and launch_dir != recovery_dir:
                    if lp:
                        l_logger.log(
                                    logging.INFO,
                                    'Copying data from recovery folder {} to folder {}.'.format(recovery_dir,
                                                                                                launch_dir))
                    distutils.dir_util.copy_tree(recovery_dir, launch_dir, update=1)

            else:
                starting_task = 0
                files_in = m_fw.spec.get("_files_in", {})
                prev_files = m_fw.spec.get("_files_prev", {})
                for f in set(files_in.keys()).intersection(prev_files.keys()):
                    # We use zopen for the file objects for transparent handling
                    # of zipped files. shutil.copyfileobj does the actual copy
                    # in chunks that avoid memory issues.
                    with zopen(prev_files[f], "rb") as fin, zopen(files_in[f], "wb") as fout:
                        shutil.copyfileobj(fin, fout)

            if lp:
                message = 'RUNNING fw_id: {} in directory: {}'.\
                    format(m_fw.fw_id, os.getcwd())
#.........这里部分代码省略.........
开发者ID:digitalsatori,项目名称:fireworks,代码行数:101,代码来源:rocket.py

示例13: assimilate


#.........这里部分代码省略.........
                        for i in [1,2]:
                            o_path = os.path.join(path,"relax"+str(i),"OUTCAR")
                            o_path = o_path if os.path.exists(o_path) else o_path+".gz"
                            outcar = Outcar(o_path)
                            d["calculations"][i-1]["output"]["outcar"] = outcar.as_dict()
                            run_stats["relax"+str(i)] = outcar.run_stats
                    except:
                        logger.error("Bad OUTCAR for {}.".format(path))

                    try:
                        overall_run_stats = {}
                        for key in ["Total CPU time used (sec)", "User time (sec)",
                                    "System time (sec)", "Elapsed time (sec)"]:
                            overall_run_stats[key] = sum([v[key]
                                              for v in run_stats.values()])
                        run_stats["overall"] = overall_run_stats
                    except:
                        logger.error("Bad run stats for {}.".format(path))

                    d["run_stats"] = run_stats

                # add is_compatible
                mpc = MaterialsProjectCompatibility("Advanced")

                try:
                    func = d["pseudo_potential"]["functional"]
                    labels = d["pseudo_potential"]["labels"]
                    symbols = ["{} {}".format(func, label) for label in labels]
                    parameters = {"run_type": d["run_type"],
                              "is_hubbard": d["is_hubbard"],
                              "hubbards": d["hubbards"],
                              "potcar_symbols": symbols}
                    entry = ComputedEntry(Composition(d["unit_cell_formula"]),
                                          0.0, 0.0, parameters=parameters,
                                          entry_id=d["task_id"])

                    d['is_compatible'] = bool(mpc.process_entry(entry))
                except:
                    traceback.print_exc()
                    print 'ERROR in getting compatibility'
                    d['is_compatible'] = None


                #task_type dependent processing
                if 'static' in d['task_type']:
                    launch_doc = launches_coll.find_one({"fw_id": d['fw_id'], "launch_dir": {"$regex": d["dir_name"]}}, {"action.stored_data": 1})
                    for i in ["conventional_standard_structure", "symmetry_operations",
                              "symmetry_dataset", "refined_structure"]:
                        try:
                            d['stored_data'][i] = launch_doc['action']['stored_data'][i]
                        except:
                            pass

                #parse band structure if necessary
                if ('band structure' in d['task_type'] or "Uniform" in d['task_type'])\
                    and d['state'] == 'successful':
                    launch_doc = launches_coll.find_one({"fw_id": d['fw_id'], "launch_dir": {"$regex": d["dir_name"]}},
                                                        {"action.stored_data": 1})
                    vasp_run = Vasprun(zpath(os.path.join(path, "vasprun.xml")), parse_projected_eigen=False)

                    if 'band structure' in d['task_type']:
                        def string_to_numlist(stringlist):
                            g=re.search('([0-9\-\.eE]+)\s+([0-9\-\.eE]+)\s+([0-9\-\.eE]+)', stringlist)
                            return [float(g.group(i)) for i in range(1,4)]

                        for i in ["kpath_name", "kpath"]:
                            d['stored_data'][i] = launch_doc['action']['stored_data'][i]
                        kpoints_doc = d['stored_data']['kpath']['kpoints']
                        for i in kpoints_doc:
                            kpoints_doc[i]=string_to_numlist(kpoints_doc[i])
                        bs=vasp_run.get_band_structure(efermi=d['calculations'][0]['output']['outcar']['efermi'],
                                                       line_mode=True)
                    else:
                        bs=vasp_run.get_band_structure(efermi=d['calculations'][0]['output']['outcar']['efermi'],
                                                       line_mode=False)
                    bs_json = json.dumps(bs.as_dict(), cls=MontyEncoder)
                    fs = gridfs.GridFS(db, "band_structure_fs")
                    bs_id = fs.put(bs_json)
                    d['calculations'][0]["band_structure_fs_id"] = bs_id

                    # also override band gap in task doc
                    gap = bs.get_band_gap()
                    vbm = bs.get_vbm()
                    cbm = bs.get_cbm()
                    update_doc = {'bandgap': gap['energy'], 'vbm': vbm['energy'], 'cbm': cbm['energy'], 'is_gap_direct': gap['direct']}
                    d['analysis'].update(update_doc)
                    d['calculations'][0]['output'].update(update_doc)

		coll.update_one({"dir_name": d["dir_name"]}, {'$set': d}, upsert=True)

                return d["task_id"], d
            else:
                logger.info("Skipping duplicate {}".format(d["dir_name"]))
                return result["task_id"], result

        else:
            d["task_id"] = 0
            logger.info("Simulated insert into database for {} with task_id {}"
            .format(d["dir_name"], d["task_id"]))
            return 0, d
开发者ID:xhqu1981,项目名称:MPWorks,代码行数:101,代码来源:mp_vaspdrone.py

示例14: run_task


#.........这里部分代码省略.........
                       PositiveEnergyErrorHandler(), FrozenJobErrorHandler(),
                       StdErrHandler(), AliasingErrorHandler(), DriftErrorHandler()],
            "md": [VaspErrorHandler(), NonConvergingErrorHandler()],
            "no_handler": []
            }

        vasp_cmd = env_chk(self["vasp_cmd"], fw_spec)

        if isinstance(vasp_cmd, six.string_types):
            vasp_cmd = os.path.expandvars(vasp_cmd)
            vasp_cmd = shlex.split(vasp_cmd)

        # initialize variables
        job_type = self.get("job_type", "normal")
        scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
        gzip_output = self.get("gzip_output", True)
        max_errors = self.get("max_errors", 5)
        auto_npar = env_chk(self.get("auto_npar"), fw_spec, strict=False, default=False)
        gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"), fw_spec, strict=False, default=None)
        if gamma_vasp_cmd:
            gamma_vasp_cmd = shlex.split(gamma_vasp_cmd)

        # construct jobs
        if job_type == "normal":
            jobs = [VaspJob(vasp_cmd, auto_npar=auto_npar, gamma_vasp_cmd=gamma_vasp_cmd)]
        elif job_type == "double_relaxation_run":
            jobs = VaspJob.double_relaxation_run(vasp_cmd, auto_npar=auto_npar,
                                                 ediffg=self.get("ediffg"),
                                                 half_kpts_first_relax=self.get("half_kpts_first_relax", HALF_KPOINTS_FIRST_RELAX))
        elif job_type == "metagga_opt_run":
            jobs = VaspJob.metagga_opt_run(vasp_cmd, auto_npar=auto_npar,
                                                 ediffg=self.get("ediffg"),
                                                 half_kpts_first_relax=self.get("half_kpts_first_relax", HALF_KPOINTS_FIRST_RELAX))

        elif job_type == "full_opt_run":
            jobs = VaspJob.full_opt_run(vasp_cmd, auto_npar=auto_npar,
                                        ediffg=self.get("ediffg"),
                                        max_steps=9,
                                        half_kpts_first_relax=self.get("half_kpts_first_relax", HALF_KPOINTS_FIRST_RELAX))
        elif job_type == "neb":
            # TODO: @shyuep @HanmeiTang This means that NEB can only be run (i) in reservation mode
            # and (ii) when the queueadapter parameter is overridden and (iii) the queue adapter
            # has a convention for nnodes (with that name). Can't the number of nodes be made a
            # parameter that the user sets differently? e.g., fw_spec["neb_nnodes"] must be set
            # when setting job_type=NEB? Then someone can use this feature in non-reservation
            # mode and without this complication. -computron
            nnodes = int(fw_spec["_queueadapter"]["nnodes"])

            # TODO: @shyuep @HanmeiTang - I am not sure what the code below is doing. It looks like
            # it is trying to override the number of processors. But I tried running the code
            # below after setting "vasp_cmd = 'mpirun -n 16 vasp'" and the code fails.
            # (i) Is this expecting an array vasp_cmd rather than String? If so, that's opposite to
            # the rest of this task's convention and documentation
            # (ii) can we get rid of this hacking in the first place? e.g., allowing the user to
            # separately set the NEB_VASP_CMD as an env_variable and not rewriting the command
            # inside this.
            # -computron

            # Index the tag "-n" or "-np"
            index = [i for i, s in enumerate(vasp_cmd) if '-n' in s]
            ppn = int(vasp_cmd[index[0] + 1])
            vasp_cmd[index[0] + 1] = str(nnodes * ppn)

            # Do the same for gamma_vasp_cmd
            if gamma_vasp_cmd:
                index = [i for i, s in enumerate(gamma_vasp_cmd) if '-n' in s]
                ppn = int(gamma_vasp_cmd[index[0] + 1])
                gamma_vasp_cmd[index[0] + 1] = str(nnodes * ppn)

            jobs = [VaspNEBJob(vasp_cmd, final=False, auto_npar=auto_npar,
                               gamma_vasp_cmd=gamma_vasp_cmd)]
        else:
            raise ValueError("Unsupported job type: {}".format(job_type))

        # construct handlers

        handler_group = self.get("handler_group", "default")
        if isinstance(handler_group, six.string_types):
            handlers = handler_groups[handler_group]
        else:
            handlers = handler_group

        if self.get("max_force_threshold"):
            handlers.append(MaxForceErrorHandler(max_force_threshold=self["max_force_threshold"]))

        if self.get("wall_time"):
            handlers.append(WalltimeHandler(wall_time=self["wall_time"]))

        if job_type == "neb":
            validators = []  # CINEB vasprun.xml sometimes incomplete, file structure different
        else:
            validators = [VasprunXMLValidator(), VaspFilesValidator()]

        c = Custodian(handlers, jobs, validators=validators, max_errors=max_errors,
                      scratch_dir=scratch_dir, gzipped_output=gzip_output)

        c.run()

        if os.path.exists(zpath("custodian.json")):
            return FWAction(stored_data=loadfn(zpath("custodian.json")))
开发者ID:shyamd,项目名称:MatMethods,代码行数:101,代码来源:run_calc.py

示例15: process_fw

    def process_fw(self, dir_name, d):
        d["task_id_deprecated"] = int(d["task_id"].split('-')[-1])  # useful for WC and AJ

        # update the run fields to give species group in root, if exists
        for r in d['run_tags']:
            if "species_group=" in r:
                d["species_group"] = int(r.split("=")[-1])
                break

        # custom Materials Project post-processing for FireWorks
        with zopen(zpath(os.path.join(dir_name, 'FW.json'))) as f:
            fw_dict = json.load(f)
            d['fw_id'] = fw_dict['fw_id']
            d['snl'] = fw_dict['spec']['mpsnl']
            d['snlgroup_id'] = fw_dict['spec']['snlgroup_id']
            d['vaspinputset_name'] = fw_dict['spec'].get('vaspinputset_name')
            d['task_type'] = fw_dict['spec']['task_type']
            # Process data for deformed structures
            if 'deformed' in d['task_type']:
                d['deformation_matrix'] = fw_dict['spec']['deformation_matrix']
                d['original_task_id'] = fw_dict['spec']['original_task_id']
            if not self.update_duplicates:
                if 'optimize structure' in d['task_type'] and 'output' in d:
                    # create a new SNL based on optimized structure
                    new_s = Structure.from_dict(d['output']['crystal'])
                    old_snl = StructureNL.from_dict(d['snl'])
                    history = old_snl.history
                    history.append(
                        {'name': 'Materials Project structure optimization',
                         'url': 'http://www.materialsproject.org',
                         'description': {'task_type': d['task_type'],
                                         'fw_id': d['fw_id'],
                                         'task_id': d['task_id']}})
                    new_snl = StructureNL(new_s, old_snl.authors, old_snl.projects,
                                          old_snl.references, old_snl.remarks,
                                          old_snl.data, history)

                    # enter new SNL into SNL db
                    # get the SNL mongo adapter
                    sma = SNLMongoAdapter.auto_load()

                    # add snl
                    mpsnl, snlgroup_id, spec_group = sma.add_snl(new_snl, snlgroup_guess=d['snlgroup_id'])
                    d['snl_final'] = mpsnl.as_dict()
                    d['snlgroup_id_final'] = snlgroup_id
                    d['snlgroup_changed'] = (d['snlgroup_id'] !=
                                             d['snlgroup_id_final'])
                else:
                    d['snl_final'] = d['snl']
                    d['snlgroup_id_final'] = d['snlgroup_id']
                    d['snlgroup_changed'] = False

        # custom processing for detecting errors
        new_style = os.path.exists(zpath(os.path.join(dir_name, 'FW.json')))
        vasp_signals = {}
        critical_errors = ["INPUTS_DONT_EXIST",
                           "OUTPUTS_DONT_EXIST", "INCOHERENT_POTCARS",
                           "VASP_HASNT_STARTED", "VASP_HASNT_COMPLETED",
                           "CHARGE_UNCONVERGED", "NETWORK_QUIESCED",
                           "HARD_KILLED", "WALLTIME_EXCEEDED",
                           "ATOMS_TOO_CLOSE", "DISK_SPACE_EXCEEDED", "NO_RELAX2", "POSITIVE_ENERGY"]

        last_relax_dir = dir_name

        if not new_style:
            # get the last relaxation dir
            # the order is relax2, current dir, then relax1. This is because
            # after completing relax1, the job happens in the current dir.
            # Finally, it gets moved to relax2.
            # There are some weird cases where both the current dir and relax2
            # contain data. The relax2 is good, but the current dir is bad.
            if is_valid_vasp_dir(os.path.join(dir_name, "relax2")):
                last_relax_dir = os.path.join(dir_name, "relax2")
            elif is_valid_vasp_dir(dir_name):
                pass
            elif is_valid_vasp_dir(os.path.join(dir_name, "relax1")):
                last_relax_dir = os.path.join(dir_name, "relax1")

        vasp_signals['last_relax_dir'] = last_relax_dir
        ## see what error signals are present

        print "getting signals for dir :{}".format(last_relax_dir)

        sl = SignalDetectorList()
        sl.append(VASPInputsExistSignal())
        sl.append(VASPOutputsExistSignal())
        sl.append(VASPOutSignal())
        sl.append(HitAMemberSignal())
        sl.append(SegFaultSignal())
        sl.append(VASPStartedCompletedSignal())

        if d['state'] == 'successful' and 'optimize structure' in d['task_type']:
            sl.append(Relax2ExistsSignal())

        signals = sl.detect_all(last_relax_dir)

        signals = signals.union(WallTimeSignal().detect(dir_name))
        if not new_style:
            root_dir = os.path.dirname(dir_name)  # one level above dir_name
            signals = signals.union(WallTimeSignal().detect(root_dir))
#.........这里部分代码省略.........
开发者ID:xhqu1981,项目名称:MPWorks,代码行数:101,代码来源:mp_vaspdrone.py


注:本文中的monty.os.path.zpath函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。