当前位置: 首页>>代码示例>>Python>>正文


Python HelperFunctions.ensure_dir方法代码示例

本文整理汇总了Python中HelperFunctions.ensure_dir方法的典型用法代码示例。如果您正苦于以下问题:Python HelperFunctions.ensure_dir方法的具体用法?Python HelperFunctions.ensure_dir怎么用?Python HelperFunctions.ensure_dir使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在HelperFunctions的用法示例。


在下文中一共展示了HelperFunctions.ensure_dir方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: check_high_t

# 需要导入模块: import HelperFunctions [as 别名]
# 或者: from HelperFunctions import ensure_dir [as 别名]
def check_high_t(T=6000, metal=0.0, vsini=10):
    filenames = [f for f in os.listdir("./") if f.endswith("smoothed.fits") and f.startswith("H")]
    corrdir = "Cross_correlations/"
    logg = 4.5
    HelperFunctions.ensure_dir("Figures/")

    for rootfile in sorted(filenames):
        corrfile = "{0:s}{1:s}.{2:d}kps_{3:.1f}K{4:+.1f}{5:+.1f}".format(corrdir,
                                                                         rootfile.split(".fits")[0],
                                                                         vsini,
                                                                         T,
                                                                         logg,
                                                                         metal)
        print corrfile
        try:
            vel, corr = np.loadtxt(corrfile, unpack=True)
        except IOError:
            continue

        plt.plot(vel, corr, 'k-')
        plt.xlabel("Velocity")
        plt.ylabel("CCF")
        plt.title(rootfile.split(".fits")[0])
        plt.show()
开发者ID:kgullikson88,项目名称:Chiron-Scripts,代码行数:26,代码来源:CheckCCFs.py

示例2: parse_input

# 需要导入模块: import HelperFunctions [as 别名]
# 或者: from HelperFunctions import ensure_dir [as 别名]
        spt_full = data.SpectralType().split()[0]
        spt = spt_full[0] + re.search(r'\d*\.?\d*', spt_full[1:]).group()

    d = {'Object': object,
         'plx': plx,
         'SpT': spt,
         'exptime': header['exptime']}
    return d


if __name__ == '__main__':
    scale = True
    early, late = parse_input(sys.argv[1:])

    # Add each late file to all of the early-type files
    HelperFunctions.ensure_dir('GeneratedObservations')
    for late_file in late:
        for early_file in early:
            outfilename = 'GeneratedObservations/{}_{}.fits'.format(early_file.split('/')[-1].split(
                '.fits')[0], late_file.split('/')[-1].split('.fits')[0])
            if scale:
                outfilename = outfilename.replace('.fits', '_scalex10.fits')
            if outfilename.split('/')[-1] in os.listdir('GeneratedObservations/'):
                print "File already generated. Skipping {}".format(outfilename)
                continue

            total, early_dict, late_dict = combine(early_file, late_file, increase_scale=scale)

            # Prepare for output
            column_list = []
            for order in total:
开发者ID:kgullikson88,项目名称:Chiron-Scripts,代码行数:33,代码来源:MakeSyntheticBinaryObservation.py

示例3: check_all

# 需要导入模块: import HelperFunctions [as 别名]
# 或者: from HelperFunctions import ensure_dir [as 别名]
def check_all():
    filenames = [f for f in os.listdir("./") if f.endswith("smoothed.fits") and f.startswith("H")]
    corrdir = "Cross_correlations/"
    vsini_values = [1, 10, 20, 30, 40]
    Temperatures = [3300, 3500, 3700, 3900, 4200, 4500, 5000, 5500]
    Temperatures = range(3000, 6800, 100)
    metals = [-0.5, 0.0, 0.5]
    logg = 4.5
    HelperFunctions.ensure_dir("Figures/")

    for rootfile in sorted(filenames):
        Tvals = []
        Zvals = []
        rotvals = []
        significance = []
        corrval = []
        for T in Temperatures:
            for metal in metals:
                for vsini in vsini_values:
                    corrfile = "{0:s}{1:s}.{2:d}kps_{3:.1f}K{4:+.1f}{5:+.1f}".format(corrdir,
                                                                                   rootfile.split(".fits")[0],
                                                                                   vsini,
                                                                                   T,
                                                                                   logg,
                                                                                   metal)
                    print corrfile
                    try:
                        vel, corr = np.loadtxt(corrfile, unpack=True)
                    except IOError:
                        continue

                    # Check the significance of the highest peak within +/- 500 km/s
                    left = np.searchsorted(vel, -500)
                    right = np.searchsorted(vel, 500)
                    idx = np.argmax(corr[left:right]) + left
                    v = vel[idx]
                    goodindices = np.where(np.abs(vel - v) > vsini)[0]
                    std = np.std(corr[goodindices])
                    mean = np.mean(corr[goodindices])
                    mean = np.median(corr)
                    mad = HelperFunctions.mad(corr)
                    std = 1.4826 * mad
                    sigma = (corr[idx] - mean) / std

                    """
                    # Plot if > 3 sigma peak
                    if sigma > 4:
                        fig = plt.figure(10)
                        ax = fig.add_subplot(111)
                        ax.plot(vel, corr, 'k-', lw=2)
                        ax.set_xlabel("Velocity (km/s)")
                        ax.set_ylabel("CCF")
                        ax.set_title(r'{0:s}:  $T_s$={1:d}K & [Fe/H]={2:.1f}'.format(rootfile, T, metal))
                        ax.grid(True)
                        fig.savefig(u"Figures/{0:s}.pdf".format(corrfile.split("/")[-1]))
                        plt.close(fig)
                    """

                    Tvals.append(T)
                    Zvals.append(metal)
                    rotvals.append(vsini)
                    significance.append(sigma)
                    corrval.append(corr[idx] - np.median(corr))

        # Now, make a plot of the significance as a function of Temperature and metallicity for each vsini
        Tvals = np.array(Tvals)
        Zvals = np.array(Zvals)
        rotvals = np.array(rotvals)
        significance = np.array(significance)
        corrval = np.array(corrval)
        fig = plt.figure(1)
        ax = fig.add_subplot(111, projection='3d')
        ax.set_title("Significance Summary for %s" % (rootfile.split(".fits")[0].replace("_", " ")))
        for i, rot in enumerate(vsini_values):
            goodindices = np.where(abs(rotvals - rot) < 1e-5)[0]
            ax.set_xlabel("Temperature (K)")
            ax.set_ylabel("[Fe/H]")
            ax.set_zlabel("Significance")
            ax.plot(Tvals[goodindices], Zvals[goodindices], significance[goodindices], 'o', label="%i km/s" % rot)
            #ax.plot(Tvals[goodindices], Zvals[goodindices], corrval[goodindices], 'o', label="{0:d} km/s".format(rot))
        leg = ax.legend(loc='best', fancybox=True)
        leg.get_frame().set_alpha(0.5)
        fig.savefig("Figures/Summary_{0:s}.pdf".format(rootfile.split(".fits")[0]))
        idx = np.argmax(significance)
        #ax.plot(Tvals[idx], Zvals[idx], significance[idx], 'x', markersize=25, label="Most Significant")
        print os.getcwd()
        plt.show()
开发者ID:kgullikson88,项目名称:Chiron-Scripts,代码行数:89,代码来源:CheckCCFs.py

示例4:

# 需要导入模块: import HelperFunctions [as 别名]
# 或者: from HelperFunctions import ensure_dir [as 别名]
import numpy as np

import DataStructures
import HelperFunctions
from PlotBlackbodies import Planck
import Normalized_Xcorr


currentdir = os.getcwd() + "/"
homedir = os.environ["HOME"]
outfiledir = currentdir + "Cross_correlations/"
modeldir = homedir + "/School/Research/Models/Sorted/Stellar/Vband/"
minvel = -1000  # Minimum velocity to output, in km/s
maxvel = 1000

HelperFunctions.ensure_dir(outfiledir)

model_list = [modeldir + "lte30-4.00-0.0.AGS.Cond.PHOENIX-ACES-2009.HighRes.7.sorted",
              modeldir + "lte31-4.00-0.0.AGS.Cond.PHOENIX-ACES-2009.HighRes.7.sorted",
              modeldir + "lte32-4.00-0.0.AGS.Cond.PHOENIX-ACES-2009.HighRes.7.sorted",
              modeldir + "lte33-4.00-0.0.AGS.Cond.PHOENIX-ACES-2009.HighRes.7.sorted",
              modeldir + "lte34-4.00-0.0.AGS.Cond.PHOENIX-ACES-2009.HighRes.7.sorted",
              modeldir + "lte35-4.00-0.0.AGS.Cond.PHOENIX-ACES-2009.HighRes.7.sorted",
              modeldir + "lte36-4.00-0.0.AGS.Cond.PHOENIX-ACES-2009.HighRes.7.sorted",
              modeldir + "lte37-4.00-0.0.AGS.Cond.PHOENIX-ACES-2009.HighRes.7.sorted",
              modeldir + "lte38-4.00-0.0.AGS.Cond.PHOENIX-ACES-2009.HighRes.7.sorted",
              modeldir + "lte39-4.00-0.0.AGS.Cond.PHOENIX-ACES-2009.HighRes.7.sorted",
              modeldir + "lte40-4.00-0.0.AGS.Cond.PHOENIX-ACES-2009.HighRes.7.sorted",
              modeldir + "lte42-4.00-0.0.AGS.Cond.PHOENIX-ACES-2009.HighRes.7.sorted",
              modeldir + "lte43-4.00-0.0.AGS.Cond.PHOENIX-ACES-2009.HighRes.7.sorted",
              modeldir + "lte44-4.00-0.0.AGS.Cond.PHOENIX-ACES-2009.HighRes.7.sorted",
开发者ID:kgullikson88,项目名称:General,代码行数:33,代码来源:Correlate.py

示例5: range

# 需要导入模块: import HelperFunctions [as 别名]
# 或者: from HelperFunctions import ensure_dir [as 别名]
    matplotlib.use("tkagg")
import matplotlib.pyplot as plt
import numpy as np

import HelperFunctions


if __name__ == "__main__":
    filenames = [f for f in os.listdir("./") if f.endswith("smoothed.fits") and f.startswith("H")]
    corrdir = "Cross_correlations/"
    vsini_values = [1, 10, 20, 30, 40]
    Temperatures = [3300, 3500, 3700, 3900, 4200, 4500, 5000, 5500]
    Temperatures = range(3000, 6800, 100)
    metals = [-0.5, 0.0, 0.5]
    logg = 4.5
    HelperFunctions.ensure_dir("Figures/")

    for rootfile in sorted(filenames):
        Tvals = []
        Zvals = []
        rotvals = []
        significance = []
        for T in Temperatures:
            for metal in metals:
                for vsini in vsini_values:
                    corrfile = "{0:s}{1:s}.{2:d}kps_{3:.1f}K{4:+.1f}{5:+.1f}".format(corrdir,
                                                                                   rootfile.split(".fits")[0],
                                                                                   vsini,
                                                                                   T,
                                                                                   logg,
                                                                                   metal)
开发者ID:gully,项目名称:IGRINS_Scripts,代码行数:33,代码来源:CheckCCFs.py

示例6: ValueError

# 需要导入模块: import HelperFunctions [as 别名]
# 或者: from HelperFunctions import ensure_dir [as 别名]
        plt.show()

        # Get instrument name from the header
        header = fits.getheader(fname)
        observatory = header["OBSERVAT"]
        if "ctio" in observatory.lower():
            instrument = "CHIRON"
            star = header["OBJECT"].replace(" ", "")
        else:
            instrument = header["INSTRUME"]
            if "ts23" in instrument.lower():
                instrument = "TS23"
                star = header["OBJECT"].replace(" ", "")
            elif "hrs" in instrument.lower():
                instrument = "HRS"
                star = header["OBJECT"].split()[0].replace("_", "")
            else:
                raise ValueError("Unknown instrument: %s" % instrument)

        outfilename = "%s/%s/%s/%s.txt" % (outdir, instrument, star, star)
        print outfilename
        HelperFunctions.ensure_dir(outfilename)
        np.savetxt(outfilename, np.transpose((output.x * 10.0, output.y)))

        # for i, order in enumerate(orders):
        #  outfilename = "%s/%s/%s/order%i.txt" %(outdir, instrument, star, i+1)
        #  np.savetxt(outfilename, np.transpose((order.x*10.0, order.y/order.cont)))
    
    
      
开发者ID:kgullikson88,项目名称:TS23-Scripts,代码行数:29,代码来源:PrepareForAnalyse.py

示例7: range

# 需要导入模块: import HelperFunctions [as 别名]
# 或者: from HelperFunctions import ensure_dir [as 别名]
    tellurics = False
    trimsize = 1
    windowsize = 101
    MS = SpectralTypeRelations.MainSequence()
    PMS = SpectralTypeRelations.PreMainSequence()
    vel_list = range(-400, 400, 50)
    outdir = "Sensitivity/"
    for arg in sys.argv[1:]:
        if "-e" in arg:
            extensions = False
        if "-t" in arg:
            tellurics = True  #telluric lines modeled but not removed
        else:
            fileList.append(arg)

    HelperFunctions.ensure_dir(outdir)
    outfile = open(outdir + "logfile.dat", "w")
    outfile.write("Sensitivity Analysis:\n*****************************\n\n")
    outfile.write(
        "Filename\t\t\tPrimary Temperature\tSecondary Temperature\tMass (Msun)\tMass Ratio\tVelocity\tPeak Correct?\tSignificance\n")

    for fname in fileList:
        if extensions:
            orders_original = HelperFunctions.ReadFits(fname, extensions=extensions, x="wavelength", y="flux",
                                                       errors="error")
            if tellurics:
                model_orders = HelperFunctions.ReadFits(fname, extensions=extensions, x="wavelength", y="model")
                for i, order in enumerate(orders_original):
                    orders_original[i].cont = FindContinuum.Continuum(order.x, order.y, lowreject=2, highreject=2)
                    orders_original[i].y /= model_orders[i].y
开发者ID:kgullikson88,项目名称:TS23-Scripts,代码行数:32,代码来源:SensitivityAnalysis_Fast.py

示例8: slow_companion_search

# 需要导入模块: import HelperFunctions [as 别名]
# 或者: from HelperFunctions import ensure_dir [as 别名]

#.........这里部分代码省略.........
                            except AttributeError:
                                temperature_dict[fname] = np.nan  # Unknown
                                logging.warning('Spectral type retrieval from simbad failed! Entering NaN for primary temperature!')
                            datadict[fname] = orders
                        else:
                            orders = datadict[fname]

                        # Now, process the model
                        model_orders = process_model(model.copy(), orders, vsini_primary=vsini_prim, maxvel=1000.0,
                                                     debug=debug, oversample=1, logspace=False)

                        # Get order weights if addmode='T-weighted'
                        if addmode.lower() == 't-weighted':
                            get_weights = False
                            orderweights = [np.sum(temperature_weights(o.x)) for o in orders]
                            addmode = 'simple-weighted'

                        if debug and makeplots:
                            fig = plt.figure('T={}   vsini={}'.format(temp, vsini_sec))
                            for o, m in zip(orders, model_orders):
                                d_scale = np.std(o.y/o.cont)
                                m_scale = np.std(m.y/m.cont)
                                plt.plot(o.x, (o.y/o.cont-1.0)/d_scale, 'k-', alpha=0.4)
                                plt.plot(m.x, (m.y/m.cont-1.0)/m_scale, 'r-', alpha=0.6)
                            plt.show(block=False)

                        # Make sure the output directory exists
                        output_dir = "Cross_correlations/"
                        outfilebase = fname.split(".fits")[0]
                        if "/" in fname:
                            dirs = fname.split("/")
                            outfilebase = dirs[-1].split(".fits")[0]
                            if obstype.lower() == 'synthetic':
                                output_dir = ""
                                for directory in dirs[:-1]:
                                    output_dir = output_dir + directory + "/"
                                output_dir = output_dir + "Cross_correlations/"
                        HelperFunctions.ensure_dir(output_dir)

                        # Save the model and data orders, if debug=True
                        if debug:
                            # Save the individual spectral inputs and CCF orders (unweighted)
                            output_dir2 = output_dir.replace("Cross_correlations", "CCF_inputs")
                            HelperFunctions.ensure_dir(output_dir2)
                            HelperFunctions.ensure_dir("%sCross_correlations/" % (output_dir2))

                            for i, (o, m) in enumerate(zip(orders, model_orders)):
                                outfilename = "{0:s}{1:s}.{2:.0f}kps_{3:.1f}K{4:+.1f}{5:+.1f}.data.order{6:d}".format(
                                    output_dir2,
                                    outfilebase, vsini_sec,
                                    temp, gravity,
                                    metallicity, i + 1)
                                o.output(outfilename)
                                outfilename = "{0:s}{1:s}.{2:.0f}kps_{3:.1f}K{4:+.1f}{5:+.1f}.model.order{6:d}".format(
                                    output_dir2,
                                    outfilebase, vsini_sec,
                                    temp, gravity,
                                    metallicity, i + 1)
                                m.output(outfilename)

                        corr = Correlate.Correlate(orders, model_orders, addmode=addmode, outputdir=output_dir,
                                                   get_weights=get_weights, prim_teff=temperature_dict[fname],
                                                   orderweights=orderweights, debug=debug)
                        if debug:
                            corr, ccf_orders = corr

                        # Barycentric correction
                        if vbary_correct:
                            corr.x += vbary

                        # Output the ccf
                        if obstype.lower() == 'synthetic':
                            pars = {'outdir': output_dir, 'outbase': outfilebase, 'addmode': addmode,
                                    'vsini_prim': vsini_prim, 'vsini': vsini_sec,
                                    'T': temp, 'logg': gravity, '[Fe/H]': metallicity}
                            save_synthetic_ccf(corr, params=pars, mode=output_mode)
                        else:
                            pars = {'outdir': output_dir, 'fname': fname, 'addmode': addmode,
                                    'vsini_prim': vsini_prim, 'vsini': vsini_sec,
                                    'T': temp, 'logg': gravity, '[Fe/H]': metallicity}
                            pars['vbary'] = vbary if vbary_correct else np.nan
                            save_ccf(corr, params=pars, mode=output_mode, hdf_outfilename=output_file)

                        # Save the individual orders, if debug=True
                        if debug:
                            for i, c in enumerate(ccf_orders):
                                print "Saving CCF inputs for order {}".format(i + 1)
                                outfilename = "{0:s}Cross_correlations/{1:s}.{2:.0f}kps_{3:.1f}K{4:+.1f}{5:+.1f}.order{6:d}".format(
                                    output_dir2,
                                    outfilebase, vsini_sec,
                                    temp, gravity,
                                    metallicity, i + 1)
                                c.output(outfilename)



                    # Delete the model. We don't need it anymore and it just takes up ram.
                    modeldict[temp][gravity][metallicity][alpha][vsini_sec] = []

    return
开发者ID:kgullikson88,项目名称:General,代码行数:104,代码来源:GenericSearch.py

示例9: CompanionSearch

# 需要导入模块: import HelperFunctions [as 别名]
# 或者: from HelperFunctions import ensure_dir [as 别名]
def CompanionSearch(fileList,
                    badregions=[],
                    interp_regions=[],
                    extensions=True,
                    resolution=60000,
                    trimsize=1,
                    vsini_values=(10, 20, 30, 40),
                    Tvalues=range(3000, 6900, 100),
                    metal_values=(-0.5, 0.0, +0.5),
                    logg_values=(4.5,),
                    modeldir=StellarModel.modeldir,
                    hdf5_file=StellarModel.HDF5_FILE,
                    vbary_correct=True,
                    observatory="CTIO",
                    addmode="ML",
                    debug=False):
    model_list = StellarModel.GetModelList(type='hdf5',
                                           hdf5_file=hdf5_file,
                                           temperature=Tvalues,
                                           metal=metal_values,
                                           logg=logg_values)
    modeldict, processed = StellarModel.MakeModelDicts(model_list, type='hdf5', hdf5_file=hdf5_file,
                                                       vsini_values=vsini_values, vac2air=True)

    get_weights = True if addmode.lower() == "weighted" else False
    orderweights = None

    MS = SpectralTypeRelations.MainSequence()

    # Do the cross-correlation
    datadict = defaultdict(list)
    temperature_dict = defaultdict(float)
    vbary_dict = defaultdict(float)
    alpha=0.0
    for temp in sorted(modeldict.keys()):
        for gravity in sorted(modeldict[temp].keys()):
            for metallicity in sorted(modeldict[temp][gravity].keys()):
                for vsini in vsini_values:
                    for fname in fileList:
                        if vbary_correct:
                            if fname in vbary_dict:
                                vbary = vbary_dict[fname]
                            else:
                                vbary = HelCorr_IRAF(fits.getheader(fname), observatory=observatory)
                                vbary_dict[fname] = vbary
                        process_data = False if fname in datadict else True
                        if process_data:
                            orders = Process_Data(fname, badregions, interp_regions=interp_regions,
                                                  extensions=extensions, trimsize=trimsize)
                            header = fits.getheader(fname)
                            spt = StarData.GetData(header['object']).spectype
                            match = re.search('[0-9]', spt)
                            if match is None:
                                spt = spt[0] + "5"
                            else:
                                spt = spt[:match.start() + 1]
                            temperature_dict[fname] = MS.Interpolate(MS.Temperature, spt)
                        else:
                            orders = datadict[fname]

                        output_dir = "Cross_correlations/"
                        outfilebase = fname.split(".fits")[0]
                        if "/" in fname:
                            dirs = fname.split("/")
                            output_dir = ""
                            outfilebase = dirs[-1].split(".fits")[0]
                            for directory in dirs[:-1]:
                                output_dir = output_dir + directory + "/"
                            output_dir = output_dir + "Cross_correlations/"
                        HelperFunctions.ensure_dir(output_dir)

                        model = modeldict[temp][gravity][metallicity][alpha][vsini]
                        pflag = not processed[temp][gravity][metallicity][alpha][vsini]
                        # if pflag:
                        # orderweights = None
                        retdict = Correlate.GetCCF(orders,
                                                   model,
                                                   resolution=resolution,
                                                   vsini=vsini,
                                                   rebin_data=process_data,
                                                   process_model=pflag,
                                                   debug=debug,
                                                   outputdir=output_dir.split("Cross_corr")[0],
                                                   addmode=addmode,
                                                   orderweights=orderweights,
                                                   get_weights=get_weights,
                                                   prim_teff=temperature_dict[fname])
                        corr = retdict["CCF"]
                        if pflag:
                            processed[temp][gravity][metallicity][alpha][vsini] = True
                            modeldict[temp][gravity][metallicity][alpha][vsini] = retdict["model"]
                            # orderweights = retdict['weights']
                        if process_data:
                            datadict[fname] = retdict['data']

                        outfilename = "{0:s}{1:s}.{2:.0f}kps_{3:.1f}K{4:+.1f}{5:+.1f}".format(output_dir, outfilebase,
                                                                                              vsini, temp, gravity,
                                                                                              metallicity)
                        print "Outputting to ", outfilename, "\n"
                        if vbary_correct:
#.........这里部分代码省略.........
开发者ID:kgullikson88,项目名称:General,代码行数:103,代码来源:GenericSearch.py

示例10: Fit

# 需要导入模块: import HelperFunctions [as 别名]
# 或者: from HelperFunctions import ensure_dir [as 别名]

#.........这里部分代码省略.........
    #Make an instance of the model getter
    if mg is None:
        mg = StellarModel.KuruczGetter(modeldir,
                                       T_min=T_min,
                                       T_max=T_max,
                                       logg_min=logg_min,
                                       logg_max=logg_max,
                                       metal_min=metal_min,
                                       metal_max=metal_max,
                                       alpha_min=alpha_min,
                                       alpha_max=alpha_max,
                                       wavemin=350.0)

    # Make the appropriate lmfit model
    fitter = HelperFunctions.ListModel(LM_Model, independent_vars=['x'], model_getter=mg)

    #Set default values
    fitter.set_param_hint("rv", value=rv.value, min=-50, max=50)
    fitter.set_param_hint('vsini', value=vsini.value, vary=True, min=0.0, max=500.0)
    fitter.set_param_hint('temperature', value=temperature, min=T_min, max=T_max, vary=True)
    fitter.set_param_hint('logg', value=logg, min=logg_min, max=logg_max, vary=True)
    fitter.set_param_hint('metal', value=metal, min=metal_min, max=metal_max, vary=True)
    fitter.set_param_hint('alpha', value=0.0, min=alpha_min, max=alpha_max, vary=mg.alpha_varies)

    """
    Here is the main loop over files!
    """
    for filename in file_list:
        # Make output directories
        header = fits.getheader(filename)
        date = header['date-obs'].split("T")[0]
        star = header['object']
        stardir = "{:s}{:s}/".format(output_dir, star.replace(" ", "_"))
        HelperFunctions.ensure_dir(stardir)
        datedir = "{:s}{:s}/".format(stardir, date)
        HelperFunctions.ensure_dir(datedir)
        chain_filename = "{:s}chain.dat".format(datedir)

        # Read the data
        print "Fitting parameters for {}".format(filename)
        all_orders = HelperFunctions.ReadExtensionFits(filename)
        orders = [o[1] for o in enumerate(all_orders) if o[0] in good_orders]

        # Perform the fit
        optdict = {"epsfcn": 1e-2}
        params = fitter.make_params()
        fitparams = {"rv": np.zeros(N_iter),
                     "vsini": np.zeros(N_iter),
                     "temperature": np.zeros(N_iter),
                     "logg": np.zeros(N_iter),
                     "metal": np.zeros(N_iter),
                     "alpha": np.zeros(N_iter)}
        orders_original = [o.copy() for o in orders]
        chainfile = open(chain_filename, "a")
        vbary = GenericSearch.HelCorr(header, observatory="CTIO")
        for n in range(N_iter):
            print "Fitting iteration {:d}/{:d}".format(n + 1, N_iter)
            orders = []
            for order in orders_original:
                o = order.copy()
                o.y += np.random.normal(loc=0, scale=o.err)
                orders.append(o.copy())

            # Make a fast interpolator instance if not the first loop
            #if n > 0:
            #    fast_interpolator = mg.make_vsini_interpolator()
开发者ID:kgullikson88,项目名称:Chiron-Scripts,代码行数:70,代码来源:FitBstar2.py


注:本文中的HelperFunctions.ensure_dir方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。