当前位置: 首页>>代码示例>>Python>>正文


Python serial.preprocess函数代码示例

本文整理汇总了Python中pylearn2.utils.serial.preprocess函数的典型用法代码示例。如果您正苦于以下问题:Python preprocess函数的具体用法?Python preprocess怎么用?Python preprocess使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了preprocess函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: train_nice

def train_nice(args):
    vn = True
    center = True
    if args.transposed:
        fmri = MRI.MRI_Transposed(dataset_name=args.dataset_name,
                                  even_input=True)
        input_dim = fmri.X.shape[1]
        del fmri
    else:
        data_path = serial.preprocess("${PYLEARN2_NI_PATH}/" + args.dataset_name)
        mask_file = path.join(data_path, "mask.npy")
        mask = np.load(mask_file)
        input_dim = (mask == 1).sum()
        if input_dim % 2 == 1:
            input_dim -= 1

    logging.info("Input shape: %d" % input_dim)

    p = path.abspath(path.dirname(__file__))
    yaml_file = path.join(p, "nice_%s.yaml" % args.dataset_name)
    user = path.expandvars("$USER")
    save_file = "nice_%s%s%s" % (args.dataset_name,
                                 "_transposed" if args.transposed else "",
                                 "_logistic" if args.logistic else "")
    save_path = serial.preprocess("/export/mialab/users/%s/pylearn2_outs/%s"
                                  % (user, save_file))
    variance_map_file = path.join(data_path, "variance_map.npy")
    if not path.isfile(variance_map_file):
        raise ValueError("Variance map file %s not found."
                         % variance_map_file)
    train(yaml_file, save_path, input_dim,
          args.transposed, args.logistic, variance_map_file)
开发者ID:ecastrow,项目名称:pl2mind,代码行数:32,代码来源:train_nice_smri.py

示例2: test_rbm

def test_rbm():
    save_path = path.join(serial.preprocess("${PYLEARN2_OUTS}"), "tutorials")
    if not path.isdir(serial.preprocess("${PYLEARN2_OUTS}")):
        raise IOError("PYLEARN2_OUTS environment variable not set")
    train_rbm.train_rbm(epochs = 1, save_path=save_path)
    mri_analysis.main(path.join(save_path, "rbm_smri.pkl"),
                      save_path, "sz_t")
开发者ID:ecastrow,项目名称:pl2mind,代码行数:7,代码来源:test_rbm_smri.py

示例3: main

def main(args):
    dataset_name = args.dataset_name

    logger.info("Getting dataset info for %s" % dataset_name)
    data_path = serial.preprocess("${PYLEARN2_NI_PATH}/" + dataset_name)
    mask_file = path.join(data_path, "mask.npy")
    mask = np.load(mask_file)
    input_dim = (mask == 1).sum()

    user = path.expandvars("$USER")
    save_path = serial.preprocess("/export/mialab/users/%s/pylearn2_outs/%s"
                                  % (user, "rbm_simple_test"))

    # File parameters are path specific ones (not model specific).
    file_params = {"save_path": save_path,
                   }

    yaml_template = open(yaml_file).read()
    hyperparams = expand(flatten(experiment.default_hyperparams(input_dim=input_dim)),
                         dict_type=ydict)

    # Set additional hyperparams from command line args
    if args.learning_rate is not None:
        hyperparams["learning_rate"] = args.learning_rate
    if args.batch_size is not None:
        hyperparams["batch_size"] = args.batch_size

    for param in file_params:
        yaml_template = yaml_template.replace("%%(%s)s" % param, file_params[param])

    yaml = yaml_template % hyperparams

    logger.info("Training")
    train = yaml_parse.load(yaml)
    train.main_loop()
开发者ID:ecastrow,项目名称:pl2mind,代码行数:35,代码来源:simple_train.py

示例4: train_nice

def train_nice(args):
    vn = True
    center = True
    logger.info("Getting dataset info for %s" % args.dataset_name)
    data_path = serial.preprocess("${PYLEARN2_NI_PATH}/" + args.dataset_name)
    if args.transposed:
        logger.info("Data in transpose...")
        mri = MRI.MRI_Transposed(dataset_name=args.dataset_name,
                                 unit_normalize=True,
                                 even_input=True,
                                 apply_mask=True)
        input_dim = mri.X.shape[1]
        variance_map_file = path.join(data_path, "transposed_variance_map.npy")
    else:
        mask_file = path.join(data_path, "mask.npy")
        mask = np.load(mask_file)
        input_dim = (mask == 1).sum()
        if input_dim % 2 == 1:
            input_dim -= 1
        mri = MRI.MRI_Standard(which_set="full",
                               dataset_name=args.dataset_name,
                               unit_normalize=True,
                               even_input=True,
                               apply_mask=True)
        variance_map_file = path.join(data_path, "variance_map.npy")
    save_variance_map(mri, variance_map_file)

    logger.info("Input shape: %d" % input_dim)

    p = path.abspath(path.dirname(__file__))
    yaml_file = path.join(p, "nice_mri.yaml")
    user = path.expandvars("$USER")

    if args.out_name is not None:
        out_name = args.out_name
    else:
        out_name = args.dataset_name
    save_file = "nice_%s%s%s" % (out_name,
                                 "_transposed" if args.transposed else "",
                                 "_logistic" if args.logistic else "")
    save_path = serial.preprocess("/export/mialab/users/%s/pylearn2_outs/%s"
                                  % (user, save_file))
    if path.isfile(save_path + ".pkl") or path.isfile(save_path + "_best.pkl"):
        answer = None
        while answer not in ["Y", "N", "y", "n"]:
            answer = raw_input("%s already exists, continuing will overwrite."
                               "\nOverwrite? (Y/N)[N]: " % save_path) or "N"
            if answer not in ["Y", "N", "y", "n"]:
                print "Please answer Y or N"
        if answer in ["N", "n"]:
            print "If you want to run without overwrite, consider using the -o option."
            sys.exit()

    logger.info("Saving to prefix %s" % save_path)

    if not path.isfile(variance_map_file):
        raise ValueError("Variance map file %s not found."
                         % variance_map_file)
    train(yaml_file, save_path, input_dim,
          args.transposed, args.logistic, variance_map_file, args.dataset_name)
开发者ID:ecastrow,项目名称:pl2mind,代码行数:60,代码来源:train_nice_mri.py

示例5: test_rbm

def test_rbm():
    save_path = path.join(serial.preprocess("${PYLEARN2_OUTS}"), "tutorials")
    if not path.isdir(serial.preprocess("${PYLEARN2_OUTS}")):
        raise IOError("PYLEARN2_OUTS environment variable not set")

    train_rbm.train_rbm(epochs = 1, save_path=save_path)
    show_weights.show_weights(path.join(save_path, "rbm_mnist.pkl"),
                              out=path.join(save_path, "rbm_mnist_weights.png"))
开发者ID:ecastrow,项目名称:pl2mind,代码行数:8,代码来源:test_rbm_mnist.py

示例6: load_aod_gts

    def load_aod_gts(self):
        p = path.join(self.dataset_root, "aod_extra/")

        if not(path.isdir(serial.preprocess(p))):
            raise IOError("AOD extras directory %s not found."
                          % serial.preprocess(p))

        targets = np.load(serial.preprocess(p + "targets.npy"))
        novels = np.load(serial.preprocess(p + "novels.npy"))
        return targets, novels
开发者ID:ecastrow,项目名称:pl2mind,代码行数:10,代码来源:MRI.py

示例7: __init__

    def __init__(self,
                 which_set,
                 data_path=None,
                 center=True,
                 rescale=True,
                 gcn=True):
        self.class_name = ['neg', 'pos']
        # load data
        path = "${PYLEARN2_DATA_PATH}/cin/"
        #datapath = path + 'feature850-2-1.pkl'
        if data_path is None:
            data_path = path + 'feature850-2-1.pkl'
        else:
            data_path = path + data_path
        data_path = serial.preprocess(data_path)
        with  open(data_path, 'rb') as f:
            #f = open(datapath, 'rb')
            train_set, valid_set, test_set = cPickle.load(f)
            #f.close()

        self.train_set = train_set
        self.valid_set = valid_set
        self.test_set = test_set
        if which_set == 'train':
            X, Y = self.train_set
        elif which_set == 'valid':
            X, Y = self.valid_set
        else:
            X, Y = self.test_set

        X.astype(float)
        axis = 0
        _max = np.max(X, axis=axis)
        _min = np.min(X, axis=axis)
        _mean = np.mean(X, axis=axis)
        _std = np.std(X, axis=axis)
        _scale = _max - _min


        # print _max
        # print _min
        # print _mean
        # print _std

        if gcn:
            X = global_contrast_normalize(X, scale=gcn)
        else:
            if center:
                X[:, ] -= _mean
            if rescale:
                X[:, ] /= _scale

        # topo_view = X.reshape(X.shape[0], X.shape[1], 1, 1)
        # y = np.reshape(Y, (Y.shape[0], 1))
        # y = np.atleast_2d(Y).T
        y = np.zeros((Y.shape[0], 2))
        y[:, 0] = Y
        y[:, 0] = 1 - Y
        print X.shape, y.shape
        super(CIN_FEATURE2, self).__init__(X=X, y=y)
开发者ID:jackal092927,项目名称:pylearn2_med,代码行数:60,代码来源:cin_feature2.py

示例8: __init__

    def __init__(self, jobs, db, name, updater, analyzer, alerter, reload=False):
        self.__dict__.update(locals())

        self.table_dir = serial.preprocess(path.join(args.out_dir,
                                                     self.name))
        self.html = HTMLPage(self.name + " results")

        self.analyzer.start()
        self.updater.start()
开发者ID:ecastrow,项目名称:pl2mind,代码行数:9,代码来源:jobman_analysis.py

示例9: getFilename

 def getFilename(i):
     base = path+'snapshot_'
     if i<10:
         out= base+'00%d.hdf5'%i
     elif i<100:
         out= base+'0%d.hdf5'%i
     else:
         out= base+'%d.hdf5'%i
     return serial.preprocess(out)
开发者ID:Samantha-Thrush,项目名称:pylearn2,代码行数:9,代码来源:nanoParticle.py

示例10: test_data

def test_data():
    pylearn2_out_path = path.expandvars("$PYLEARN2_OUTS")
    assert pylearn2_out_path != "", ("PYLEARN2_OUTS environment variable is "
                                     "not set.")

    pylearn2_data_path = path.expandvars("$PYLEARN2_NI_PATH")
    assert pylearn2_data_path != "", ("PYLEARN2_NI_PATH environment"
                                      " variable is not set")

    data_path = serial.preprocess("${PYLEARN2_NI_PATH}/smri/")
    extras_path = serial.preprocess("${PYLEARN2_NI_PATH}/mri_extra/")

    try:
        assert path.isdir(data_path), data_path
        assert path.isdir(extras_path), extras_path
    except AssertionError as e:
        raise IOError("File or directory not found (%s), did you set your "
                      "PYLEARN2_NI_PATH correctly? (%s)" % (e, data_path))
开发者ID:ecastrow,项目名称:pl2mind,代码行数:18,代码来源:test_rbm_smri.py

示例11: __init__

    def __init__(self, which_set, start=None, stop=None, shuffle=False):
        if which_set not in ['train', 'valid']:
            if which_set == 'test':
                raise ValueError(
                    "Currently test datasets not supported")
            raise ValueError(
                'Unrecognized which_set value "%s".' % (which_set,) +
                '". Valid values are ["train","valid"].')

        p = "${PYLEARN2_NI_PATH}/snp/"
        if which_set == 'train':
            data_path = p + 'gen.chr1.npy'
            label_path = p + 'gen.chr1_labels.npy'
        else:
            assert which_set == 'test'
            data_path = p + 'test.npy'
            label_path = p + 'test_labels.npy'

        data_path = serial.preprocess(data_path)
        label_path = serial.preprocess(label_path)

        print "Loading data"
        topo_view = np.load(data_path)
        y = np.atleast_2d(np.load(label_path)).T
        samples, number_snps = topo_view.shape

        if start is not None:
            stop = stop if (stop <= samples) else samples
            assert 0 <= start < stop
            topo_view = topo_view[start:stop, :]
            y = y[start:stop]

        if shuffle:
            self.shuffle_rng = make_np_rng(None, default_seed=[1, 2, 3], which_method="shuffle")
            for i in xrange(samples):
                j = self.shuffle_rng.randint(samples)
                tmp = topo_view[i].copy()
                topo_view[i] = topo_view[j]
                topo_view[j] = tmp
                tmp = y[i,i+1].copy()
                y[i] = y[j]
                y[j] = tmp

        super(SNP, self).__init__(X=topo_view, y=y, y_labels=np.amax(y)+1)
开发者ID:ecastrow,项目名称:pl2mind,代码行数:44,代码来源:SNP.py

示例12: __init__

    def __init__(self, which_set, one_hot=False, axes=['b', 0, 1, 'c']):
        """
        .. todo::

            WRITEME
        """
        self.args = locals()

        assert which_set in self.data_split.keys()

        path = serial.preprocess(
            "${PYLEARN2_DATA_PATH}/ocr_letters/letter.data")
        with open(path, 'r') as data_f:
            data = data_f.readlines()
            data = [line.split("\t") for line in data]

        data_x = [map(int, item[6:-1]) for item in data]
        data_letters = [item[1] for item in data]
        data_fold = [int(item[5]) for item in data]

        letters = list(numpy.unique(data_letters))
        data_y = [letters.index(item) for item in data_letters]

        if which_set == 'train':
            split = slice(0, self.data_split['train'])
        elif which_set == 'valid':
            split = slice(self.data_split['train'], self.data_split['train'] +
                          self.data_split['valid'])
        elif which_set == 'test':
            split = slice(self.data_split['train'] + self.data_split['valid'],
                          (self.data_split['train'] +
                           self.data_split['valid'] +
                           self.data_split['test']))

        data_x = numpy.asarray(data_x[split])
        data_y = numpy.asarray(data_y[split])
        data_fold = numpy.asarray(data_y[split])
        assert data_x.shape[0] == data_y.shape[0]
        assert data_x.shape[0] == self.data_split[which_set]

        self.one_hot = one_hot
        if one_hot:
            one_hot = numpy.zeros(
                (data_y.shape[0], len(letters)), dtype='float32')
            for i in xrange(data_y.shape[0]):
                one_hot[i, data_y[i]] = 1.
            data_y = one_hot

        view_converter = dense_design_matrix.DefaultViewConverter(
            (16, 8, 1), axes)
        super(OCR, self).__init__(
            X=data_x, y=data_y, view_converter=view_converter)

        assert not contains_nan(self.X)
        self.fold = data_fold
开发者ID:Deathmonster,项目名称:pylearn2,代码行数:55,代码来源:ocr.py

示例13: main

def main(dataset_name="smri"):
    logger.info("Getting dataset info for %s" % args.dataset_name)
    data_path = serial.preprocess("${PYLEARN2_NI_PATH}/" + args.dataset_name)
    mask_file = path.join(data_path, "mask.npy")
    mask = np.load(mask_file)
    input_dim = (mask == 1).sum()
    if input_dim % 2 == 1:
        input_dim -= 1
    mri = MRI.MRI_Standard(which_set="full",
                           dataset_name=args.dataset_name,
                           unit_normalize=True,
                           even_input=True,
                           apply_mask=True)
    variance_map_file = path.join(data_path, "variance_map.npy")
    mri_nifti.save_variance_map(mri, variance_map_file)

    user = path.expandvars("$USER")
    save_path = serial.preprocess("/export/mialab/users/%s/pylearn2_outs/%s"
                                  % (user, "jobman_test"))

    file_params = {"save_path": save_path,
                   "variance_map_file": variance_map_file
                   }

    yaml_template = open(yaml_file).read()
    hyperparams = expand(flatten(mlp_experiment.default_hyperparams(input_dim=input_dim)),
                         dict_type=ydict)

    for param in hyperparams:
        if hasattr(args, param) and getattr(args, param):
            val = getattr(args, param)
            logger.info("Filling %s with %r" % (param, val))
            hyperparams[param] = type(hyperparams[param])(val)

    for param in file_params:
        yaml_template = yaml_template.replace("%%(%s)s" % param, file_params[param])

    yaml = yaml_template % hyperparams
    print yaml
    logger.info("Training")
    train = yaml_parse.load(yaml)
    train.main_loop()
开发者ID:ecastrow,项目名称:pl2mind,代码行数:42,代码来源:simple_train.py

示例14: get_input_params

    def get_input_params(self, args, hyperparams):
        data_path = serial.preprocess("${PYLEARN2_NI_PATH}/" + args.dataset_name)

        data_class = hyperparams["data_class"]
        variance_normalize = hyperparams.get("variance_normalize", False)
        unit_normalize = hyperparams.get("unit_normalize", False)
        demean = hyperparams.get("demean", False)
        assert not (variance_normalize and unit_normalize)

        logger.info((data_class, variance_normalize, unit_normalize, demean))
        h = hash((data_class, variance_normalize, unit_normalize, demean))

        if self.d.get(h, False):
            return self.d[h]
        else:
            if data_class == "MRI_Transposed":
                assert not variance_normalize
                mri = MRI.MRI_Transposed(dataset_name=args.dataset_name,
                                         unit_normalize=unit_normalize,
                                         demean=demean,
                                         even_input=True,
                                         apply_mask=True)
                input_dim = mri.X.shape[1]
                variance_file_name = ("variance_map_transposed%s%s.npy"
                                      % ("_un" if unit_normalize else "",
                                         "_dm" if demean else ""))

            elif data_class == "MRI_Standard":
                assert not demean
                mask_file = path.join(data_path, "mask.npy")
                mask = np.load(mask_file)
                input_dim = (mask == 1).sum()
                if input_dim % 2 == 1:
                    input_dim -= 1
                mri = MRI.MRI_Standard(which_set="full",
                                       dataset_name=args.dataset_name,
                                       unit_normalize=unit_normalize,
                                       variance_normalize=variance_normalize,
                                       even_input=True,
                                       apply_mask=True)
                variance_file_name = ("variance_map%s%s.npy"
                                      % ("_un" if unit_normalize else "",
                                         "_vn" if variance_normalize else ""))
                logger.info(variance_file_name)
                logger.info((data_class, variance_normalize, unit_normalize, demean))

        variance_map_file = path.join(data_path, variance_file_name)
        if not path.isfile(variance_map_file):
            logger.info("Saving variance file %s" % variance_map_file)
            mri_nifti.save_variance_map(mri, variance_map_file)
        self.d[h] = (input_dim, variance_map_file)
        return self.d[h]
开发者ID:ecastrow,项目名称:pl2mind,代码行数:52,代码来源:load_rbm_experiments.py

示例15: train_nice

def train_nice():
    vn = True
    center = True
    smri = MRI.MRI_Transposed(dataset_name="smri",
                              even_input=True)
    input_dim = smri.X.shape[1]

    p = path.abspath(path.dirname(__file__))
    yaml_file = path.join(p, "nice_smri_transposed.yaml")
    user = path.expandvars("$USER")
    save_path = serial.preprocess("/export/mialab/users/%s/pylearn2_outs/" % user)
    assert path.isdir(save_path)
    train(yaml_file, save_path, input_dim)
开发者ID:ecastrow,项目名称:pl2mind,代码行数:13,代码来源:train_nice_smri_transposed.py


注:本文中的pylearn2.utils.serial.preprocess函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。