当前位置: 首页>>代码示例>>Python>>正文


Python dense_design_matrix.DenseDesignMatrix类代码示例

本文整理汇总了Python中pylearn2.datasets.dense_design_matrix.DenseDesignMatrix的典型用法代码示例。如果您正苦于以下问题:Python DenseDesignMatrix类的具体用法?Python DenseDesignMatrix怎么用?Python DenseDesignMatrix使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了DenseDesignMatrix类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: get_feats_from_cnn

def get_feats_from_cnn(rows, model=None):
    """
    fprop rows using best trained model and returns activations of the
    penultimate layer
    """
    conf = utils.get_config()
    patch_size = conf['patch_size']
    region_size = conf['region_size']
    batch_size = None
    preds = utils.get_predictor(model=model, return_all=True)
    y = np.zeros(len(rows))
    samples = np.zeros(
        (len(rows), region_size, region_size, 1), dtype=np.float32)
    for i, row in enumerate(rows):
        print 'processing %i-th image: %s' % (i, row['image_filename'])
        try:
            samples[i] = utils.get_samples_from_image(row, False)[0]
        except ValueError as e:
            print '{1} Value error: {0}'.format(str(e), row['image_filename'])
        y[i] = utils.is_positive(row)
    ds = DenseDesignMatrix(topo_view=samples)
    pipeline = utils.get_pipeline(
        ds.X_topo_space.shape, patch_size, batch_size)
    pipeline.apply(ds)
    return preds[-2](ds.get_topological_view()), y
开发者ID:johnarevalo,项目名称:cnn-bcdr,代码行数:25,代码来源:fe_extraction.py

示例2: test_convert_to_one_hot

def test_convert_to_one_hot():
    rng = np.random.RandomState([2013, 11, 14])
    m = 11
    d = DenseDesignMatrix(
        X=rng.randn(m, 4),
        y=rng.randint(low=0, high=10, size=(m,)))
    d.convert_to_one_hot()
开发者ID:AlexArgus,项目名称:pylearn2,代码行数:7,代码来源:test_dense_design_matrix.py

示例3: next

    def next(self):
        next_index = self._subset_iterator.next()

        # convert to boolean selection
        sel = np.zeros(self.num_examples, dtype=bool)
        sel[next_index] = True
        next_index = sel

        rval = []
        for data, fn in safe_izip(self._raw_data, self._convert):
            try:
                this_data = data[next_index]
            except TypeError:
                this_data = data[next_index, :]
            if fn:
                this_data = fn(this_data)
            if self._preprocessor is not None:
                d = DenseDesignMatrix(X=this_data)
                self._preprocessor.apply(d)
                this_data = d.get_design_matrix()
            assert not np.any(np.isnan(this_data))
            rval.append(this_data)
        rval = tuple(rval)
        if not self._return_tuple and len(rval) == 1:
            rval, = rval
        return rval    
开发者ID:everglory99,项目名称:deepAutoController,代码行数:26,代码来源:icmc.py

示例4: apply_ZCA_fast

def apply_ZCA_fast(patches, normalize, zca_preprocessor):
    patches = patches.astype(np.float32)
    if normalize:
        patches /= 255.0
    dataset = DenseDesignMatrix(X = patches.T)    
    zca_preprocessor.apply(dataset)
    patches = dataset.get_design_matrix()
    return patches.T
开发者ID:ttblue,项目名称:human_demos,代码行数:8,代码来源:create_leveldb_utils.py

示例5: test

        def test(store_inverse):
            preprocessed_X = copy.copy(self.X)
            preprocessor = ZCA(store_inverse=store_inverse)

            dataset = DenseDesignMatrix(X=preprocessed_X,
                                        preprocessor=preprocessor,
                                        fit_preprocessor=True)

            preprocessed_X = dataset.get_design_matrix()
            assert_allclose(self.X, preprocessor.inverse(preprocessed_X))
开发者ID:ASAPPinc,项目名称:pylearn2,代码行数:10,代码来源:test_preprocessing.py

示例6: make_dataset

    def make_dataset(num_batches):
        m = num_batches*batch_size
        X = rng.randn(m, num_features)
        y = rng.randn(m, num_features)

        rval =  DenseDesignMatrix(X=X, y=y)

        rval.yaml_src = "" # suppress no yaml_src warning

        return rval
开发者ID:123fengye741,项目名称:pylearn2,代码行数:10,代码来源:test_bgd.py

示例7: __init__

 def __init__(self, which_set, data_path=None, 
              term_range=None, target_type='cluster100'):
     """
     which_set: a string specifying which portion of the dataset
         to load. Valid values are 'train', 'valid' or 'test'
     data_path: a string specifying the directory containing the 
         webcluster data. If None (default), use environment 
         variable WEBCLUSTER_DATA_PATH.
     term_range: a tuple for taking only a slice of the available
         terms. Default is to use all 6275. For example, an input
         range of (10,2000) will truncate the 10 most frequent terms
         and the 6275-2000=4275 les frequent terms, whereby frequency
         we mean how many unique documents each term is in.
     target_type: the type of targets to use. Valid options are 
         'cluster[10,100,1000]'
     """
     self.__dict__.update(locals())
     del self.self
     
     self.corpus_terms = None
     self.doc_info = None
     
     print "loading WebCluster DDM. which_set =", self.which_set
     
     if self.data_path is None:
         self.data_path \
             = string_utils.preprocess('${WEBCLUSTER_DATA_PATH}')
     
     fname = os.path.join(self.data_path, which_set+'_doc_inputs.npy')
     X = np.load(fname)
     if self.term_range is not None:
         X = X[:,self.term_range[0]:self.term_range[1]]
         X = X/X.sum(1).reshape(X.shape[0],1)
     print X.sum(1).mean()
     
     fname = os.path.join(self.data_path, which_set+'_doc_targets.npy')
     # columns: 0:cluster10s, 1:cluster100s, 2:cluster1000s
     self.cluster_hierarchy = np.load(fname)
     
     y = None
     if self.target_type == 'cluster10':
         y = self.cluster_hierarchy[:,0]
     elif self.target_type == 'cluster100':
         y = self.cluster_hierarchy[:,1]
     elif self.target_type == 'cluster1000':
         y = self.cluster_hierarchy[:,2]
     elif self.target_type is None:
         pass
     else:
         raise NotImplementedError()
     
     DenseDesignMatrix.__init__(self, X=X, y=y)
     
     print "... WebCluster ddm loaded"
开发者ID:nicholas-leonard,项目名称:delicious,代码行数:54,代码来源:webcluster.py

示例8: test

    def test(store_inverse):
        rng = np.random.RandomState([1, 2, 3])
        X = as_floatX(rng.randn(15, 10))
        preprocessed_X = copy.copy(X)
        preprocessor = ZCA(store_inverse=store_inverse)

        dataset = DenseDesignMatrix(X=preprocessed_X,
                                    preprocessor=preprocessor,
                                    fit_preprocessor=True)

        preprocessed_X = dataset.get_design_matrix()

        assert_allclose(X, preprocessor.inverse(preprocessed_X))
开发者ID:JesseLivezey,项目名称:pylearn2,代码行数:13,代码来源:test_preprocessing.py

示例9: convert_to_dataset

     def convert_to_dataset(X,y):            
         X = np.vstack(X);
         y = np.vstack(y);
         
         # convert labels
         y = self.label_converter.get_labels(y, self.label_mode);
         y = np.hstack(y);
         
         one_hot_y = one_hot(y);
         
         dataset = DenseDesignMatrix(X=X, y=one_hot_y);
         dataset.labels = y; # for confusion matrix
 
         return dataset;
开发者ID:sarikayamehmet,项目名称:ismir2014-deepbeat,代码行数:14,代码来源:EEGDatasetLoader.py

示例10: make_dataset

        def make_dataset(num_batches):
            disturb_mem.disturb_mem()
            m = num_batches*batch_size
            X = rng.randn(m, num_features)
            y = np.zeros((m,1))
            y[:,0] = np.dot(X, w) > 0.

            rval =  DenseDesignMatrix(X=X, y=y)

            rval.yaml_src = "" # suppress no yaml_src warning

            X = rval.get_batch_design(batch_size)
            assert X.shape == (batch_size, num_features)

            return rval
开发者ID:mathewsbabu,项目名称:pylearn,代码行数:15,代码来源:test_sgd.py

示例11: test_zero_vector

    def test_zero_vector(self):
        """ Test that passing in the zero vector does not result in
            a divide by 0 """

        dataset = DenseDesignMatrix(X=as_floatX(np.zeros((1, 1))))

        # the settings of subtract_mean and use_norm are not relevant to
        # the test
        # std_bias = 0.0 is the only value for which there should be a risk
        # of failure occurring
        preprocessor = GlobalContrastNormalization(subtract_mean=True, sqrt_bias=0.0, use_std=True)

        dataset.apply_preprocessor(preprocessor)

        result = dataset.get_design_matrix()

        assert not np.any(np.isnan(result))
        assert not np.any(np.isinf(result))
开发者ID:sonu5623,项目名称:pylearn2,代码行数:18,代码来源:test_preprocessing.py

示例12: test_finitedataset_source_check

def test_finitedataset_source_check():
    """
    Check that the FiniteDatasetIterator returns sensible
    errors when there is a missing source in the dataset.
    """
    dataset = DenseDesignMatrix(X=np.random.rand(20,15).astype(theano.config.floatX),
                                y=np.random.rand(20,5).astype(theano.config.floatX))
    assert_raises(ValueError,
                  dataset.iterator,
                  mode='sequential',
                  batch_size=5,
                  data_specs=(VectorSpace(15),'featuresX'))
    try:
        dataset.iterator(mode='sequential',
                         batch_size=5,
                         data_specs=(VectorSpace(15),'featuresX'))
    except ValueError as e:
        assert 'featuresX' in str(e)
开发者ID:JesseLivezey,项目名称:pylearn2,代码行数:18,代码来源:test_iteration.py

示例13: test_random_image

    def test_random_image(self):
        """
        Test on a random image if the per-processor loads and works without
        anyerror and doesn't result in any nan or inf values

        """

        rng = np.random.RandomState([1, 2, 3])
        X = as_floatX(rng.randn(5, 32 * 32 * 3))

        axes = ["b", 0, 1, "c"]
        view_converter = dense_design_matrix.DefaultViewConverter((32, 32, 3), axes)
        dataset = DenseDesignMatrix(X=X, view_converter=view_converter)
        dataset.axes = axes
        preprocessor = LeCunLCN(img_shape=[32, 32])
        dataset.apply_preprocessor(preprocessor)
        result = dataset.get_design_matrix()

        assert not np.any(np.isnan(result))
        assert not np.any(np.isinf(result))
开发者ID:sonu5623,项目名称:pylearn2,代码行数:20,代码来源:test_preprocessing.py

示例14: test_split_nfold_datasets

def test_split_nfold_datasets():
    #Load and create ddm from cifar100
    path = "/data/lisa/data/cifar100/cifar-100-python/train"
    obj = serial.load(path)
    X = obj['data']

    assert X.max() == 255.
    assert X.min() == 0.

    X = np.cast['float32'](X)
    y = None #not implemented yet

    view_converter = DefaultViewConverter((32,32,3))

    ddm = DenseDesignMatrix(X = X, y =y, view_converter = view_converter)

    assert not np.any(np.isnan(ddm.X))
    ddm.y_fine = np.asarray(obj['fine_labels'])
    ddm.y_coarse = np.asarray(obj['coarse_labels'])
    folds = ddm.split_dataset_nfolds(10)
    print folds[0].shape
开发者ID:caglar,项目名称:pylearn_old,代码行数:21,代码来源:test_dense_design_matrix.py

示例15: __init__

 def __init__(self, filename, X=None, topo_view=None, y=None,
              load_all=False, **kwargs):
     if 'preprocessor' in kwargs:
         if ('fit_preprocessor' in kwargs and 
             kwargs['fit_preprocessor'] is False) or ('fit_preprocessor' 
                                                      not in kwargs):
             self._preprocessor = kwargs['preprocessor']
             kwargs['preprocessor'] = None
     else:
         self._preprocessor = None
     self.load_all = load_all
     if h5py is None:
         raise RuntimeError("Could not import h5py.")
     self._file = h5py.File(filename)
     if X is not None:
         X = self.get_dataset(X, load_all)
     if topo_view is not None:
         topo_view = self.get_dataset(topo_view, load_all)
     if y is not None:
         y = self.get_dataset(y, load_all)
     DenseDesignMatrix.__init__(self, X=X, topo_view=topo_view, y=y,
                                **kwargs)
开发者ID:everglory99,项目名称:deepAutoController,代码行数:22,代码来源:icmc.py


注:本文中的pylearn2.datasets.dense_design_matrix.DenseDesignMatrix类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。