当前位置: 首页>>代码示例>>Python>>正文


Python control.get_load_data函数代码示例

本文整理汇总了Python中pylearn2.datasets.control.get_load_data函数的典型用法代码示例。如果您正苦于以下问题:Python get_load_data函数的具体用法?Python get_load_data怎么用?Python get_load_data使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了get_load_data函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: __init__

    def __init__(self,
                 preprocessed_dataset,
                 preprocessor,
                 convert_to_one_hot=True,
                 start=None,
                 stop=None,
                 axes=['b', 0, 1, 'c']):

        self.args = locals()

        self.preprocessed_dataset = preprocessed_dataset
        self.preprocessor = preprocessor
        self.rng = self.preprocessed_dataset.rng
        self.data_specs = preprocessed_dataset.data_specs
        self.X_space = preprocessed_dataset.X_space
        self.X_topo_space = preprocessed_dataset.X_topo_space
        self.view_converter = preprocessed_dataset.view_converter

        self.y = preprocessed_dataset.y
        if convert_to_one_hot:
            if not (self.y.min() == 0):
                raise AssertionError("Expected y.min == 0 but y.min == %g" %
                                     self.y.min())
            nclass = self.y.max() + 1
            y = np.zeros((self.y.shape[0], nclass), dtype='float32')
            for i in xrange(self.y.shape[0]):
                y[i, self.y[i]] = 1.
            self.y = y
            assert self.y is not None
            space, source = self.data_specs
            space.components[source.index('targets')].dim = nclass

        if control.get_load_data():
            if start is not None:
                self.X = preprocessed_dataset.X[start:stop, :]
                if self.y is not None:
                    self.y = self.y[start:stop, :]
                assert self.X.shape[0] == stop-start
            else:
                self.X = preprocessed_dataset.X
        else:
            self.X = None
        if self.X is not None:
            if self.y is not None:
                assert self.y.shape[0] == self.X.shape[0]

        #self.mn = self.X.min()
        #self.mx = self.X.max()

        if getattr(preprocessor, "inv_P_", None) is None:
            warnings.warn("ZCA preprocessor.inv_P_ was none. Computing "
                          "inverse of preprocessor.P_ now. This will take "
                          "some time. For efficiency, it is recommended that "
                          "in the future you compute the inverse in ZCA.fit() "
                          "instead, by passing it compute_inverse=True.")
            logger.info('inverting...')
            preprocessor.inv_P_ = np.linalg.inv(preprocessor.P_)
            logger.info('...done inverting')

        self.view_converter.set_axes(axes)
开发者ID:BloodNg,项目名称:pylearn2,代码行数:60,代码来源:zca_dataset.py

示例2: __setstate__

    def __setstate__(self, d):
        """
        .. todo::

            WRITEME
        """

        if d['design_loc'] is not None:
            if control.get_load_data():
                d['X'] = np.load(d['design_loc'])
            else:
                d['X'] = None

        if d['compress']:
            X = d['X']
            mx = d['compress_max']
            mn = d['compress_min']
            del d['compress_max']
            del d['compress_min']
            d['X'] = 0
            self.__dict__.update(d)
            if X is not None:
                self.X = np.cast['float32'](X) * mx / 255. + mn
            else:
                self.X = None
        else:
            self.__dict__.update(d)

        # To be able to unpickle older data after the addition of
        # the data_specs mechanism
        if not all(m in d for m in ('data_specs', 'X_space',
                                    '_iter_data_specs', 'X_topo_space')):
            X_space = VectorSpace(dim=self.X.shape[1])
            X_source = 'features'
            if self.y is None:
                space = X_space
                source = X_source
            else:
                y_space = VectorSpace(dim=self.y.shape[-1])
                y_source = 'targets'

                space = CompositeSpace((X_space, y_space))
                source = (X_source, y_source)

            self.data_specs = (space, source)
            self.X_space = X_space
            self._iter_data_specs = (X_space, X_source)

            view_converter = d.get('view_converter', None)
            if view_converter is not None:
                # Get the topo_space from the view_converter
                if not hasattr(view_converter, 'topo_space'):
                    raise NotImplementedError("Not able to get a topo_space "
                            "from this converter: %s"
                            % view_converter)

                # self.X_topo_space stores a "default" topological space that
                # will be used only when self.iterator is called without a
                # data_specs, and with "topo=True", which is deprecated.
                self.X_topo_space = view_converter.topo_space
开发者ID:JakeMick,项目名称:pylearn2,代码行数:60,代码来源:dense_design_matrix.py

示例3: from_dataset

def from_dataset(dataset, num_examples):
    """
    .. todo::

        WRITEME
    """
    try:

        V, y = dataset.get_batch_topo(num_examples, True)

    except:

        # This patches a case where control.get_load_data() is false so
        # dataset.X is None This logic should be removed whenever we implement
        # lazy loading

        if isinstance(dataset, DenseDesignMatrix) and dataset.X is None and not control.get_load_data():
            warnings.warn("from_dataset wasn't able to make subset of " "dataset, using the whole thing")
            return DenseDesignMatrix(X=None, view_converter=dataset.view_converter)
        raise

    rval = DenseDesignMatrix(topo_view=V, y=y)
    rval.adjust_for_viewer = dataset.adjust_for_viewer

    return rval
开发者ID:Rt0220,项目名称:pylearn2,代码行数:25,代码来源:dense_design_matrix.py

示例4: __init__

    def __init__(self,
                 preprocessed_dataset,
                 preprocessor,
                 start=None,
                 stop=None,
                 axes=None):

        if axes is not None:
            warnings.warn("The axes argument to ZCA_Dataset no longer has "
                          "any effect. Its role is now carried out by the "
                          "Space you pass to Dataset.iterator. You should "
                          "remove 'axes' arguments from calls to "
                          "ZCA_Dataset. This argument may be removed from "
                          "the library after 2015-05-05.")
        self.args = locals()

        self.preprocessed_dataset = preprocessed_dataset
        self.preprocessor = preprocessor
        self.rng = self.preprocessed_dataset.rng
        self.data_specs = preprocessed_dataset.data_specs
        self.X_space = preprocessed_dataset.X_space
        self.X_topo_space = preprocessed_dataset.X_topo_space
        self.view_converter = preprocessed_dataset.view_converter

        self.y = preprocessed_dataset.y
        self.y_labels = preprocessed_dataset.y_labels

        # Defined up here because PEP8 requires excessive indenting if defined
        # where it is used.
        msg = ("Expected self.y to have dim 2, but it has %d. Maybe you are "
               "loading from an outdated pickle file?")
        if control.get_load_data():
            if start is not None:
                self.X = preprocessed_dataset.X[start:stop, :]
                if self.y is not None:
                    if self.y.ndim != 2:
                        raise ValueError(msg % self.y.ndim)
                    self.y = self.y[start:stop, :]
                assert self.X.shape[0] == stop - start
            else:
                self.X = preprocessed_dataset.X
        else:
            self.X = None
        if self.X is not None:
            if self.y is not None:
                assert self.y.shape[0] == self.X.shape[0]

        if getattr(preprocessor, "inv_P_", None) is None:
            warnings.warn("ZCA preprocessor.inv_P_ was none. Computing "
                          "inverse of preprocessor.P_ now. This will take "
                          "some time. For efficiency, it is recommended that "
                          "in the future you compute the inverse in ZCA.fit() "
                          "instead, by passing it compute_inverse=True.")
            logger.info('inverting...')
            preprocessor.inv_P_ = np.linalg.inv(preprocessor.P_)
            logger.info('...done inverting')
开发者ID:DevSinghSachan,项目名称:pylearn2,代码行数:56,代码来源:zca_dataset.py

示例5: __init__

    def __init__(self,
            preprocessed_dataset,
            preprocessor,
            convert_to_one_hot = True,
            start = None,
            stop = None,
            axes = ['b', 0, 1, 'c']):

        self.args = locals()

        self.preprocessed_dataset = preprocessed_dataset
        self.preprocessor = preprocessor
        self.rng = self.preprocessed_dataset.rng
        self.data_specs = preprocessed_dataset.data_specs
        self.X_space = preprocessed_dataset.X_space
        self.X_topo_space = preprocessed_dataset.X_topo_space
        self.view_converter = preprocessed_dataset.view_converter

        self.y = preprocessed_dataset.y
        if convert_to_one_hot:
            if not ( self.y.min() == 0):
                raise AssertionError("Expected y.min == 0 but y.min == "+str(self.y.min()))
            nclass = self.y.max() + 1
            y = np.zeros((self.y.shape[0], nclass), dtype='float32')
            for i in xrange(self.y.shape[0]):
                y[i,self.y[i]] = 1.
            self.y = y
            assert self.y is not None
            space, source = self.data_specs
            space.components[source.index('targets')].dim = nclass

        if control.get_load_data():
            if start is not None:
                self.X = preprocessed_dataset.X[start:stop,:]
                if self.y is not None:
                    self.y = self.y[start:stop,:]
                assert self.X.shape[0] == stop-start
            else:
                self.X = preprocessed_dataset.X
        else:
            self.X = None
        if self.X is not None:
            if self.y is not None:
                assert self.y.shape[0] == self.X.shape[0]


        #self.mn = self.X.min()
        #self.mx = self.X.max()
        print 'inverting...'
        preprocessor.invert()
        print '...done inverting'

        self.view_converter.axes = axes
开发者ID:coodoing,项目名称:pylearn2,代码行数:53,代码来源:zca_dataset.py

示例6: __init__

    def __init__(self,
                 preprocessed_dataset,
                 preprocessor,
                 start=None,
                 stop=None,
                 axes=['b', 0, 1, 'c']):
        """
        .. todo::

            WRITEME
        """
        self.args = locals()

        self.preprocessed_dataset = preprocessed_dataset
        self.preprocessor = preprocessor
        self.rng = self.preprocessed_dataset.rng
        self.data_specs = preprocessed_dataset.data_specs
        self.X_space = preprocessed_dataset.X_space
        self.X_topo_space = preprocessed_dataset.X_topo_space
        self.view_converter = preprocessed_dataset.view_converter

        self.y = preprocessed_dataset.y
        self.y_labels = preprocessed_dataset.y_labels

        if control.get_load_data():
            if start is not None:
                self.X = preprocessed_dataset.X[start:stop, :]
                if self.y is not None:
                    self.y = self.y[start:stop, :]
                assert self.X.shape[0] == stop - start
            else:
                self.X = preprocessed_dataset.X
        else:
            self.X = None
        if self.X is not None:
            if self.y is not None:
                assert self.y.shape[0] == self.X.shape[0]

        # self.mn = self.X.min()
        # self.mx = self.X.max()

        if getattr(preprocessor, "inv_P_", None) is None:
            warnings.warn("ZCA preprocessor.inv_P_ was none. Computing "
                          "inverse of preprocessor.P_ now. This will take "
                          "some time. For efficiency, it is recommended that "
                          "in the future you compute the inverse in ZCA.fit() "
                          "instead, by passing it compute_inverse=True.")
            logger.info('inverting...')
            preprocessor.inv_P_ = np.linalg.inv(preprocessor.P_)
            logger.info('...done inverting')

        self.view_converter.set_axes(axes)
开发者ID:jerrymouse333,项目名称:pylearn2,代码行数:52,代码来源:zca_dataset.py

示例7: __init__

 def __init__(self, which_set='train', center=False, start=None, stop=None,
              axes=['b', 'c', 0, 1], preprocessor=None,
              fit_preprocessor=False, fit_test_preprocessor=False):
     self.shape = (8, 35, 57)
     self.size = {'train': 2849, 'valid': 2849, 'test': 2849}
     self.range = (-10, 10)
     self.path = "${PYLEARN2_DATA_PATH}/ecmwf/"
     self.set_path = {'train': 'ecmwf.train', 'valid': 'ecmwf.val', 'test': 'ecmwf.test'}
     self.args = locals()
     if which_set not in ['train', 'valid', 'test']:
         raise ValueError(
             'Unrecognized which_set value "%s".' % (which_set,) +
             '". Valid values are ["train","valid","test"].')
     path = self.path + self.set_path[which_set]
     if control.get_load_data():
         path = serial.preprocess(path)
         datasetCache = cache.datasetCache
         path = datasetCache.cache_file(path)
         X, topo_view, y = self._read_ecmwf(path, which_set)
     else:
         X = np.random.rand(self.size[which_set], np.prod(self.shape))
         topo_view = np.random.rand(self.size[which_set]*np.prod(self.shape))
         y = np.random.randint(self.range[0], self.range[1], (self.size[which_set], 1))
     (m, v, r, c) = topo_view.shape
     if center:
         topo_view -= topo_view.mean(axis=0)
     super(ECMWF, self).__init__(X=X, topo_view=topo_view, y=y, axes=axes)
     assert not np.any(np.isnan(self.X))
     if start is not None:
         assert start >= 0
         if stop > self.X.shape[0]:
             raise ValueError('stop=' + str(stop) + '>' +
                              'm=' + str(self.X.shape[0]))
         assert stop > start
         self.X = self.X[start:stop, :]
         if self.X.shape[0] != stop - start:
             raise ValueError("X.shape[0]: %d. start: %d stop: %d"
                              % (self.X.shape[0], start, stop))
         if len(self.y.shape) > 1:
             self.y = self.y[start:stop, :]
         else:
             self.y = self.y[start:stop]
         assert self.y.shape[0] == stop - start
     if which_set == 'test':
         assert fit_test_preprocessor is None or \
             (fit_preprocessor == fit_test_preprocessor)
     if self.X is not None and preprocessor:
         preprocessor.apply(self, fit_preprocessor)
开发者ID:daviddiazvico,项目名称:pylearn2,代码行数:48,代码来源:ecmwf.py

示例8: __init__

    def __init__(self, which_set, center = False,
            one_hot = False, binarize = False,
            axes=['b', 0, 1, 'c'],
            preprocessor = ZCA(),
            fit_preprocessor = False,
            fit_test_preprocessor = False):

        self.args = locals()
        print "==========IIII LOVVEEEE YOOOOOUU========"
        def dimshuffle(b01c):
            default = ('b', 0, 1, 'c')
            return b01c.transpose(*[default.index(axis) for axis in axes])

        if control.get_load_data():
            path = os.environ['PYLEARN2_DATA_PATH'] + '/faceEmo/'

            if which_set == 'train':
                X = np.load(path + 'train_X.npy').astype('float32')
                y = np.load(path + 'train_y.npy').astype('float32')
            else:
#                 import pdb
#                 pdb.set_trace()
                assert which_set == 'test'
                X = np.load(path + 'test_X.npy').astype('float32')
                y = np.load(path + 'test_y.npy').astype('float32')
            
            if binarize:
                X = (X > 0.5).astype('float32')

            self.one_hot = one_hot
            if one_hot:
                one_hot = np.zeros((y.shape[0],3),dtype='float32')
                for i in xrange(y.shape[0]):
                    one_hot[i,y[i]] = 1.
                y = one_hot

            if center:
                X -= X.mean(axis=0)

            super(FaceEmo,self).__init__(X = X, y = y)

        if which_set == 'test':
            assert fit_test_preprocessor is None or (fit_preprocessor == fit_test_preprocessor)

        if self.X is not None and preprocessor:
            preprocessor.fit(self.X)
            preprocessor.apply(self, fit_preprocessor)
开发者ID:hycis,项目名称:Conv2d,代码行数:47,代码来源:faceEmo.py

示例9: __init__

    def __init__(self,
                 preprocessed_dataset,
                 preprocessor,
                 start=None,
                 stop=None,
                 axes=None):

        if axes is not None:
            warnings.warn("The axes argument to ZCA_Dataset no longer has "
                          "any effect. Its role is now carried out by the "
                          "Space you pass to Dataset.iterator. You should "
                          "remove 'axes' arguments from calls to "
                          "ZCA_Dataset. This argument may be removed from "
                          "the library after 2015-05-05.")
        self.args = locals()

        self.preprocessed_dataset = preprocessed_dataset
        self.preprocessor = preprocessor
        self.rng = self.preprocessed_dataset.rng
        self.data_specs = preprocessed_dataset.data_specs
        self.X_space = preprocessed_dataset.X_space
        self.X_topo_space = preprocessed_dataset.X_topo_space
        self.view_converter = preprocessed_dataset.view_converter

        self.y = preprocessed_dataset.y
        self.y_labels = preprocessed_dataset.y_labels

        # Defined up here because PEP8 requires excessive indenting if defined
        # where it is used.
        msg = ("Expected self.y to have dim 2, but it has %d. Maybe you are "
               "loading from an outdated pickle file?")
        if control.get_load_data():
            if start is not None:
                self.X = preprocessed_dataset.X[start:stop, :]
                if self.y is not None:
                    if self.y.ndim != 2:
                        raise ValueError(msg % self.y.ndim)
                    self.y = self.y[start:stop, :]
                assert self.X.shape[0] == stop - start
            else:
                self.X = preprocessed_dataset.X
        else:
            self.X = None
        if self.X is not None:
            if self.y is not None:
                assert self.y.shape[0] == self.X.shape[0]
开发者ID:123fengye741,项目名称:pylearn2,代码行数:46,代码来源:zca_dataset.py

示例10: from_dataset

def from_dataset(dataset, num_examples):
# This function does not support tags attribute
    try:

        V, y = dataset.get_batch_topo(num_examples, True)

    except:

        if isinstance(dataset, DenseDesignMatrix) and dataset.X is None and not control.get_load_data():
                warnings.warn("from_dataset wasn't able to make subset of dataset, using the whole thing")
                return DenseDesignMatrix(X = None, view_converter = dataset.view_converter)
                #This patches a case where control.get_load_data() is false so dataset.X is None
                #This logic should be removed whenever we implement lazy loading
        raise

    rval =  DenseDesignMatrix(topo_view=V, y=y)
    rval.adjust_for_viewer = dataset.adjust_for_viewer

    return rval
开发者ID:capybaralet,项目名称:current,代码行数:19,代码来源:dense_design_matrix.py

示例11: _load_path

    def _load_path(self, which_set, which_targets, word2vec_dict={}):
        if which_targets not in ['fine', 'coarse']:
            raise ValueError(
                'Unrecognized which_set value "%s".' % (which_set,) +
                '". Valid values are ["fine","coarse"].')

        if which_set not in ['train', 'test']:
            raise ValueError(
                'Unrecognized which_set value "%s".' % (which_set,) +
                '". Valid values are ["train","test"].')

        if control.get_load_data():
            path = "${PYLEARN2_DATA_PATH}/TREC_question_type_data/"
            if which_set == 'train':
                data_path = path + 'trecqc.train_5500.label.txt'
            else:
                assert which_set == 'test'
                data_path = path + 'trecqc.test_500.label.txt'
            data_path = serial.preprocess(data_path)
        self.path = path
        return data_path
开发者ID:nmi-lab-ucsd,项目名称:spiking_QC,代码行数:21,代码来源:QC_dataset.py

示例12: __setstate__

    def __setstate__(self, d):

        if d["design_loc"] is not None:
            if control.get_load_data():
                d["X"] = N.load(d["design_loc"])
            else:
                d["X"] = None

        if d["compress"]:
            X = d["X"]
            mx = d["compress_max"]
            mn = d["compress_min"]
            del d["compress_max"]
            del d["compress_min"]
            d["X"] = 0
            self.__dict__.update(d)
            if X is not None:
                self.X = N.cast["float32"](X) * mx / 255.0 + mn
            else:
                self.X = None
        else:
            self.__dict__.update(d)
开发者ID:gdesjardins,项目名称:pylearn,代码行数:22,代码来源:dense_design_matrix.py

示例13: __setstate__

    def __setstate__(self, d):

        if d['design_loc'] is not None:
            if control.get_load_data():
                d['X'] = N.load(d['design_loc'])
            else:
                d['X'] = None

        if d['compress']:
            X = d['X']
            mx = d['compress_max']
            mn = d['compress_min']
            del d['compress_max']
            del d['compress_min']
            d['X'] = 0
            self.__dict__.update(d)
            if X is not None:
                self.X = N.cast['float32'](X) * mx / 255. + mn
            else:
                self.X = None
        else:
            self.__dict__.update(d)
开发者ID:doorjuice,项目名称:pylearn,代码行数:22,代码来源:dense_design_matrix.py

示例14: __init__

    def __init__(self, which_set, start=None, stop=None, nParticles = None):

        self.args = locals()

        if which_set not in ['train','valid', 'test']:
            raise ValueError(
            'Unrecognized which_set value "%s".' % (which_set,) +
            '". Valid values are ["train","test", "valid"].')
        if control.get_load_data():

            path = "${PYLEARN2_DATA_PATH}/nanoParticle/"
            #have to load in the whole array from the start anyway
            import h5py
            import numpy as np
            totParticles = 262144
            if nParticles is  None:
                nParticles = totParticles

            trainingFrac = .8
            validFrac = .1

            np.random.seed(0)

            idxs = np.random.choice(totParticles, nParticles)

            #if which_set == 'train':
            slice = np.s_[idxs, :]

            #elif which_set == 'valid':
            #    slice = np.s_[int(totParticles*trainingFrac):int(totParticles*trainingFrac)+nParticles, :]
            #else:
            #    assert which_set == 'test'
            #    slice = np.s_[int(totParticles*(trainingFrac+validFrac)):int(totParticles*(trainingFrac+validFrac))+nParticles, :]

            X = np.zeros((100, nParticles*6))
            y = np.zeros((100, nParticles*3))

            colors = ['b', 'g','r','y','k','m']

            def getFilename(i):
                base = path+'snapshot_'
                if i<10:
                    out= base+'00%d.hdf5'%i
                elif i<100:
                    out= base+'0%d.hdf5'%i
                else:
                    out= base+'%d.hdf5'%i
                return serial.preprocess(out)

            absMinVel, absMaxVal = 0,0
            maxCoord= 10000 #particles in a 0-10000 cube
            for i in xrange(101):
                fname = getFilename(i)
                f = h5py.File(fname, 'r')
                ids = f['PartType1']['ParticleIDs'][()]
                sorter = ids.argsort()

                coords = f['PartType1']['Coordinates'][()]
                coords = coords[sorter]#sort by ids

                #normalize
                #coordinates are all >=0, so just divide by max
                coords/=maxCoord

                #from matplotlib import pyplot as plt
                #plt.scatter(coords[0, 0], coords[0,1], c = colors[i%len(colors)])

                coords = coords[slice]

                #if i == 100:
                #    print which_set
                #    plt.show()

                if i!=0:
                    y[i-1,:] = coords.flatten()
                #if i == 100:
                #    continue
                #y[i,:] = coords.flatten()
                if i!=100:
                    vels = f['PartType1']['Velocities'][()]
                    vels = vels[sorter]

                    minVel, maxVel = vels.min(), vels.max()
                    if minVel < absMinVel:
                        absMinVel = minVel

                    if maxVel > absMaxVal:
                        absMaxVal = maxVel

                    vels = vels[slice]
                    data = np.concatenate((coords, vels), axis = 1).flatten()

                    X[i,:] = data
                    del data

                del coords
                f.close()

            #normalize the velocity columns
            for n in xrange(nParticles):
#.........这里部分代码省略.........
开发者ID:Samantha-Thrush,项目名称:pylearn2,代码行数:101,代码来源:nanoParticle.py

示例15: __init__

    def __init__(
        self,
        which_set,
        center=False,
        shuffle=False,
        one_hot=False,
        binarize=False,
        start=None,
        stop=None,
        axes=["b", 0, 1, "c"],
        preprocessor=None,
        fit_preprocessor=False,
        fit_test_preprocessor=False,
    ):

        self.args = locals()

        if which_set not in ["train", "test"]:
            if which_set == "valid":
                raise ValueError(
                    "There is no such thing as the MNIST validation set. MNIST"
                    "consists of 60,000 train examples and 10,000 test"
                    "examples. If you wish to use a validation set you should"
                    "divide the train set yourself. The pylearn2 dataset"
                    "implements and will only ever implement the standard"
                    "train / test split used in the literature."
                )
            raise ValueError(
                'Unrecognized which_set value "%s".' % (which_set,) + '". Valid values are ["train","test"].'
            )

        def dimshuffle(b01c):
            default = ("b", 0, 1, "c")
            return b01c.transpose(*[default.index(axis) for axis in axes])

        if control.get_load_data():
            path = "${VIDTIMIT}/data/"
            if which_set == "train":
                im_path = path + "train.npy"
                label_path = path + "train-labels.npy"
            else:
                assert which_set == "test"
                im_path = path + "test.npy"
                label_path = path + "test-labels.npy"
            # Path substitution done here in order to make the lower-level
            # mnist_ubyte.py as stand-alone as possible (for reuse in, e.g.,
            # the Deep Learning Tutorials, or in another package).
            im_path = serial.preprocess(im_path)
            label_path = serial.preprocess(label_path)
            topo_view = np.load(im_path)
            y = np.load(label_path)

            if binarize:
                topo_view = (topo_view > 0.5).astype("float32")

            self.one_hot = one_hot
            if one_hot:
                one_hot = N.zeros((y.shape[0], 36), dtype="float32")
                for i in xrange(y.shape[0]):
                    one_hot[i, y[i]] = 1.0
                y = one_hot
                max_labels = None
            else:
                max_labels = 36

            m, r, c = topo_view.shape
            assert r == 32
            assert c == 32
            topo_view = topo_view.reshape(m, r, c, 1)

            if which_set == "train":
                assert m == 27280
            elif which_set == "test":
                assert m == 10929
            else:
                assert False

            if center:
                topo_view -= topo_view.mean(axis=0)

            if shuffle:
                self.shuffle_rng = make_np_rng(None, [1, 2, 3], which_method="shuffle")
                for i in xrange(topo_view.shape[0]):
                    j = self.shuffle_rng.randint(m)
                    # Copy ensures that memory is not aliased.
                    tmp = topo_view[i, :, :, :].copy()
                    topo_view[i, :, :, :] = topo_view[j, :, :, :]
                    topo_view[j, :, :, :] = tmp
                    # Note: slicing with i:i+1 works for one_hot=True/False
                    tmp = y[i : i + 1].copy()
                    y[i] = y[j]
                    y[j] = tmp

            super(VidTIMIT, self).__init__(topo_view=dimshuffle(topo_view), y=y, axes=axes, max_labels=max_labels)

            assert not N.any(N.isnan(self.X))

            if start is not None:
                assert start >= 0
                if stop > self.X.shape[0]:
#.........这里部分代码省略.........
开发者ID:EderSantana,项目名称:mdpcn,代码行数:101,代码来源:old_vidtimit.py


注:本文中的pylearn2.datasets.control.get_load_data函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。