当前位置: 首页>>代码示例>>Python>>正文


Python MultiEncoder.addEncoder方法代码示例

本文整理汇总了Python中nupic.encoders.MultiEncoder.addEncoder方法的典型用法代码示例。如果您正苦于以下问题:Python MultiEncoder.addEncoder方法的具体用法?Python MultiEncoder.addEncoder怎么用?Python MultiEncoder.addEncoder使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在nupic.encoders.MultiEncoder的用法示例。


在下文中一共展示了MultiEncoder.addEncoder方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: createEncoder

# 需要导入模块: from nupic.encoders import MultiEncoder [as 别名]
# 或者: from nupic.encoders.MultiEncoder import addEncoder [as 别名]
def createEncoder():
    """Create the encoder instance for our test and return it."""
    consumption_encoder = ScalarEncoder(21, 0.0, 100.0, n=50, name="consumption", clipInput=True)
    time_encoder = DateEncoder(timeOfDay=(21, 9.5), name="timestamp_timeOfDay")

    encoder = MultiEncoder()
    encoder.addEncoder("consumption", consumption_encoder)
    encoder.addEncoder("timestamp", time_encoder)

    return encoder
开发者ID:nickolyamba,项目名称:nupic,代码行数:12,代码来源:network_api_demo.py

示例2: createScalarEncoder

# 需要导入模块: from nupic.encoders import MultiEncoder [as 别名]
# 或者: from nupic.encoders.MultiEncoder import addEncoder [as 别名]
def createScalarEncoder():
  scalarEncoder = ScalarEncoder(SCALAR_ENCODER_PARAMS['w'], 
                       SCALAR_ENCODER_PARAMS['minval'], 
                       SCALAR_ENCODER_PARAMS['maxval'], 
                       n=SCALAR_ENCODER_PARAMS['n'], 
                       name=SCALAR_ENCODER_PARAMS['name'])
  
  # NOTE: we don't want to encode the category along with the scalar input. 
  # The category will be fed separately to the classifier later, during the training phase.
  #categoryEncoder = CategoryEncoder(CATEGORY_ENCODER_PARAMS['w'],
  #                                  CATEGORY_ENCODER_PARAMS['categoryList'],
  #                                  name=CATEGORY_ENCODER_PARAMS['name'])
  encoder = MultiEncoder()
  encoder.addEncoder(SCALAR_ENCODER_PARAMS['name'], scalarEncoder)
  
  return encoder
开发者ID:pford68,项目名称:nupic.research,代码行数:18,代码来源:classify_active_cells.py

示例3: Sensor

# 需要导入模块: from nupic.encoders import MultiEncoder [as 别名]
# 或者: from nupic.encoders.MultiEncoder import addEncoder [as 别名]

#.........这里部分代码省略.........
      """
      Initialize this node opening the file and place cursor on the first record.
      """

      # If file name provided is a relative path, use project file path
      if self.fileName != '' and os.path.dirname(self.fileName) == '':
        fullFileName = os.path.dirname(Global.project.fileName) + '/' + self.fileName
      else:
        fullFileName = self.fileName

      # Check if file really exists
      if not os.path.isfile(fullFileName):
        QtGui.QMessageBox.warning(None, "Warning", "Input stream file '" + fullFileName + "' was not found or specified.", QtGui.QMessageBox.Ok)
        return

      # Create a data source for read the file
      self.dataSource = FileRecordStream(fullFileName)

    elif self.dataSourceType == DataSourceType.database:
      pass

    self.encoder = MultiEncoder()
    for encoding in self.encodings:
      encoding.initialize()

      # Create an instance class for an encoder given its module, class and constructor params
      encoding.encoder = getInstantiatedClass(encoding.encoderModule, encoding.encoderClass, encoding.encoderParams)

      # Take the first part of encoder field name as encoder name
      # Ex: timestamp_weekend.weekend => timestamp_weekend
      encoding.encoder.name = encoding.encoderFieldName.split('.')[0]

      # Add sub-encoder to multi-encoder list
      self.encoder.addEncoder(encoding.dataSourceFieldName, encoding.encoder)

    # If encoder size is not the same to sensor size then throws exception
    encoderSize = self.encoder.getWidth()
    sensorSize = self.width * self.height
    if encoderSize > sensorSize:
      QtGui.QMessageBox.warning(None, "Warning", "'" + self.name + "': Encoder size (" + str(encoderSize) + ") is different from sensor size (" + str(self.width) + " x " + str(self.height) + " = " + str(sensorSize) + ").", QtGui.QMessageBox.Ok)
      return

    return True

  def nextStep(self):
    """
    Performs actions related to time step progression.
    """

    # Update states machine by remove the first element and add a new element in the end
    for encoding in self.encodings:
      encoding.currentValue.rotate()
      if encoding.enableInference:
        encoding.predictedValues.rotate()
        encoding.bestPredictedValue.rotate()

    Node.nextStep(self)
    for bit in self.bits:
      bit.nextStep()

    # Get record value from data source
    # If the last record was reached just rewind it
    data = self.dataSource.getNextRecordDict()
    if not data:
      self.dataSource.rewind()
      data = self.dataSource.getNextRecordDict()
开发者ID:Amazer,项目名称:nupic.studio,代码行数:70,代码来源:node_sensor.py

示例4: getDescription

# 需要导入模块: from nupic.encoders import MultiEncoder [as 别名]
# 或者: from nupic.encoders.MultiEncoder import addEncoder [as 别名]
def getDescription(datasets):

    # ========================================================================
    # Encoder for the sensor
    encoder = MultiEncoder()

    if config["encodingFieldStyleA"] == "contiguous":
        encoder.addEncoder(
            "fieldA",
            ScalarEncoder(
                w=config["encodingOnBitsA"],
                n=config["encodingFieldWidthA"],
                minval=0,
                maxval=config["numAValues"],
                periodic=True,
                name="fieldA",
            ),
        )
    elif config["encodingFieldStyleA"] == "sdr":
        encoder.addEncoder(
            "fieldA",
            SDRCategoryEncoder(
                w=config["encodingOnBitsA"],
                n=config["encodingFieldWidthA"],
                categoryList=range(config["numAValues"]),
                name="fieldA",
            ),
        )
    else:
        assert False

    if config["encodingFieldStyleB"] == "contiguous":
        encoder.addEncoder(
            "fieldB",
            ScalarEncoder(
                w=config["encodingOnBitsB"],
                n=config["encodingFieldWidthB"],
                minval=0,
                maxval=config["numBValues"],
                periodic=True,
                name="fieldB",
            ),
        )
    elif config["encodingFieldStyleB"] == "sdr":
        encoder.addEncoder(
            "fieldB",
            SDRCategoryEncoder(
                w=config["encodingOnBitsB"],
                n=config["encodingFieldWidthB"],
                categoryList=range(config["numBValues"]),
                name="fieldB",
            ),
        )
    else:
        assert False

    # ========================================================================
    # Network definition

    # ------------------------------------------------------------------
    # Node params
    # The inputs are long, horizontal vectors
    inputShape = (1, encoder.getWidth())

    # Layout the coincidences vertically stacked on top of each other, each
    # looking at the entire input field.
    coincidencesShape = (config["spCoincCount"], 1)
    inputBorder = inputShape[1] / 2
    if inputBorder * 2 >= inputShape[1]:
        inputBorder -= 1

    sensorParams = dict(
        # encoder/datasource are not parameters so don't include here
        verbosity=config["sensorVerbosity"]
    )

    CLAParams = dict(
        inputShape=inputShape,
        inputBorder=inputBorder,
        coincidencesShape=coincidencesShape,
        coincInputRadius=inputShape[1] / 2,
        coincInputPoolPct=1.0,
        gaussianDist=0,
        commonDistributions=0,  # should be False if possibly not training
        localAreaDensity=-1,  # 0.05,
        numActivePerInhArea=config["spNumActivePerInhArea"],
        dutyCyclePeriod=1000,
        stimulusThreshold=1,
        synPermInactiveDec=config["spSynPermInactiveDec"],
        synPermActiveInc=0.02,
        synPermActiveSharedDec=0.0,
        synPermOrphanDec=0.0,
        minPctDutyCycleBeforeInh=0.001,
        minPctDutyCycleAfterInh=config["spMinPctDutyCycleAfterInh"],
        minDistance=0.05,
        computeTopDown=1,
        spVerbosity=config["spVerbosity"],
        spSeed=1,
        printPeriodicStats=int(config["spPeriodicStats"]),
        # TP params
#.........这里部分代码省略.........
开发者ID:srecioe,项目名称:nupic,代码行数:103,代码来源:description.py

示例5: getDescription

# 需要导入模块: from nupic.encoders import MultiEncoder [as 别名]
# 或者: from nupic.encoders.MultiEncoder import addEncoder [as 别名]
def getDescription(datasets):

  # ========================================================================
  # Encoder for the sensor
  encoder = MultiEncoder()

  if config['encodingFieldStyleA'] == 'contiguous':
    encoder.addEncoder('fieldA', ScalarEncoder(w=config['encodingOnBitsA'],
                        n=config['encodingFieldWidthA'], minval=0,
                        maxval=config['numAValues'], periodic=True, name='fieldA'))
  elif config['encodingFieldStyleA'] == 'sdr':
    encoder.addEncoder('fieldA', SDRCategoryEncoder(w=config['encodingOnBitsA'],
                        n=config['encodingFieldWidthA'],
                        categoryList=range(config['numAValues']), name='fieldA'))
  else:
    assert False


  if config['encodingFieldStyleB'] == 'contiguous':
    encoder.addEncoder('fieldB', ScalarEncoder(w=config['encodingOnBitsB'], 
                      n=config['encodingFieldWidthB'], minval=0, 
                      maxval=config['numBValues'], periodic=True, name='fieldB'))
  elif config['encodingFieldStyleB'] == 'sdr':
    encoder.addEncoder('fieldB', SDRCategoryEncoder(w=config['encodingOnBitsB'], 
                      n=config['encodingFieldWidthB'], 
                      categoryList=range(config['numBValues']), name='fieldB'))
  else:
    assert False



  # ========================================================================
  # Network definition


  # ------------------------------------------------------------------
  # Node params
  # The inputs are long, horizontal vectors
  inputDimensions = (1, encoder.getWidth())

  # Layout the coincidences vertically stacked on top of each other, each
  # looking at the entire input field. 
  columnDimensions = (config['spCoincCount'], 1)

  sensorParams = dict(
    # encoder/datasource are not parameters so don't include here
    verbosity=config['sensorVerbosity']
  )

  CLAParams = dict(
    inputDimensions = inputDimensions,
    columnDimensions = columnDimensions,
    potentialRadius = inputDimensions[1]/2,
    potentialPct = 1.0,
    gaussianDist = 0,
    commonDistributions = 0,    # should be False if possibly not training
    localAreaDensity = -1, #0.05, 
    numActiveColumnsPerInhArea = config['spNumActivePerInhArea'],
    dutyCyclePeriod = 1000,
    stimulusThreshold = 1,
    synPermInactiveDec = config['spSynPermInactiveDec'],
    synPermActiveInc = 0.02,
    synPermActiveSharedDec=0.0,
    synPermOrphanDec = 0.0,
    minPctDutyCycleBeforeInh = 0.001,
    minPctDutyCycleAfterInh = config['spMinPctDutyCycleAfterInh'],
    minDistance = 0.05,
    computeTopDown = 1,
    spVerbosity = config['spVerbosity'],
    spSeed = 1,
    printPeriodicStats = int(config['spPeriodicStats']),

    # TP params
    disableTemporal = 1,

    # General params
    trainingStep = 'spatial',
    )

  trainingDataSource = FileRecordStream(datasets['trainingFilename'])


  description = dict(
    options = dict(
      logOutputsDuringInference = False,
    ),

    network = dict(
      sensorDataSource = trainingDataSource,
      sensorEncoder = encoder, 
      sensorParams = sensorParams,

      CLAType = 'py.CLARegion',
      CLAParams = CLAParams,

      classifierType = None,
      classifierParams = None),

  )

#.........这里部分代码省略.........
开发者ID:runt18,项目名称:nupic,代码行数:103,代码来源:description.py

示例6: getDescription

# 需要导入模块: from nupic.encoders import MultiEncoder [as 别名]
# 或者: from nupic.encoders.MultiEncoder import addEncoder [as 别名]
def getDescription(datasets):
  encoder = MultiEncoder()
  encoder.addEncoder("date", DateEncoder(timeOfDay=3))
  encoder.addEncoder("amount", LogEncoder(name="amount", maxval=1000))
  for i in xrange(0, nRandomFields):
    s = ScalarEncoder(name="scalar", minval=0, maxval=randomFieldWidth, resolution=1, w=3)
    encoder.addEncoder("random%d" % i, s)

  dataSource = FunctionSource(generateFunction, dict(nRandomFields=nRandomFields,
                                                 randomFieldWidth=randomFieldWidth))

  inputShape = (1, encoder.getWidth())

  # Layout the coincidences vertically stacked on top of each other, each
  # looking at the entire input field.
  coincidencesShape = (nCoincidences, 1)
  # TODO: why do we need input border?
  inputBorder = inputShape[1]/2
  if inputBorder*2 >= inputShape[1]:
    inputBorder -= 1


  nodeParams = dict()

  spParams = dict(
        commonDistributions=0,
        inputShape = inputShape,
        inputBorder = inputBorder,
        coincidencesShape = coincidencesShape,
        coincInputRadius = inputShape[1]/2,
        coincInputPoolPct = 0.75,
        gaussianDist = 0,
        localAreaDensity = 0.10,
        # localAreaDensity = 0.04,
        numActivePerInhArea = -1,
        dutyCyclePeriod = 1000,
        stimulusThreshold = 5,
        synPermInactiveDec=0.08,
        # synPermInactiveDec=0.02,
        synPermActiveInc=0.02,
        synPermActiveSharedDec=0.0,
        synPermOrphanDec = 0.0,
        minPctDutyCycleBeforeInh = 0.05,
        # minPctDutyCycleAfterInh = 0.1,
        # minPctDutyCycleBeforeInh = 0.05,
        minPctDutyCycleAfterInh = 0.05,
        # minPctDutyCycleAfterInh = 0.4,
        seed = 1,
  )

  otherParams = dict(
    disableTemporal=1,
    trainingStep='spatial',

  )

  nodeParams.update(spParams)
  nodeParams.update(otherParams)

  def mySetupCallback(experiment):
    print "Setup function called"

  description = dict(
    options = dict(
      logOutputsDuringInference = False,
    ),

    network = dict(
      sensorDataSource = dataSource,
      sensorEncoder = encoder,
      CLAType = "py.CLARegion",
      CLAParams = nodeParams,
      classifierType = None,
      classifierParams = None),

    # step
    spTrain = dict(
      name="phase1",
      setup=mySetupCallback,
      iterationCount=5000,
      #iter=displaySPCoincidences(100),
      finish=printSPCoincidences()),

    tpTrain = None,        # same format as sptrain if non-empty

    infer = None,          # same format as sptrain if non-empty

  )

  return description
开发者ID:AI-Cdrone,项目名称:nupic,代码行数:92,代码来源:description.py

示例7: createTemporalAnomaly

# 需要导入模块: from nupic.encoders import MultiEncoder [as 别名]
# 或者: from nupic.encoders.MultiEncoder import addEncoder [as 别名]
def createTemporalAnomaly(recordParams, spatialParams=_SP_PARAMS,
                          temporalParams=_TP_PARAMS,
                          verbosity=_VERBOSITY):
  """Generates a Network with connected RecordSensor, SP, TP.

  This function takes care of generating regions and the canonical links.
  The network has a sensor region reading data from a specified input and
  passing the encoded representation to an SPRegion.
  The SPRegion output is passed to a TPRegion.

  Note: this function returns a network that needs to be initialized. This
  allows the user to extend the network by adding further regions and
  connections.

  :param recordParams: a dict with parameters for creating RecordSensor region.
  :param spatialParams: a dict with parameters for creating SPRegion.
  :param temporalParams: a dict with parameters for creating TPRegion.
  :param verbosity: an integer representing how chatty the network will be.
  """
  inputFilePath= recordParams["inputFilePath"]
  scalarEncoderArgs = recordParams["scalarEncoderArgs"]
  dateEncoderArgs = recordParams["dateEncoderArgs"]

  scalarEncoder = ScalarEncoder(**scalarEncoderArgs)
  dateEncoder = DateEncoder(**dateEncoderArgs)

  encoder = MultiEncoder()
  encoder.addEncoder(scalarEncoderArgs["name"], scalarEncoder)
  encoder.addEncoder(dateEncoderArgs["name"], dateEncoder)

  network = Network()

  network.addRegion("sensor", "py.RecordSensor",
                    json.dumps({"verbosity": verbosity}))

  sensor = network.regions["sensor"].getSelf()
  sensor.encoder = encoder
  sensor.dataSource = FileRecordStream(streamID=inputFilePath)

  # Create the spatial pooler region
  spatialParams["inputWidth"] = sensor.encoder.getWidth()
  network.addRegion("spatialPoolerRegion", "py.SPRegion",
                    json.dumps(spatialParams))

  # Link the SP region to the sensor input
  network.link("sensor", "spatialPoolerRegion", "UniformLink", "")
  network.link("sensor", "spatialPoolerRegion", "UniformLink", "",
               srcOutput="resetOut", destInput="resetIn")
  network.link("spatialPoolerRegion", "sensor", "UniformLink", "",
               srcOutput="spatialTopDownOut", destInput="spatialTopDownIn")
  network.link("spatialPoolerRegion", "sensor", "UniformLink", "",
               srcOutput="temporalTopDownOut", destInput="temporalTopDownIn")

  # Add the TPRegion on top of the SPRegion
  network.addRegion("temporalPoolerRegion", "py.TPRegion",
                    json.dumps(temporalParams))

  network.link("spatialPoolerRegion", "temporalPoolerRegion", "UniformLink", "")
  network.link("temporalPoolerRegion", "spatialPoolerRegion", "UniformLink", "",
               srcOutput="topDownOut", destInput="topDownIn")


  spatialPoolerRegion = network.regions["spatialPoolerRegion"]

  # Make sure learning is enabled
  spatialPoolerRegion.setParameter("learningMode", True)
  # We want temporal anomalies so disable anomalyMode in the SP. This mode is
  # used for computing anomalies in a non-temporal model.
  spatialPoolerRegion.setParameter("anomalyMode", False)

  temporalPoolerRegion = network.regions["temporalPoolerRegion"]

  # Enable topDownMode to get the predicted columns output
  temporalPoolerRegion.setParameter("topDownMode", True)
  # Make sure learning is enabled (this is the default)
  temporalPoolerRegion.setParameter("learningMode", True)
  # Enable inference mode so we get predictions
  temporalPoolerRegion.setParameter("inferenceMode", True)
  # Enable anomalyMode to compute the anomaly score.
  temporalPoolerRegion.setParameter("anomalyMode", True)

  return network
开发者ID:mewbak,项目名称:nupic,代码行数:84,代码来源:__init__.py

示例8: ScalarEncoder

# 需要导入模块: from nupic.encoders import MultiEncoder [as 别名]
# 或者: from nupic.encoders.MultiEncoder import addEncoder [as 别名]
        self.quarterOfYear = month.quarter
        self.halfOfYear = month.half


if __name__ == "__main__":
    day_of_week_enc = ScalarEncoder(w=3, minval=0, maxval=7, radius=1.5, periodic=True, name="dayOfWeek", forced=True)
    day_of_month_enc = ScalarEncoder(w=3, minval=1, maxval=31, radius=1.5, periodic=False, name="dayOfMonth", forced=True)
    first_last_of_month_enc = ScalarEncoder(w=1, minval=0, maxval=2, radius=1, periodic=False, name="firstLastOfMonth", forced=True)
    week_of_month_enc = ScalarEncoder(w=3, minval=0, maxval=6, radius=1.5, periodic=True, name="weekOfMonth", forced=True)
    month_of_year_enc = ScalarEncoder(w=3, minval=1, maxval=13, radius=1.5, periodic=True, name="monthOfYear", forced=True)
    quarter_of_year_enc = ScalarEncoder(w=3, minval=0, maxval=4, radius=1.5, periodic=True, name="quarterOfYear", forced=True)
    half_of_year_enc = ScalarEncoder(w=1, minval=0, maxval=2, radius=1, periodic=True, name="halfOfYear", forced=True)
    year_of_decade_enc = ScalarEncoder(w=3, minval=0, maxval=10, radius=1.5, periodic=True, name="yearOfDecade", forced=True)

    date_enc = MultiEncoder()
    date_enc.addEncoder(day_of_week_enc.name, day_of_week_enc)
    date_enc.addEncoder(day_of_month_enc.name, day_of_month_enc)
    date_enc.addEncoder(first_last_of_month_enc.name, first_last_of_month_enc)
    date_enc.addEncoder(week_of_month_enc.name, week_of_month_enc)
    date_enc.addEncoder(year_of_decade_enc.name, year_of_decade_enc)
    date_enc.addEncoder(month_of_year_enc.name, month_of_year_enc)
    date_enc.addEncoder(quarter_of_year_enc.name, quarter_of_year_enc)
    date_enc.addEncoder(half_of_year_enc.name, half_of_year_enc)

    if os.path.isfile('tp.p'):
	print "loading TP from tp.p and tp.tp"
	with open("tp.p", "r") as f:
	    tp = pickle.load(f)
	tp.loadFromFile("tp.tp")
    else:
	tp = TP(numberOfCols=date_enc.width, cellsPerColumn=1795,
开发者ID:aviyashchin,项目名称:hacks,代码行数:33,代码来源:train_tp.py

示例9: _get_encoder

# 需要导入模块: from nupic.encoders import MultiEncoder [as 别名]
# 或者: from nupic.encoders.MultiEncoder import addEncoder [as 别名]
    def _get_encoder(self):
        # date encoding
        #date_enc = DateEncoder(name='date', forced=True)
        day_of_week_enc = ScalarEncoder(w=21, minval=0, maxval=7, radius=1.5,
                                        periodic=True, name=COL_DAY_OF_WEEK, forced=True)
        day_of_month_enc = ScalarEncoder(w=21, minval=1, maxval=31, radius=1.5,
                                         periodic=False, name=COL_DAY_OF_MONTH, forced=True)
        first_last_of_month_enc = ScalarEncoder(w=21, minval=0, maxval=2, radius=1, periodic=False,
                                                name=COL_FIRST_LAST_MONTH, forced=True)
        week_of_month_enc = ScalarEncoder(w=21, minval=0, maxval=6, radius=1.5,
                                          periodic=True, name=COL_WEEK_OF_MONTH, forced=True)
        month_of_year_enc = ScalarEncoder(w=21, minval=1, maxval=13, radius=1.5,
                                          periodic=True, name=COL_MONTH_OF_YEAR, forced=True)
        quarter_of_year_enc = ScalarEncoder(w=21, minval=0, maxval=4, radius=1.5,
                                            periodic=True, name=COL_QUART_OF_YEAR, forced=True)
        half_of_year_enc = ScalarEncoder(w=21, minval=0, maxval=2,
                                         radius=1, periodic=True, name=COL_HALF_OF_YEAR, forced=True)
        year_of_decade_enc = ScalarEncoder(w=21, minval=0, maxval=10, radius=1.5,
                                           periodic=True, name=COL_YEAR_OF_DECADE, forced=True)

        # semantics encoder
        stoch_rsi_enc = ScalarEncoder(w=21, minval=0, maxval=1,
                                      radius=0.05, periodic=False, name=COL_STOCH_RSI, forced=True)
        # symbol_enc = ScalarEncoder(w=21, minval=0, maxval=1, radius=0.1, periodic=False, name=COL_SYMBOL, forced=True)
        candlestick_enc = PassThroughEncoder(50, name=COL_CANDLESTICK, forced=True)

        encoder = MultiEncoder()
        encoder.addEncoder(day_of_week_enc.name, day_of_week_enc)
        encoder.addEncoder(day_of_month_enc.name, day_of_month_enc)
        encoder.addEncoder(first_last_of_month_enc.name, first_last_of_month_enc)
        encoder.addEncoder(week_of_month_enc.name, week_of_month_enc)
        encoder.addEncoder(year_of_decade_enc.name, year_of_decade_enc)
        encoder.addEncoder(month_of_year_enc.name, month_of_year_enc)
        encoder.addEncoder(quarter_of_year_enc.name, quarter_of_year_enc)
        encoder.addEncoder(half_of_year_enc.name, half_of_year_enc)

        encoder.addEncoder(stoch_rsi_enc.name, stoch_rsi_enc)
        # encoder.addEncoder(symbol_enc.name, symbol_enc)
        encoder.addEncoder(candlestick_enc.name, candlestick_enc)

        return encoder
开发者ID:aviyashchin,项目名称:hacks,代码行数:43,代码来源:encoder.py

示例10: getDescription

# 需要导入模块: from nupic.encoders import MultiEncoder [as 别名]
# 或者: from nupic.encoders.MultiEncoder import addEncoder [as 别名]
def getDescription(datasets):

  # ========================================================================
  # Network definition

  # Encoder for the sensor
  encoder = MultiEncoder()  
  if 'filenameCategory' in datasets:
    categories = [x.strip() for x in 
                              open(datasets['filenameCategory']).xreadlines()]
  else:
    categories = [chr(x+ord('a')) for x in range(26)]

  if config['overlappingPatterns']:
    encoder.addEncoder("name", SDRCategoryEncoder(n=200, 
      w=config['spNumActivePerInhArea'], categoryList=categories, name="name"))
  else:
    encoder.addEncoder("name", CategoryEncoder(w=config['spNumActivePerInhArea'], 
                        categoryList=categories, name="name"))


  # ------------------------------------------------------------------
  # Node params
  # The inputs are long, horizontal vectors
  inputDimensions = (1, encoder.getWidth())

  # Layout the coincidences vertically stacked on top of each other, each
  # looking at the entire input field. 
  columnDimensions = (config['spCoincCount'], 1)

  # If we have disableSpatial, then set the number of "coincidences" to be the
  #  same as the encoder width
  if config['disableSpatial']:
    columnDimensions = (encoder.getWidth(), 1)
    config['trainSP'] = 0

  sensorParams = dict(
    # encoder/datasource are not parameters so don't include here
    verbosity=config['sensorVerbosity']
  )

  CLAParams = dict(
    # SP params
    disableSpatial = config['disableSpatial'],
    inputDimensions = inputDimensions,
    columnDimensions = columnDimensions,
    potentialRadius = inputDimensions[1]/2,
    potentialPct = 1.00,
    gaussianDist = 0,
    commonDistributions = 0,    # should be False if possibly not training
    localAreaDensity = -1, #0.05, 
    numActiveColumnsPerInhArea = config['spNumActivePerInhArea'], 
    dutyCyclePeriod = 1000,
    stimulusThreshold = 1,
    synPermInactiveDec=0.11,
    synPermActiveInc=0.11,
    synPermActiveSharedDec=0.0,
    synPermOrphanDec = 0.0,
    minPctDutyCycleBeforeInh = 0.001,
    minPctDutyCycleAfterInh = 0.001,
    spVerbosity = config['spVerbosity'],
    spSeed = 1,
    printPeriodicStats = int(config['spPrintPeriodicStats']),


    # TP params
    tpSeed = 1,
    disableTemporal = 0 if config['trainTP'] else 1,
    temporalImp = config['temporalImp'],
    nCellsPerCol = config['tpNCellsPerCol'] if config['trainTP'] else 1,

    collectStats = 1,
    burnIn = 2,
    verbosity = config['tpVerbosity'],

    newSynapseCount = config['spNumActivePerInhArea'],
    minThreshold = config['spNumActivePerInhArea'],
    activationThreshold = config['spNumActivePerInhArea'],

    initialPerm = config['tpInitialPerm'],
    connectedPerm = 0.5,
    permanenceInc = config['tpPermanenceInc'],
    permanenceDec = config['tpPermanenceDec'],  # perhaps tune this
    globalDecay = config['tpGlobalDecay'],

    pamLength = config['tpPAMLength'],
    maxSeqLength = config['tpMaxSeqLength'],
    maxAge = config['tpMaxAge'],


    # General params
    computeTopDown = config['computeTopDown'],
    trainingStep = 'spatial',
    )


  dataSource = FileRecordStream(datasets['filenameTrain'])

  description = dict(
    options = dict(
#.........这里部分代码省略.........
开发者ID:NunoEdgarGub1,项目名称:nupic,代码行数:103,代码来源:description.py


注:本文中的nupic.encoders.MultiEncoder.addEncoder方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。