当前位置: 首页>>代码示例>>C#>>正文


C# IActivationFunction类代码示例

本文整理汇总了C#中IActivationFunction的典型用法代码示例。如果您正苦于以下问题:C# IActivationFunction类的具体用法?C# IActivationFunction怎么用?C# IActivationFunction使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


IActivationFunction类属于命名空间,在下文中一共展示了IActivationFunction类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: NormalizeOneDesiredOutputInPlace

        /// <summary>
        ///   Normalize in place desired output
        /// </summary>
        /// <param name = "function">Activation function used</param>
        /// <param name = "output">Normalize output</param>
        /// <returns>Normalized output</returns>
        public static double[] NormalizeOneDesiredOutputInPlace(IActivationFunction function, double[] output)
        {
            if (function is ActivationSigmoid)
            {
                for (int i = 0, n = output.Length; i < n; i++)
                {
                    output[i] = (output[i] > 0 ? 0.8 : 0.2);
                }
            }
            else if (function is ActivationTANH)
            {
                for (int i = 0, n = output.Length; i < n; i++)
                {
                    output[i] = (output[i] > 0.5 ? 0.5 : -0.5);
                }
            }
            else if (function is ActivationLinear)
            {
                /*do nothing*/
            }
            else
            {
                throw new ArgumentException("Unknown activation function");
            }

            return output;
        }
开发者ID:jorik041,项目名称:soundfingerprinting,代码行数:33,代码来源:NormalizeUtils.cs

示例2: NormalizeDesiredInputInPlace

 /// <summary>
 ///   Normalize in place desired input
 /// </summary>
 /// <param name = "function">Activation function</param>
 /// <param name = "input">Input to normalize</param>
 /// <returns>Reference to normalized input</returns>
 public static double[] NormalizeDesiredInputInPlace(IActivationFunction function, double[] input)
 {
     if (function is ActivationTANH)
     {
         for (int i = 0, n = input.Length; i < n; i++)
         {
             input[i] = (input[i] == 0 ? 0.0f : (input[i] < 0 ? -0.8f : 0.8f));
         }
     }
     else if (function is ActivationSigmoid)
     {
         for (int i = 0, n = input.Length; i < n; i++)
         {
             input[i] = (input[i] == 0 ? 0.0f : (input[i] < 0 ? 0.2f : 0.8f));
         }
     }
     else if (function is ActivationLinear)
     {
         /*do nothing*/
     }
     else
     {
         throw new ArgumentException("Unknown activation function");
     }
     return input;
 }
开发者ID:jorik041,项目名称:soundfingerprinting,代码行数:32,代码来源:NormalizeUtils.cs

示例3: Conv2DLayer

 /// <summary>
 ///     Construct a 2D convolution layer.
 /// </summary>
 /// <param name="theActivation">The activation function.</param>
 /// <param name="theNumFilters">The number of filters.</param>
 /// <param name="theFilterRows">The rows in each filter.</param>
 /// <param name="theFilterColumns">The columns in each filter.</param>
 public Conv2DLayer(IActivationFunction theActivation, int theNumFilters, int theFilterRows, int theFilterColumns)
 {
     Activation = theActivation;
     FilterRows = theFilterRows;
     FilterColumns = theFilterColumns;
     _numFilters = theNumFilters;
 }
开发者ID:legendvijay,项目名称:aifh,代码行数:14,代码来源:Conv2DLayer.cs

示例4: SetActivationFunction

		/// <summary>
		/// Set new activation function for all neurons of the layer.
		/// </summary>
		/// 
		/// <param name="function">Activation function to set.</param>
		/// 
		/// <remarks><para>The methods sets new activation function for each neuron by setting
		/// their <see cref="ActivationNeuron.ActivationFunction"/> property.</para></remarks>
		/// 
		public void SetActivationFunction( IActivationFunction function )
		{
			for ( int i = 0; i < neurons.Length; i++ )
			{
				( (ActivationNeuron) neurons[i] ).ActivationFunction = function;
			}
		}
开发者ID:holisticware-admin,项目名称:MonoVersal.AForgeNET,代码行数:16,代码来源:ActivationLayer.cs

示例5: FlatLayer

 public FlatLayer(IActivationFunction activation, int count, double biasActivation)
 {
     this.Activation = activation;
     this._x10f4d88af727adbc = count;
     this._x25922738b86264c8 = biasActivation;
     this._x4d51c0aa16352a14 = null;
 }
开发者ID:neismit,项目名称:emds,代码行数:7,代码来源:FlatLayer.cs

示例6: FastCyclicNetwork

        /// <summary>
        /// Constructs a FastCyclicNetwork with the provided pre-built FastConnection array and 
        /// associated data.
        /// </summary>
        public FastCyclicNetwork(FastConnection[] connectionArray,
                                 IActivationFunction[] neuronActivationFnArray,
                                 double[][] neuronAuxArgsArray,
                                 int neuronCount,
                                 int inputNeuronCount,
                                 int outputNeuronCount,
                                 int timestepsPerActivation)
        {
            _connectionArray = connectionArray;
            _neuronActivationFnArray = neuronActivationFnArray;
            _neuronAuxArgsArray = neuronAuxArgsArray;

            // Create neuron pre- and post-activation signal arrays.
            _preActivationArray = new double[neuronCount];
            _postActivationArray = new double[neuronCount];

            // Wrap sub-ranges of the neuron signal arrays as input and output arrays for IBlackBox.
            // Offset is 1 to skip bias neuron (The value at index 1 is the first black box input).
            _inputSignalArrayWrapper = new SignalArray(_postActivationArray, 1, inputNeuronCount);

            // Offset to skip bias and input neurons. Output neurons follow input neurons in the arrays.
            _outputSignalArrayWrapper = new SignalArray(_postActivationArray, inputNeuronCount+1, outputNeuronCount);

            // Store counts for use during activation.
            _inputNeuronCount = inputNeuronCount;
            _inputAndBiasNeuronCount = inputNeuronCount+1;
            _outputNeuronCount = outputNeuronCount;
            _timestepsPerActivation = timestepsPerActivation;

            // Initialise the bias neuron's fixed output value.
            _postActivationArray[0] = 1.0;
        }
开发者ID:jbrant,项目名称:SharpBackpropNeat,代码行数:36,代码来源:FastCyclicNetwork.cs

示例7: Neuron

 public Neuron(IActivationFunction activationFunc, double charge)
 {
     Charge = charge;
     Error = 0;
     Activation = activationFunc;
     In = new Dictionary<Neuron, double>();
 }
开发者ID:KineticCookie,项目名称:FuzzyDev,代码行数:7,代码来源:NeuralNetwork.cs

示例8: NeatActivationFunctionLibrary

 /// <summary>
 /// Construct with a single IActivationFunction.
 /// </summary>
 /// <param name="activationFn"></param>
 public NeatActivationFunctionLibrary(IActivationFunction activationFn)
 {
     _activationFn = activationFn;
     _activationFnInfo = new ActivationFunctionInfo(0, 1.0, activationFn);
     _activationFnInfoList = new List<ActivationFunctionInfo>(1);
     _activationFnInfoList.Add(_activationFnInfo);
 }
开发者ID:BLueders,项目名称:SharpNeat_Playground,代码行数:11,代码来源:NeatActivationFunctionLibrary.cs

示例9: NEATNeuronGene

 /// <summary>
 /// Construct a neuron gene.
 /// </summary>
 /// <param name="type">The neuron type.</param>
 /// <param name="theActivationFunction">The activation function.</param>
 /// <param name="id">The neuron id.</param>
 /// <param name="innovationId">The innovation id.</param>
 public NEATNeuronGene(NEATNeuronType type, IActivationFunction theActivationFunction, long id, long innovationId)
 {
     NeuronType = type;
     InnovationId = innovationId;
     Id = id;
     ActivationFunction = theActivationFunction;
 }
开发者ID:jongh0,项目名称:MTree,代码行数:14,代码来源:NEATNeuronGene.cs

示例10: DecodeToConcurrentNetwork

		static public INetwork DecodeToConcurrentNetwork(NeatGenome.NeatGenome g, IActivationFunction activationFn)
		{
		//----- Loop the neuronGenes. Create Neuron for each one.
			// Store a table of neurons keyed by their id.
			Hashtable neuronTable = new Hashtable(g.NeuronGeneList.Count);
			NeuronList neuronList = new NeuronList();

			foreach(NeuronGene neuronGene in g.NeuronGeneList)
			{
				Neuron newNeuron = new Neuron(activationFn, neuronGene.NeuronType, neuronGene.InnovationId);
				neuronTable.Add(newNeuron.Id, newNeuron);
				neuronList.Add(newNeuron);
			}

		//----- Loop the connection genes. Create a Connection for each one and bind them to the relevant Neurons.
			foreach(ConnectionGene connectionGene in g.ConnectionGeneList)
			{
				Connection newConnection = new Connection(connectionGene.SourceNeuronId, connectionGene.TargetNeuronId, connectionGene.Weight);

				// Bind the connection to it's source neuron.
				newConnection.SetSourceNeuron((Neuron)neuronTable[connectionGene.SourceNeuronId]);

				// Store the new connection against it's target neuron.
				((Neuron)(neuronTable[connectionGene.TargetNeuronId])).ConnectionList.Add(newConnection);
			}

			return new ConcurrentNetwork(neuronList);
		}
开发者ID:zaheeroz,项目名称:qd-maze-simulator,代码行数:28,代码来源:GenomeDecoder.cs

示例11: Write

        /// <param name="activationFn">Not strictly part of a genome. But it is useful to document which function
        /// the genome is supposed to run against when decoded into a network.</param>
        public static void Write(XmlNode parentNode, NeatGenome genome, IActivationFunction activationFn)
        {
            //----- Start writing. Create document root node.
            XmlElement xmlGenome = XmlUtilities.AddElement(parentNode, "genome");
            XmlUtilities.AddAttribute(xmlGenome, "id", genome.GenomeId.ToString());
            XmlUtilities.AddAttribute(xmlGenome, "species-id", genome.SpeciesId.ToString());
            XmlUtilities.AddAttribute(xmlGenome, "age", genome.GenomeAge.ToString());
            XmlUtilities.AddAttribute(xmlGenome, "fitness", genome.Fitness.ToString("0.00"));
            XmlUtilities.AddAttribute(xmlGenome, "activation-fn-id", activationFn.FunctionId);

            //----- Write neurons.
            XmlElement xmlNeurons = XmlUtilities.AddElement(xmlGenome, "neurons");
            foreach(NeuronGene neuronGene in genome.NeuronGeneList)
                WriteNeuron(xmlNeurons, neuronGene);

            //----- Write modules.
            XmlElement xmlModules = XmlUtilities.AddElement(xmlGenome, "modules");
            foreach (ModuleGene moduleGene in genome.ModuleGeneList)
                WriteModule(xmlModules, moduleGene);

            //----- Write Connections.
            XmlElement xmlConnections = XmlUtilities.AddElement(xmlGenome, "connections");
            foreach(ConnectionGene connectionGene in genome.ConnectionGeneList)
                WriteConnectionGene(xmlConnections, connectionGene);

            //----- Write beahavior
            if(genome.Behavior!=null)
            {
                if(genome.Behavior.behaviorList!=null)
                {
                    XmlElement xmlBehavior = XmlUtilities.AddElement(xmlGenome, "behavior");
                    WriteBehavior(xmlBehavior,genome.Behavior);
                }
            }
        }
开发者ID:OptimusLime,项目名称:IESoR,代码行数:37,代码来源:XmlGenomeWriterStatic.cs

示例12: DecodeToFastConcurrentMultiplicativeNetwork

        public static FastConcurrentMultiplicativeNetwork DecodeToFastConcurrentMultiplicativeNetwork(NeatGenome.NeatGenome g, IActivationFunction activationFn)
        {
            int outputNeuronCount = g.OutputNeuronCount;
            int neuronGeneCount = g.NeuronGeneList.Count;

            // Slightly inefficient - determine the number of bias nodes. Fortunately there is not actually
            // any reason to ever have more than one bias node - although there may be 0.
            int neuronGeneIdx=0;
            for(; neuronGeneIdx<neuronGeneCount; neuronGeneIdx++)
            {
                if(g.NeuronGeneList[neuronGeneIdx].NeuronType != NeuronType.Bias)
                    break;
            }
            int biasNodeCount = neuronGeneIdx;
            int inputNeuronCount = g.InputNeuronCount;

            // ConnectionGenes point to a neuron ID. We need to map this ID to a 0 based index for
            // efficiency. To do this we build a table of indexes (ints) keyed on neuron ID.
            // TODO: An alternative here would be to forgo the building of a table and do a binary
            // search directly on the NeuronGeneList - probably a good idea to use a heuristic based upon
            // neuroncount*connectioncount that decides on which technique to use. Small networks will
            // likely be faster to decode using the binary search.

            // Actually we can partly achieve the above optimzation by using HybridDictionary instead of Hashtable.
            // Although creating a table is a bit expensive.
            HybridDictionary neuronIndexTable = new HybridDictionary(neuronGeneCount);
            for(int i=0; i<neuronGeneCount; i++)
                neuronIndexTable.Add(g.NeuronGeneList[i].InnovationId, i);

            // Count how many of the connections are actually enabled. TODO: make faster - store disable count?
            int connectionGeneCount = g.ConnectionGeneList.Count;
            int connectionCount=connectionGeneCount;
            //			for(int i=0; i<connectionGeneCount; i++)
            //			{
            //				if(g.ConnectionGeneList[i].Enabled)
            //					connectionCount++;
            //			}

            // Now we can build the connection array(s).
            FloatFastConnection[] connectionArray = new FloatFastConnection[connectionCount];
            int connectionIdx=0;
            for(int connectionGeneIdx=0; connectionGeneIdx<connectionCount; connectionGeneIdx++)
            {
                ConnectionGene connectionGene = g.ConnectionGeneList[connectionIdx];
                connectionArray[connectionIdx].sourceNeuronIdx = (int)neuronIndexTable[connectionGene.SourceNeuronId];
                connectionArray[connectionIdx].targetNeuronIdx = (int)neuronIndexTable[connectionGene.TargetNeuronId];
                connectionArray[connectionIdx].weight = (float)connectionGene.Weight;
                connectionIdx++;
            }

            // Now sort the connection array on sourceNeuronIdx, secondary sort on targetNeuronIdx.
            // TODO: custom sort routine to prevent boxing/unboxing required by Array.Sort(ValueType[])
            //Array.Sort(connectionArray, fastConnectionComparer);
            QuickSortFastConnections(0, fastConnectionArray.Length-1);

            return new FastConcurrentMultiplicativeNetwork(
                biasNodeCount, inputNeuronCount,
                outputNeuronCount, neuronGeneCount,
                connectionArray, activationFn);
        }
开发者ID:jtglaze,项目名称:IndependentWork2013,代码行数:60,代码来源:GenomeDecoder.cs

示例13: CalculateGradient

 /// <summary>
 /// Not used for this type of plugin.
 /// </summary>
 ///
 /// <param name="gradients">Not used.</param>
 /// <param name="layerOutput">Not used.</param>
 /// <param name="weights">Not used.</param>
 /// <param name="layerDelta">Not used.</param>
 /// <param name="af">Not used.</param>
 /// <param name="index">Not used.</param>
 /// <param name="fromLayerIndex">Not used.</param>
 /// <param name="fromLayerSize">Not used.</param>
 /// <param name="toLayerIndex">Not used.</param>
 /// <param name="toLayerSize">Not used.</param>
 public void CalculateGradient(double[] gradients,
                               double[] layerOutput, double[] weights,
                               double[] layerDelta, IActivationFunction af,
                               int index, int fromLayerIndex, int fromLayerSize,
                               int toLayerIndex, int toLayerSize)
 {
 }
开发者ID:encog,项目名称:encog-silverlight-core,代码行数:21,代码来源:SystemLoggingPlugin.cs

示例14: FloatFastConcurrentNetwork

		public FloatFastConcurrentNetwork(	int biasNeuronCount, 
										int inputNeuronCount,
                                        int outputNeuronCount,
                                        int outputsPerPolicy, // Schrum: Added
										int totalNeuronCount,
										FloatFastConnection[] connectionArray, 
										IActivationFunction[] activationFnArray)
		{
			this.biasNeuronCount = biasNeuronCount;
			this.inputNeuronCount = inputNeuronCount;
			this.totalInputNeuronCount = biasNeuronCount + inputNeuronCount;
            this.outputNeuronCount = outputNeuronCount;
            this.outputsPerPolicy = outputsPerPolicy; // Schrum: Added

			this.connectionArray = connectionArray;
			this.activationFnArray = activationFnArray;
			
			//----- Allocate the arrays that make up the neural network.
			// The neuron signals are initialised to 0 by default. Only bias nodes need setting to 1.
			neuronSignalArray = new float[totalNeuronCount];
			_neuronSignalArray = new float[totalNeuronCount];

			for(int i=0; i<biasNeuronCount; i++)
				neuronSignalArray[i] = 1.0F;
		}
开发者ID:val1kus,项目名称:agent_multimodal,代码行数:25,代码来源:FloatFastConcurrentNetwork.cs

示例15: ActivationLayer

		/// <summary>
		/// Initializes a new instance of the <see cref="ActivationLayer"/> class.
		/// </summary>
		/// 
		/// <param name="neuronsCount">Layer's neurons count.</param>
		/// <param name="inputsCount">Layer's inputs count.</param>
		/// <param name="function">Activation function of neurons of the layer.</param>
		/// 
		/// <remarks>The new layer is randomized (see <see cref="ActivationNeuron.Randomize"/>
		/// method) after it is created.</remarks>
		/// 
		public ActivationLayer( int neuronsCount, int inputsCount, IActivationFunction function )
			: base( neuronsCount, inputsCount )
		{
			// create each neuron
			for ( int i = 0; i < neurons.Length; i++ )
				neurons[i] = new ActivationNeuron( inputsCount, function );
		}
开发者ID:holisticware-admin,项目名称:MonoVersal.AForgeNET,代码行数:18,代码来源:ActivationLayer.cs


注:本文中的IActivationFunction类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。