当前位置: 首页>>代码示例>>C#>>正文


C# BasicNetwork类代码示例

本文整理汇总了C#中BasicNetwork的典型用法代码示例。如果您正苦于以下问题:C# BasicNetwork类的具体用法?C# BasicNetwork怎么用?C# BasicNetwork使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


BasicNetwork类属于命名空间,在下文中一共展示了BasicNetwork类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: NeuralGeneticAlgorithm

        /// <summary>
        /// Construct a neural genetic algorithm.
        /// </summary>
        ///
        /// <param name="network">The network to base this on.</param>
        /// <param name="randomizer">The randomizer used to create this initial population.</param>
        /// <param name="calculateScore">The score calculation object.</param>
        /// <param name="populationSize">The population size.</param>
        /// <param name="mutationPercent">The percent of offspring to mutate.</param>
        /// <param name="percentToMate">The percent of the population allowed to mate.</param>
        public NeuralGeneticAlgorithm(BasicNetwork network,
            IRandomizer randomizer, ICalculateScore calculateScore,
            int populationSize, double mutationPercent,
            double percentToMate)
            : base(TrainingImplementationType.Iterative)
        {
            Genetic = new NeuralGeneticAlgorithmHelper
                           {
                               CalculateScore = new GeneticScoreAdapter(calculateScore)
                           };
            IPopulation population = new BasicPopulation(populationSize);
            Genetic.MutationPercent = mutationPercent;
            Genetic.MatingPopulation = percentToMate*2;
            Genetic.PercentToMate = percentToMate;
            Genetic.Crossover = new Splice(network.Structure.CalculateSize()/3);
            Genetic.Mutate = new MutatePerturb(4.0d);
            Genetic.Population = population;
            for (int i = 0; i < population.PopulationSize; i++)
            {
                var chromosomeNetwork = (BasicNetwork) (network
                                                           .Clone());
                randomizer.Randomize(chromosomeNetwork);

                var genome = new NeuralGenome(chromosomeNetwork) {GA = Genetic};
                Genetic.PerformCalculateScore(genome);
                Genetic.Population.Add(genome);
            }
            population.Sort();
        }
开发者ID:fxmozart,项目名称:encog-dotnet-core,代码行数:39,代码来源:NeuralGeneticAlgorithm.cs

示例2: ManhattanPropagation

 /// <summary>
 /// Construct a Manhattan propagation training object.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data to use.</param>
 /// <param name="learnRate">The learning rate.</param>
 public ManhattanPropagation(BasicNetwork network,
     IMLDataSet training, double learnRate)
     : base(network, training)
 {
     _learningRate = learnRate;
     _zeroTolerance = RPROPConst.DefaultZeroTolerance;
 }
开发者ID:Romiko,项目名称:encog-dotnet-core,代码行数:14,代码来源:ManhattanPropagation.cs

示例3: NetworkToString

        /// <summary>
        /// Format the network as a human readable string that lists the 
        /// hidden layers.
        /// </summary>
        /// <param name="network">The network to format.</param>
        /// <returns>A human readable string.</returns>
        public static String NetworkToString(BasicNetwork network)
        {
            StringBuilder result = new StringBuilder();
            int num = 1;

            ILayer layer = network.GetLayer(BasicNetwork.TAG_INPUT);

            // display only hidden layers
            while (layer.Next.Count > 0)
            {
                layer = layer.Next[0].ToLayer;

                if (result.Length > 0)
                {
                    result.Append(",");
                }
                result.Append("H");
                result.Append(num++);
                result.Append("=");
                result.Append(layer.NeuronCount);
            }

            return result.ToString();

        }
开发者ID:encog,项目名称:encog-silverlight-core,代码行数:31,代码来源:PruneIncremental.cs

示例4: TrainHopfield

 /// <summary>
 /// Construct a Hopfield training class.
 /// </summary>
 /// <param name="trainingSet">The training set to use.</param>
 /// <param name="network">The network to train.</param>
 public TrainHopfield(INeuralDataSet trainingSet,
          BasicNetwork network)
 {
     this.network = network;
     this.Training = trainingSet;
     this.Error = 0;
 }
开发者ID:encog,项目名称:encog-silverlight-core,代码行数:12,代码来源:TrainHopfield.cs

示例5: Randomize

 /// <inheritdoc />
 public override void Randomize(BasicNetwork network)
 {
     for (var i = 0; i < network.Layers.Count - 1; i++)
     {
         RandomizeLayer(network, i);
     }
 }
开发者ID:legendvijay,项目名称:aifh,代码行数:8,代码来源:XaiverRandomizeNetwork.cs

示例6: CalculateDepth

 /// <summary>
 /// Construct the depth calculation object.
 /// </summary>
 /// <param name="network">The network that we are calculating for.</param>
 public CalculateDepth(BasicNetwork network)
 {
     this.network = network;
     this.outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT);
     if( this.outputLayer!=null )
         Calculate(0, this.outputLayer);
 }
开发者ID:encog,项目名称:encog-silverlight-core,代码行数:11,代码来源:CalculateDepth.cs

示例7: RPROPJob

 /// <summary>
 /// Construct an RPROP job. For more information on RPROP see the
 /// ResilientPropagation class. 
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data to use.</param>
 /// <param name="loadToMemory">True if binary training data should be loaded to memory.</param>
 /// <param name="localRatio">The local ratio, used if this job is performed by an OpenCL Device.</param>
 /// <param name="globalRatio">The global ratio, used if this job is performed by an OpenCL Device.</param>
 /// <param name="segmentationRatio">The segmentation ratio, used if this job is performed by an OpenCL Device.</param>
 /// <param name="iterationsPer">How many iterations to process per cycle.</param>
 public RPROPJob(BasicNetwork network, INeuralDataSet training,
         bool loadToMemory, double localRatio, int globalRatio, double segmentationRatio, int iterationsPer) :
     this(network, training,
          loadToMemory, RPROPConst.DEFAULT_INITIAL_UPDATE,
          RPROPConst.DEFAULT_MAX_STEP, localRatio, globalRatio, segmentationRatio, iterationsPer)
 {
 }
开发者ID:OperatorOverload,项目名称:encog-cs,代码行数:18,代码来源:RPROPJob.cs

示例8: CrossTraining

 /// <summary>
 /// Construct a cross trainer. 
 /// </summary>
 /// <param name="network">The network.</param>
 /// <param name="training">The training data.</param>
 public CrossTraining(BasicNetwork network,
          FoldedDataSet training)
 {
     this.network = network;
     Training = training;
     this.folded = training;
 }
开发者ID:OperatorOverload,项目名称:encog-cs,代码行数:12,代码来源:CrossTraining.cs

示例9: Randomize

 /// <inheritdoc />
 public override void Randomize(BasicNetwork network)
 {
     for (var i = 0; i < network.Weights.Length; i++)
     {
         network.Weights[i] = Rnd.NextDouble(_low, _high);
     }
 }
开发者ID:legendvijay,项目名称:aifh,代码行数:8,代码来源:RangeRandomizeNetwork.cs

示例10: SVDTraining

        /// <summary>
        /// Construct the LMA object.
        /// </summary>
        /// <param name="network">The network to train. Must have a single output neuron.</param>
        /// <param name="training">The training data to use. Must be indexable.</param>
        public SVDTraining(BasicNetwork network, INeuralDataSet training)
        {
            ILayer outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT);

            if (outputLayer == null)
            {
                throw new TrainingError("SVD requires an output layer.");
            }

            if (outputLayer.NeuronCount != 1)
            {
                throw new TrainingError("SVD requires an output layer with a single neuron.");
            }

            if (network.GetLayer(RadialBasisPattern.RBF_LAYER) == null)
                throw new TrainingError("SVD is only tested to work on radial basis function networks.");

            rbfLayer = (RadialBasisFunctionLayer)network.GetLayer(RadialBasisPattern.RBF_LAYER);

            this.Training = training;
            this.network = network;
            this.trainingLength = (int)this.Training.InputSize;

            BasicNeuralData input = new BasicNeuralData(this.Training.InputSize);
            BasicNeuralData ideal = new BasicNeuralData(this.Training.IdealSize);
            this.pair = new BasicNeuralDataPair(input, ideal);
        }
开发者ID:encog,项目名称:encog-silverlight-core,代码行数:32,代码来源:SVDTraining.cs

示例11: Equals

        /// <summary>
        /// Determine if the two neural networks are equal.
        /// </summary>
        ///
        /// <param name="network1">The first network.</param>
        /// <param name="network2">The second network.</param>
        /// <param name="precision">How many decimal places to check.</param>
        /// <returns>True if the two networks are equal.</returns>
        public static bool Equals(BasicNetwork network1,
                                  BasicNetwork network2, int precision)
        {
            double[] array1 = NetworkToArray(network1);
            double[] array2 = NetworkToArray(network2);

            if (array1.Length != array2.Length)
            {
                return false;
            }

            double test = Math.Pow(10.0d, precision);
            if (Double.IsInfinity(test) || (test > Int64.MaxValue))
            {
                throw new NeuralNetworkError("Precision of " + precision
                                             + " decimal places is not supported.");
            }

            for (int i = 0; i < array1.Length; i++)
            {
                var l1 = (long) (array1[i]*test);
                var l2 = (long) (array2[i]*test);
                if (l1 != l2)
                {
                    return false;
                }
            }

            return true;
        }
开发者ID:encog,项目名称:encog-silverlight-core,代码行数:38,代码来源:NetworkCODEC.cs

示例12: QuickPropagation

 /// <summary>
 /// Construct a QPROP trainer for flat networks.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 /// <param name="learnRate">The learning rate.  2 is a good suggestion as 
 ///            a learning rate to start with.  If it fails to converge, 
 ///            then drop it.  Just like backprop, except QPROP can 
 ///            take higher learning rates.</param>
 public QuickPropagation(BasicNetwork network,
                         IMLDataSet training, double learnRate)
     : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     LearningRate = learnRate;
 }
开发者ID:neismit,项目名称:emds,代码行数:16,代码来源:QuickPropagation.cs

示例13: NetworkSize

        /// <summary>
        /// Determine the network size.
        /// </summary>
        /// <param name="network">The network to check.</param>
        /// <returns>The size of the network.</returns>
        public static int NetworkSize(BasicNetwork network)
        {

            // see if there is already an up to date flat network
            if (network.Structure.Flat != null
                && (network.Structure.FlatUpdate == FlatUpdateNeeded.None
                || network.Structure.FlatUpdate == FlatUpdateNeeded.Unflatten))
            {
                return network.Structure.Flat.Weights.Length;
            }

            int index = 0;

            // loop over all of the layers, take the output layer first
            foreach (ILayer layer in network.Structure.Layers)
            {

                // see if the previous layer, which is the next layer that the loop will hit,
                // is either a connection to a BasicLayer or a ContextLayer.
                ISynapse synapse = network.Structure
                        .FindPreviousSynapseByLayerType(layer, typeof(BasicLayer));
                ISynapse contextSynapse = network.Structure.FindPreviousSynapseByLayerType(
                        layer, typeof(ContextLayer));

                // get a list of of the previous synapses to this layer
                IList<ISynapse> list = network.Structure.GetPreviousSynapses(layer);

                // If there is not a BasicLayer or contextLayer as the next layer, then
                // just take the first synapse of any type.
                if (synapse == null && contextSynapse == null && list.Count > 0)
                {
                    synapse = list[0];
                }

                // is there any data to record for this synapse?
                if (synapse != null && synapse.WeightMatrix != null)
                {
                    // process each weight matrix
                    for (int x = 0; x < synapse.ToNeuronCount; x++)
                    {

                        index += synapse.FromNeuronCount;


                        if (synapse.ToLayer.HasBias)
                        {
                            index++;
                        }

                        if (contextSynapse != null)
                        {
                            index += contextSynapse.FromNeuronCount;
                        }
                    }
                }
            }

            return index;
        }
开发者ID:OperatorOverload,项目名称:encog-cs,代码行数:64,代码来源:NetworkCODEC.cs

示例14: TrainInstar

 /// <summary>
 /// Construct the instar training object.
 /// </summary>
 /// <param name="network">The network to be trained.</param>
 /// <param name="training">The training data.</param>
 /// <param name="learningRate">The learning rate.</param>
 public TrainInstar(BasicNetwork network, INeuralDataSet training,
         double learningRate)
 {
     this.network = network;
     this.training = training;
     this.learningRate = learningRate;
     this.parts = new FindCPN(network);
 }
开发者ID:encog,项目名称:encog-silverlight-core,代码行数:14,代码来源:TrainInstar.cs

示例15: ScaledConjugateGradient

 /// <summary>
 /// Construct a training class.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 public ScaledConjugateGradient(BasicNetwork network,
         INeuralDataSet training)
     : base(network, training)
 {
     TrainFlatNetworkSCG rpropFlat = new TrainFlatNetworkSCG(
             network.Structure.Flat,
             this.Training);
     this.FlatTraining = rpropFlat;
 }
开发者ID:OperatorOverload,项目名称:encog-cs,代码行数:14,代码来源:ScaledConjugateGradient.cs


注:本文中的BasicNetwork类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。