本文整理汇总了C#中ActivationNetwork类的典型用法代码示例。如果您正苦于以下问题:C# ActivationNetwork类的具体用法?C# ActivationNetwork怎么用?C# ActivationNetwork使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
ActivationNetwork类属于命名空间,在下文中一共展示了ActivationNetwork类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: BackPropagationLearning
/// <summary>
/// Initializes a new instance of the <see cref="BackPropagationLearning"/> class
/// </summary>
///
/// <param name="network">Network to teach</param>
///
public BackPropagationLearning( ActivationNetwork network )
{
this.network = network;
// create error and deltas arrays
neuronErrors = new double[network.LayersCount][];
weightsUpdates = new double[network.LayersCount][][];
thresholdsUpdates = new double[network.LayersCount][];
// initialize errors and deltas arrays for each layer
for ( int i = 0, n = network.LayersCount; i < n; i++ )
{
Layer layer = network[i];
neuronErrors[i] = new double[layer.NeuronsCount];
weightsUpdates[i] = new double[layer.NeuronsCount][];
thresholdsUpdates[i] = new double[layer.NeuronsCount];
// for each neuron
for ( int j = 0; j < layer.NeuronsCount; j++ )
{
weightsUpdates[i][j] = new double[layer.InputsCount];
}
}
}
示例2: RunEpochTest1
public void RunEpochTest1()
{
Accord.Math.Tools.SetupGenerator(0);
double[][] input =
{
new double[] { -1, -1 },
new double[] { -1, 1 },
new double[] { 1, -1 },
new double[] { 1, 1 }
};
double[][] output =
{
new double[] { -1 },
new double[] { 1 },
new double[] { 1 },
new double[] { -1 }
};
Neuron.RandGenerator = new ThreadSafeRandom(0);
ActivationNetwork network = new ActivationNetwork(
new BipolarSigmoidFunction(2), 2, 2, 1);
var teacher = new LevenbergMarquardtLearning(network,
false, JacobianMethod.ByFiniteDifferences);
double error = 1.0;
while (error > 1e-5)
error = teacher.RunEpoch(input, output);
for (int i = 0; i < input.Length; i++)
Assert.AreEqual(network.Compute(input[i])[0], output[i][0], 0.1);
}
示例3: MulticlassTest1
public void MulticlassTest1()
{
Accord.Math.Tools.SetupGenerator(0);
Neuron.RandGenerator = new ThreadSafeRandom(0);
int numberOfInputs = 3;
int numberOfClasses = 4;
int hiddenNeurons = 5;
double[][] input =
{
new double[] { -1, -1, -1 }, // 0
new double[] { -1, 1, -1 }, // 1
new double[] { 1, -1, -1 }, // 1
new double[] { 1, 1, -1 }, // 0
new double[] { -1, -1, 1 }, // 2
new double[] { -1, 1, 1 }, // 3
new double[] { 1, -1, 1 }, // 3
new double[] { 1, 1, 1 } // 2
};
int[] labels =
{
0,
1,
1,
0,
2,
3,
3,
2,
};
double[][] outputs = Accord.Statistics.Tools
.Expand(labels, numberOfClasses, -1, 1);
var function = new BipolarSigmoidFunction(2);
var network = new ActivationNetwork(function,
numberOfInputs, hiddenNeurons, numberOfClasses);
new NguyenWidrow(network).Randomize();
var teacher = new LevenbergMarquardtLearning(network);
double error = Double.PositiveInfinity;
for (int i = 0; i < 10; i++)
error = teacher.RunEpoch(input, outputs);
for (int i = 0; i < input.Length; i++)
{
int answer;
double[] output = network.Compute(input[i]);
double response = output.Max(out answer);
int expected = labels[i];
Assert.AreEqual(expected, answer);
}
}
示例4: GaussianWeights
/// <summary>
/// Constructs a new Gaussian Weight initialization.
/// </summary>
///
/// <param name="network">The activation network whose weights will be initialized.</param>
/// <param name="stdDev">The standard deviation to be used. Common values lie in the 0.001-
/// 0.1 range. Default is 0.1.</param>
///
public GaussianWeights(ActivationNetwork network, double stdDev = 0.1)
{
this.network = network;
this.random = new GaussianGenerator(0f, (float)stdDev, Accord.Math.Tools.Random.Next());
this.UpdateThresholds = false;
}
示例5: DeltaRuleLearning
/// <summary>
/// Initializes a new instance of the <see cref="DeltaRuleLearning"/> class.
/// </summary>
///
/// <param name="network">Network to teach.</param>
///
/// <exception cref="ArgumentException">Invalid nuaral network. It should have one layer only.</exception>
///
public DeltaRuleLearning( ActivationNetwork network )
{
// check layers count
if ( network.Layers.Length != 1 )
{
throw new ArgumentException( "Invalid nuaral network. It should have one layer only." );
}
this.network = network;
}
示例6: NguyenWidrow
/// <summary>
/// Constructs a new Nguyen-Widrow Weight initialization.
/// </summary>
///
/// <param name="network">The activation network whose weights will be initialized.</param>
///
public NguyenWidrow(ActivationNetwork network)
{
this.network = network;
int hiddenNodes = network.Layers[0].Neurons.Length;
int inputNodes = network.Layers[0].InputsCount;
randRange = new Range(-0.5f, 0.5f);
beta = 0.7 * Math.Pow(hiddenNodes, 1.0 / inputNodes);
}
示例7: AnnAgent
public AnnAgent(bool learn, int boardSize, byte player = 1)
{
learning = learn;
playerNumber = player;
int boardFields = boardSize * boardSize;
if(File.Exists("ann" + boardSize + ".bin"))
network = (ActivationNetwork)Serialization.LoadNetwork("ann" + boardSize + ".bin");
else
network = new ActivationNetwork(new BipolarSigmoidFunction(), boardFields, 5, boardFields * 2);
backProp = new BackPropagationLearning(network);
teacher = new MinimaxAgent(2, player);
}
示例8: EvolutionaryFitness
/// <summary>
/// Initializes a new instance of the <see cref="EvolutionaryFitness"/> class.
/// </summary>
///
/// <param name="network">Neural network for which fitness will be calculated.</param>
/// <param name="input">Input data samples for neural network.</param>
/// <param name="output">Output data sampels for neural network (desired output).</param>
///
/// <exception cref="ArgumentException">Length of inputs and outputs arrays must be equal and greater than 0.</exception>
/// <exception cref="ArgumentException">Length of each input vector must be equal to neural network's inputs count.</exception>
///
public EvolutionaryFitness( ActivationNetwork network, double[][] input, double[][] output )
{
if ( ( input.Length == 0 ) || ( input.Length != output.Length ) )
{
throw new ArgumentException( "Length of inputs and outputs arrays must be equal and greater than 0." );
}
if ( network.InputsCount != input[0].Length )
{
throw new ArgumentException( "Length of each input vector must be equal to neural network's inputs count." );
}
this.network = network;
this.input = input;
this.output = output;
}
示例9: RunEpochTest1
public void RunEpochTest1()
{
Accord.Math.Tools.SetupGenerator(0);
double[][] input =
{
new double[] { -1, -1 },
new double[] { -1, 1 },
new double[] { 1, -1 },
new double[] { 1, 1 }
};
double[][] output =
{
new double[] { -1 },
new double[] { 1 },
new double[] { 1 },
new double[] { -1 }
};
Neuron.RandGenerator = new ThreadSafeRandom(0);
ActivationNetwork network = new ActivationNetwork(
new BipolarSigmoidFunction(2), 2, 2, 1);
var teacher = new ParallelResilientBackpropagationLearning(network);
double error = 1.0;
while (error > 1e-5)
error = teacher.RunEpoch(input, output);
for (int i = 0; i < input.Length; i++)
{
double actual = network.Compute(input[i])[0];
double expected = output[i][0];
Assert.AreEqual(expected, actual, 0.01);
Assert.IsFalse(Double.IsNaN(actual));
}
}
示例10: TrainNewModel
public IForecastingModel TrainNewModel(double[][] iInput, double[][] iOutput)
{
int inputSize = iInput[0].Length, samplesNum = iOutput.Length;
if (samplesNum != iInput.Length)
throw new ArgumentException();
for (int i = 0; i < samplesNum;++i)
if (iInput[i].Length != inputSize || iOutput[i].Length != 1) //iInput isn't a square matrix or iOutput isn't a vector
throw new ArgumentException();
int[] neuronsCount = (int[]) ModelParametersDict[NeuronsInLayersKey];
string activationFunction = (string) ModelParametersDict[ActivationFunctionKey];
long maxIterNum = (long) ModelParametersDict[MaxIterationsNumberKey];
double stopError = (double)ModelParametersDict[StopErrorKey];
ActivationNetwork netToTrain = new ActivationNetwork(ActivationFunctionsDict[activationFunction], inputSize, neuronsCount);
DataNormalizer normalizer = new DataNormalizer(iInput.Concat(iOutput).ToArray());
IForecastingModel aModel = new ANNforecastingModel(netToTrain, normalizer);
ISupervisedLearning teacher = new ResilientBackpropagationLearning(netToTrain);
double[][] trainInputSet, trainOutputSet;
TrainingSubsetGenerator.GenerateRandomly(iInput, iOutput, out trainInputSet, out trainOutputSet, iMultiplier: TrainSubsetMultiplier);
trainInputSet = normalizer.Normalize(trainInputSet); trainOutputSet = normalizer.Normalize(trainOutputSet);
long epochsCount = 0;
double nextError = ErrorCalculator.CalculateMSE(aModel, iInput, iOutput), prevError;
do
{
prevError = nextError;
teacher.RunEpoch(trainInputSet, trainOutputSet);
nextError = ErrorCalculator.CalculateMSE(aModel, iInput, iOutput);
}
while (epochsCount++ <= maxIterNum && Math.Abs(prevError - nextError) >= stopError);
return aModel;
}
示例11: EvolutionaryLearning
/// <summary>
/// Initializes a new instance of the <see cref="EvolutionaryLearning"/> class.
/// </summary>
///
/// <param name="activationNetwork">Activation network to be trained.</param>
/// <param name="populationSize">Size of genetic population.</param>
/// <param name="chromosomeGenerator">Random numbers generator used for initialization of genetic
/// population representing neural network's weights and thresholds (see <see cref="DoubleArrayChromosome.chromosomeGenerator"/>).</param>
/// <param name="mutationMultiplierGenerator">Random numbers generator used to generate random
/// factors for multiplication of network's weights and thresholds during genetic mutation
/// (ses <see cref="DoubleArrayChromosome.mutationMultiplierGenerator"/>.)</param>
/// <param name="mutationAdditionGenerator">Random numbers generator used to generate random
/// values added to neural network's weights and thresholds during genetic mutation
/// (see <see cref="DoubleArrayChromosome.mutationAdditionGenerator"/>).</param>
/// <param name="selectionMethod">Method of selection best chromosomes in genetic population.</param>
/// <param name="crossOverRate">Crossover rate in genetic population (see
/// <see cref="Population.CrossoverRate"/>).</param>
/// <param name="mutationRate">Mutation rate in genetic population (see
/// <see cref="Population.MutationRate"/>).</param>
/// <param name="randomSelectionRate">Rate of injection of random chromosomes during selection
/// in genetic population (see <see cref="Population.RandomSelectionPortion"/>).</param>
///
public EvolutionaryLearning( ActivationNetwork activationNetwork, int populationSize,
IRandomNumberGenerator chromosomeGenerator,
IRandomNumberGenerator mutationMultiplierGenerator,
IRandomNumberGenerator mutationAdditionGenerator,
ISelectionMethod selectionMethod,
double crossOverRate, double mutationRate, double randomSelectionRate )
{
// Check of assumptions during debugging only
Debug.Assert( activationNetwork != null );
Debug.Assert( populationSize > 0 );
Debug.Assert( chromosomeGenerator != null );
Debug.Assert( mutationMultiplierGenerator != null );
Debug.Assert( mutationAdditionGenerator != null );
Debug.Assert( selectionMethod != null );
Debug.Assert( crossOverRate >= 0.0 && crossOverRate <= 1.0 );
Debug.Assert( mutationRate >= 0.0 && crossOverRate <= 1.0 );
Debug.Assert( randomSelectionRate >= 0.0 && randomSelectionRate <= 1.0 );
// networks's parameters
this.network = activationNetwork;
this.numberOfNetworksWeights = CalculateNetworkSize( activationNetwork );
// population parameters
this.populationSize = populationSize;
this.chromosomeGenerator = chromosomeGenerator;
this.mutationMultiplierGenerator = mutationMultiplierGenerator;
this.mutationAdditionGenerator = mutationAdditionGenerator;
this.selectionMethod = selectionMethod;
this.crossOverRate = crossOverRate;
this.mutationRate = mutationRate;
this.randomSelectionRate = randomSelectionRate;
}
示例12: RunEpochTest4
public void RunEpochTest4()
{
Accord.Math.Tools.SetupGenerator(0);
double[][] input =
{
new double[] { 0, 0 },
};
double[][] output =
{
new double[] { 0 },
};
Neuron.RandGenerator = new ThreadSafeRandom(0);
ActivationNetwork network = new ActivationNetwork(
new BipolarSigmoidFunction(2), 2, 1);
var teacher = new LevenbergMarquardtLearning(network,
true, JacobianMethod.ByBackpropagation);
double error = 1.0;
for (int i = 0; i < 1000; i++)
error = teacher.RunEpoch(input, output);
for (int i = 0; i < input.Length; i++)
Assert.AreEqual(network.Compute(input[i])[0], output[i][0], 0.1);
}
示例13: ConstructorTest
public void ConstructorTest()
{
// Four training samples of the xor function
// two inputs (x and y)
double[][] input =
{
new double[] { -1, -1 },
new double[] { -1, 1 },
new double[] { 1, -1 },
new double[] { 1, 1 }
};
// one output (z = x ^ y)
double[][] output =
{
new double[] { -1 },
new double[] { 1 },
new double[] { 1 },
new double[] { -1 }
};
// create multi-layer neural network
ActivationNetwork network = new ActivationNetwork(
new BipolarSigmoidFunction(2), // use a bipolar sigmoid activation function
2, // two inputs
3, // three hidden neurons
1 // one output neuron
);
// create teacher
LevenbergMarquardtLearning teacher = new LevenbergMarquardtLearning(
network, // the neural network
false, // whether or not to use Bayesian regularization
JacobianMethod.ByBackpropagation // Jacobian calculation method
);
// set learning rate and momentum
teacher.LearningRate = 0.1f;
// start the supervisioned learning
for (int i = 0; i < 1000; i++)
{
double error = teacher.RunEpoch(input, output);
}
// If we reached here, the constructor test has passed.
}
示例14: SearchSolution
// Worker thread
void SearchSolution( )
{
// initialize input and output values
double[][] input = null;
double[][] output = null;
if ( sigmoidType == 0 )
{
// unipolar data
input = new double[4][] {
new double[] {0, 0},
new double[] {0, 1},
new double[] {1, 0},
new double[] {1, 1}
};
output = new double[4][] {
new double[] {0},
new double[] {1},
new double[] {1},
new double[] {0}
};
}
else
{
// bipolar data
input = new double[4][] {
new double[] {-1, -1},
new double[] {-1, 1},
new double[] { 1, -1},
new double[] { 1, 1}
};
output = new double[4][] {
new double[] {-1},
new double[] { 1},
new double[] { 1},
new double[] {-1}
};
}
// create neural network
ActivationNetwork network = new ActivationNetwork(
( sigmoidType == 0 ) ?
(IActivationFunction) new SigmoidFunction( sigmoidAlphaValue ) :
(IActivationFunction) new BipolarSigmoidFunction( sigmoidAlphaValue ),
2, 2, 1 );
// create teacher
var teacher = new ParallelResilientBackpropagationLearning(network);
// set learning rate and momentum
teacher.Reset(initialStep);
// iterations
int iteration = 0;
// statistic files
StreamWriter errorsFile = null;
try
{
// check if we need to save statistics to files
if ( saveStatisticsToFiles )
{
// open files
errorsFile = File.CreateText( "errors.csv" );
}
// erros list
ArrayList errorsList = new ArrayList( );
// loop
while ( !needToStop )
{
// run epoch of learning procedure
double error = teacher.RunEpoch( input, output );
errorsList.Add( error );
// save current error
if ( errorsFile != null )
{
errorsFile.WriteLine( error );
}
// show current iteration & error
SetText( currentIterationBox, iteration.ToString( ) );
SetText( currentErrorBox, error.ToString( ) );
iteration++;
// check if we need to stop
if ( error <= learningErrorLimit )
break;
}
// show error's dynamics
double[,] errors = new double[errorsList.Count, 2];
for ( int i = 0, n = errorsList.Count; i < n; i++ )
//.........这里部分代码省略.........
示例15: RunEpochTest3
public void RunEpochTest3()
{
double[,] dataset = yinyang;
double[][] input = dataset.GetColumns(0, 1).ToArray();
double[][] output = dataset.GetColumn(2).ToArray();
Neuron.RandGenerator = new ThreadSafeRandom(0);
ActivationNetwork network = new ActivationNetwork(
new BipolarSigmoidFunction(2), 2, 5, 1);
var teacher = new LevenbergMarquardtLearning(network,
true, JacobianMethod.ByBackpropagation);
Assert.IsTrue(teacher.UseRegularization);
double error = 1.0;
for (int i = 0; i < 500; i++)
error = teacher.RunEpoch(input, output);
double[][] actual = new double[output.Length][];
for (int i = 0; i < input.Length; i++)
actual[i] = network.Compute(input[i]);
for (int i = 0; i < input.Length; i++)
Assert.AreEqual(Math.Sign(output[i][0]), Math.Sign(actual[i][0]));
}