本文整理汇总了C#中Network.GetLayerNeuronCount方法的典型用法代码示例。如果您正苦于以下问题:C# Network.GetLayerNeuronCount方法的具体用法?C# Network.GetLayerNeuronCount怎么用?C# Network.GetLayerNeuronCount使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Network
的用法示例。
在下文中一共展示了Network.GetLayerNeuronCount方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: ReorderOutput
protected void ReorderOutput(Network network, BasicNeuralDataSet dataset, Dictionary<int, List<BasicMLData>> trackIdFingerprints, double[][] binaryCodes)
{
int outputNeurons = network.GetLayerNeuronCount(network.LayerCount - 1);
int trackCount = trackIdFingerprints.Count;
// For each song, compute Am
double[][] am = new double[trackCount][];
int counter = 0;
foreach (KeyValuePair<int, List<BasicMLData>> pair in trackIdFingerprints)
{
List<BasicMLData> sxSnippet = pair.Value;
if (sxSnippet.Count < trainingSongSnippets)
{
throw new NetTrainerException("Not enough snippets for a song");
}
am[counter] = new double[outputNeurons];
foreach (BasicMLData snippet in sxSnippet)
{
IMLData actualOutput = network.Compute(snippet);
for (int k = 0; k < outputNeurons; k++)
{
actualOutput[k] /= outputNeurons;
am[counter][k] += actualOutput[k];
}
}
counter++;
}
// Get a collection of tracks (shallow copy)
int[] unassignedTracks = new int[trackCount];
int countTrack = 0;
foreach (KeyValuePair<int, List<BasicMLData>> item in trackIdFingerprints)
{
unassignedTracks[countTrack++] = item.Key;
}
int currItteration = 0;
// Find binary code - track pair that has min l2 norm across all binary codes
List<Tuple<int, int>> binCodeTrackPair = BinaryOutputUtil.FindMinL2Norm(binaryCodes, am);
foreach (Tuple<int, int> pair in binCodeTrackPair)
{
// Set the input-output for all fingerprints of that song
List<BasicMLData> songFingerprints = trackIdFingerprints[unassignedTracks[pair.Item2]];
foreach (BasicMLData songFingerprint in songFingerprints)
{
for (int i = 0, n = songFingerprint.Count; i < n; i++)
{
dataset.Data[currItteration].Input[i] = songFingerprint[i];
}
for (int i = 0, n = binaryCodes[pair.Item1].Length; i < n; i++)
{
dataset.Data[currItteration].Ideal[i] = binaryCodes[pair.Item1][i];
}
currItteration++;
}
}
}
示例2: Train
public void Train(Network network, TrainingCallback callback)
{
IActivationFunction activationFunctionInput = network.GetActivation(0);
int outputNeurons = network.GetLayerNeuronCount(network.LayerCount - 1);
double error = 0;
callback.Invoke(TrainingStatus.FillingStandardInputs, 0, 0, 0); /*First operation is filling standard input/outputs*/
Dictionary<int, List<BasicMLData>> trackIdFingerprints = GetNormalizedTrackFingerprints(activationFunctionInput, trainingSongSnippets, outputNeurons);
workingThread = Thread.CurrentThread;
IActivationFunction activationFunctionOutput = network.GetActivation(network.LayerCount - 1);
double[][] normalizedBinaryCodes = GetNormalizedBinaryCodes(activationFunctionOutput, outputNeurons);
Tuple<double[][], double[][]> tuple = FillStandardInputsOutputs(trackIdFingerprints, normalizedBinaryCodes); /*Fill standard input output*/
double[][] inputs = tuple.Item1;
double[][] outputs = tuple.Item2;
if (inputs == null || outputs == null)
{
callback.Invoke(TrainingStatus.Exception, 0, 0, 0);
return;
}
int currentIterration = 0;
double correctOutputs = 0.0;
BasicNeuralDataSet dataset = new BasicNeuralDataSet(inputs, outputs);
ITrain learner = new ResilientPropagation(network, dataset);
try
{
// Dynamic output reordering cycle
/*Idyn = 50*/
for (int i = 0; i < Idyn; i++)
{
if (paused)
{
pauseSem.WaitOne();
}
correctOutputs = NetworkPerformanceMeter.MeasurePerformance(network, dataset);
callback.Invoke(TrainingStatus.OutputReordering, correctOutputs, error, currentIterration);
ReorderOutput(network, dataset, trackIdFingerprints, normalizedBinaryCodes);
/*Edyn = 10*/
for (int j = 0; j < Edyn; j++)
{
if (paused)
{
pauseSem.WaitOne();
}
correctOutputs = NetworkPerformanceMeter.MeasurePerformance(network, dataset);
callback.Invoke(TrainingStatus.RunningDynamicEpoch, correctOutputs, error, currentIterration);
learner.Iteration();
error = learner.Error;
currentIterration++;
}
}
for (int i = 0; i < Efixed; i++)
{
if (paused)
{
pauseSem.WaitOne();
}
correctOutputs = NetworkPerformanceMeter.MeasurePerformance(network, dataset);
callback.Invoke(TrainingStatus.FixedTraining, correctOutputs, error, currentIterration);
learner.Iteration();
error = learner.Error;
currentIterration++;
}
network.ComputeMedianResponses(inputs, trainingSongSnippets);
callback.Invoke(TrainingStatus.Finished, correctOutputs, error, currentIterration);
}
catch (ThreadAbortException)
{
callback.Invoke(TrainingStatus.Aborted, correctOutputs, error, currentIterration);
paused = false;
}
}