本文整理汇总了C#中Encog.Neural.Networks.BasicNetwork.GetWeight方法的典型用法代码示例。如果您正苦于以下问题:C# BasicNetwork.GetWeight方法的具体用法?C# BasicNetwork.GetWeight怎么用?C# BasicNetwork.GetWeight使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Encog.Neural.Networks.BasicNetwork
的用法示例。
在下文中一共展示了BasicNetwork.GetWeight方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: ConnectLayersFromBasic
/// <summary>
/// Connect layers from a BasicNetwork. Used internally only.
/// </summary>
/// <param name="network">The BasicNetwork.</param>
/// <param name="fromLayerIdx">The from layer index.</param>
/// <param name="source">The from layer.</param>
/// <param name="target">The target.</param>
private void ConnectLayersFromBasic(BasicNetwork network,
int fromLayerIdx, IFreeformLayer source, IFreeformLayer target)
{
for (int targetNeuronIdx = 0; targetNeuronIdx < target.Count; targetNeuronIdx++)
{
for (int sourceNeuronIdx = 0; sourceNeuronIdx < source.Count; sourceNeuronIdx++)
{
IFreeformNeuron sourceNeuron = source.Neurons[sourceNeuronIdx];
IFreeformNeuron targetNeuron = target.Neurons[targetNeuronIdx];
// neurons with no input (i.e. bias neurons)
if (targetNeuron.InputSummation == null)
{
continue;
}
IFreeformConnection connection = _connectionFactory
.Factor(sourceNeuron, targetNeuron);
sourceNeuron.AddOutput(connection);
targetNeuron.AddInput(connection);
double weight = network.GetWeight(fromLayerIdx,
sourceNeuronIdx, targetNeuronIdx);
connection.Weight = weight;
}
}
}
示例2: AnalyzeNetwork
public AnalyzeNetwork(BasicNetwork network)
{
int num3;
int num4;
int layerTotalNeuronCount;
int layerNeuronCount;
int num7;
int num8;
double num9;
int num10;
int num11;
double num12;
int num = 0;
int num2 = 0;
IList<double> list = new List<double>();
IList<double> list2 = new List<double>();
if (0 != 0)
{
goto Label_0115;
}
IList<double> values = new List<double>();
if ((((uint) num2) | 1) != 0)
{
num3 = 0;
goto Label_00C9;
}
Label_000B:
this._xd16d54155d6ebc35 = new NumericRange(values);
this._x8158512e31b17fc4 = EngineArray.ListToDouble(list2);
this._x7cd672b98e9d2817 = EngineArray.ListToDouble(values);
this._x5933bfade0487265 = EngineArray.ListToDouble(list);
Label_003D:
if (((uint) layerNeuronCount) >= 0)
{
}
return;
Label_0057:
if ((((uint) num2) + ((uint) num2)) < 0)
{
goto Label_0317;
}
this._x465229d781237721 = num2;
if (((uint) layerTotalNeuronCount) > uint.MaxValue)
{
goto Label_003D;
}
this._x2f33d779e5a20b28 = new NumericRange(list2);
if ((((uint) num3) + ((uint) layerTotalNeuronCount)) > uint.MaxValue)
{
goto Label_01E8;
}
this._x232c44e69c86297f = new NumericRange(list);
if ((((uint) num11) + ((uint) layerTotalNeuronCount)) <= uint.MaxValue)
{
goto Label_000B;
}
goto Label_01F2;
Label_00C3:
num3++;
Label_00C9:
if (num3 < (network.LayerCount - 1))
{
goto Label_0317;
}
this._x0dcd8230e4ec0670 = num;
goto Label_0057;
Label_00FF:
if (num11 < layerNeuronCount)
{
num12 = network.GetWeight(num3, num10, num11);
goto Label_0127;
}
goto Label_00C3;
Label_0115:
values.Add(num12);
num2++;
if (0 == 0)
{
num11++;
if (((uint) num9) < 0)
{
goto Label_02BF;
}
goto Label_00FF;
}
goto Label_0184;
Label_0127:
if (!network.Structure.ConnectionLimited)
{
goto Label_014B;
}
Label_0134:
if (Math.Abs(num12) < network.Structure.ConnectionLimit)
{
num++;
if ((((uint) num8) - ((uint) num12)) >= 0)
{
goto Label_0167;
}
goto Label_000B;
//.........这里部分代码省略.........
示例3: Randomize
/// <summary>
/// Randomize one level of a neural network.
/// </summary>
///
/// <param name="network">The network to randomize</param>
/// <param name="fromLayer">The from level to randomize.</param>
public override void Randomize(BasicNetwork network, int fromLayer)
{
int fromCount = network.GetLayerTotalNeuronCount(fromLayer);
int toCount = network.GetLayerNeuronCount(fromLayer + 1);
for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
{
double n = 0.0;
for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
{
double w = network.GetWeight(fromLayer, fromNeuron, toNeuron);
n += w * w;
}
n = Math.Sqrt(n);
for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
{
double w = network.GetWeight(fromLayer, fromNeuron, toNeuron);
w = _beta * w / n;
network.SetWeight(fromLayer, fromNeuron, toNeuron, w);
}
}
}
示例4: Randomize
/// <summary>
/// Randomize one level of a neural network.
/// </summary>
///
/// <param name="network">The network to randomize</param>
/// <param name="fromLayer">The from level to randomize.</param>
public virtual void Randomize(BasicNetwork network, int fromLayer)
{
int fromCount = network.GetLayerTotalNeuronCount(fromLayer);
int toCount = network.GetLayerNeuronCount(fromLayer + 1);
for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
{
for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
{
double v = network.GetWeight(fromLayer, fromNeuron, toNeuron);
v = Randomize(v);
network.SetWeight(fromLayer, fromNeuron, toNeuron, v);
}
}
}
示例5: Randomize
public virtual void Randomize(BasicNetwork network, int fromLayer)
{
int num4;
double num5;
int layerTotalNeuronCount = network.GetLayerTotalNeuronCount(fromLayer);
int layerNeuronCount = network.GetLayerNeuronCount(fromLayer + 1);
int fromNeuron = 0;
goto Label_002C;
Label_000D:
fromNeuron++;
if ((((uint) fromNeuron) + ((uint) fromNeuron)) > uint.MaxValue)
{
goto Label_004B;
}
if (0 != 0)
{
goto Label_003C;
}
Label_002C:
if (fromNeuron < layerTotalNeuronCount)
{
goto Label_0067;
}
return;
Label_003C:
network.SetWeight(fromLayer, fromNeuron, num4, num5);
num4++;
Label_004B:
if (num4 < layerNeuronCount)
{
num5 = network.GetWeight(fromLayer, fromNeuron, num4);
if (((uint) num5) >= 0)
{
num5 = this.Randomize(num5);
goto Label_003C;
}
goto Label_000D;
}
if ((((uint) fromLayer) + ((uint) fromLayer)) >= 0)
{
goto Label_000D;
}
Label_0067:
num4 = 0;
goto Label_004B;
}
示例6: Randomize
public override void Randomize(BasicNetwork network, int fromLayer)
{
int num2;
int num3;
double num4;
int num5;
double num6;
int num7;
double num8;
int layerTotalNeuronCount = network.GetLayerTotalNeuronCount(fromLayer);
goto Label_00DF;
Label_0011:
if (num3 < num2)
{
num4 = 0.0;
num5 = 0;
}
else if ((((uint) num8) - ((uint) layerTotalNeuronCount)) >= 0)
{
return;
}
while (true)
{
if (num5 >= layerTotalNeuronCount)
{
num4 = Math.Sqrt(num4);
num7 = 0;
if ((((uint) num4) + ((uint) num2)) < 0)
{
break;
}
goto Label_0065;
}
num6 = network.GetWeight(fromLayer, num5, num3);
num4 += num6 * num6;
num5++;
}
Label_0044:
if ((((uint) fromLayer) + ((uint) num6)) > uint.MaxValue)
{
goto Label_00DF;
}
num7++;
Label_0065:
if (num7 < layerTotalNeuronCount)
{
num8 = network.GetWeight(fromLayer, num7, num3);
}
else
{
num3++;
goto Label_0011;
}
Label_009C:
num8 = (this._xd7d571ecee49d1e4 * num8) / num4;
network.SetWeight(fromLayer, num7, num3, num8);
goto Label_0044;
Label_00DF:
num2 = network.GetLayerNeuronCount(fromLayer + 1);
if (((uint) num8) > uint.MaxValue)
{
goto Label_009C;
}
num3 = 0;
goto Label_0011;
}
示例7: Learn
public List<double[]> Learn(double[][] data, double[][] ideal)
{
double[][] origData = (double[][])data.Clone();
int n = data.Length;
int m = data[0].Length;
double[][] output = new double[n][];
double[][] sgmNeighbours = new double[n][];
for (var i = 0; i < n; i++)
{
double[] sgmN = new double[SegmentationData.SEGMENT_NEIGHBOURS];
Array.Copy(data[i], m - SegmentationData.SEGMENT_NEIGHBOURS, sgmN, 0, SegmentationData.SEGMENT_NEIGHBOURS);
sgmNeighbours[i] = sgmN;
data[i] = data[i].Take(m - SegmentationData.SEGMENT_NEIGHBOURS).ToArray();
output[i] = new double[m - SegmentationData.SEGMENT_NEIGHBOURS];
data[i].CopyTo(output[i], 0);
}
IMLDataSet trainingSet = new BasicMLDataSet(data, output);
int inputLayerSize = layersConfiguration[0] - SegmentationData.SEGMENT_NEIGHBOURS;
int trainingLayerSize = layersConfiguration[1];
BasicNetwork oneLayerAutoencoder = new BasicNetwork();
oneLayerAutoencoder.AddLayer(new BasicLayer(null, BIAS, inputLayerSize));
oneLayerAutoencoder.AddLayer(new BasicLayer(CurrentActivationFunction(), BIAS, trainingLayerSize));
oneLayerAutoencoder.AddLayer(new BasicLayer(CurrentActivationFunction(), false, inputLayerSize));
oneLayerAutoencoder.Structure.FinalizeStructure();
oneLayerAutoencoder.Reset();
IMLTrain train = new ResilientPropagation(oneLayerAutoencoder, trainingSet);
//IMLTrain train = new Backpropagation(oneLayerAutoencoder, trainingSet, LEARNING_RATE, MOMENTUM);
int epoch = 1;
List<double[]> errors = new List<double[]>();
double[] trainError = new double[AUTOENCODER_MAX_ITER];
do
{
train.Iteration();
ActiveForm.Text = @"Epoch #" + epoch + @" Error:" + train.Error;
Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
trainError[epoch - 1] = train.Error;
epoch++;
//errors.Add(train.Error);
} while (train.Error > EPS && epoch < AUTOENCODER_MAX_ITER);
errors.Add(trainError);
train.FinishTraining();
BasicNetwork encoder = new BasicNetwork();
encoder.AddLayer(new BasicLayer(null, BIAS, oneLayerAutoencoder.GetLayerNeuronCount(0)));
encoder.AddLayer(new BasicLayer(CurrentActivationFunction(), false, oneLayerAutoencoder.GetLayerNeuronCount(1)));
encoder.Structure.FinalizeStructure();
encoder.Reset();
//przypisanie wag do encodera
for (int i = 0; i < encoder.LayerCount - 1; i++)
for (int f = 0; f < encoder.GetLayerNeuronCount(i); f++)
for (int t = 0; t < encoder.GetLayerNeuronCount(i + 1); t++)
encoder.SetWeight(i, f, t, oneLayerAutoencoder.GetWeight(i, f, t));
//Compare2Networks(oneLayerAutoencoder, encoder);
for(int l=1; l<layersConfiguration.Count -2; l++)
{
inputLayerSize = layersConfiguration[l];
trainingLayerSize = layersConfiguration[l+1];
oneLayerAutoencoder = new BasicNetwork();
oneLayerAutoencoder.AddLayer(new BasicLayer(null, BIAS, inputLayerSize));
oneLayerAutoencoder.AddLayer(new BasicLayer(CurrentActivationFunction(), BIAS, trainingLayerSize));
oneLayerAutoencoder.AddLayer(new BasicLayer(CurrentActivationFunction(), false, inputLayerSize));
oneLayerAutoencoder.Structure.FinalizeStructure();
oneLayerAutoencoder.Reset();
//liczenie outputu z dotychczasowego encodera
double[][] input = new double[n][];
double[][] newOutput = new double[n][];
for(int ni = 0; ni <n; ni++)
{
IMLData res = encoder.Compute(new BasicMLData(data[ni]));
double[] resD = new double[res.Count];
for(int i=0; i<res.Count; i++)
resD[i] = res[i];
input[ni] = resD;
newOutput[ni] = new double[res.Count];
input[ni].CopyTo(newOutput[ni], 0);
}
BasicMLDataSet newTrainingSet = new BasicMLDataSet(input, newOutput);
train = new ResilientPropagation(oneLayerAutoencoder, newTrainingSet);
//train = new Backpropagation(oneLayerAutoencoder, newTrainingSet, LEARNING_RATE, MOMENTUM);
epoch = 1;
trainError = new double[AUTOENCODER_MAX_ITER];
do
{
train.Iteration();
ActiveForm.Text = @"Epoch #" + epoch + @" Error:" + train.Error;
Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
trainError[epoch - 1] = train.Error;
epoch++;
} while (train.Error > EPS && epoch < AUTOENCODER_MAX_ITER);
//.........这里部分代码省略.........
示例8: Compare2Networks
private void Compare2Networks(BasicNetwork n1, BasicNetwork n2)
{
string oneLay = string.Empty;
for (int i = 0; i < n1.LayerCount - 1; i++)
{
oneLay += ("Layer: " + i + ": \n");
for (int f = 0; f < n1.GetLayerNeuronCount(i); f++)
{
oneLay += ("Neuron: " + f + "\n");
for (int t = 0; t < n1.GetLayerNeuronCount(i + 1); t++)
{
oneLay += (n1.GetWeight(i, f, t) + ", ");
}
oneLay += "\n";
}
oneLay += "\n";
}
oneLay += "---------------------------------------\n\n";
for (int i = 0; i < n2.LayerCount - 1; i++)
{
oneLay += ("Layer: " + i + ": \n");
for (int f = 0; f < n2.GetLayerNeuronCount(i); f++)
{
oneLay += ("Neuron: " + f + "\n");
for (int t = 0; t < n2.GetLayerNeuronCount(i + 1); t++)
{
oneLay += (n2.GetWeight(i, f, t) + ", ");
}
oneLay += "\n";
}
oneLay += "\n";
}
MessageBox.Show(oneLay);
}
示例9: ReLabelLinks
/// <summary>
/// Новые метки должны быть уже в нейронной сети neuralNet
/// </summary>
/// <param name="weidth"></param>
/// <param name="nodes"></param>
/// <param name="neuralNet"></param>
private void ReLabelLinks(List<ShapeNode>[] nodes, BasicNetwork neuralNet)
{
string tmpWeigth;
for (int i = neuralNet.LayerCount - 1; i > 0; i--)
{
for (int x = 0; x < neuralNet.Flat.LayerCounts[i]; x++)
{
for (int y = 0; y < neuralNet.Flat.LayerCounts[i - 1]; y++)
{
try
{
tmpWeigth = neuralNet.GetWeight(neuralNet.LayerCount - i - 1, x, y).ToString("F4");
nodes[i][x].OutgoingLinks[y].Text = tmpWeigth;
}
catch
{
tmpWeigth = "null";
}
}
}
}
}
示例10: DrawNeuralNetPair
private void DrawNeuralNetPair(Diagram diagram, out List<ShapeNode>[] nodes, BasicNetwork neuralNet)
{
if (diagram.Items.Count > 0)
diagram.ClearAll();
ProcessPair pp = cbProcessPair.SelectedItem as ProcessPair;
TemplXML.FormData form = ConvertDataArrayToXml(templ, trainingData[pp.Pair].InputArray);
double dx = 400;
double rastNode = 30;
double startYF = 50;
double startX = dx + 50; // + (neuralNet.LayerCount - 1) * 300;
double startY = 50;
double diam = 30;
double startYMax = startY;
nodes = new List<ShapeNode>[neuralNet.LayerCount];
string label = "null";
graphPairLinkInput = new List<DiagramLink>(neuralNet.Flat.LayerCounts[neuralNet.LayerCount - 1]);
for (int i = neuralNet.LayerCount - 1; i >= 0; i--)
{
List<ShapeNode> curN = new List<ShapeNode>();
for (int j = 0; j < neuralNet.Flat.LayerCounts[i]; j++)
{
ShapeNode tmp = DiagramHelper.CreateNode(diagram, startX, startY, diam, diam, j.ToString());
tmp.MouseLeftButtonDown += nodeSelected_MouseLeftButtonDown;
curN.Add(tmp);
if (i == neuralNet.LayerCount - 1 && j < neuralNet.InputCount)
{
ShapeNode q = DiagramHelper.CreateNode(Shapes.Rectangle, diagram, startX - dx - 200, startY - 10, 200, 50, form.Values[j].Field.Title);
if (form.Values[j] is TemplXML.FormDataValueNumber)
{
var tmpF = form.Values[j] as TemplXML.FormDataValueNumber;
label = string.Format("{0}", tmpF.Value);
}
else
{
var tmpF = form.Values[j] as TemplXML.FormDataValueSelect;
label = tmpF.Value.Title;
}
var tmpLink = DiagramHelper.CreateLink(diagram, q, tmp, label);
tmpLink.MouseLeftButtonDown += linkSelect_MouseLeftButtonDown;
graphPairLinkInput.Add(tmpLink);
}
startY += diam + rastNode;
}
nodes[i] = curN;
startX += dx;
if (startYMax < startY) startYMax = startY;
if (i != 0)
startY = startYMax / 2 - neuralNet.Flat.LayerCounts[i - 1] * (rastNode + diam) / 2;
}
string tmpWeigth;
int countN;
for (int i = neuralNet.LayerCount - 1; i > 0; i--)
{
countN = countN = neuralNet.Flat.LayerCounts[i - 1];
if (i - 1 == neuralNet.LayerCount - 2 && neuralNet.GetLayerBiasActivation(i - 1) > 0)
countN -= 1;
for (int x = 0; x < neuralNet.Flat.LayerCounts[i]; x++)
{
for (int y = 0; y < countN; y++)
{
tmpWeigth = neuralNet.GetWeight(neuralNet.LayerCount - i - 1, x, y).ToString("F4");
var link = DiagramHelper.CreateLink(diagram, nodes[i][x], nodes[i - 1][y], tmpWeigth);
link.MouseLeftButtonDown += linkSelect_MouseLeftButtonDown;
}
}
}
}
示例11: DrawNeuralNet
private void DrawNeuralNet(Diagram diagram, out List<ShapeNode>[] nodes, BasicNetwork neuralNet)
{
if (diagram.Items.Count > 0)
diagram.ClearAll();
//получение шаблона
InitEvent netInit = cbTrainsLog.SelectedItem as InitEvent;
//TemplXML.FormTemplate templ;
string pathXML = netInit.Path.Replace(".np4", ".xml");
if (!File.Exists(pathXML))
MessageBox.Show("Форма не найдена");
templ = TemplXML.FormTemplate.FromXml(XElement.Load(pathXML));
double dx = 400;
double rastNode = 30;
double startYF = 50;
double startX = dx + 50; // + (neuralNet.LayerCount - 1) * 300;
double startY = 50;
double diam = 30;
double startYMax = startY;
nodes = new List<ShapeNode>[neuralNet.LayerCount];
for (int i = neuralNet.LayerCount - 1 ; i >= 0; i--)
{
List<ShapeNode> curN = new List<ShapeNode>();
for (int j = 0; j < neuralNet.Flat.LayerCounts[i]; j++)
{
ShapeNode tmp = DiagramHelper.CreateNode(diagram, startX, startY, diam, diam, j.ToString());
tmp.MouseLeftButtonDown += nodeSelected_MouseLeftButtonDown;;
curN.Add(tmp);
if (i == neuralNet.LayerCount - 1 && j <neuralNet.InputCount)
{
ShapeNode q = DiagramHelper.CreateNode(Shapes.Rectangle, diagram, startX - dx, startY, 200, 50, templ.Fields[j].Title);
DiagramLink link = DiagramHelper.CreateLink(diagram, q, tmp);
link.MouseLeftButtonDown += linkSelect_MouseLeftButtonDown;
}
startY += diam + rastNode;
}
nodes[i] = curN;
startX += dx;
if (startYMax < startY) startYMax = startY;
if(i != 0)
startY = startYMax / 2 - neuralNet.Flat.LayerCounts[i - 1] * (rastNode + diam) / 2;
}
string tmpWeigth;
int countN;
for (int i = neuralNet.LayerCount - 1; i > 0; i--)
{
countN = countN = neuralNet.Flat.LayerCounts[i - 1];
if (i - 1 == neuralNet.LayerCount - 2 && neuralNet.GetLayerBiasActivation(i - 1) > 0)
countN -= 1;
for (int x = 0; x < neuralNet.Flat.LayerCounts[i]; x++)
{
for (int y = 0; y < countN; y++)
{
tmpWeigth = neuralNet.GetWeight(neuralNet.LayerCount - i - 1, x, y).ToString("F4");
var link = DiagramHelper.CreateLink(diagram, nodes[i][x], nodes[i - 1][y], tmpWeigth);
link.MouseLeftButtonDown += linkSelect_MouseLeftButtonDown;
}
}
}
}