本文整理汇总了C#中INetwork.ClearSignals方法的典型用法代码示例。如果您正苦于以下问题:C# INetwork.ClearSignals方法的具体用法?C# INetwork.ClearSignals怎么用?C# INetwork.ClearSignals使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类INetwork
的用法示例。
在下文中一共展示了INetwork.ClearSignals方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: generateHomogeneousGenome
private NeatGenome.NeatGenome generateHomogeneousGenome(INetwork network, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
{
IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
ConnectionGeneList connections = new ConnectionGeneList((int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));
float[] coordinates = new float[4];
float output;
uint connectionCounter = 0;
int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
uint totalOutputCount = OutputCount;
uint totalInputCount = InputCount;
uint totalHiddenCount = HiddenCount;
uint sourceCount, targetCout;
double weightRange = HyperNEATParameters.weightRange;
double threshold = HyperNEATParameters.threshold;
NeuronGeneList neurons;
// SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));
// set up the input nodes
for (uint a = 0; a < totalInputCount; a++)
{
neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
}
// set up the output nodes
for (uint a = 0; a < totalOutputCount; a++)
{
neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));
}
// set up the hidden nodes
for (uint a = 0; a < totalHiddenCount; a++)
{
neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
}
bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount];
uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
NeuronGroup connectedNG;
foreach (NeuronGroup ng in neuronGroups)
{
foreach (uint connectedTo in ng.ConnectedTo)
{
connectedNG = getNeuronGroup(connectedTo);
sourceCount = 0;
foreach (PointF source in ng.NeuronPositions)
{
targetCout = 0;
foreach (PointF target in connectedNG.NeuronPositions)
{
switch (ng.GroupType)
{
case 0: sourceID = ng.GlobalID + sourceCount; break; //Input
case 1: sourceID = totalInputCount + ng.GlobalID + sourceCount; break; //Output
case 2: sourceID = totalInputCount + totalOutputCount + ng.GlobalID + sourceCount; break; //Hidden
}
switch (connectedNG.GroupType)
{
case 0: targetID = connectedNG.GlobalID + targetCout; break;
case 1: targetID = totalInputCount + connectedNG.GlobalID + targetCout; break;
case 2: targetID = totalInputCount + totalOutputCount + connectedNG.GlobalID + targetCout; break;
}
//calculate bias of target node
if (!biasCalculated[targetID])
{
coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;
network.ClearSignals();
network.SetInputSignals(coordinates);
((ModularNetwork)network).RecursiveActivation();
neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
biasCalculated[targetID] = true;
}
coordinates[0] = source.X;
coordinates[1] = source.Y;
coordinates[2] = target.X;
coordinates[3] = target.Y;
network.ClearSignals();
network.SetInputSignals(coordinates);
((ModularNetwork)network).RecursiveActivation();
//network.MultipleSteps(iterations);
output = network.GetOutputSignal(0);
if (Math.Abs(output) > threshold)
{
float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f));
}
//else
//{
//.........这里部分代码省略.........
示例2: generateGenome
public override NeatGenome.NeatGenome generateGenome(INetwork network)
{
#if OUTPUT
System.IO.StreamWriter sw = new System.IO.StreamWriter("testfile.txt");
#endif
ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount * hiddenCount) + (hiddenCount * outputCount)));
float[] coordinates = new float[4];
float output;
uint connectionCounter = 0;
int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
coordinates[0] = -1 + inputDelta / 2.0f;
coordinates[1] = -1;
coordinates[2] = -1 + hiddenDelta / 2.0f;
coordinates[3] = 0;
for (uint source = 0; source < inputCount; source++, coordinates[0] += inputDelta)
{
coordinates[2] = -1 + hiddenDelta / 2.0f;
for (uint target = 0; target < hiddenCount; target++, coordinates[2] += hiddenDelta)
{
//Since there are an equal number of input and hidden nodes, we check these everytime
network.ClearSignals();
network.SetInputSignals(coordinates);
network.MultipleSteps(iterations);
output = network.GetOutputSignal(0);
#if OUTPUT
foreach (double d in inputs)
sw.Write(d + " ");
sw.Write(output);
sw.WriteLine();
#endif
if (Math.Abs(output) > threshold)
{
float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
connections.Add(new ConnectionGene(connectionCounter++, source, target + inputCount + outputCount, weight));
}
//Since every other hidden node has a corresponding output node, we check every other time
if (target % 2 == 0)
{
network.ClearSignals();
coordinates[1] = 0;
coordinates[3] = 1;
network.SetInputSignals(coordinates);
network.MultipleSteps(iterations);
output = network.GetOutputSignal(0);
#if OUTPUT
foreach (double d in inputs)
sw.Write(d + " ");
sw.Write(output);
sw.WriteLine();
#endif
if (Math.Abs(output) > threshold)
{
float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
connections.Add(new ConnectionGene(connectionCounter++, source + inputCount + outputCount, (target / 2) + inputCount, weight));
}
coordinates[1] = -1;
coordinates[3] = 0;
}
}
}
#if OUTPUT
sw.Flush();
#endif
return new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount);
}
示例3: generateMultiGenomeStack
//.........这里部分代码省略.........
{
connectedNG = getNeuronGroup(connectedTo);
sourceCount = 0;
foreach (PointF source in ng.NeuronPositions)
{
targetCout = 0;
foreach (PointF target in connectedNG.NeuronPositions)
{
switch (ng.GroupType)
{
case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break; //Input
case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break; //Output
case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break; //Hidden
case 3: sourceID = totalInputCount + totalOutputCount + totalHiddenCount + (agent * ReceiveCount) + ng.GlobalID + sourceCount; break; //Receive
case 4: sourceID = totalInputCount + totalOutputCount + totalHiddenCount + totalReceiveCount + (agent * TransCount) + ng.GlobalID + sourceCount; break; //Transmit
}
switch (connectedNG.GroupType)
{
case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;
case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;
case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
case 3: targetID = totalInputCount + totalOutputCount + totalHiddenCount + (agent * ReceiveCount) + connectedNG.GlobalID + targetCout; break;
case 4: targetID = totalInputCount + totalOutputCount + totalHiddenCount + totalReceiveCount + (agent * TransCount) + connectedNG.GlobalID + targetCout; break;
}
//target node bias
if (!biasCalculated[targetID])
{
coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;
network.ClearSignals();
network.SetInputSignals(coordinates);
((ModularNetwork)network).RecursiveActivation();
neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
biasCalculated[targetID] = true;
}
coordinates[0] = source.X;
coordinates[1] = source.Y;
coordinates[2] = target.X;
coordinates[3] = target.Y;
network.ClearSignals();
network.SetInputSignals(coordinates);
((ModularNetwork)network).RecursiveActivation();
//network.MultipleSteps(iterations);
output = network.GetOutputSignal(0);
double leo = 0.0;
if (adaptiveNetwork)
{
A = network.GetOutputSignal(2);
B = network.GetOutputSignal(3);
C = network.GetOutputSignal(4);
D = network.GetOutputSignal(5);
learningRate = network.GetOutputSignal(6);
}
if (modulatoryNet)
{
modConnection = network.GetOutputSignal(7);
}
示例4: generateHiveBrainGenomeStack
// NOTE: Multi-Plane Substrates ARE supported by this method!
private NeatGenome.NeatGenome generateHiveBrainGenomeStack(INetwork network, List<float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet,bool ct)
{
//bool relativeCoordinate = false;
bool oneWay = false;
bool homogeneous = false ;
Dictionary<String, float> weights = new Dictionary<String, float>();
float timeConstantMin = 0.1f;
float timeConstantMax = 2.0f;
uint numberOfAgents = (uint)stackCoordinates.Count;
IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
ConnectionGeneList connections = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount))); // TODO: Perhaps get an exact count of connections in the constructor and use that value here?
float[] coordinates = new float[5]; //JUSTIN: Used to be 6 coordinates, zstack was duplicated for relativeCoordinate hyjinx. fixed it. // Inputs to the CPPN: [srcX, srcY, tgX, tgY, zstack]
float output;
uint connectionCounter = 0;
float agentDelta = 2.0f / (numberOfAgents - 1);
int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
uint totalOutputCount = OutputCount * numberOfAgents;
uint totalInputCount = InputCount * numberOfAgents;
uint totalHiddenCount = HiddenCount * numberOfAgents;
uint sourceCount, targetCout;
double weightRange = HyperNEATParameters.weightRange;
double threshold = HyperNEATParameters.threshold;
NeuronGeneList neurons;
// SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));
// set up the input nodes
for (uint a = 0; a < totalInputCount; a++)
{
neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
}
// set up the output nodes
for (uint a = 0; a < totalOutputCount; a++)
{
neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
}
// set up the hidden nodes
for (uint a = 0; a < totalHiddenCount; a++)
{
neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
}
uint agent = 0;
float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;
// CPPN Outputs: [ Weights ] [ Biases ]
// When using multi-plane substrates, there will be multiple Weight and Bias outputs.
// There is a Weight output for every plane-to-plane connection (including a plane connected to itself, as in regular substrates)
// There is a Bias output for every plane
// Since "regular substrates" only have 1 plane, they only have 1 Weight and 1 Bias output. MP substrates have more. :)
int numPlanes = planes.Count;
int numPlaneConnections = planesConnected.Count;
int computedIndex;
foreach (float stackCoordinate in stackCoordinates)
{
coordinates[4] = stackCoordinate;
//coordinates[4] = homogeneous ? 0 : stackCoordinate;//-1 ? -1 : 0;//0;//stackCoordinate;
//coordinates[5] = stackCoordinate;
uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
NeuronGroup connectedNG;
foreach (NeuronGroup ng in neuronGroups)
{
foreach (uint connectedTo in ng.ConnectedTo)
{
/*if (!relativeCoordinate)
coordinates[5] = stackCoordinate;
else //USE RELATIVE
coordinates[5] = 0;//*/
connectedNG = getNeuronGroup(connectedTo);
sourceCount = 0;
foreach (PointF source in ng.NeuronPositions)
{
//-----------------Get the bias of the source node
/* switch (ng.GroupType)
{
case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break; //Input
case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break; //Output
case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break; //Hidden
}
coordinates[0] = source.X; coordinates[1] = source.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;
network.ClearSignals();
network.SetInputSignals(coordinates);
network.RecursiveActivation();//network.MultipleSteps(iterations);
neurons[(int)sourceID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
if (ct)
{
neurons[(int)sourceID].TimeConstant = 0.01f + ((((float)network.GetOutputSignal(2) + 1.0f) / 2.0f) * .05f);
//.........这里部分代码省略.........
示例5: generateMultiGenomeModulus
public NeatGenome.NeatGenome generateMultiGenomeModulus(INetwork network, uint numberOfAgents)
{
#if OUTPUT
System.IO.StreamWriter sw = new System.IO.StreamWriter("testfile.txt");
#endif
float[] coordinates = new float[4];
float output;
uint connectionCounter = 0;
uint inputsPerAgent = inputCount / numberOfAgents;
uint hiddenPerAgent = hiddenCount / numberOfAgents;
uint outputsPerAgent = outputCount / numberOfAgents;
ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount*hiddenCount)+(hiddenCount*outputCount)));
int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
coordinates[0] = -1 + inputDelta / 2.0f; //x1
coordinates[1] = -1; //y1
coordinates[2] = -1 + hiddenDelta / 2.0f; //x2
coordinates[3] = 0; //y2
for (uint agent = 0; agent < numberOfAgents; agent++)
{
coordinates[0] = -1 + (agent * inputsPerAgent * inputDelta) + inputDelta / 2.0f;
for (uint source = 0; source < inputsPerAgent; source++, coordinates[0] += inputDelta)
{
coordinates[2] = -1 + (agent * hiddenPerAgent * hiddenDelta) + hiddenDelta / 2.0f;
for (uint target = 0; target < hiddenPerAgent; target++, coordinates[2] += hiddenDelta)
{
//Since there are an equal number of input and hidden nodes, we check these everytime
network.ClearSignals();
network.SetInputSignals(coordinates);
((FloatFastConcurrentNetwork)network).MultipleStepsWithMod(iterations, (int)numberOfAgents);
output = network.GetOutputSignal(0);
#if OUTPUT
foreach (double d in inputs)
sw.Write(d + " ");
sw.Write(output);
sw.WriteLine();
#endif
if (Math.Abs(output) > threshold)
{
float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
connections.Add(new ConnectionGene(connectionCounter++, (agent*inputsPerAgent) + source, (agent*hiddenPerAgent) + target + inputCount + outputCount, weight));
}
//Since every other hidden node has a corresponding output node, we check every other time
if (target % 2 == 0)
{
network.ClearSignals();
coordinates[1] = 0;
coordinates[3] = 1;
network.SetInputSignals(coordinates);
((FloatFastConcurrentNetwork)network).MultipleStepsWithMod(iterations, (int)numberOfAgents);
output = network.GetOutputSignal(0);
#if OUTPUT
foreach (double d in inputs)
sw.Write(d + " ");
sw.Write(output);
sw.WriteLine();
#endif
if (Math.Abs(output) > threshold)
{
float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
connections.Add(new ConnectionGene(connectionCounter++, (agent*hiddenPerAgent) + source + inputCount + outputCount, ((outputsPerAgent * agent) + ((target) / 2)) + inputCount, weight));
}
coordinates[1] = -1;
coordinates[3] = 0;
}
}
}
}
#if OUTPUT
sw.Flush();
#endif
//Console.WriteLine(count);
//Console.ReadLine();
return new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount);
}
示例6: generateMultiGenomeStack
// MPS support on the Hive methods only
#region Generate heterogenous genomes with z-stack
// MPS NOT supported by this method
private NeatGenome.NeatGenome generateMultiGenomeStack(INetwork network, List<float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
{
if (useMultiPlaneSubstrate) throw new Exception("MPS not implemented for these parameters");
uint numberOfAgents = (uint)stackCoordinates.Count;
IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
ConnectionGeneList connections = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount)));
float[] coordinates = new float[5];
float output;
uint connectionCounter = 0;
float agentDelta = 2.0f / (numberOfAgents - 1);
int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
uint totalOutputCount = OutputCount * numberOfAgents;
uint totalInputCount = InputCount * numberOfAgents;
uint totalHiddenCount = HiddenCount * numberOfAgents;
uint sourceCount, targetCout;
double weightRange = HyperNEATParameters.weightRange;
double threshold = HyperNEATParameters.threshold;
NeuronGeneList neurons;
// SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));
// set up the input nodes
for (uint a = 0; a < totalInputCount; a++)
{
neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
}
// set up the output nodes
for (uint a = 0; a < totalOutputCount; a++)
{
neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
}
// set up the hidden nodes
for (uint a = 0; a < totalHiddenCount; a++)
{
neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
}
uint agent = 0;
float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;
foreach (float stackCoordinate in stackCoordinates)
{
coordinates[4] = stackCoordinate;
uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
NeuronGroup connectedNG;
foreach (NeuronGroup ng in neuronGroups)
{
foreach (uint connectedTo in ng.ConnectedTo)
{
connectedNG = getNeuronGroup(connectedTo);
sourceCount = 0;
foreach (PointF source in ng.NeuronPositions)
{
//-----------------Get the bias of the source node
switch (ng.GroupType)
{
case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break; //Input
case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break; //Output
case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break; //Hidden
}
coordinates[0] = source.X; coordinates[1] = source.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;
network.ClearSignals();
network.SetInputSignals(coordinates);
network.RecursiveActivation();//network.MultipleSteps(iterations);
neurons[(int)sourceID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
//----------------------------
targetCout = 0;
foreach (PointF target in connectedNG.NeuronPositions)
{
switch (ng.GroupType)
{
case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break; //Input
case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break; //Output
case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break; //Hidden
}
switch (connectedNG.GroupType)
{
case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;
case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;
case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
}
coordinates[0] = source.X;
coordinates[1] = source.Y;
coordinates[2] = target.X;
//.........这里部分代码省略.........
示例7: generateHomogeneousGenome
// NOTE: Multi-Plane Substrates ARE MAYBE supported by this method!
private NeatGenome.NeatGenome generateHomogeneousGenome(INetwork network, bool normalizeWeights, bool adaptiveNetwork,bool modulatoryNet)
{
IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
ConnectionGeneList connections = new ConnectionGeneList((int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));
float[] coordinates = new float[4]; //JUSTIN: CHANGE THIS BACK TO [4]!!!
float output;
uint connectionCounter = 0;
int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
uint totalOutputCount = OutputCount;
uint totalInputCount = InputCount;
uint totalHiddenCount = HiddenCount;
uint sourceCount, targetCout;
double weightRange = HyperNEATParameters.weightRange;
double threshold = HyperNEATParameters.threshold;
NeuronGeneList neurons;
// SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));
// set up the input nodes
for (uint a = 0; a < totalInputCount; a++)
{
neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
}
// set up the output nodes
for (uint a = 0; a < totalOutputCount; a++)
{
neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));
}
// set up the hidden nodes
for (uint a = 0; a < totalHiddenCount; a++)
{
neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
}
// CPPN Outputs: [ Weights ] [ Biases ]
// When using multi-plane substrates, there will be multiple Weight and Bias outputs.
// There is a Weight output for every plane-to-plane connection (including a plane connected to itself, as in regular substrates)
// There is a Bias output for every plane
// Since "regular substrates" only have 1 plane, they only have 1 Weight and 1 Bias output. MP substrates have more. :)
int numPlanes = planes.Count;
int numPlaneConnections = planesConnected.Count;
int computedIndex;
uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
NeuronGroup connectedNG;
foreach (NeuronGroup ng in neuronGroups)
{
foreach (uint connectedTo in ng.ConnectedTo)
{
connectedNG = getNeuronGroup(connectedTo);
sourceCount = 0;
foreach (PointF source in ng.NeuronPositions)
{
//-----------------Get the bias of the source node
/*switch (ng.GroupType)
{
case 0: sourceID = ng.GlobalID + sourceCount; break; //Input
case 1: sourceID = totalInputCount + ng.GlobalID + sourceCount; break; //Output
case 2: sourceID = totalInputCount + totalOutputCount + ng.GlobalID + sourceCount; break; //Hidden
}
coordinates[0] = source.X; coordinates[1] = source.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;
network.ClearSignals();
network.SetInputSignals(coordinates);
network.RecursiveActivation();//network.MultipleSteps(iterations);
neurons[(int)sourceID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
//*///----------------------------
targetCout = 0;
foreach (PointF target in connectedNG.NeuronPositions)
{
switch (ng.GroupType)
{
case 0: sourceID = ng.GlobalID + sourceCount; break; //Input
case 1: sourceID = totalInputCount + ng.GlobalID + sourceCount; break; //Output
case 2: sourceID = totalInputCount + totalOutputCount + ng.GlobalID + sourceCount; break; //Hidden
}
switch (connectedNG.GroupType)
{
case 0: targetID = connectedNG.GlobalID + targetCout; break;
case 1: targetID = totalInputCount + connectedNG.GlobalID + targetCout; break;
case 2: targetID = totalInputCount + totalOutputCount + connectedNG.GlobalID + targetCout; break;
}
//-----------------Get the bias of the target node
coordinates[0] = target.X; coordinates[1] = target.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;
//coordinates[4] = 0.0f; coordinates[5] = 0.0f; //JUSTIN: REMOVE THIS!!!
//String s = arrayToString(coordinates);
//if (weights.ContainsKey(s))
// neurons[(int)targetID].Bias = weights[s];
//.........这里部分代码省略.........
示例8: generateGenome
public virtual NeatGenome.NeatGenome generateGenome(INetwork network)
{
float[] coordinates = new float[4];
float output;
uint connectionCounter = 0;
int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
ConnectionGeneList connections=new ConnectionGeneList();
if (hiddenCount > 0)
{
coordinates[0] = -1 + inputDelta / 2.0f;
coordinates[1] = -1;
coordinates[2] = -1 + hiddenDelta / 2.0f;
coordinates[3] = 0;
for (uint input = 0; input < inputCount; input++, coordinates[0] += inputDelta)
{
coordinates[2] = -1 + hiddenDelta / 2.0f;
for (uint hidden = 0; hidden < hiddenCount; hidden++, coordinates[2] += hiddenDelta)
{
network.ClearSignals();
network.SetInputSignals(coordinates);
network.MultipleSteps(iterations);
output = network.GetOutputSignal(0);
if (Math.Abs(output) > threshold)
{
float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
connections.Add(new ConnectionGene(connectionCounter++, input, hidden + inputCount + outputCount, weight));
}
}
}
coordinates[0] = -1 + hiddenDelta / 2.0f;
coordinates[1] = 0;
coordinates[2] = -1 + outputDelta / 2.0f;
coordinates[3] = 1;
for (uint hidden = 0; hidden < hiddenCount; hidden++, coordinates[0] += hiddenDelta)
{
coordinates[2] = -1 + outputDelta / 2.0f;
for (uint outputs = 0; outputs < outputCount; outputs++, coordinates[2] += outputDelta)
{
network.ClearSignals();
network.SetInputSignals(coordinates);
network.MultipleSteps(iterations);
output = network.GetOutputSignal(0);
if (Math.Abs(output) > threshold)
{
float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
connections.Add(new ConnectionGene(connectionCounter++, hidden + inputCount + outputCount, outputs + inputCount, weight));
}
}
}
}
else
{
coordinates[0] = -1 + inputDelta / 2.0f;
coordinates[1] = -1;
coordinates[2] = -1 + outputDelta / 2.0f;
coordinates[3] = 1;
for (uint input = 0; input < inputCount; input++, coordinates[0] += inputDelta)
{
coordinates[2] = -1 + outputDelta / 2.0f;
for (uint outputs = 0; outputs < outputCount; outputs++, coordinates[2] += outputDelta)
{
network.ClearSignals();
network.SetInputSignals(coordinates);
network.MultipleSteps(iterations);
output = network.GetOutputSignal(0);
if (Math.Abs(output) > threshold)
{
float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
connections.Add(new ConnectionGene(connectionCounter++, input, outputs + inputCount, weight));
}
}
}
}
return new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount);
}
示例9: generateGenome
public virtual NeatGenome.NeatGenome generateGenome(INetwork network)
{
int maxIterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
double epsilon = 0.0;
uint firstBias = 0;
uint lastBias = biasCount;
uint firstInput = biasCount;
uint lastInput = biasCount + inputCount;
uint firstOutput = biasCount + inputCount;
uint lastOutput = biasCount + inputCount + outputCount;
uint firstHidden = biasCount + inputCount + outputCount;
uint lastHidden = biasCount + inputCount + outputCount + hiddenCount;
float[] coordinates = new float[4];
float output;
uint connectionCounter = 0;
ConnectionGeneList connections = new ConnectionGeneList();
// give bias inputs to all hidden and output nodes.
// the source of the the link is located at (0,0), the target is each node, and the weight of the link is the second output of CPPN.
coordinates[0] = 0;
coordinates[1] = 0;
for (uint bias = firstBias; bias < lastBias; bias++) {
// link the bias to all hidden nodes.
coordinates[2] = -1 + hiddenDelta / 2.0f;
coordinates[3] = 0;
for (uint hidden = firstHidden; hidden < lastHidden; hidden++) {
coordinates[2] += hiddenDelta;
network.ClearSignals();
network.SetInputSignals(coordinates);
network.RelaxNetwork(maxIterations, epsilon);
output = network.GetOutputSignal(1);
if (Math.Abs(output) > threshold) {
float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
connections.Add(new ConnectionGene(connectionCounter++, bias, hidden, weight));
}
}
// link the bias to all output nodes.
coordinates[2] = -1 + outputDelta / 2.0f;
coordinates[3] = 1;
for (uint outp = firstOutput; outp < lastOutput; outp++) {
coordinates[2] += outputDelta;
network.ClearSignals();
network.SetInputSignals(coordinates);
network.RelaxNetwork(maxIterations, epsilon);
output = network.GetOutputSignal(1);
if (Math.Abs(output) > threshold) {
float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
connections.Add(new ConnectionGene(connectionCounter++, bias, outp, weight));
}
}
}
if (hiddenCount > 0) {
// link all input nodes to all hidden nodes.
coordinates[0] = -1 + inputDelta / 2.0f;
coordinates[1] = -1;
coordinates[2] = -1 + hiddenDelta / 2.0f;
coordinates[3] = 0;
for (uint input = firstInput; input < lastInput; input++) {
coordinates[0] += inputDelta;
coordinates[2] = -1 + hiddenDelta / 2.0f;
for (uint hidden = firstHidden; hidden < lastHidden; hidden++) {
coordinates[2] += hiddenDelta;
network.ClearSignals();
network.SetInputSignals(coordinates);
network.RelaxNetwork(maxIterations, epsilon);
output = network.GetOutputSignal(0);
if (Math.Abs(output) > threshold) {
float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
connections.Add(new ConnectionGene(connectionCounter++, input, hidden, weight));
}
}
}
// link all hidden nodes to all output nodes.
coordinates[0] = -1 + hiddenDelta / 2.0f;
coordinates[1] = 0;
coordinates[2] = -1 + outputDelta / 2.0f;
coordinates[3] = 1;
for (uint hidden = firstHidden; hidden < lastHidden; hidden++) {
coordinates[0] += hiddenDelta;
coordinates[2] = -1 + outputDelta / 2.0f;
for (uint outp = firstOutput; outp < lastOutput; outp++) {
coordinates[2] += outputDelta;
network.ClearSignals();
network.SetInputSignals(coordinates);
network.RelaxNetwork(maxIterations, epsilon);
output = network.GetOutputSignal(0);
if (Math.Abs(output) > threshold) {
float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
connections.Add(new ConnectionGene(connectionCounter++, hidden, outp, weight));
}
}
//.........这里部分代码省略.........
示例10: generateGenomeStackSituationalPolicy
//.........这里部分代码省略.........
sourceCount = 0;
foreach (PointF source in ng.NeuronPositions)
{
//----------------------------
targetCout = 0;
foreach (PointF target in connectedNG.NeuronPositions)
{
switch (ng.GroupType)
{
case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;
//Input
case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;
//Output
case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break; //Hidden
}
switch (connectedNG.GroupType)
{
case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;
case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;
case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
}
//--- bias
//-----------------Get the bias of the target node
if (!biasCalculated[targetID])
{
coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;
network.ClearSignals();
network.SetInputSignals(coordinates);
((ModularNetwork)network).RecursiveActivation();
neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
biasCalculated[targetID] = true;
}
//--bias
coordinates[0] = source.X;
coordinates[1] = source.Y;
coordinates[2] = target.X;
coordinates[3] = target.Y;
// Schrum: Debug
//Console.WriteLine("CPPN inputs: " + string.Join(",", coordinates));
network.ClearSignals();
network.SetInputSignals(coordinates);
((ModularNetwork)network).RecursiveActivation();
//network.MultipleSteps(iterations);
output = network.GetOutputSignal(0);
double leo = 0.0;
// Schrum: Observation: It seems impossible to use both LEO and adaptive networks because of these hardcoded magic numbers
if (adaptiveNetwork)
{
A = network.GetOutputSignal(2);
B = network.GetOutputSignal(3);
C = network.GetOutputSignal(4);
D = network.GetOutputSignal(5);