本文整理汇总了C++中Network::Simulate方法的典型用法代码示例。如果您正苦于以下问题:C++ Network::Simulate方法的具体用法?C++ Network::Simulate怎么用?C++ Network::Simulate使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Network
的用法示例。
在下文中一共展示了Network::Simulate方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: NetworkTestBCPNNRecurrent
void NetworkTests::NetworkTestBCPNNRecurrent()
{
// network construction
Network* network = new Network();
PopulationColumns* layer1 = new PopulationColumns(network,256,1,PopulationColumns::Graded);
PopulationColumns* layer2 = new PopulationColumns(network,1,5, PopulationColumns::Graded);
FullConnectivity* full = new FullConnectivity();
FullConnectivity* full2 = new FullConnectivity();
FullConnectivity* full3 = new FullConnectivity();
FullConnectivity* full4 = new FullConnectivity();
network->AddPopulation(layer1);
network->AddPopulation(layer2);
layer1->AddPost(layer2,full); // Feedforward, with BCPNN
layer2->AddPre(layer1,full2); // Feedforward
layer2->AddPost(layer2,full3); // Recurrent, with inhibitory BCPNN
layer2->AddPre(layer2,full4); // Recurrent
SoftMax* softmax = new SoftMax(1.0, SoftMax::Standard);
layer2->AddPopulationModifier(softmax);
// Add Projection changes
float lambda0 = 0.0001;
float alpha = 0.01;
ProjectionModifierBcpnnOnline* bStandard = new ProjectionModifierBcpnnOnline(alpha,lambda0);
ProjectionModifierBcpnnOnline* bInhib = new ProjectionModifierBcpnnOnline(alpha,lambda0);
full2->AddProjectionsEvent(bStandard);
full4->AddProjectionsEvent(bInhib);
// Construct initial network
network->Initialize();
vector<int> partsOfDataToUseAsInput = layer1->GetUnitIdLocals();//GetMPIDistribution(network->MPIGetNodeId());
// Specify input data
vector<vector<float> > iData2;// = storageH5.LoadDataFloatHDF5("C:\\Projects\\Databases\\Bars\\bars1616.h5","dataset1",partsOfDataToUseAsInput,0,5);
// Simulate
int iterations = 20;
for(int i=0;i<iterations;i++)
{
cout<<i<<"\n";
for(int j=0;j<iData2.size();j++)
{
layer1->SetValuesLocal(iData2[j]);
// next time step
network->Simulate();
}
// save weights
}
}
示例2: NetworkTestSanger
void NetworkTests::NetworkTestSanger(int mpiRank, int mpiSize)
{
int nrInputHypercolumns = 1;
int nrInputRateUnits = 2; // 1 input (number)
int nrMiddleHypercolumns = 1;
int nrMiddleRateUnits = 2; // 2 code vectors
int nrOutputHypercolumns = 1;
int nrOutputRateUnits = 2; // 2 classes
// Set up network
Network* network = new Network();
network->SetMPIParameters(mpiRank,mpiSize);
PopulationColumns* layer1InclTime = new PopulationColumns(network,nrInputHypercolumns,nrInputRateUnits,PopulationColumns::Graded);
FullConnectivity* fullMidSanger = new FullConnectivity();
PopulationColumns* layerSanger = new PopulationColumns(network,nrMiddleHypercolumns,nrMiddleRateUnits,PopulationColumns::Graded);
layerSanger->AddPre(layer1InclTime,fullMidSanger);
// PCA/Sanger extraction
ProjectionModifierSanger* sangerLearn = new ProjectionModifierSanger();
fullMidSanger->AddProjectionsEvent(sangerLearn);
network->AddPopulation(layer1InclTime);
network->AddPopulation(layerSanger);
network->Initialize();
///////////////////////////////////
//////// Data generation
vector<vector<float> > dataIn;
int nrTrainItems = 100000;
for(int i=0;i<nrTrainItems;i++)
{
float t = (float)rand()/(float)(RAND_MAX+1);
float y = (float)rand()/(float)(RAND_MAX+1);
vector<float> f(2); f[0] = t; f[1] = y;
dataIn.push_back(f);
}
////////////////////////////////////
////// Simulate
layer1InclTime->SwitchOnOff(false); // fixed
for(int i=0;i<nrTrainItems;i++)
{
layer1InclTime->SetValuesAll(dataIn[i]);
network->Simulate();
}
}
示例3: NetworkTestPearson
void NetworkTests::NetworkTestPearson(int mpiRank, int mpiSize)
{
Network* network = new Network();
network->SetMPIParameters(mpiRank,mpiSize);
int nrHypercolumns = 1;
int nrRateUnits = 2;
PopulationColumns* layer1 = new PopulationColumns(network,nrHypercolumns,nrRateUnits,PopulationColumns::Graded);
FullConnectivity* full = new FullConnectivity();
network->AddPopulation(layer1);
layer1->AddPre(layer1,full);
ProjectionModifierPearson* ePearson = new ProjectionModifierPearson();
full->AddProjectionsEvent(ePearson);
network->Initialize();
vector<vector<float> > trainData;
vector<float> a1; a1.push_back(1);a1.push_back(-1);
vector<float> a2; a2.push_back(0);a2.push_back(-1);
vector<float> a3; a3.push_back(1);a3.push_back(-1);
vector<float> a4; a4.push_back(0);a4.push_back(0);
vector<float> a5; a5.push_back(2);a5.push_back(2);
vector<float> a6; a6.push_back(3);a6.push_back(-2);
trainData.push_back(a1);
trainData.push_back(a2);
trainData.push_back(a3);
trainData.push_back(a4);
trainData.push_back(a5);
trainData.push_back(a6);
unsigned int iterations = 1000;
layer1->SwitchOnOff(false); // fixed during training phase
for(unsigned int j=0;j<iterations;j++)
{
for(unsigned int i=0;i<trainData.size();i++)
{
layer1->SetValuesAll(trainData[i]);
network->Simulate();
}
}
}
示例4: NetworkTestMDSVQ
void NetworkTests::NetworkTestMDSVQ(int mpiRank, int mpiSize)
{
// network construction
Network* network = new Network();
network->SetMPIParameters(mpiRank,mpiSize);
int nrInputHypercolumns = 24;
int nrInputRateUnits = 25;
int nrOutputHypercolumns = 5;
int nrOutputRateUnits = 5;
PopulationColumns* layer1 = new PopulationColumns(network,nrInputHypercolumns,nrInputRateUnits,PopulationColumns::Graded);
PopulationColumns* layer2 = new PopulationColumns(network,nrOutputHypercolumns,nrOutputRateUnits,PopulationColumns::Graded);
FullConnectivity* full = new FullConnectivity();
FullConnectivity* full2 = new FullConnectivity();
FullConnectivity* full3 = new FullConnectivity();
FullConnectivity* full4 = new FullConnectivity();
FullConnectivity* full5 = new FullConnectivity();
FullConnectivity* full6 = new FullConnectivity();
FullConnectivity* full7 = new FullConnectivity(true,"hypercolumn");
FullConnectivity* full8 = new FullConnectivity(true,"hypercolumn");
network->AddPopulation(layer1);
network->AddPopulation(layer2);
layer1->AddPost(layer2,full); // Feedforward, modified by VQ calculations
layer2->AddPre(layer1,full2); // Feedforward
layer1->AddPost(layer1,full7); // Recurrent, with MI hypercolumn calculations + MDS
layer1->AddPre(layer1,full8); // Recurrent
layer1->AddPost(layer1,full3); // Recurrent, with MI minicolumn calculations
layer1->AddPre(layer1,full4); // Recurrent
layer2->AddPost(layer2,full5); // Recurrent, with inhib bcpnn
layer2->AddPre(layer2,full6); // Recurrent
int mdsDimension = 3;
int miDimension = nrInputHypercolumns;
// MI
ProjectionModifierMIHypercolumn* miHypercolumns = new ProjectionModifierMIHypercolumn();
ProjectionModifierMIRateUnit* miRateUnits = new ProjectionModifierMIRateUnit(miHypercolumns);
full8->AddProjectionsEvent(miHypercolumns);
full4->AddProjectionsEvent(miRateUnits);
//miRateUnits->AddParentProjectionModifier(miHypercolumns); // allows mi hypercolumns to have access to the belonging mi minicolumns (set as default?)
// MDS
LayerMDS* MDS = new LayerMDS(miDimension,mdsDimension, network);
ProjectionModifierMDS* mdsHypercolumns = new ProjectionModifierMDS();
layer1->AddPopulationModifier(MDS);
mdsHypercolumns->AddParentPopulationModifier(MDS); // allows MDS to have access to the hypercolumn event Projections (will be set as default)
full8->AddProjectionsEvent(mdsHypercolumns);
// VQ
int nrGroups = 10;
LayerVQ* VQ = new LayerVQ(nrGroups, LayerVQ::VQStandard);
layer1->AddPopulationModifier(VQ);
VQ->AddChildPopulationModifier(MDS); // Allow VQ to have access to MDS output (m_Xi)
//full2->AddProjectionsEvent(VQconn);
// Inhibitory bcpnn
float lambda0 = 0.0001;
float alpha = 0.01;
ProjectionModifierBcpnnOnline* bInhib = new ProjectionModifierBcpnnOnline(alpha,lambda0);
bInhib->SetImpactBeta(-1);
bInhib->SetImpactWeights(-1);
full6->AddProjectionsEvent(bInhib);
// Construct initial network
network->Initialize();
vector<int> partsOfDataToUseAsInput = layer1->GetUnitIdLocals();//GetMPIDistribution(mpiRank);
vector<int> allData;
for(int i=0;i<nrInputHypercolumns*nrInputRateUnits;i++)
allData.push_back(i);
// Specify input data
Storage storageH5;
storageH5.SetMPIParameters(mpiRank,mpiSize);
vector<vector<float> > iData2;// = storageH5.LoadDataFloatHDF5("C:\\Projects\\Network\\Databases\\HWBCPNN\\mtact.h5","dataset1",allData,0,25);//partsOfDataToUseAsInput,0,25);
// Simulate
int iterations = 10;
for(int i=0;i<iterations;i++)
{
if(mpiRank == 1) cout<<i<<"\n";
for(int j=3;j<iData2.size();j++)
{
if(mpiRank == 1) cout<<".";
for(int k=0;k<3;k++)
{
layer1->SetValuesLocal(iData2[j]);
// next time step
network->Simulate();
//.........这里部分代码省略.........
示例5: NetworkTestTrieschAndFoldiak
//.........这里部分代码省略.........
float eta1 = 0.5, eta2= 0.02, eta3 = 0.02, alpha = 0.0005, beta = 10;//alpha = 1.0/8.0, beta = 10;
bool lateral = false;
ProjectionModifierFoldiak* eFoldiak = new ProjectionModifierFoldiak(eta1, eta2, eta3, alpha, beta, lateral);
lateral = true;
alpha = 0.75;
ProjectionModifierFoldiak* eFoldiakLateral = new ProjectionModifierFoldiak(eta1, eta2, eta3, alpha, beta, lateral);
//ProjectionModifierBCM* eBCM = new ProjectionModifierBCM(0.1,0.05,20);
if(!isTriesch)
{
full2 = new FullConnectivity();
layer2->AddPre(layer2,full2);
full->AddProjectionsEvent(eFoldiak);
full2->AddProjectionsEvent(eFoldiakLateral);
}
else
{
full3NoLocal = new FullConnectivityNoLocalHypercolumns();
//full3NoLocal->AddProjectionsEvent(eBCM);
full3NoLocal->AddProjectionsEvent(eFoldiakLateral);
layer2->AddPre(layer2,full3NoLocal);
}
// implements N here
SoftMax* softmax = new SoftMax(SoftMax::WTAThresholded,0.5);//(10.0, SoftMax::ProbWTA);
WTA* wta = new WTA();
//layer2->AddPopulationModifier(wta);
layer2->AddPopulationModifier(softmax);
network->Initialize();
//////////////////////////////
// Meters
char* name1 = new char[50];
char* name2 = new char[50];
sprintf(name1,"Projection_triesch_n%d.csv",mpiRank);
Meter* connMeter = new Meter(name1, Storage::CSV);
connMeter->AttachProjection(layer2->GetIncomingProjections()[0],0);
network->AddMeter(connMeter);
sprintf(name2,"Layer2Activity_triesch.csv");
Meter* layerMeter = new Meter(name2, Storage::CSV);
layerMeter->AttachPopulation(layer2);
network->AddMeter(layerMeter);
// end Meters
//////////////////////////////
vector<vector<float> > trainData = dataSources.GetBars(sizeX,sizeY, nrItems);
int iterations = 1;
int iterSameStimuli = 100;
if(!isTriesch)
iterSameStimuli = 10;
layer1->SwitchOnOff(false); // fixed during training phase
for(int j=0;j<iterations;j++)
{
for(int i=0;i<trainData.size();i++)
{
/*if(!isTriesch)
{
// in order to settle recurrent activity
eFoldiak->SwitchOnOff(false);
eFoldiakLateral->SwitchOnOff(false);
}*/
for(int k=0;k<iterSameStimuli;k++)
{
/* if(!isTriesch && k==iterSameStimuli-1)
{
eFoldiak->SwitchOnOff(true);
eFoldiakLateral->SwitchOnOff(true);
}
*/
for(int m=0;m<1;m++)
{
layer1->SetValuesAll(trainData[i]);
//for(int n=0;n<3;n++)
network->Simulate();
}
}
// allow units to reset
network->Reset();
/*if(i%50 == 0)
{
network->RecordAll();
if(mpiRank == 0)
cout<<"Storing.";
}*/
}
}
network->RecordAll();
}
示例6: NetworkTestIF
void NetworkTests::NetworkTestIF(int mpiRank,int mpiSize)
{
Network* network = new Network();
network->SetMPIParameters(mpiRank,mpiSize);
int nrHypercolumns = 2;
int nrRateUnits = 2;
PopulationColumns* layer1 = new PopulationColumns(network,nrHypercolumns,nrRateUnits,PopulationColumns::IF);
PopulationColumns* layer2 = new PopulationColumns(network,nrHypercolumns,nrRateUnits,PopulationColumns::IF);
FullConnectivity* full = new FullConnectivity();
FullConnectivity* full2 = new FullConnectivity();
FullConnectivity* full3 = new FullConnectivity();
FullConnectivity* full4 = new FullConnectivity();
network->AddPopulation(layer1);
network->AddPopulation(layer2);
//layer1->AddPost(layer2,full); // Feedforward layer 1 -> layer 2
layer2->AddPre(layer1,full2);
//layer1->AddPost(layer1,full3); // Recurrent layer 1 -> layer 1
layer1->AddPre(layer1,full4);
float lambda0 = 0.0001f;
float alpha = 0.01f;
ProjectionModifierBcpnnOnline* bRecL1 = new ProjectionModifierBcpnnOnline(alpha,lambda0);
bRecL1->SetImpactBeta(-1);
bRecL1->SetImpactWeights(-1);
full4->AddProjectionsEvent(bRecL1);
// Construct initial network
network->Initialize();
// Recordings
Meter unitMeter("unit1.csv", Storage::CSV);
unitMeter.AttachUnit(layer1->GetRateUnits()[0]);
network->AddMeter(&unitMeter);
int timesteps = int(1e3);
int nrPatterns = 4;
vector<vector<float> > patterns(nrPatterns);
vector<float> p1(8), p2(8), p3(8), p4(8);
p1[0] = 1.0; p1[1] = 1.0; p1[2] = 0.0; p1[3] = 0.0; p1[4] = 1.0; p1[5] = 1.0; p1[6] = 0.0; p1[7] = 0.0; // 1100 1100
p2[0] = 0.0; p2[1] = 1.0; p2[2] = 1.0; p2[3] = 0.0; p2[4] = 0.0; p2[5] = 1.0; p2[6] = 1.0; p2[7] = 0.0; // 0110 0110
p3[0] = 0.0; p3[1] = 0.0; p3[2] = 1.0; p3[3] = 1.0; p3[4] = 0.0; p3[5] = 0.0; p3[6] = 1.0; p3[7] = 1.0; // 0011 0011
p4[0] = 1.0; p4[1] = 0.0; p4[2] = 0.0; p4[3] = 1.0; p4[4] = 1.0; p4[5] = 0.0; p4[6] = 0.0; p4[7] = 1.0; // 1001 1001
patterns[0] = p1;
patterns[1] = p2;
patterns[3] = p3;
patterns[4] = p4;
// initial training phase (separated atm)
layer1->SwitchOnOff(false);
int iterations = 100;
int stepsEach = 5;
for(int i=0;i<iterations;i++)
{
for(int j=0;j<stepsEach;j++)
{
layer1->SetValuesAll(patterns[i%nrPatterns]);
}
}
// free running
layer1->SwitchOnOff(true);
for(int i=0;i<timesteps;i++)
{
if(i%10 == 0)
{
layer1->SetValuesAll(patterns[0]);
}
else if(i%5 == 0)
{
layer1->SetValuesAll(patterns[1]);
}
network->Simulate();
}
}
示例7: NetworkTestMNISTClassification
//.........这里部分代码省略.........
{
modSize = 100;//10;
plastStopIter = int(iterations*0.8f);
}
else
{
modSize = 100;
plastStopIter = iterations*1;//*0.7;
}
if(j%modSize == 0 && j!=0 && compLearn->IsOn())
{
alpha = alpha/2;
impactBeta = impactBeta/2;
bFeedforward->SetAlpha(alpha);
bInhib->SetImpactBeta(impactBeta);
clC = clC*0.7f;
compLearn->SetC(clC);
//bInhib->SetImpactBeta(0.0);
//softmax->SetType(false);
}
if(j==plastStopIter)
{
bFeedforward->SwitchOnOff(false);//SetAlpha(0.0);
bInhib->SwitchOnOff(false);//SetImpactBeta(0.0);
softmax->SetType(SoftMax::WTA);
}
}
// next time step
network->Simulate();
cout.flush();
}
connMeter.RecordAll(0);
if(mpiRank == 0)
{
cout<<"\n";
cout.flush();
layerMeter.RecordAll(0);
layer3Meter.RecordAll(0);
}
}
// save data
if(mpiRank == 0)
vqMeter.RecordAll(0);
// 2. Testing phase
//network->Simulate();
network->Reset(); // Clears values
//network->Simulate();
//network->Simulate();
layer1->SwitchOnOff(false);
layer3->SwitchOnOff(true);
bFeedforward->SwitchOnOff(false);//->SetAlpha(0.0);
bClassification->SwitchOnOff(false);
kClassification->SwitchOnOff(false);
bInhib->SwitchOnOff(false);//SetImpactBeta(0.0);
示例8: NetworkTestMNISTRecurrent
void NetworkTests::NetworkTestMNISTRecurrent(int mpiRank, int mpiSize)
{
// Set up network
int nrColors = 2;
char* filename;
int nrInputHypercolumns = 28*5;//28;
int nrInputRateUnits = nrColors;
int nrMiddleHypercolumns = 5;
int nrMiddleRateUnits = 5;
int nrOutputHypercolumns = 1;
int nrOutputRateUnits = 10;
Network* network = new Network();
network->SetMPIParameters(mpiRank,mpiSize);
PopulationColumns* layer1 = new PopulationColumns(network,nrInputHypercolumns,nrInputRateUnits,PopulationColumns::Graded); // input
PopulationColumns* layer2 = new PopulationColumns(network,nrMiddleHypercolumns,nrMiddleRateUnits,PopulationColumns::Graded); // middle
PopulationColumns* layer3 = new PopulationColumns(network,nrOutputHypercolumns,nrOutputRateUnits,PopulationColumns::Graded); // output
FullConnectivity* full = new FullConnectivity();
FullConnectivity* full2 = new FullConnectivity();
FullConnectivity* full3 = new FullConnectivity();
FullConnectivity* full4 = new FullConnectivity();
FullConnectivity* full7 = new FullConnectivity(true,"hypercolumn");
FullConnectivity* full8 = new FullConnectivity(true,"hypercolumn");
FullConnectivity* full9 = new FullConnectivity();
FullConnectivity* full10 = new FullConnectivity();
network->AddPopulation(layer1);
// layer1->AddPost(layer1,full3); // Recurrent, with MI minicolumn calculations
layer1->AddPre(layer1,full4); // Recurrent
int mdsDimension = 5;
int miDimension = nrInputHypercolumns;
// Recurrent bcpnn for memory test
float lambda0 = 0.0001;
float alpha = 0.01;
ProjectionModifierBcpnnOnline* bRecTest = new ProjectionModifierBcpnnOnline(alpha,lambda0);
full4->AddProjectionsEvent(bRecTest);
SoftMax* softmax = new SoftMax(1.0,SoftMax::Standard);
layer1->AddPopulationModifier(softmax);
// Construct initial network
network->Initialize();
// Specify input data
Storage storageH5;
storageH5.SetMPIParameters(mpiRank,mpiSize);
if(m_architecture == BGL)
{
if(nrColors == 2)
filename = "/gpfs/scratch/s/simonbe/Databases/MNIST/MNIST_2colors.h5";
else if(nrColors <= 0)
filename = "/gpfs/scratch/s/simonbe/Databases/MNIST/MNIST.h5";
}
else if(m_architecture == PC)
{
if(nrColors == 2)
filename = "C:\\CurrentProjects\\Network\\Databases\\MNIST\\MNIST_2colors.h5";
else if(nrColors <= 0)
filename = "C:\\CurrentProjects\\Network\\Databases\\MNIST\\MNIST.h5";
}
vector<int> partsOfDataToUseAsInput = layer1->GetMPIDistributionHypercolumns(mpiRank);
vector<int> partsOfDataToUseAsOutput = vector<int>();//layer1->GetMPIDistributionHypercolumns(mpiRank);
// Training phase
int nrTrainImages = 60000;
int nrTestImages = 1000;
vector<vector<float> > trainingData;
int iterations = 10;
int stepsStimuliOn = 1;
// Training phase
layer1->SwitchOnOff(false); // fixed during training phase
for(int j=0;j<iterations;j++)
for(int i=0;i<nrTrainImages;i++)
{
if(mpiRank == 0)
{
cout<<i;
cout.flush();
}
vector<float> binData = toBinary(trainingData[i],trainingData[i].size(), nrInputRateUnits);//vector<float> binData = toBinary(currentTrainingData[0],currentTrainingData[0].size(),nrInputRateUnits);//trainingData[i],trainingData[i].size(), nrInputRateUnits);
layer1->SetValuesAll(binData);
for(int t=0;t<stepsStimuliOn;t++)
{
// next time step
network->Simulate();
}
//.........这里部分代码省略.........
示例9: NetworkTestInclTimingBCPNNRecurrent
void NetworkTests::NetworkTestInclTimingBCPNNRecurrent()
{
int nrHypercolumns = 65536;//65536;//128*16*10;//128*16*10;//128*16*10;//128*16;
int nrRateUnits = 100;//50;//100;//100;//40;//10
//float activity = 0.05;
int nrItems = 5;
bool storeData = false;
float probConnectivity = 0.0003;//0.00005;//0.000025;//0.00003;//0.000025;//0.001;//0.00044;//0.00011;//0.00024;//0.00006;//0.0004;//0.00001;
bool doTesting = true; // false for some scaling tests
// network construction
Network* network = new Network();
network->AddTiming(network);
PopulationColumns* layer1 = new PopulationColumns(network,nrHypercolumns,nrRateUnits,PopulationColumns::Graded);
network->AddPopulation(layer1);
FullConnectivity* full = new FullConnectivity();//false,"minicolumns");
full->SetRandomWeights(0,0);
RandomConnectivity* randConn = new RandomConnectivity(probConnectivity);//0.1);
randConn->SetRandomWeights(0,0);
//network->AddTiming(randConn);
layer1->AddPre(layer1,randConn); // recurrent
// Add Projection changes
float lambda0 = 10e-6;
float alpha = 0.05;
ProjectionModifierBcpnnOnline* bStandard = new ProjectionModifierBcpnnOnline(alpha,lambda0);
//full->AddProjectionsEvent(bStandard);
randConn->AddProjectionsEvent(bStandard);
WTA* wta = new WTA();
layer1->AddPopulationModifier(wta);
// Construct initial network
network->Initialize();
//vector<int> partsOfDataToUseAsInput = layer1->GetMPIDistribution(network->MPIGetNodeId());
// Specify input data
// - change to only create local part
DataSources source;
// not correct right now as SetValuesAll working locally and this is global
vector<vector<float> > data = source.GetRandomHCsOrthogonal(nrHypercolumns/100,nrRateUnits,nrItems);
// Meters
Meter* l1meter = new Meter("layer1.csv", Storage::CSV);
if(storeData)
{
l1meter->AttachPopulation(layer1);
network->AddMeter(l1meter);
}
Meter* c1meter = new Meter("Projections1.csv",Storage::CSV);
if(storeData)
{
c1meter->AttachProjection(layer1->GetIncomingProjections()[0],0);
network->AddMeter(c1meter);
}
// Timings
network->AddTiming(bStandard);
network->AddTiming(layer1);
network->AddTiming(full);
// need to access after it has been built
network->AddTiming(layer1->GetIncomingProjections()[0]);
// Training
// set fixed pattern
layer1->SwitchOnOff(false);
int trainIterations = 1;
int testIterations = 5;
for(int i=0;i<trainIterations;i++)
{
//cout<<i<<"\n";
for(int j=0;j<data.size();j++)
{
layer1->SetValuesAll(data[j]);
// next time step
network->Simulate(10);
}
}
// Testing
if(doTesting == true)
{
layer1->SwitchOnOff(true);
bStandard->SwitchOnOff(false);
for(int i=0;i<testIterations;i++)
{
for(int j=0;j<data.size();j++)
{
// clear all events before switching so no disturbance, can remove if moving average activity etc.
//.........这里部分代码省略.........
示例10: NetworkTestSwitching
// Switching
void NetworkTests::NetworkTestSwitching(int mpiRank, int mpiSize)
{
int nrHypercolumns = 5;
int nrRateUnits = 10;
int nrItems = 2;
DataSources sources;
srand(2);
vector<vector<float> > data = sources.GetRandomHCs(nrHypercolumns,nrRateUnits,nrItems);//sources.GetRandom(size,0.1,nrItems);
// setup recurrent network
Network* network = new Network();
network->SetMPIParameters(mpiRank,mpiSize);
PopulationColumns* layer1 = new PopulationColumns(network,nrHypercolumns,nrRateUnits,PopulationColumns::Graded);
FullConnectivity* full = new FullConnectivity();//FullConnectivity(false,"");
layer1->AddPre(layer1,full);
network->AddPopulation(layer1);
ProjectionModifierBcpnnOnline* eBcpnn = new ProjectionModifierBcpnnOnline();
ProjectionModifierTriesch* eTriesch = new ProjectionModifierTriesch();
ProjectionModifierHebbSimple* eHebb = new ProjectionModifierHebbSimple();
ProjectionModifierBCM* eBCM = new ProjectionModifierBCM();
full->AddProjectionsEvent(eBcpnn); // incl adding transfer fcn
//full->AddProjectionsEvent(eTriesch); // incl adding transfer fcn
//full->AddProjectionsEvent(eHebb);
//full->AddProjectionsEvent(eBCM);
PopulationModifierAdaptation2* eAdaptation = new PopulationModifierAdaptation2();
//eAdaptation->SetParameters(0,0); // adaptation off initially
eAdaptation->SetParameters(0); // adaptation off initially
layer1->AddPopulationModifier(eAdaptation);
WTA* wta = new WTA();
layer1->AddPopulationModifier(wta);//wta);//softmax);
network->Initialize();
eAdaptation->Initm_Aj(1); // initialize m_Aj vector
// set up meters
char* name1 = new char[30];
char* name2 = new char[30];
char* name3 = new char[30];
sprintf(name1,"Projections_n%d.csv",mpiRank);
sprintf(name2,"Layer1ActivityWTA.csv");
sprintf(name3,"Layer1Activity.csv");
Meter* connMeter = new Meter(name1, Storage::CSV);
connMeter->AttachProjection(layer1->GetIncomingProjections()[0],0);
network->AddMeter(connMeter);
Meter* layerMeter = new Meter(name3, Storage::CSV);
layerMeter->AttachPopulation(layer1);
network->AddMeter(layerMeter);
Meter* eventLayerMeter=new Meter(name2, Storage::CSV);
eventLayerMeter->AttachPopulationModifier(eAdaptation);
network->AddMeter(eventLayerMeter);
int nrIters = 10;
int stimuliOn = 10;
layer1->SwitchOnOff(false); // fixed input
// store patterns
for(unsigned int i=0;i<nrIters;i++)
{
for(unsigned int j=0;j<data.size();j++)
{
for(unsigned int k=0;k<stimuliOn; k++)
{
layer1->SetValuesAll(data[j]);
network->Simulate();
}
}
}
// random stimulation
vector<float> randVec(data[0].size());
for(unsigned int i=0;i<randVec.size();i++)
randVec[i] = 0.5f*float(rand()/RAND_MAX);
// mixture
vector<float> mixVec(data[0].size());
for(unsigned int i=0;i<mixVec.size();i++)
mixVec[i] = 1*(data[0][i] + data[1][i]);
layer1->SetValuesAll(mixVec);//randVec);
// Test without adaptation turned on
layer1->SwitchOnOff(true);
//eHebb->SetEtaHebb(0.0);
eBCM->SwitchOnOff(false);
eBcpnn->SwitchOnOff(false);
//.........这里部分代码省略.........