本文整理汇总了C#中MathNet.Numerics.LinearAlgebra.Double.DenseMatrix.Row方法的典型用法代码示例。如果您正苦于以下问题:C# DenseMatrix.Row方法的具体用法?C# DenseMatrix.Row怎么用?C# DenseMatrix.Row使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类MathNet.Numerics.LinearAlgebra.Double.DenseMatrix
的用法示例。
在下文中一共展示了DenseMatrix.Row方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: NormalizeData
public DenseMatrix NormalizeData(DenseMatrix data)
{
var normalizedData = new DenseMatrix(data.RowCount, data.ColumnCount);
for (int i = 0; i < data.RowCount; i++)
{
normalizedData.SetRow(i, normalizeArrayInput[i].Process(data.Row(i).ToArray()));
}
return normalizedData;
}
示例2: Error
private static double Error(DenseVector weights, DenseMatrix xM, DenseVector yV)
{
Debug.Assert(xM.RowCount == yV.Count);
Debug.Assert(xM.ColumnCount == weights.Count);
double wrong = 0.0;
for (int i = 0; i < xM.RowCount; i++)
{
var p = xM.Row(i) * weights;
if ((p < 0 ? -1 : 1) != yV[i])
wrong += 1;
}
return wrong / xM.RowCount;
}
示例3: Smooth
public void Smooth(ref double[,] inputValues)
{
// TODO: Using the matrix works, but does a lot of data accesses. Can improve by working out all the data access myself? I might be able to cut down on number of data accesses, but not sure.
var inputMatrix = new DenseMatrix(inputValues.GetLength(0), inputValues.GetLength(1));
for (int i = 0; i < inputMatrix.RowCount; i++)
{
inputMatrix.SetRow(i, Smooth(inputMatrix.Row(i).ToArray()));
}
for (int i = 0; i < inputMatrix.ColumnCount; i++)
{
inputMatrix.SetColumn(i, Smooth(inputMatrix.Column(i).ToArray()));
}
inputValues = inputMatrix.ToArray();
}
示例4: FinishAndProcess
public void FinishAndProcess()
{
var priceData = new DenseMatrix(symbols.Length, numTicks);
for (int j = 0; j < symbols.Length; j++)
{
SortedList<DateTime, Tick> d = mktData[j].data.Data;
for (int k = 0; k < d.Count; k++)
{
priceData[j, k] = d.Values[k].BidClose;
}
}
for (int i = 0; i < symbols.Length; i++)
{
results[i] = function((DenseVector) priceData.Row(i));
}
}
示例5: GetTopK2
public static Dictionary<int, double> GetTopK2(DenseMatrix userSimilarities, int uidTarget, int K)
{
Dictionary<int, double> topK = new Dictionary<int, double>(K);
Vector<double> uidSimilarities = userSimilarities.Row(uidTarget);
double minSimilarity = double.MinValue;
int minUid = int.MinValue;
foreach (Tuple<int, double> entry in uidSimilarities.EnumerateIndexed(Zeros.AllowSkip))
{
int uid = entry.Item1;
double similarity = entry.Item2;
if (uid == uidTarget) { continue; } // A user is not a neighbor of himself
if (topK.Count < K) // Fill the top K list untill it is full
{
topK[uid] = similarity;
if (topK.Count == K)
{
// Find the least similar neighbor when it is full
minUid = topK.Aggregate((l, r) => l.Value < r.Value ? l : r).Key;
minSimilarity = topK[minUid];
}
}
else if (similarity > minSimilarity)
{
// Replace the least similar neighbor
topK.Remove(minUid); // The first time it does nothing as the minUid is not set
topK[uid] = similarity;
// Find the least similar neighbor
minUid = topK.Aggregate((l, r) => l.Value < r.Value ? l : r).Key;
minSimilarity = topK[minUid];
}
}
return topK;
}
示例6: Process
public static void Process(FXSession session, string symbol1, string symbol2, string timeframe, int length)
{
HistoricPriceEngine h1 = new HistoricPriceEngine(session);
h1.GetLongHistoricPrices(symbol1, timeframe, length);
while (!h1.Complete)
{
Thread.Sleep(100);
}
HistoricPriceEngine h2 = new HistoricPriceEngine(session);
h2.GetLongHistoricPrices(symbol2, timeframe, length);
while (!h2.Complete)
{
Thread.Sleep(100);
}
//-----------------------
var dateTimeList = new SortedList<DateTime, int>();
Quantum q1 = h1.Data;
Quantum q2 = h2.Data;
var priceData = new DenseMatrix(2, q1.Data.Count);
for (int j = 0; j < ((q1.Data.Count <= q2.Data.Count)?q1.Data.Count:q2.Data.Count); j++ )
{
dateTimeList.Add(q1.Data.Values[j].Time, 1);
priceData[0, j] = q1.Data.Values[j].BidClose;
priceData[1, j] = q2.Data.Values[j].BidClose;
}
Vector<double> price1 = priceData.Row(0);
Vector<double> price2 = priceData.Row(1);
//Statistics.ApplyFunction((DenseVector)price1, Math.Log);
//Statistics.ApplyFunction((DenseVector)price2, Math.Log);
DenseVector norm1 = price1.ToArray().NormalizeZScore();
DenseVector norm2 = price2.ToArray().NormalizeZScore();
var newsym = new string[] {symbol1, symbol2, "spread"};
var m = new DenseMatrix(6, norm1.Count);
m.SetRow(0, norm1);
m.SetRow(1, norm2);
m.SetRow(2, (norm1 - norm2).ToArray().NormalizeZScore());
string filename = symbol1.Replace('/', '_') + "-" + symbol2.Replace('/', '_') + ".html";
Visualize.GenerateMultiPaneGraph(newsym, dateTimeList.Keys.ToArray(), m, QSConstants.DEFAULT_DATA_FILEPATH + filename,
new ChartOption[]{new ChartOption(), new ChartOption(){Layover = true, YPosition = 0}, new ChartOption(){YPosition = 1} }, null, filename + ".json");
FileUpload.UploadFileToFTP(QSConstants.DEFAULT_DATA_FILEPATH + filename, filename);
FileUpload.UploadFileToFTP(QSConstants.DEFAULT_DATA_FILEPATH + filename + ".json", filename + ".json");
double Spread = m[2, m.ColumnCount - 1];
if (Spread > 2.0 && m[2, m.ColumnCount - 2] <= 2.0)
Emailer.SendEmail(symbol1 + "-" + symbol2 + " Spread Above 2.0", "Test");
if (Spread < -2.0 && m[2, m.ColumnCount - 2] >= -2.0)
Emailer.SendEmail(symbol1 + "-" + symbol2 + " Spread Below -2.0", "Test");
}
示例7: Train
public void Train(DenseMatrix X, DenseVector d, DenseVector Kd)
{
int R = X.RowCount;
int N = X.ColumnCount;
int U = 0; //the number of neurons in the structure
var c = new DenseMatrix(R, 1);
var sigma = new DenseMatrix(R, 1);
var Q = new DenseMatrix((R + 1), (R + 1));
var O = new DenseMatrix(1, (R + 1));
var pT_n = new DenseMatrix((R + 1), 1);
double maxPhi = 0;
int maxIndex;
var Psi = new DenseMatrix(N, 1);
Console.WriteLine("Running...");
//for each observation n in X
for (int i = 0; i < N; i++)
{
Console.WriteLine(100*(i/(double) N) + "%");
var x = new DenseVector(R);
X.Column(i, x);
//if there are neurons in structure,
//update structure recursively.
if (U == 0)
{
c = (DenseMatrix) x.ToColumnMatrix();
sigma = new DenseMatrix(R, 1, SigmaZero);
U = 1;
Psi = CalculatePsi(X, c, sigma);
UpdateStructure(X, Psi, d, ref Q, ref O);
pT_n =
(DenseMatrix)
(CalculateGreatPsi((DenseMatrix) x.ToColumnMatrix(), (DenseMatrix) Psi.Row(i).ToRowMatrix()))
.Transpose();
}
else
{
StructureRecurse(X, Psi, d, i, ref Q, ref O, ref pT_n);
}
bool KeepSpinning = true;
while (KeepSpinning)
{
//Calculate the error and if-part criteria
double ee = pT_n.Multiply(O)[0, 0];
double approximationError = Math.Abs(d[i] - ee);
DenseVector Phi;
double SumPhi;
CalculatePhi(x, c, sigma, out Phi, out SumPhi);
maxPhi = Phi.Maximum();
maxIndex = Phi.MaximumIndex();
if (approximationError > delta)
{
if (maxPhi < threshold)
{
var tempSigma = new DenseVector(R);
sigma.Column(maxIndex, tempSigma);
double minSigma = tempSigma.Minimum();
int minIndex = tempSigma.MinimumIndex();
sigma[minIndex, maxIndex] = k_sigma*minSigma;
Psi = CalculatePsi(X, c, sigma);
UpdateStructure(X, Psi, d, ref Q, ref O);
var psi = new DenseVector(Psi.ColumnCount);
Psi.Row(i, psi);
pT_n =
(DenseMatrix)
CalculateGreatPsi((DenseMatrix) x.ToColumnMatrix(), (DenseMatrix) psi.ToRowMatrix())
.Transpose();
}
else
{
//add a new neuron and update strucutre
double distance = 0;
var cTemp = new DenseVector(R);
var sigmaTemp = new DenseVector(R);
//foreach input variable
for (int j = 0; j < R; j++)
{
distance = Math.Abs(x[j] - c[j, 0]);
int distanceIndex = 0;
//foreach neuron past 1
for (int k = 1; k < U; k++)
{
//.........这里部分代码省略.........
示例8: StructureRecurse
public void StructureRecurse(DenseMatrix X, DenseMatrix Psi, DenseVector d, int n, ref DenseMatrix Q,
ref DenseMatrix O, ref DenseMatrix pT_n)
{
//O = O(t-1) O_enxt = O(t)
//o should be a column vector ( in matrix form)
var x = new DenseVector(X.RowCount);
var psi = new DenseVector(Psi.ColumnCount);
X.Column(n, x);
Psi.Row(n, psi);
DenseMatrix p_n = CalculateGreatPsi((DenseMatrix) x.ToColumnMatrix(), (DenseMatrix) psi.ToRowMatrix());
pT_n = (DenseMatrix) p_n.Transpose();
double ee = Math.Abs(d[n] - (pT_n.Multiply(O))[0, 0]);
double temp = 1 + (pT_n.Multiply(Q)).Multiply(p_n)[0, 0];
double ae = Math.Abs(ee/temp);
if (ee >= ae)
{
var L = (DenseMatrix) Q.Multiply(p_n).Multiply(1/temp);
Q = (DenseMatrix) ((DenseMatrix.Identity(Q.RowCount).Subtract(L.Multiply(pT_n))).Multiply(Q));
O = (DenseMatrix) O.Add(L*ee);
}
else
{
Q = (DenseMatrix) DenseMatrix.Identity(Q.RowCount).Multiply(Q);
}
}
示例9: Predict
public void Predict(DenseMatrix newPredictData, double[] newPredictPrices)
{
double error = 0;
int c = 0;
var newNormalizedPredictData = new DenseMatrix(newPredictData.RowCount, newPredictData.ColumnCount,
double.NaN);
for (int i = 0; i < newPredictData.RowCount; i++)
{
newNormalizedPredictData.SetRow(i, normalizeArrayInput[i].Process(newPredictData.Row(i).ToArray()));
}
double[] normalizedPrices = normalizeArrayOutput.Process(newPredictPrices);
var d = new DenseMatrix(2, normalizedPrices.Length + 1, double.NaN);
int count = 0;
for (int i = 0; i < normalizedPrices.Length; i++)
{
// calculate based on actual data
IMLData input = new BasicMLData(inputs);
for (int j = 0; j < input.Count; j++)
{
input.Data[j] = newNormalizedPredictData[j, i];
}
IMLData output = network.Compute(input);
double prediction = output.Data[0];
error +=
Math.Pow(
(normalizeArrayOutput.Stats.DeNormalize(prediction) - newPredictPrices[i])/newPredictPrices[i],
2);
c++;
d[0, count] = newPredictPrices[i];
d[1, count] = normalizeArrayOutput.Stats.DeNormalize(prediction);
count++;
}
/////////////////////////////////////////////////////////////////
IMLData input1 = new BasicMLData(inputs);
for (int j = 0; j < input1.Count; j++)
{
input1.Data[j] = newNormalizedPredictData[j, newNormalizedPredictData.ColumnCount - 1];
}
IMLData output1 = network.Compute(input1);
d[1, count] = normalizeArrayOutput.Stats.DeNormalize(output1.Data[0]);
/////////////////////////////////////////////////////////////////
error /= c;
error = Math.Pow(error, .5);
Console.WriteLine(error);
string[] symbols = {"actual", "predicted"};
Visualize.GeneratePredictionGraph(symbols, d, new DateTime(), new TimeSpan(24, 0, 0),
"C:\\Sangar\\resultfinal.html");
outputCorre =
StatisticsExtension.Correlation(d.Row(0).ToArray().Take(d.ColumnCount - 1).ToArray().RawRateOfReturn(),
d.Row(1).ToArray().Take(d.ColumnCount - 1).ToArray().RawRateOfReturn());
Console.WriteLine("ST2 Correlation: " + outputCorre);
outputRMSE = error;
Console.WriteLine("Predicted return for D+1:" +
(d[1, d.ColumnCount - 1] - d[1, d.ColumnCount - 2])/d[1, d.ColumnCount - 2]*100 +
" percent");
}
示例10: Krige
public Krige(DenseMatrix xx, DenseMatrix yy, Powvargram vg)
{
// Initializer
x = xx;
vgram = vg;
n = xx.RowCount;
d = xx.ColumnCount;
dstar = new DenseMatrix(n + 1, 1);
vstar = new DenseMatrix(n + 1, 1);
v = new DenseMatrix(n + 1, n + 1);
y = new DenseMatrix(n + 1, n);
// Building matrix V,Y
for (int i = 0; i < n; i++)
{
y[i, 0] = yy[i, 0];
for (int j = i; j < n; j++)
{
double vvv = vgram.GetV((x.Row(i) - x.Row(j)).Norm(2));
v[i, j] = vvv;
v[j, i] = vvv;
}
v[i, n] = 1.0;
v[n, i] = 1.0;
}
v[n, n] = 0.0;
y[n, 0] = 0.0;
vi = v.Inverse();
yvi = vi.Transpose() * y;
}
示例11: RunPLAvsSVM
static Tuple<double, double> RunPLAvsSVM(int experiments, int points)
{
const int TEST_POINTS = 10000;
Random rnd = new Random();
long svmWins = 0, svCount = 0;
for (int i = 1; i <= experiments; i++)
{
//pick a random line y = a * x + b
double x1 = rnd.NextDouble(), y1 = rnd.NextDouble(), x2 = rnd.NextDouble(), y2 = rnd.NextDouble();
var Wf = new DenseVector(3);
Wf[0] = 1;
Wf[1] = (y1 - y2) / (x1 * y2 - y1 * x2);
Wf[2] = (x2 - x1) / (x1 * y2 - y1 * x2);
Func<MathNet.Numerics.LinearAlgebra.Generic.Vector<double>, int> f = x => Wf.DotProduct(x) >= 0 ? 1 : -1;
//generate training set of N random points
var X = new DenseMatrix(points, 3);
do
for (int j = 0; j < points; j++)
{
X[j, 0] = 1;
X[j, 1] = rnd.NextDouble() * 2 - 1;
X[j, 2] = rnd.NextDouble() * 2 - 1;
}
while (Enumerable.Range(0, X.RowCount).All(j => f(X.Row(0)) == f(X.Row(j))));
var W = new DenseVector(3);
Func<MathNet.Numerics.LinearAlgebra.Generic.Vector<double>, int> h = x => W.DotProduct(x) >= 0 ? 1 : -1;
//run Perceptron
int k = 1;
while (Enumerable.Range(0, points).Any(j => h(X.Row(j)) != f(X.Row(j))))
{
//find all misclasified points
int[] M = Enumerable.Range(0, points).Where(j => h(X.Row(j)) != f(X.Row(j))).ToArray();
int m = M[rnd.Next(0, M.Length)];
int sign = f(X.Row(m));
W[0] += sign;
W[1] += sign * X[m, 1];
W[2] += sign * X[m, 2];
k++;
}
//calculate P[f(Xtest) != h(Xtest)]
DenseVector Xtest = new DenseVector(3);
Xtest[0] = 1;
int matches = 0;
for (int j = 0; j < TEST_POINTS; j++)
{
Xtest[1] = rnd.NextDouble() * 2 - 1;
Xtest[2] = rnd.NextDouble() * 2 - 1;
if (f(Xtest) == h(Xtest)) matches++;
}
double Ppla = (matches + 0.0) / TEST_POINTS;
//Run SVM
var prob = new svm_problem() {
x = Enumerable.Range(0, points).Select(j =>
new svm_node[] {
new svm_node() { index = 0, value = X[j, 1] },
new svm_node() { index = 1, value = X[j, 2] } }).ToArray(),
y = Enumerable.Range(0, points).Select(j => (double)f(X.Row(j))).ToArray(),
l = points };
var model = svm.svm_train(prob, new svm_parameter()
{
svm_type = (int)SvmType.C_SVC,
kernel_type = (int)KernelType.LINEAR,
C = 1000000,
eps = 0.001,
shrinking = 0
});
//calculate P[f(Xtest) != h_svm(Xtest)]
svm_node[] Xsvm = new svm_node[] {
new svm_node() { index = 0, value = 1.0 },
new svm_node() { index = 1, value = 1.0 } };
matches = 0;
for (int j = 0; j < TEST_POINTS; j++)
{
Xtest[1] = rnd.NextDouble() * 2 - 1;
Xsvm[0].value = Xtest[1];
Xtest[2] = rnd.NextDouble() * 2 - 1;
Xsvm[1].value = Xtest[2];
if (f(Xtest) == (svm.svm_predict(model, Xsvm) > 0 ? 1 : -1)) matches++;
}
double Psvm = (matches + 0.0) / TEST_POINTS;
svCount += model.l;
if (Psvm >= Ppla) svmWins++;
}
return Tuple.Create((svmWins + 0.0) / experiments, (svCount + 0.0) / experiments);
}
示例12: Predict
public void Predict(double[] newPredictData)
{
double error = 0;
int c = 0;
double[] newNormalizedData = normalizeArray.Process(newPredictData);
var d = new DenseMatrix(2, newNormalizedData.Length - WindowSize + 1, double.NaN);
int count = 0;
for (int i = WindowSize; i < newNormalizedData.Length; i++)
{
// calculate based on actual data
IMLData input = new BasicMLData(WindowSize);
for (int j = 0; j < input.Count; j++)
{
input.Data[j] = newNormalizedData[(i - WindowSize) + j];
}
IMLData output = network.Compute(input);
double prediction = output.Data[0];
error += Math.Pow((normalizeArray.Stats.DeNormalize(prediction) - newPredictData[i])/newPredictData[i],
2);
c++;
d[0, count] = newPredictData[i];
d[1, count] = normalizeArray.Stats.DeNormalize(prediction);
count++;
}
///////////////////////////////////////////////////////////////////////////////
var lastData = new double[WindowSize];
int count1 = 0;
for (int i = newNormalizedData.Length - WindowSize; i < newNormalizedData.Length; i++)
{
lastData[count1++] = newNormalizedData[i];
}
IMLData input1 = new BasicMLData(WindowSize);
for (int j = 0; j < input1.Count; j++)
{
input1.Data[j] = lastData[j];
}
IMLData output1 = network.Compute(input1);
d[1, count] = normalizeArray.Stats.DeNormalize(output1.Data[0]);
/////////////////////////////////////////////////////////////////////////////////
error /= c;
error = Math.Pow(error, .5);
Console.WriteLine(error);
OutputData = d.Row(1).ToArray();
string[] symbols = {"actual", "predicted"};
Visualize.GeneratePredictionGraph(symbols, d, new DateTime(), new TimeSpan(24, 0, 0),
QSConstants.DEFAULT_DATA_FILEPATH + write + ".html");
Console.WriteLine("ST1 Correlation: " +
StatisticsExtension.Correlation(
d.Row(0).ToArray().Take(d.ColumnCount - 1).ToArray().RawRateOfReturn(),
d.Row(1).ToArray().Take(d.ColumnCount - 1).ToArray().RawRateOfReturn()));
}
示例13: FinishAndProcess
public void FinishAndProcess()
{
try
{
var priceData = new DenseMatrix(symbols.Length, numTicks);
for (int j = 0; j < symbols.Length; j++)
{
SortedList<DateTime, Tick> d = mktData[j].data.Data;
for (int k = 0; k < d.Count; k++)
{
//if (!symbols[j].Substring(0, 3).Equals("USD")) priceData[j, k] = 1/d.Values[k].BidClose;
priceData[j, k] = d.Values[k].BidOpen;
}
}
Vector<double> price1 = priceData.Row(0);
Vector<double> price2 = priceData.Row(1);
//Statistics.ApplyFunction((DenseVector)price1, Math.Log);
//Statistics.ApplyFunction((DenseVector)price2, Math.Log);
DenseVector norm1 = price1.ToArray().NormalizeZScore();
DenseVector norm2 = price2.ToArray().NormalizeZScore();
var newsym = new string[symbols.Length + 4];
for (int i = 0; i < symbols.Length; i++) newsym[i] = symbols[i];
newsym[2] = "spread";
newsym[3] = "EMA5";
newsym[4] = "EMA15";
newsym[5] = "EMA30";
var m = new DenseMatrix(6, norm1.Count);
m.SetRow(0, norm1);
m.SetRow(1, norm2);
m.SetRow(2, (norm1 - norm2).ToArray().NormalizeZScore());
m.SetRow(3, EMA.CalcEMA(m.Row(2).ToArray(), 5));
m.SetRow(4, EMA.CalcEMA(m.Row(2).ToArray(), 15));
m.SetRow(5, EMA.CalcEMA(m.Row(2).ToArray(), 30));
string filename = symbols[0].Replace('/', '_') + "-" + symbols[1].Replace('/', '_') + ".html";
((DenseVector) m.Row(0)).GenerateSimpleGraph("C:\\Sangar\\result.html");
Visualize.GenerateMultiSymbolGraph(newsym, m, DateTime.Now.AddSeconds(-60*5*300), new TimeSpan(0, 5, 0),
"C:\\Sangar\\" + filename);
FileUpload.UploadFileToFTP("C:\\Sangar\\" + filename, filename);
Spread = m[2, m.ColumnCount - 1];
if (Spread > 2.0 && m[2, m.ColumnCount - 2] <= 2.0)
Emailer.SendEmail(symbols[0] + "-" + symbols[1] + " Spread Above 2.0", "Test");
if (Spread < -2.0 && m[2, m.ColumnCount - 2] >= -2.0)
Emailer.SendEmail(symbols[0] + "-" + symbols[1] + " Spread Below -2.0", "Test");
//if (m[2, m.ColumnCount - 1] < 0.5 && m[2, m.ColumnCount - 2] >= 0.5)
// Emailer.SendEmail(symbols[0] + "-" + symbols[1] + " Spread Below 0.5", "Test");
//if (m[2, m.ColumnCount - 1] > -0.5 && m[2, m.ColumnCount - 2] <= -0.5)
// Emailer.SendEmail(symbols[0] + "-" + symbols[1] + " Spread Above -0.5", "Test");
}
catch (Exception e)
{
Console.WriteLine(e.Message);
}
}
示例14: FinishAndProcess
public void FinishAndProcess()
{
try
{
var priceData = new DenseMatrix(symbols.Length, numTicks);
for (int j = 0; j < symbols.Length; j++)
{
SortedList<DateTime, Tick> d = mktData[j].data.Data;
for (int k = 0; k < d.Count; k++)
{
priceData[j, k] = d.Values[k].BidClose;
}
}
for (int i = 0; i < symbols.Length; i++)
{
for (int j = 0; j < symbols.Length; j++)
{
double[] pDatai = priceData.Row(i).ToArray();
double[] pDataj = priceData.Row(j).ToArray();
switch (cType)
{
case CovarianceType.LogReturn:
{
pDatai = priceData.Row(i).ToArray().LogRateOfReturn();
pDataj = priceData.Row(j).ToArray().LogRateOfReturn();
break;
}
case CovarianceType.RawReturn:
{
pDatai = priceData.Row(i).ToArray().RawRateOfReturn();
pDataj = priceData.Row(j).ToArray().RawRateOfReturn();
break;
}
}
correlation[i, j] = StatisticsExtension.Correlation(pDatai, pDataj);
covariance[i, j] = StatisticsExtension.Covariance((DenseVector) priceData.Row(i),
(DenseVector) priceData.Row(j));
}
}
Visualize.GenerateHeatMatrix(symbols, correlation, "C:\\Users\\Ethan\\Work\\QuantSysdata.html");
Console.WriteLine("Finished Generating Correlation Matrix.");
}
catch (Exception e)
{
Console.WriteLine(e.Message);
}
}
示例15: MakeClassification
//.........这里部分代码省略.........
if (weights == null)
{
weights = Enumerable.Repeat(1.0 / nClasses, nClasses).ToList();
weights[weights.Count - 1] = 1.0 - weights.Take(weights.Count - 1).Sum();
}
var nSamplesPerCluster = new List<int>();
for (int k = 0; k < nClusters; k++)
{
nSamplesPerCluster.Add(
(int)(nSamples * weights[k % nClasses] / nClustersPerClass));
}
for (int i = 0; i < nSamples - nSamplesPerCluster.Sum(); i++)
{
nSamplesPerCluster[i % nClusters] += 1;
}
// Intialize X and y
Matrix x = new DenseMatrix(nSamples, nFeatures);
int[] y = new int[nSamples];
// Build the polytope
Matrix c = new DenseMatrix(1 << nInformative, nInformative);
for (int i = 0; i < 1 << nInformative; i++)
{
var row = new DenseVector(nInformative);
for (int bitN = 0; bitN < nInformative; bitN++)
{
row[bitN] = (i & (1 << bitN)) == 1 ? classSep : -classSep;
}
c.SetRow(i, row);
}
if (!hypercube)
{
for (int k = 0; k < nClusters; k++)
{
c.SetRow(k, c.Row(k) * generator.NextDouble());
}
for (int f = 0; f < nInformative; f++)
{
c.SetColumn(f, c.Column(f) * generator.NextDouble());
}
}
// todo:
// generator.shuffle(C)
// Loop over all clusters
int pos = 0;
int posEnd = 0;
for (int k = 0; k < nClusters; k++)
{
// Number of samples in cluster k
int nSamplesK = nSamplesPerCluster[k];
// Define the range of samples
pos = posEnd;
posEnd = pos + nSamplesK;
// Assign labels