当前位置: 首页>>代码示例>>C#>>正文


C# Vector.Copy方法代码示例

本文整理汇总了C#中numl.Math.LinearAlgebra.Vector.Copy方法的典型用法代码示例。如果您正苦于以下问题:C# Vector.Copy方法的具体用法?C# Vector.Copy怎么用?C# Vector.Copy使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在numl.Math.LinearAlgebra.Vector的用法示例。


在下文中一共展示了Vector.Copy方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: Logistic_Regression_Test_CostFunction_2_WithoutRegularization

        public void Logistic_Regression_Test_CostFunction_2_WithoutRegularization()
        {
            Matrix X = new[,] {
             { 8, 1, 6 },
             { 3, 5 ,7 },
             { 4, 9, 2 }};

            Vector y = new Vector(new double[] { 1, 1, 0 });
            Vector theta = new Vector(new double[] { 0, 1, 0 });

            ICostFunction logisticCostFunction = new LogisticCostFunction()
            {
                X = X,
                Y = y,
                Lambda = 0,
            };

            double cost = logisticCostFunction.ComputeCost(theta.Copy());

            theta = logisticCostFunction.ComputeGradient(theta.Copy());

            Assert.Equal(3.1067d, System.Math.Round(cost, 4));

            Assert.Equal(0.6093d, System.Math.Round(theta[0], 4));
            Assert.Equal(2.8988d, System.Math.Round(theta[1], 4));
            Assert.Equal(0.1131d, System.Math.Round(theta[2], 4));
        }
开发者ID:sethjuarez,项目名称:numl,代码行数:27,代码来源:LogisticRegressionTests.cs

示例2: Run

        /// <summary>
        ///     Performs gradient descent to optomise theta parameters.
        /// </summary>
        /// <param name="theta">Initial Theta (Zeros)</param>
        /// <param name="x">Training set</param>
        /// <param name="y">Training labels</param>
        /// <param name="maxIterations">Maximum number of iterations to run gradient descent</param>
        /// <param name="learningRateAlpha">The learning rate (Alpha)</param>
        /// <param name="costFunction">Cost function to use for gradient descent</param>
        /// <param name="lambda">The regularization constant to apply</param>
        /// <param name="regularizer">The regularization function to apply</param>
        /// <returns></returns>
        public static Tuple<double, Vector> Run(
            Vector theta, 
            Matrix x, 
            Vector y, 
            int maxIterations, 
            double learningRateAlpha, 
            ICostFunction costFunction, 
            double lambda, 
            IRegularizer regularizer)
        {
            var bestTheta = theta.Copy();
            var bestCost = double.PositiveInfinity;

            double currentCost = 0;
            var currentGradient = theta.Copy();

            for (var i = 0; i <= maxIterations; i++)
            {
                currentCost = costFunction.ComputeCost(bestTheta, x, y, lambda, regularizer);
                currentGradient = costFunction.ComputeGradient(bestTheta, x, y, lambda, regularizer);

                if (currentCost < bestCost)
                {
                    bestTheta = bestTheta - learningRateAlpha * currentGradient;
                    bestCost = currentCost;
                }
                else
                {
                    learningRateAlpha = learningRateAlpha * 0.99;
                }
            }

            return new Tuple<double, Vector>(bestCost, bestTheta);
        }
开发者ID:ChewyMoon,项目名称:Cupcake,代码行数:46,代码来源:GradientDescent.cs

示例3: Logistic_Regression_Test_CostFunction_1

        public void Logistic_Regression_Test_CostFunction_1()
        {
            Matrix X = new[,]
            {{ 1, 1, 1 },
             { 1, 1, 1 },
             { 1, 1, 1 },
             { 8, 1, 6 },
             { 3, 5 ,7 },
             { 4, 9, 2 }};

            Vector y = new Vector(new double[] { 1, 0, 1, 0, 1, 0 });
            Vector theta = new Vector(new double[] { 0, 1, 0 });

            ICostFunction logisticCostFunction = new LogisticCostFunction()
            {
                X = X,
                Y = y,
                Lambda = 3,
                Regularizer = new L2Regularizer()
            };

            double cost = logisticCostFunction.ComputeCost(theta.Copy());

            theta = logisticCostFunction.ComputeGradient(theta.Copy());

            Assert.Equal(2.2933d, System.Math.Round(cost, 4));

            Assert.Equal(1.6702d, System.Math.Round(theta[0], 4));
            Assert.Equal(2.1483d, System.Math.Round(theta[1], 4));
            Assert.Equal(1.0887d, System.Math.Round(theta[2], 4));
        }
开发者ID:sethjuarez,项目名称:numl,代码行数:31,代码来源:LogisticRegressionTests.cs

示例4: Calc

 public static Vector Calc(Vector v, Func<int, double, double> f)
 {
     var result = v.Copy();
     for (int i = 0; i < v.Length; i++)
         result[i] = f(i, result[i]);
     return result;
 }
开发者ID:budbjames,项目名称:numl,代码行数:7,代码来源:VectorStatics.cs

示例5: IncreaseDimensions

 /// <summary>
 /// Adds a specified number of polynomial features to the training / test Vector.
 /// </summary>
 /// <param name="x">Training / Testing record</param>
 /// <param name="polynomialFeatures">Number of polynomial features to add</param>
 /// <returns></returns>
 public static Vector IncreaseDimensions(Vector x, int polynomialFeatures)
 {
     Vector xtemp = x.Copy();
     int maxCols = xtemp.Length;
     for (int j = 0; j < maxCols - 1; j++)
     {
         for (int k = 0; k <= polynomialFeatures; k++)
         {
             for (int m = 0; m <= k; m++)
             {
                 double v = (System.Math.Pow(xtemp[j], k - m) * System.Math.Pow(xtemp[j + 1], m));
                 xtemp = xtemp.Insert(xtemp.Length - 1, v);
             }
         }
     }
     return xtemp;
 }
开发者ID:sethjuarez,项目名称:numl,代码行数:23,代码来源:LogisticRegressionModel.cs

示例6: IncreaseDimensions

        /// <summary>
        ///     Adds a specified number of polynomial features to the training / test Vector.
        /// </summary>
        /// <param name="x">Training / Testing record</param>
        /// <param name="polynomialFeatures">Number of polynomial features to add</param>
        /// <returns></returns>
        public static Vector IncreaseDimensions(Vector x, int polynomialFeatures)
        {
            var xtemp = x.Copy();
            var maxCols = xtemp.Length;
            for (var j = 0; j < maxCols - 1; j++)
            {
                for (var k = 0; k <= polynomialFeatures; k++)
                {
                    for (var m = 0; m <= k; m++)
                    {
                        var v = Math.Pow(xtemp[j], (double)(k - m)) * Math.Pow(xtemp[j + 1], (double)m);
                        xtemp = xtemp.Insert(xtemp.Length - 1, v);
                    }
                }
            }

            return xtemp;
        }
开发者ID:ChewyMoon,项目名称:Cupcake,代码行数:24,代码来源:FeatureDimensions.cs

示例7: Logistic_Regression_Test_CostFunction_1

        public void Logistic_Regression_Test_CostFunction_1()
        {
            Matrix X = new[,] 
            {{ 1, 1, 1 },
             { 1, 1, 1 },
             { 1, 1, 1 },
             { 8, 1, 6 },
             { 3, 5 ,7 },
             { 4, 9, 2 }};

            Vector y = new Vector(new double[] { 1, 0, 1, 0, 1, 0 });
            Vector theta = new Vector(new double[] { 0, 1, 0 });

            numl.Math.Functions.Cost.ICostFunction logisticCostFunction = new numl.Math.Functions.Cost.LogisticCostFunction();
            numl.Math.Functions.Regularization.IRegularizer regularizer = new numl.Math.Functions.Regularization.Regularization();

            double cost = logisticCostFunction.ComputeCost(theta.Copy(), X, y, 3, regularizer);
            
            theta = logisticCostFunction.ComputeGradient(theta.Copy(), X, y, 3, regularizer);

            Assert.AreEqual(2.2933d, System.Math.Round(cost, 4));

            Assert.AreEqual(1.6702d, System.Math.Round(theta[0], 4));
            Assert.AreEqual(2.1483d, System.Math.Round(theta[1], 4));
            Assert.AreEqual(1.0887d, System.Math.Round(theta[2], 4));
        }
开发者ID:m-abubakar,项目名称:numl,代码行数:26,代码来源:LogisticRegressionTests.cs


注:本文中的numl.Math.LinearAlgebra.Vector.Copy方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。