当前位置: 首页>>代码示例>>C#>>正文


C# SparseMatrix.EnumerateRowsIndexed方法代码示例

本文整理汇总了C#中MathNet.Numerics.LinearAlgebra.Double.SparseMatrix.EnumerateRowsIndexed方法的典型用法代码示例。如果您正苦于以下问题:C# SparseMatrix.EnumerateRowsIndexed方法的具体用法?C# SparseMatrix.EnumerateRowsIndexed怎么用?C# SparseMatrix.EnumerateRowsIndexed使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在MathNet.Numerics.LinearAlgebra.Double.SparseMatrix的用法示例。


在下文中一共展示了SparseMatrix.EnumerateRowsIndexed方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: SpeedOfGetRow

        /// <summary>
        /// List indexing is 2000x faster than Matrix.Row() or enumeration.
        /// </summary>
        public static void SpeedOfGetRow()
        {
            SparseMatrix myMatrix = new SparseMatrix(1000, 1000);
            SparseVector myVector = SparseVector.OfVector(Vector.Build.Random(1000));
            myVector.CoerceZero(1.8);
            for (int i = 0; i < 1000; i++)
            {
                myMatrix.SetRow(i, myVector);
            }
            List<Vector<double>> myList = new List<Vector<double>>(myMatrix.EnumerateRows());

            Utils.StartTimer();
            for (int repeat = 0; repeat < 10; repeat++)
            {
                for (int i = 0; i < 1000; i++)
                {
                    double foo = myMatrix.Row(i)[0];
                }
            }
            Utils.StopTimer();

            Utils.StartTimer();
            for (int repeat = 0; repeat < 10; repeat++)
            {
                foreach(var row in myMatrix.EnumerateRowsIndexed())
                {
                    double foo = row.Item2[0];
                }
            }
            Utils.StopTimer();

            Utils.StartTimer();
            for (int repeat = 0; repeat < 10; repeat++)
            {
                for (int i = 0; i < 1000; i++)
                {
                    double foo = myList[i][0];
                }
            }
            Utils.StopTimer();
        }
开发者ID:lawrencewu,项目名称:RecSys,代码行数:44,代码来源:ExperimentOfSpeed.cs

示例2: PredictPrefRelations

        public static PrefRelations PredictPrefRelations(PrefRelations PR_train, SparseMatrix PR_unknown,
            int maxEpoch, double learnRate, double regularizationOfUser, double regularizationOfItem, int factorCount)
        {
            // Latent features
            List<Vector<double>> P;
            List<Vector<double>> Q;
            //Matrix<double> P;
            //Matrix<double> Q;

            LearnLatentFeatures(PR_train, maxEpoch, learnRate, regularizationOfUser, regularizationOfItem, factorCount, out P, out Q);

            PrefRelations PR_predicted = new PrefRelations(PR_train.ItemCount);

            Object lockMe = new Object();
            Parallel.ForEach(PR_unknown.EnumerateRowsIndexed(), user =>
            {
                int indexOfUser = user.Item1;
                Vector<double> unknownPreferencesOfUser = user.Item2;
                SparseMatrix predictedPreferencesOfUser = new SparseMatrix(PR_train.ItemCount, PR_train.ItemCount);

                // Predict each unknown preference
                foreach(var unknownPreference in unknownPreferencesOfUser.EnumerateIndexed(Zeros.AllowSkip))
                {
                    int indexOfItem_i = unknownPreference.Item1;
                    int indexOfItem_j = (int)unknownPreference.Item2;
                    double estimate_uij = P[indexOfUser].DotProduct(Q[indexOfItem_i] - Q[indexOfItem_j]);   // Eq. 2
                    double normalized_estimate_uij = Core.SpecialFunctions.InverseLogit(estimate_uij);   // pi_uij in paper
                    predictedPreferencesOfUser[indexOfItem_i, indexOfItem_j] = normalized_estimate_uij;
                }

                lock(lockMe)
                {
                    PR_predicted[indexOfUser] = predictedPreferencesOfUser;
                }
            });

            return PR_predicted;
        }
开发者ID:lawrencewu,项目名称:RecSys,代码行数:38,代码来源:PrefNMF.cs

示例3: PredictRatings

        /// <summary>
        /// Ordinal Matrix Factorization.
        /// </summary>
        /// <param name="R_train">The matrix contains training ratings</param>
        /// <param name="R_unknown">The matrix contains ones indicating unknown ratings</param>
        /// <param name="R_scorer">This matrix contains ratings predicted by the scorer on
        /// both the R_train and R_unknown sets</param>
        /// <returns>The predicted ratings on R_unknown</returns>
        #region PredictRatings
        public static string PredictRatings(SparseMatrix R_train, SparseMatrix R_unknown,
 SparseMatrix R_scorer, List<double> quantizer, out DataMatrix R_predicted,
            out Dictionary<Tuple<int, int>, List<double>> OMFDistributionByUserItem)
        {
            StringBuilder log = new StringBuilder();
            /************************************************************
             *   Parameterization and Initialization
            ************************************************************/
            #region Parameterization and Initialization
            // This matrix stores predictions
            SparseMatrix R_predicted_out = (SparseMatrix)Matrix.Build.Sparse(R_unknown.RowCount, R_unknown.ColumnCount);
            Dictionary<Tuple<int, int>, List<double>> OMFDistributionByUserItem_out =
                new Dictionary<Tuple<int, int>, List<double>>(R_unknown.NonZerosCount);

            // User specified parameters
            double maxEpoch = Config.OMF.MaxEpoch;
            double learnRate = Config.OMF.LearnRate;
            double regularization = Config.OMF.Regularization;
            int intervalCount = quantizer.Count;
            int userCount = R_train.RowCount;

            // Parameters for each user
            Dictionary<int, ParametersOfUser> paramtersByUser = new Dictionary<int, ParametersOfUser>(R_train.RowCount);

            // Compute initial values of t1 and betas 
            // that will be used for all users, Eq. 5
            double t1_initial = (double)(quantizer[0] + quantizer[1]) / 2;
            Vector<double> betas_initial = Vector.Build.Dense(quantizer.Count - 2);
            for (int i = 1; i <= betas_initial.Count; i++)
            {
                double t_r = t1_initial;
                double t_r_plus_1 = (quantizer[i] + quantizer[i + 1]) * 0.5f;
                betas_initial[i - 1] = Math.Log(t_r_plus_1 - t_r); // natural base
                t_r = t_r_plus_1;
            }

            // Initialize parameters (t1, betas) for each user
            for (int indexOfUser = 0; indexOfUser < R_train.RowCount; indexOfUser++)
            {
                paramtersByUser[indexOfUser] = new ParametersOfUser(t1_initial, betas_initial);
            }
            #endregion

            /************************************************************
             *   Learn parameters from training data R_train and R_score
            ************************************************************/
            #region Learn parameters from training data R_train and R_score
            // Learn parameters for each user, note that each user has his own model
            Object lockMe = new Object();
            Parallel.ForEach(R_train.EnumerateRowsIndexed(), row =>
            {
                int indexOfUser = row.Item1;
                SparseVector ratingsOfUser = (SparseVector)row.Item2;

                // Store this user's ratings from R_train and correpsonding ratings from scorer
                List<double> ratingsFromScorer = new List<double>(ratingsOfUser.NonZerosCount);
                List<double> ratingsFromRTrain = new List<double>(ratingsOfUser.NonZerosCount);
                foreach (var element in ratingsOfUser.EnumerateIndexed(Zeros.AllowSkip))
                {
                    int indexOfItem = element.Item1;
                    double rating = element.Item2;
                    // Ratings need to be added in the same order
                    ratingsFromScorer.Add(R_scorer[indexOfUser, indexOfItem]);
                    ratingsFromRTrain.Add(rating);
                }

                Debug.Assert(ratingsFromScorer.Count == ratingsOfUser.NonZerosCount);
                Debug.Assert(ratingsFromRTrain.Count == ratingsOfUser.NonZerosCount);

                // Parameters for the current user are estimated by
                // maximizing the log likelihood (Eq. 21) using stochastic gradient ascent
                // Eq. 22
                double t1 = paramtersByUser[indexOfUser].t1;
                Vector<double> betas = paramtersByUser[indexOfUser].betas;
                for (int epoch = 0; epoch < maxEpoch; epoch++)
                {
                    for (int i = 0; i < ratingsFromRTrain.Count; i++)
                    {
                        double ratingFromRTrain = ratingsFromRTrain[i];
                        double ratingFromScorer = ratingsFromScorer[i];

                        int r = quantizer.IndexOf(ratingFromRTrain);    // r is the interval that the rating falls into
                        double probLE_r = ComputeProbLE(ratingFromScorer, r, t1, betas);   // Eq. 9
                        double probLE_r_minus_1 = ComputeProbLE(ratingFromScorer, r - 1, t1, betas);
                        double probE_r = probLE_r - probLE_r_minus_1;    // Eq. 10

                        // Compute derivatives/gradients
                        double derivativeOft1 = learnRate / probE_r * (probLE_r * (1 - probLE_r) * DerivativeOfBeta(r, 0, t1)
                                - probLE_r_minus_1 * (1 - probLE_r_minus_1) * DerivativeOfBeta(r - 1, 0, t1)
                                - Config.OMF.Regularization * t1);

//.........这里部分代码省略.........
开发者ID:lawrencewu,项目名称:RecSys,代码行数:101,代码来源:OMF.cs


注:本文中的MathNet.Numerics.LinearAlgebra.Double.SparseMatrix.EnumerateRowsIndexed方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。