当前位置: 首页>>代码示例>>C#>>正文


C# clsDBQueryManager.InsertOrUpdateScrapeSetting方法代码示例

本文整理汇总了C#中BaseLib.clsDBQueryManager.InsertOrUpdateScrapeSetting方法的典型用法代码示例。如果您正苦于以下问题:C# clsDBQueryManager.InsertOrUpdateScrapeSetting方法的具体用法?C# clsDBQueryManager.InsertOrUpdateScrapeSetting怎么用?C# clsDBQueryManager.InsertOrUpdateScrapeSetting使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在BaseLib.clsDBQueryManager的用法示例。


在下文中一共展示了clsDBQueryManager.InsertOrUpdateScrapeSetting方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: ScrapeKeywordSeacrh

        private void ScrapeKeywordSeacrh()
        {
            try
            {
                TwitterDataScrapper TweetData = new TwitterDataScrapper();
                if (!string.IsNullOrEmpty(txtRecords.Text) && NumberHelper.ValidateNumber(txtRecords.Text))
                {
                    TweetData.noOfRecords = Convert.ToInt32(txtRecords.Text);
                }
                else
                {
                    TweetData.noOfRecords = 100;
                }

                //List<TwitterDataScrapper.StructTweetIDs> data = TweetData.GetTweetData(txtScrapeKeyword.Text);

                List<TwitterDataScrapper.StructTweetIDs> data = TweetData.KeywordStructData(txtScrapeKeyword.Text);

                data = DistinctDataList(data);

                if (!(data.Count() > 0))
                {
                    AddToScrapeLogs("Request Not Complted");
                    AddToScrapeLogs("Requesting For 100 USer ids");
                    data = TweetData.GetTweetData(txtScrapeKeyword.Text);
                }

                AddToScrapeLogs(data.Count + " User ids Scraped ");

                AddToScrapeLogs("Please Wait Till Data Is Retrieving");
                
                int counter = 0;

                if (!File.Exists(Globals.Path_KeywordScrapedList))
                {
                    GlobusFileHelper.AppendStringToTextfileNewLine("Keyword , User-id , Username" , Globals.Path_KeywordScrapedList);
                }

                foreach (TwitterDataScrapper.StructTweetIDs item in data)
                {
                    if (!string.IsNullOrEmpty(item.username__Tweet_User) && item.ID_Tweet_User != "null")
                    {
                        Globals.lstScrapedUserIDs.Add(item.ID_Tweet_User);
                        GlobusFileHelper.AppendStringToTextfileNewLine(txtScrapeKeyword.Text + "," + item.ID_Tweet_User + "," + item.username__Tweet_User, Globals.Path_KeywordScrapedList);
                        //AddToScrapeLogs(item.ID_Tweet_User);

                    }
                    
                }
                //AddToScrapeLogs("Retrieving data");
                AddToScrapeLogs("Adding Data To DataBase");
                Globals.lstScrapedUserIDs = Globals.lstScrapedUserIDs.Distinct().ToList();

                if (!File.Exists(Globals.Path_KeywordScrapedList))
                {
                    GlobusFileHelper.AppendStringToTextfileNewLine("KEYWORD:USER ID:USERNAME ", Globals.Path_KeywordScrapedList);
                }

                new Thread(() =>
                {
                    foreach (TwitterDataScrapper.StructTweetIDs item in data)
                    {
                        if (!string.IsNullOrEmpty(item.username__Tweet_User) && item.ID_Tweet_User != "null")
                        {
                            AddToScrapeLogs(item.ID_Tweet_User);
                            clsDBQueryManager DataBase = new clsDBQueryManager();
                            DataBase.InsertOrUpdateScrapeSetting(item.ID_Tweet_User, item.username__Tweet_User, item.ID_Tweet);
                        }
                    }
                }).Start();

                if (Globals.IsDirectedFromFollower)
                {
                    Thread.Sleep(1000);
                    Globals.IsDirectedFromFollower = false;
                    AddToLog_Follower(data.Count + " User ids Scraped and Added To Follow List");
                    tabMain.Invoke(new MethodInvoker(delegate
                    {
                        tabMain.SelectedIndex = 2;
                    }));

                    //tabMain.SelectedIndex = 2;
                }
            }
            catch (Exception ex)
            {
                Globussoft.GlobusFileHelper.AppendStringToTextfileNewLine(DateTime.Now + " --> Error --> ScrapeKeywordSeacrh() --> " + ex.Message, Globals.Path_ScrapeUsersErroLog);
                Globussoft.GlobusFileHelper.AppendStringToTextfileNewLine("Error --> ScrapeKeywordSeacrh() --> " + ex.Message, Globals.Path_TwtErrorLogs);
            }
        }
开发者ID:prog-moh,项目名称:twtboard,代码行数:90,代码来源:frmMain-New+UI.cs

示例2: threadStartScrape


//.........这里部分代码省略.........
                            Thread.Sleep(1000);
                        }
                    }
                    catch (Exception ex)
                    {
                        Globussoft.GlobusFileHelper.AppendStringToTextfileNewLine(DateTime.Now + " --> Error --> btnScrapeUser_Click() -- chkboxScrapeFollowers.Checked --> " + ex.Message, Globals.Path_ScrapeUsersErroLog);
                        Globussoft.GlobusFileHelper.AppendStringToTextfileNewLine("Error --> btnScrapeUser_Click() -- chkboxScrapeFollowers.Checked --> " + ex.Message, Globals.Path_TwtErrorLogs);
                    }
                }
               
                if (chkboxScrapeFollowings.Checked)
                {
                    try
                    {
                        if (!File.Exists(Globals.Path_ScrapedFollowingsList))
                        {
                            GlobusFileHelper.AppendStringToTextfileNewLine("User_ID , FollowingsUserID", Globals.Path_ScrapedFollowingsList);
                        }
                        lst_structTweetFollowingsIds = dataScrapeer.GetFollowings(user_id);
                        AddToScrapeLogs("Added " + lst_structTweetFollowingsIds.Count + " Followings to list");

                        foreach (string data in lst_structTweetFollowingsIds)
                        {
                            try
                            {
                                Globals.lstScrapedUserIDs.Add(data);
                                GlobusFileHelper.AppendStringToTextfileNewLine(user_id + "," + data, Globals.Path_ScrapedFollowingsList);
                                AddToScrapeLogs(data);
                            }
                            catch (Exception ex)
                            {
                                Globussoft.GlobusFileHelper.AppendStringToTextfileNewLine(DateTime.Now + " --> Error --> btnScrapeKeyword_Click() -- lst_structTweetFollowingsIds foreach --> " + ex.Message, Globals.Path_ScrapeUsersErroLog);
                                Globussoft.GlobusFileHelper.AppendStringToTextfileNewLine("Error --> btnScrapeKeyword_Click() -- lst_structTweetFollowingsIds foreach --> " + ex.Message, Globals.Path_TwtErrorLogs);
                            }
                        }

                        AddToScrapeLogs("Added " + lst_structTweetFollowingsIds.Count + " Followings from User: " + keyword);
                        AddToScrapeLogs("Data Exported to " + Globals.Path_ScrapedFollowingsList);
                        if (Globals.IsDirectedFromFollower)
                        {
                            AddToLog_Follower("Added " + lst_structTweetFollowingsIds.Count + " Followings from User: " + keyword);
                            Thread.Sleep(1000);
                            tabMain.Invoke(new MethodInvoker(delegate
                            {
                                tabMain.SelectedIndex = 2;
                            }));
                            //tabMain.SelectedIndex = 2;
                        }
                    }
                    catch (Exception ex)
                    {
                        Globussoft.GlobusFileHelper.AppendStringToTextfileNewLine(DateTime.Now + " --> Error --> btnScrapeKeyword_Click() -- lst_structTweetFollowingsIds foreach --> " + ex.Message, Globals.Path_ScrapeUsersErroLog);
                        Globussoft.GlobusFileHelper.AppendStringToTextfileNewLine("Error --> btnScrapeKeyword_Click() -- lst_structTweetFollowingsIds foreach --> " + ex.Message, Globals.Path_TwtErrorLogs);
                    }
                }

                Globals.lstScrapedUserIDs = Globals.lstScrapedUserIDs.Distinct().ToList();
                ////new Thread(() =>
                ////{
                //foreach (string data in lst_structTweetFollowersIDs)
                //{
                //    try
                //    {
                //        clsDBQueryManager DataBase = new clsDBQueryManager();
                //        DataBase.InsertOrUpdateScrapeSetting(data, "");
                //    }
                //    catch (Exception ex)
                //    {

                //    }
                //}
                //// }
                ////).Start();
                //AddToScrapeLogs("Added " + lst_structTweetFollowingsIds.Count + " Followings from User: " + keyword); 
            }

            new Thread(() =>
                {
                    List<string> temp = new List<string>();
                    foreach (string item in Globals.lstScrapedUserIDs)
                    {
                        temp.Add(item);
                    }

                    foreach (string data in temp)
                    {
                        try
                        {
                            clsDBQueryManager DataBase = new clsDBQueryManager();
                            DataBase.InsertOrUpdateScrapeSetting(data, "" , "");
                        }
                        catch (Exception ex)
                        {
                            Globussoft.GlobusFileHelper.AppendStringToTextfileNewLine(DateTime.Now + " --> Error --> lstScrapedUserIDs --> " + ex.Message, Globals.Path_ScrapeUsersErroLog);
                            Globussoft.GlobusFileHelper.AppendStringToTextfileNewLine("Error --> lstScrapedUserIDs --> " + ex.Message, Globals.Path_TwtErrorLogs);
                        }
                    }
                }
                ).Start();
        }
开发者ID:prog-moh,项目名称:twtboard,代码行数:101,代码来源:frmMain-New+UI.cs

示例3: threadStartScrape


//.........这里部分代码省略.........

                            }
                            else
                            {
                                lst_structTweetFollowingsIds = dataScrapeer.GetFollowings_NewForMobileVersion(keyword.Trim(), out returnStaus, ref  globusHttpHelper);
                            }

                            if (lst_structTweetFollowingsIds.Count > 0)
                            {

                                if (lst_structTweetFollowingsIds.Count > 0)
                                {

                                    AddToScrapeLogs("[ " + DateTime.Now + " ] => [ Added " + lst_structTweetFollowingsIds.Count + " Followings from User: " + keyword + " ]");
                                    AddToScrapeLogs("[ " + DateTime.Now + " ] => [ Data Exported to " + Globals.Path_ScrapedFollowingsList + " ]");
                                    if (Globals.IsDirectedFromFollower)
                                    {
                                        AddToLog_Follower("[ " + DateTime.Now + " ] => [ Added " + lst_structTweetFollowingsIds.Count + " Followings from User: " + keyword + " ]");
                                        Thread.Sleep(1000);
                                        //Tb_AccountManager.Invoke(new MethodInvoker(delegate
                                        //{
                                        //    Tb_AccountManager.SelectedIndex = 2;
                                        //}));
                                        //tabMain.SelectedIndex = 2;
                                    }
                                }
                                else if (returnStaus == "Error")
                                {
                                    AddToScrapeLogs("[ " + DateTime.Now + " ] => [ Rate Limit Exceeded.Please Try After Some Time ]");
                                    break;
                                }
                            }
                            else
                            {
                                AddToScrapeLogs("[ " + DateTime.Now + " ] => [ " + keyword + " User does not have any followings ]");
                            }
                        }
                        catch (Exception ex)
                        {
                            Globussoft.GlobusFileHelper.AppendStringToTextfileNewLine(DateTime.Now + " --> Error --> btnScrapeKeyword_Click() -- lst_structTweetFollowingsIds foreach --> " + ex.Message, Globals.Path_ScrapeUsersErroLog);
                            Globussoft.GlobusFileHelper.AppendStringToTextfileNewLine("Error --> btnScrapeKeyword_Click() -- lst_structTweetFollowingsIds foreach --> " + ex.Message, Globals.Path_TwtErrorLogs);
                        }
                    }

                    Globals.lstScrapedUserIDs = Globals.lstScrapedUserIDs.Distinct().ToList();
                }

                Globals.IsMobileVersion = false;
                new Thread(() =>
                {
                    try
                    {
                        List<string> temp = new List<string>();
                        foreach (string item in Globals.lstScrapedUserIDs)
                        {
                            temp.Add(item);
                        }

                        foreach (string data in temp)
                        {
                            try
                            {
                                clsDBQueryManager DataBase = new clsDBQueryManager();
                                DataBase.InsertOrUpdateScrapeSetting(data, "", "");
                            }
                            catch (Exception ex)
                            {
                                Globussoft.GlobusFileHelper.AppendStringToTextfileNewLine(DateTime.Now + " --> Error --> lstScrapedUserIDs --> " + ex.Message, Globals.Path_ScrapeUsersErroLog);
                                Globussoft.GlobusFileHelper.AppendStringToTextfileNewLine("Error --> lstScrapedUserIDs --> " + ex.Message, Globals.Path_TwtErrorLogs);
                            }
                        }
                    }
                    catch (Exception)
                    {
                    }

                }).Start();

                AddToScrapeLogs("[ " + DateTime.Now + " ] => [ PROCESS COMPLETED ]");
                AddToScrapeLogs("------------------------------------------------------------------------------------------------------------------------------------------");

                //if (IsUserScrapedDatalist)
                //{
                //    Tb_AccountManager.Invoke(new MethodInvoker(delegate
                //    {
                //        Tb_AccountManager.SelectedIndex = 0;
                //        //Tb_AccountManager.SelectedTab.Name = "tabFollower";
                //    }));
                //}
            }
            catch (Exception)
            {

            }
            finally
            {
                IsStart_ScrapUser = true;
                dataScrapeer.logEvents.addToLogger -= new EventHandler(DataScraperlogger_addToLogger);
            }
        }
开发者ID:ahmetDostr,项目名称:twtboard,代码行数:101,代码来源:frmMain_NewUI.cs

示例4: ScrapeKeywordSeacrh

        private void ScrapeKeywordSeacrh()
        {
            TwitterDataScrapper TweetData = new TwitterDataScrapper();
            TweetData.noOfRecords = 1000;

            List<TwitterDataScrapper.StructTweetIDs> data = TweetData.GetTweetData(txtScrapeKeyword.Text);

            AddToScrapeLogs(data.Count + " User ids Scraped ");

            foreach (TwitterDataScrapper.StructTweetIDs item in data)
            {
                Globals.lstScrapedUserIDs.Add(item.ID_Tweet_User);
                AddToScrapeLogs(item.ID_Tweet_User);
            }
            Globals.lstScrapedUserIDs = Globals.lstScrapedUserIDs.Distinct().ToList();

            new Thread(() =>
                    {
                        foreach (TwitterDataScrapper.StructTweetIDs item in data)
                        {
                            clsDBQueryManager DataBase = new clsDBQueryManager();
                            DataBase.InsertOrUpdateScrapeSetting(item.ID_Tweet_User, item.username__Tweet_User);
                        }
                    }).Start();
            if (Globals.IsDirectedFromFollower)
            {
                Thread.Sleep(1000);
                Globals.IsDirectedFromFollower = false;
                AddToLog_Follower(data.Count + " User ids Scraped and Added To Follow List");
                tabMain.Invoke(new MethodInvoker(delegate
                {
                    tabMain.SelectedIndex = 2;
                }));
                
                //tabMain.SelectedIndex = 2;
            }
        }
开发者ID:prog-moh,项目名称:twtboard,代码行数:37,代码来源:frmMain-3-7-12.cs

示例5: ScrapeKeywordSeacrh


//.........这里部分代码省略.........
                //            break;
                //        }

                //        xBeginSearchAfter = null;
                //        dataDescription = string.Empty;
                //        //xNode = xml.SearchForAttribute(xBeginSearchAfter, "span", "class", "location profile-field");
                //        xNode = xml.SearchForAttribute(xBeginSearchAfter, "span", "class", "ProfileHeaderCard-locationText u-dir");//location profile-field");
                //        while ((xNode != null))
                //        {
                //            xBeginSearchAfter = xNode;
                //            Location = xNode.AccumulateTagContent("text", "script|style");
                //            break;
                //        }

                //        int counterData = 0;
                //        xBeginSearchAfter = null;
                //        dataDescription = string.Empty;
                //        //xNode = xml.SearchForAttribute(xBeginSearchAfter, "a", "data-element-term", "tweet_stats");
                //        xNode = xml.SearchForAttribute(xBeginSearchAfter, "a", "class", "ProfileNav-stat ProfileNav-stat--link u-borderUserColor u-textCenter js-tooltip js-nav");
                //        while ((xNode != null))
                //        {
                //            xBeginSearchAfter = xNode;
                //            if (counterData == 0)
                //            {
                //                NoOfTweets = xNode.AccumulateTagContent("text", "script|style").Replace("Tweets", string.Empty).Replace(",", string.Empty).Replace("Tweet", string.Empty);
                //                counterData++;
                //            }
                //            else if (counterData == 1)
                //            {
                //                Followings = xNode.AccumulateTagContent("text", "script|style").Replace(" Following", string.Empty).Replace(",", string.Empty).Replace("Following", string.Empty);
                //                counterData++;
                //            }
                //            else if (counterData == 2)
                //            {
                //                Followers = xNode.AccumulateTagContent("text", "script|style").Replace("Followers", string.Empty).Replace(",", string.Empty).Replace("Follower", string.Empty);
                //                counterData++;
                //            }
                //            else
                //            {
                //                break;
                //            }
                //            //xNode = xml.SearchForAttribute(xBeginSearchAfter, "a", "class", "js-nav");
                //            xNode = xml.SearchForAttribute(xBeginSearchAfter, "a", "class", "ProfileNav-stat ProfileNav-stat--link u-borderUserColor u-textCenter js-tooltip js-openSignupDialog js-nonNavigable u-textUserColor");
                //        }

                //        if (!string.IsNullOrEmpty(item.username__Tweet_User) && item.ID_Tweet_User != "null")
                //        {
                //            string Id_user = item.ID_Tweet_User.Replace("}]", string.Empty).Trim();
                //            Globals.lstScrapedUserIDs.Add(Id_user);
                //            GlobusFileHelper.AppendStringToTextfileNewLine(Id_user + "," + item.username__Tweet_User + "," + ProfileName + "," + Bio.Replace(",", "") + "," + Location.Replace(",", "") + "," + website + "," + NoOfTweets.Replace(",", "").Replace("Tweets", "") + "," + Followers.Replace(",", "").Replace("Following", "") + "," + Followings.Replace(",", "").Replace("Followers", "").Replace("Follower", ""), Globals.Path_KeywordScrapedListData + "-" + txtScrapeKeyword.Text + ".csv");
                //            AddToScrapeLogs("[ " + DateTime.Now + " ] => [ " + Id_user + "," + item.username__Tweet_User + "," + ProfileName + "," + Bio.Replace(",", "") + "," + Location + "," + website + "," + NoOfTweets + "," + Followers + "," + Followings + " ]");
                //        }
                //    }

                //}
                #endregion

                //AddToScrapeLogs("Retrieving data");
                AddToScrapeLogs("[ " + DateTime.Now + " ] => [ Adding Data To DataBase ]");
                Globals.lstScrapedUserIDs = Globals.lstScrapedUserIDs.Distinct().ToList();

                thread_AddingKeywordScrape = new Thread(() =>
                {
                    foreach (TwitterDataScrapper.StructTweetIDs item in data)
                    {
                        if (!string.IsNullOrEmpty(item.username__Tweet_User) && item.ID_Tweet_User != "null")
                        {
                            //AddToScrapeLogs(item.ID_Tweet_User);
                            clsDBQueryManager DataBase = new clsDBQueryManager();
                            DataBase.InsertOrUpdateScrapeSetting(item.ID_Tweet_User, item.username__Tweet_User, item.ID_Tweet);
                        }
                    }

                    AddToScrapeLogs("[ " + DateTime.Now + " ] => [ Exported location :- " + Globals.Path_KeywordScrapedList + " ]");
                    AddToScrapeLogs("[ " + DateTime.Now + " ] => [ PROCESS COMPLETED ]");
                    AddToScrapeLogs("------------------------------------------------------------------------------------------------------------------------------------------");

                });

                thread_AddingKeywordScrape.Start();

                if (Globals.IsDirectedFromFollower)
                {
                    Thread.Sleep(1000);
                    Globals.IsDirectedFromFollower = false;
                    AddToLog_Follower("[ " + DateTime.Now + " ] => [ " + data.Count + " User ids Scraped and Added To Follow List ]");
                    Tb_AccountManager.Invoke(new MethodInvoker(delegate
                    {
                        Tb_AccountManager.SelectedIndex = 2;
                    }));

                    //tabMain.SelectedIndex = 2;
                }
            }
            catch (Exception ex)
            {
                Globussoft.GlobusFileHelper.AppendStringToTextfileNewLine(DateTime.Now + " --> Error --> ScrapeKeywordSeacrh() --> " + ex.Message, Globals.Path_ScrapeUsersErroLog);
                Globussoft.GlobusFileHelper.AppendStringToTextfileNewLine("Error --> ScrapeKeywordSeacrh() --> " + ex.Message, Globals.Path_TwtErrorLogs);
            }
        }
开发者ID:ahmetDostr,项目名称:twtboard,代码行数:101,代码来源:frmMain_NewUI.cs


注:本文中的BaseLib.clsDBQueryManager.InsertOrUpdateScrapeSetting方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。