当前位置: 首页>>代码示例>>C#>>正文


C# ChromeDriver.FindElementsByXPath方法代码示例

本文整理汇总了C#中OpenQA.Selenium.Chrome.ChromeDriver.FindElementsByXPath方法的典型用法代码示例。如果您正苦于以下问题:C# ChromeDriver.FindElementsByXPath方法的具体用法?C# ChromeDriver.FindElementsByXPath怎么用?C# ChromeDriver.FindElementsByXPath使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在OpenQA.Selenium.Chrome.ChromeDriver的用法示例。


在下文中一共展示了ChromeDriver.FindElementsByXPath方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: IfEmailAddressIsNotValid_ShowValidationMessage

        public void IfEmailAddressIsNotValid_ShowValidationMessage()
        {
            using (var driver = new ChromeDriver())
            {
                driver.Navigate().GoToUrl("http://localhost:49333/Account/Login");

                var loginErrorMessage1 = driver.FindElementsByXPath("//*[@id='loginForm']/form/div[1]/div/span/span");
                Assert.AreEqual(0, loginErrorMessage1.Count);

                driver.FindElementByXPath("//*[@id='loginForm']/form/div[4]/div/input").Click();

                var loginErrorMessage = driver.FindElementByXPath("//*[@id='loginForm']/form/div[1]/div/span/span");

                Assert.True(loginErrorMessage.Displayed);
            }
        }
开发者ID:scrumtrek,项目名称:agilecamp-csharp,代码行数:16,代码来源:LoginUiTest.cs

示例2: monster_gulf_grabber_function

        // well this is my main function I'll call it on Execute button press
        private void monster_gulf_grabber_function(int totalPages_Int, String default_Url)
        {
            /* initializing chrome driver */
            var chrome_Driver = ChromeDriverService.CreateDefaultService();
            //no need to show disturbing command window so set it hidden
            chrome_Driver.HideCommandPromptWindow = true;
            //try
            //{

            var job_region = job_region_selected;
            var job_industry = job_type_selected;

            //var job_ref_code = "";

            var totalPages = 0;
            /* create a csv file for output*/
            try
            {
                File.WriteAllText(Path.Combine(path_Desktop, "MonsterGulf_Scrapped_DataSet_" + job_region_selected + "_" + job_type_selected + ".csv"), "Job Title, Company, Description, Location, Experience,  Keyskills, Region, Industry, Date\r\n");
            }
            catch (Exception ex)
            {
                //incase of file is open or readonly or no permission show message
                //result_textBox.AppendText(ex.StackTrace);
                MessageBox.Show("Cannot write to file !");
            }

            //initialize chrome driver
            var main_page_chrome_Driver_Obj = new ChromeDriver(chrome_Driver, new ChromeOptions());

            do
            {
                var job_title = "";
                var job_company = "";
                var job_desc = "";

                var job_location = "";

                var job_experience = "";
                var job_keyskils = "";
                var job_date = "";
                main_page_chrome_Driver_Obj.Manage().Timeouts().ImplicitlyWait(TimeSpan.FromSeconds(10));
                main_page_chrome_Driver_Obj.Navigate().GoToUrl(default_Url + "/searchresult.html?rfr=;loc=" + region_Selection + ";jbc=" + job_type_Selection + ";day=60;srt=pst;ref=http:%2F%2Fjobsearch%2Emonstergulf%2Ecom%2Fsearch%2Ehtml;show_omit=1;res_cnt=" + resultsPerPage_Int + ";n=" + totalPages_Int);
                //main_page_chrome_Driver_Obj.Navigate().GoToUrl("http://jobsearch.monstergulf.com/searchresult.html?rfr=refine;day=60;srt=pst;ref=http:%2F%2Fjobsearch.monstergulf.com%2Fsearch.html;show_omit=1;res_cnt=40;n=" + totalPages_Int);
                //http://jobsearch.monstergulf.com/searchresult.html?rfr=refine;day=60;srt=pst;ref=http:%2F%2Fjobsearch.monstergulf.com%2Fsearch.html;show_omit=1;res_cnt=40;n=2
                //find main pages count.
                //select result part of DOM
                var main_page_dom = main_page_chrome_Driver_Obj.FindElementsByXPath("//*[@class='ns_sresultmain']/div[2]");
                //find main pages count.

                if (totalPages_Int > 1) { }
                else
                {
                    var main_pages_count = main_page_dom[0].FindElements(By.XPath("./div[1]/div/div[2]/div[3]/div/div/div/div/div[@class='liDiv']"));
                    totalPages = main_pages_count.Count();
                }

                //find jobs URLs in main page, it contains 40 jobs urls .
                var main_page_jobs = main_page_chrome_Driver_Obj.FindElementsByXPath("//*[@class='ns_job_wrapper']");   //it gives me 40 count i.e 40 jobs in single main page
                //
                //pages_num_textBox.AppendText(" " + main_page_jobs.Count().ToString());
                if (main_page_jobs.Count() == 40) { }
                else
                {
                    DateTime dt = DateTime.Now + TimeSpan.FromSeconds(7);
                    do
                    {
                        main_page_jobs = main_page_chrome_Driver_Obj.FindElementsByXPath("//*[@class='ns_job_wrapper']");   //it gives me 40 count i.e 40 jobs in single main page
                        //MessageBox.Show(main_page_jobs.Count().ToString());
                    } while (DateTime.Now < dt);

                }
                for (int mainPage_jobIndex = 0; mainPage_jobIndex < main_page_jobs.Count(); mainPage_jobIndex++)
                {
                    job_title = main_page_jobs[mainPage_jobIndex].FindElement(By.XPath("./div[2]/a/div[@class='ns_jobtitle ns_lt']")).Text;
                    job_company = main_page_jobs[mainPage_jobIndex].FindElement(By.XPath("./div[2]/a/div[@class='ns_cmpname ns_lt']/h2/strong")).Text;
                    job_desc = main_page_jobs[mainPage_jobIndex].FindElement(By.XPath("./div[2]/a/div[@class='ns_jobdesc ns_lt']")).Text;
                    job_desc = job_desc.Replace(System.Environment.NewLine, " ");
                    string job_location_and_experience = main_page_jobs[mainPage_jobIndex].FindElement(By.XPath("./div[2]/a/div[@class='ns_joblocation ns_lt']")).Text;
                    string s = job_location_and_experience;
                    string[] values = s.Split(',');
                    job_location = "";
                    for (int array_item = 0; array_item < values.Length - 1; array_item++)
                    {
                        job_location += values[array_item].Trim();
                    }
                    job_experience = values.Last().Trim();// the only last index of array is experience remaining index are locations

                    if (main_page_jobs[mainPage_jobIndex].FindElements(By.XPath("./div[2]/a/div[@class='ns_jobkeyskills ns_lt']")).Count() > 0)
                    {
                        job_keyskils = main_page_jobs[mainPage_jobIndex].FindElement(By.XPath("./div[2]/a/div[@class='ns_jobkeyskills ns_lt']")).GetAttribute("title");
                    }

                    //from 16th Apr 2015 => 16/4/2015
                    var job_date_before_conversion = main_page_jobs[mainPage_jobIndex].FindElement(By.XPath("./div[2]/a/div[@class='ns_jobdate ns_rt']")).Text;
                    job_date = timeStampCnversion(job_date_before_conversion);

                    /* to output the result in textbox*/
                    //Job Title, Company, Description, Location, Experience,  Keyskills, Region, Industry, Date\r\n
//.........这里部分代码省略.........
开发者ID:kaleemullah360,项目名称:MonsterGulf,代码行数:101,代码来源:Form1.cs

示例3: JobsInDubai_Scraper

        public void JobsInDubai_Scraper(string default_Url)
        {
            var job_title = "";
            var job_company = "";
            var job_desc = "";

            var job_location = "";
            var job_nationality = "";
            var job_experience = "";
            var job_keyskils = "";
            var job_function = "";
            var job_region = "United Arab Emirates";
            var job_industry = "";
            var job_date = "";
            var job_education = "";
            var next_page_class = 0;
            var page_number = 1;
            nextPage_label.Text = "Yes";

            /* initializing chrome driver */
            var chrome_Driver = ChromeDriverService.CreateDefaultService();
            //no need to show disturbing command window so set it hidden
            chrome_Driver.HideCommandPromptWindow = true;

            var main_page_chrome_Driver_Obj = new ChromeDriver(chrome_Driver, new ChromeOptions());

            /* create a csv file for output*/
            try
            {
                File.WriteAllText(Path.Combine(path_Desktop, "JobsInDubai_Scrapped_DataSet_allTypesJobs.csv"), "Job Title, Company, Description, Location, Experience,  Keyskills, Education, Region, Industry, Date\r\n");
            }
            catch (Exception ex)
            {

                //incase of file is open or readonly or no permission show message
                result_richTextBox.AppendText(ex.StackTrace);
                MessageBox.Show("Cannot write to file !");
            }

            do
            {

                if (next_page_class == 0) { nextPage_label.Text = "No"; }
                //this.page_no_label.Text = page_number.ToString();

                main_page_chrome_Driver_Obj.Navigate().GoToUrl(default_Url + "/job_list.asp?page=" + page_number + "&lstIndustryID=0&txtKeyword=&isSearch=False");

                // click the Detail View Link to show extra information for jobs
                var click_Detail_View = main_page_chrome_Driver_Obj.FindElement(By.XPath("//*/div[@id='divShowAll']/a"));
                if (click_Detail_View.Text == "Detail View") { click_Detail_View.Click(); }

                //count number of jobs on main page max 10 jobs
                var var_job_count = main_page_chrome_Driver_Obj.FindElementsByXPath("//*[@class='even']");
                var var_job_des = main_page_chrome_Driver_Obj.FindElementsByXPath("//*/table[@class='expand']");

                for (int job_index = 0; job_index < var_job_count.Count(); job_index++)
                {
                    job_title = var_job_count[job_index].FindElement(By.XPath("./td[2]")).Text;
                    job_title = cleanString(job_title);

                    job_company = var_job_des[job_index].FindElement(By.XPath("./tbody/tr[2]/td[2]/span[1]")).Text;
                    job_company = cleanString(job_company);

                    job_location = var_job_des[job_index].FindElement(By.XPath("./tbody/tr[2]/td[2]/span[2]")).Text;
                    job_location = cleanString(job_location);

                    var job_full_desc_raw = var_job_des[job_index].FindElement(By.XPath("./tbody/tr[4]/td")).Text;
                    var job_full_desc = job_full_desc_raw.Replace(System.Environment.NewLine, " ");

                    job_experience = ParseBetween(job_full_desc, "Experience :", "Years");
                    job_experience = cleanString(job_experience);
                    if (job_experience == string.Empty) { job_experience = "Not Required"; } else { job_experience = job_experience + " Years"; }

                    job_education = ParseBetween(job_full_desc, "Education :", "Experience :");
                    job_education = cleanString(job_education);
                    if (job_education == string.Empty) { job_education = "NULL"; }

                    //All this headach to extract Skills
                    
                    job_keyskils = ParseBetween(job_full_desc, "Skills :", "Description :");
                    job_keyskils = cleanString(job_keyskils);
                    if (job_keyskils == string.Empty)
                    {
                        job_keyskils = ParseBetween(job_full_desc, "Skills :", "Responsibilities :");
                        job_keyskils = cleanString(job_keyskils);
                        if (job_keyskils == string.Empty)
                        {
                            job_keyskils = ParseBetween(job_full_desc, "Responsibilities :", "Description :");
                            job_keyskils = cleanString(job_keyskils);
                            if (job_keyskils == string.Empty) { job_keyskils = "NULL"; }
                        }
                    }

                    //End Skills

                    job_industry = var_job_count[job_index].FindElement(By.XPath("./td[3]")).Text;
                    job_industry = cleanString(job_industry);

                    job_date = var_job_count[job_index].FindElement(By.XPath("./td[4]")).Text;
                    job_date = cleanString(job_date);
//.........这里部分代码省略.........
开发者ID:kaleemullah360,项目名称:JobsInDubai.com,代码行数:101,代码来源:JobsInDubai.cs

示例4: NaukriGulf_Function

        //double progress = (x / max) * 100;

        private void NaukriGulf_Function(string defautl_Url)
        {

            /* initializing chrome driver */
            var chrome_Driver = ChromeDriverService.CreateDefaultService();
            //no need to show disturbing command window so set it hidden
            chrome_Driver.HideCommandPromptWindow = true;

            //initialize chrome driver 
            var main_page_chrome_Driver_Obj = new ChromeDriver(chrome_Driver, new ChromeOptions());
            
            /* create a csv file for output*/
            try
            {
                File.WriteAllText(Path.Combine(path_Desktop, "NaukriGulf_Scrapped_DataSet_" + job_region_selected + "_" + job_type_selected + ".csv"), "Job Title, Company, Description, Location, Experience,  Keyskills, Region, Industry, Date\r\n");
            }
            catch (Exception ex)
            {

                //incase of file is open or readonly or no permission show message
                result_richTextBox.AppendText(ex.StackTrace);
                MessageBox.Show("Cannot write to file ! \r check whether file is open");
            }


            int totalPages_Int = 1;
            do
            {
                main_page_chrome_Driver_Obj.Navigate().GoToUrl(defautl_Url + "/jobs-in-uae-" + totalPages_Int + "?fa=" + job_type_Selection);   //job_type_Selection = 016; totalPages_Int = 0;
                main_page_chrome_Driver_Obj.Manage().Timeouts().ImplicitlyWait(TimeSpan.FromSeconds(1));
                //var job_functional_area = main_page_chrome_Driver_Obj.FindElementsByXPath("//*[@id='fareaContainer']/div/a");
                var main_page_dom = main_page_chrome_Driver_Obj.FindElementsByXPath("//*[@id='resultsData']");
                if (totalPages_Int != 1)
                {
                    //skip Pages Counting in rest of loop
                }
                else
                {
                    // only count total number pages once at page one
                    var main_pages_count = main_page_dom[0].FindElement(By.XPath(".//div[1]/div[2]/strong[2]")).Text;
                    //wola ! I need only digits, but it has " 15302 Advertisements ", extract only digits
                    var str_main_pages_count = Regex.Match(main_pages_count, @"\d+").Value;
                    //It needs to be converted to iNT
                    int_main_pages_total_jobs_count = Int32.Parse(str_main_pages_count);
                    int_main_pages_count = (int)Math.Ceiling((double)int_main_pages_total_jobs_count / (double)25);
                }

                var main_page_jobs = main_page_chrome_Driver_Obj.FindElementsByXPath("//*[@class='artical']");      //25 jobs per page

                if (main_page_jobs.Count() == 25) { }
                else
                {
                    DateTime dt = DateTime.Now + TimeSpan.FromSeconds(7);
                    do
                    {
                        main_page_jobs = main_page_chrome_Driver_Obj.FindElementsByXPath("//*[@class='artical']");      //25 jobs per page
                            //MessageBox.Show(main_page_jobs.Count().ToString());
                    } while (DateTime.Now < dt);

                }

                //+++++++++++++++++++++++++ 25 Jobs Loop in Single Main Page starts ++++++++++++++++++++++++
                current_number_jobs = current_number_jobs + (int)(main_page_jobs.Count());
                for (int job_index = 0; job_index < main_page_jobs.Count(); job_index++)
                {
                    // declare Initialize all variables to nothing
                    var job_title = "";
                    var job_company = "";
                    var job_desc = "";

                    var job_location = "";
                    var job_region = "";

                    //var job_nationality = "";
                    var job_experience = "";
                    var job_keyskils = "";
                    // var job_function = "";
                    //var job_role = "";
                    var job_industry = job_type_selected;
                    var job_date = "";
                    //var job_ref_code = "";

                    //+++++++++++++ values Scraper Region +++++++++++++++++++++++++

                    job_title = main_page_jobs[job_index].FindElement(By.XPath("./div[@class='aCont']/div/a/span[1]")).Text;
                    job_company = main_page_jobs[job_index].FindElement(By.XPath("./div[@class='aCont']/div/a/span[2]")).Text;

                    var job_desc_full = main_page_jobs[job_index].FindElement(By.XPath("./div[@class='aCont']/a/p")).Text; //.GetAttribute("href");
                    job_desc = job_desc_full.Replace(System.Environment.NewLine, " ");
                    //int index = job_desc_full.IndexOf("-?");
                    //if (index > 0)
                    //    job_desc = job_desc_full.Substring(0, index);
                    //job_desc = job_desc.Replace("http://www.naukrigulf.com/job-listings-", string.Empty);
                    job_location = main_page_jobs[job_index].FindElement(By.XPath("./div[@class='aCont']/div/p/span[2]")).Text;
                    string input_job_location = job_location;
                    string[] values_job_location = input_job_location.Split('-'); //Abu Dhabi - United Arab Emirates
                    job_region = values_job_location.Last(); // United Arab Emirates

//.........这里部分代码省略.........
开发者ID:kaleemullah360,项目名称:NaukriGulf.Com,代码行数:101,代码来源:NaukriGulf_Form.cs


注:本文中的OpenQA.Selenium.Chrome.ChromeDriver.FindElementsByXPath方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。