本文整理汇总了C#中HtmlAgilityPack.HtmlDocument.Load方法的典型用法代码示例。如果您正苦于以下问题:C# HtmlDocument.Load方法的具体用法?C# HtmlDocument.Load怎么用?C# HtmlDocument.Load使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类HtmlAgilityPack.HtmlDocument
的用法示例。
在下文中一共展示了HtmlDocument.Load方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: GetLinkFromWebPage
private async Task<String> GetLinkFromWebPage()
{
WebRequest request = WebRequest.CreateHttp(ZERANOE_WEB);
using (WebResponse response = await request.GetResponseAsync())
using (Stream s = response.GetResponseStream())
{
HtmlDocument doc = new HtmlDocument();
doc.Load(s);
HtmlNodeCollection collection = doc.DocumentNode.SelectNodes("/html/body/div[@id='builds-page']/div[@class='grid-460']/a[@class='latest']");
int bits = Environment.Is64BitProcess ? 64 : 32;
Regex regex = new Regex($"Download FFmpeg git-\\S+ {bits}-bit Static");
foreach (var item in collection)
{
if (regex.IsMatch(item.InnerText))
{
string link = item.GetAttributeValue("href", null);
if (link != null)
{
// Link is not absolute (./win64/ffmpeg.7z)
if (link.StartsWith("."))
link = ZERANOE_WEB + link.Substring(2);
return link;
}
}
}
}
return null;
}
示例2: Main
static void Main(string[] args)
{
args = new string[]
{
"https://www.bible.com/pt/users/rafael_yami/reading-plans/814-pt-plano-de-leitura-da-biblia?day={0}&id=814-pt-plano-de-leitura-da-biblia",
"365",
"http://www.bible.com/pt/sign-in",
"[email protected]",
"Queopps 2015",
@"D:\Documents\Extensions\Json\"
};
string urlToPlan = args[0];
string limitYear = args[1];
string loginUrl = args[2];
string htmlString;
string planFinalUrl;
Regex regex;
Match match;
GroupCollection groups;
CookieAwareWebClientService webClient = new CookieAwareWebClientService();
HtmlDocument htmlDocument = new HtmlDocument()
{
OptionDefaultStreamEncoding = Encoding.UTF8
};
htmlDocument.Load(webClient.OpenRead(loginUrl), Encoding.UTF8);
string authenticityTokenString = htmlDocument.DocumentNode.SelectSingleNode("//meta[@name='csrf-token']").Attributes["content"].Value;
NameValueCollection loginData = new NameValueCollection
{
{ "utf8", "✓" },
{ "authenticity_token", authenticityTokenString },
{ "username", args[3] },
{ "password", args[4] }
};
webClient.DoLogin(loginUrl, loginData);
int daysYear = int.Parse(limitYear);
for (int i = 1; i <= daysYear; i++)
{
planFinalUrl = string.Format(urlToPlan, i);
htmlDocument.Load(webClient.OpenRead(planFinalUrl), Encoding.UTF8);
HtmlNodeCollection references = htmlDocument.DocumentNode.SelectNodes("//input[@id='ref']");
}
}
示例3: GetTitleFromUrl
public string GetTitleFromUrl(Uri uri)
{
try
{
var request = (HttpWebRequest)WebRequest.Create(uri.AbsoluteUri);
request.UserAgent = UserAgent;
var response = (HttpWebResponse)request.GetResponse();
var ms = new MemoryStream();
using (var stream = response.GetResponseStream())
{
if (stream == null) return string.Empty;
stream.CopyTo(ms);
}
using (ms)
{
ms.Position = 0;
var charset = Encoding.ASCII.EncodingName;
var doc = new HtmlDocument();
if (!string.IsNullOrEmpty(response.CharacterSet) && response.CharacterSet != null)
{
charset = (response.CharacterSet);
}
doc.Load(ms, Encoding.GetEncoding(charset), true);
var html = doc.DocumentNode.OuterHtml;
var charsetStart = html.IndexOf("charset=\"", StringComparison.InvariantCulture);
var offset = 0;
if (charsetStart <= 0)
{
charsetStart = html.IndexOf("charset=", StringComparison.InvariantCulture);
offset = 1;
}
if (charsetStart > 0)
{
charsetStart += 9 - offset;
var charsetEnd = html.IndexOfAny(new[] { ' ', '\"', ';' }, charsetStart);
var realCharset = html.Substring(charsetStart, charsetEnd - charsetStart);
if (!realCharset.Equals(charset))
{
ms.Position = 0;
doc.Load(ms, Encoding.GetEncoding(realCharset), false);
}
}
var titleNode = doc.DocumentNode.SelectSingleNode("//title");
return titleNode == null ? string.Empty : HttpUtility.HtmlDecode(titleNode.InnerText).Replace("\n", "");
}
}
catch (Exception e)
{
Console.Error.WriteLine("Failed processing {0}", uri);
Console.Error.WriteLine(e);
return string.Empty;
}
}
示例4: Cotacao
public static string Cotacao(string codigoAcao, string funcao)
{
if (string.IsNullOrEmpty(codigoAcao))
return "";
if (string.IsNullOrEmpty(funcao))
funcao = "preco";
HtmlAgilityPack.HtmlDocument htmlDoc = new HtmlDocument();
try
{
HtmlNodeCollection nodes;
if (funcao == "negocios")
{
htmlDoc.Load(FazWebRequest("http://www.bmfbovespa.com.br/Cotacao-Rapida/ExecutaAcaoCotRapXSL.asp?gstrCA=&txtCodigo=" + codigoAcao + "&intIdiomaXsl=0"));
nodes = htmlDoc.DocumentNode.SelectNodes("//td[@class='tdValor']");
return nodes[2].InnerText.Trim();
}
DateTime data = new DateTime();
if (DateTime.TryParse(funcao, out data) && data != DateTime.Today)
{
var url = string.Format("http://ichart.finance.yahoo.com/table.csv?s={0}.SA&a={2}&b={1}&c={3}&d={2}&e={1}&f={3}&g=d", codigoAcao, data.Day, (data.Month - 1).ToString("00"), data.Year);
StreamReader reader = FazWebRequest(url);
reader.ReadLine();
var ar = reader.ReadLine().Split(',');
return ar[4].Replace(".", ",").Trim();
}
htmlDoc.Load(FazWebRequest("https://secure.apligraf.com.br/webfeed/viptrade/evolucao001.php?codpad=" + codigoAcao));
if (funcao == "hora")
return (htmlDoc.DocumentNode.SelectSingleNode("//div")).ChildNodes[4].InnerText.Trim();
if (funcao == "strike")
{
var ar = (htmlDoc.DocumentNode.SelectSingleNode("//div")).ChildNodes[0].InnerText.Trim().Split(' ');
return ar[ar.Length - 3].Trim();
}
return htmlDoc.DocumentNode.SelectNodes("//td[@class='num'] | //td[@class='num pos'] | //td[@class='num neg']")[funcoes[funcao]].InnerText.Trim();
}
catch (Exception)
{
return "";
}
}
示例5: Load
protected async Task<HtmlDocument> Load(string url)
{
var stream = await httpClient.GetStreamAsync(url);
HtmlDocument doc = new HtmlDocument();
doc.Load(stream);
return doc;
}
示例6: Main
static void Main(string[] args)
{
string orgStr = "jAeagYXxGbDe17U6TFFEomF3JnuB6V+wsjX+oU56NDjkbO8P3vxPdw==";
string key = "liudeng";
string org = MD5.Decrypt(orgStr, key);
return;
HtmlDocument htmlDoc = new HtmlDocument();
htmlDoc.Load("D:/1.html");
HtmlNode node = htmlDoc.DocumentNode.SelectSingleNode("//div[@class=\"notice\"]");
HtmlNodeCollection childCollect = node.ChildNodes;
VPNInfo vpnInfo = new VPNInfo();
for (int i = 0; i < childCollect.Count; i++)
{
string innerText = childCollect[i].InnerText;
if (string.IsNullOrEmpty(innerText))
{
continue;
}
innerText = innerText.Trim();
if (innerText.Contains("Ip"))
{
vpnInfo.VPNAddr = processInnerText(innerText);
}
else if (innerText.Contains("Account"))
{
vpnInfo.UserName = processInnerText(innerText);
}
else if (innerText.Contains("Password") && !innerText.Contains("changes"))
{
vpnInfo.UserPassword = processInnerText(innerText); ;
}
}
}
示例7: LoadHtmlSnippetFromFile
private HtmlDocument LoadHtmlSnippetFromFile()
{
HtmlDocument doc = new HtmlDocument();
doc.Load(GetStream("http://www.dota2.com/hero/Earthshaker/"));
return doc;
}
示例8: LoadHeroesPage
private HtmlDocument LoadHeroesPage()
{
HtmlDocument doc = new HtmlDocument();
doc.Load(GetStream("http://www.dota2.com/heroes/?l=english"));
return doc;
}
示例9: ScrapeCategories
private static void ScrapeCategories()
{
string url = "http://www.mormonchannel.org/";
var request = WebRequest.CreateHttp(url);
HtmlDocument html = new HtmlDocument();
using (var input = request.GetResponse())
html.Load(input.GetResponseStream(), true);
var doc = html.DocumentNode;
var categories = doc.QuerySelectorAll(".ribbon");
foreach (var item in categories)
{
var category = item.QuerySelector(".ribbon-title").InnerText;
category = category.Substring(0, category.IndexOf(" ")).TrimEnd();
var channels = item.QuerySelectorAll(".teaser_title");
foreach (var channel in channels)
{
var name = channel.InnerText;
var location = channel.GetAttributeValue("href", String.Empty);
Console.WriteLine("('{0}','mormonchannel.org','{1}','{2}'),", name, location, category);
}
}
}
示例10: CrawleOnePage
public void CrawleOnePage()
{
//arrange
var loader = new Mock<IHtmlDocumentLoader>();
var context = new Mock<ICrawlerRepository>();
var crawler = new RabotaUaCrawler(_logger);
var document = new HtmlDocument();
document.Load(new FileStream("TestData/rabotaua/rabotaua.results.htm", FileMode.Open));
loader.Setup(l => l.LoadDocument("http://rabota.ua/jobsearch/vacancy_list?rubricIds=8,9&keyWords=&parentId=1&pg=1")).Returns(document);
loader.Setup(l => l.LoadDocument("http://rabota.ua/jobsearch/vacancy_list?rubricIds=8,9&keyWords=&parentId=1&pg=2")).Returns(new HtmlDocument());
var vacancy = new HtmlDocument();
vacancy.Load(new FileStream("TestData/rabotaua/dnet.withtdd.htm", FileMode.Open));
loader.Setup(l => l.LoadDocument(It.IsRegex(@"http://rabota.ua/company\d+/vacancy\d+"))).
Returns(vacancy);
var storage = new List<TddDemandRecord>();
context.Setup(c => c.Add(It.IsAny<TddDemandRecord>())).Callback((TddDemandRecord r) => storage.Add(r));
//act
crawler.Crawle(loader.Object, context.Object);
//assert
context.Verify(c => c.SaveChanges());
Assert.That(storage.Count, Is.EqualTo(20), "Expected that all 20 divs processed");
}
示例11: GetVolumes
public IList<Volume> GetVolumes(Comic comic)
{
IList<Volume> result = new List<Volume>();
// download volumes list
Stream stream = WebClientFactory.Create(DOMAIN).OpenRead(comic.Url);
HtmlDocument document = new HtmlDocument();
document.Load(stream, Encoding.UTF8);
// parse volumes
var volumeNodes = document.DocumentNode.SelectNodes("//ul[contains(@class, 'nr6')]/li/a");
foreach (var volumeNode in volumeNodes)
{
string href = volumeNode.GetAttributeValue("href", "");
if (!href.ToLowerInvariant().Contains("javascript"))
{
result.Insert(0, new Volume
{
Title = volumeNode.InnerText,
Url = DOMAIN + href
});
}
}
return result;
}
示例12: CrawleOnePage
public void CrawleOnePage()
{
//arrange
var loader = new Mock<IHtmlDocumentLoader>();
var context = new Mock<ICrawlerRepository>();
var crawler = new CareersStackoverfowComCrawler(_logger);
var document = new HtmlDocument();
document.Load(new FileStream("TestData/careers/careers.results.htm", FileMode.Open));
loader.Setup(l => l.LoadDocument("http://careers.stackoverflow.com/Jobs?searchTerm=.net,java,c%2B%2B&searchType=Any&location=&range=20&pg=1")).Returns(document);
loader.Setup(l => l.LoadDocument("http://careers.stackoverflow.com/Jobs?searchTerm=.net,java,c%2B%2B&searchType=Any&location=&range=20&pg=2")).Returns(new HtmlDocument());
var vacancy = new HtmlDocument();
vacancy.Load(new FileStream("TestData/careers/vacancy.htm", FileMode.Open));
loader.Setup(l => l.LoadDocument(It.IsRegex(@"http://careers.stackoverflow.com/Jobs/(\d+)\?campaign=(\w+)"))).
Returns(vacancy);
var storage = new List<TddDemandRecord>();
context.Setup(c => c.Add(It.IsAny<TddDemandRecord>())).Callback((TddDemandRecord r) => storage.Add(r));
//act
crawler.Crawle(loader.Object, context.Object);
//assert
context.Verify(c => c.SaveChanges());
Assert.That(storage.Count, Is.EqualTo(25), "Expected that all 25 jobs processed");
}
示例13: Scrape
public void Scrape(string filePath, string directory, bool splitMultiple = false)
{
CreateDirectory(directory, OutputDirectory);
var document = new HtmlDocument();
var tableData = new List<List<string>>();
document.Load(filePath);
var root = document.DocumentNode;
if (splitMultiple)
{
var tableCount = 1;
foreach (HtmlNode table in document.DocumentNode.SelectNodes("//" + TableEl))
{
tableData = new List<List<string>>();
ScrapeSingle(table, tableData);
WriteResults(tableData, directory + OutputDirectory + GetFileName(filePath) + tableCount + ".csv");
tableCount++;
}
}
else
{
foreach (HtmlNode table in document.DocumentNode.SelectNodes("//" + TableEl))
{
ScrapeSingle(table, tableData);
}
WriteResults(tableData, directory + OutputDirectory + GetFileName(filePath) + ".csv");
}
}
示例14: getDealer
static public ArrayList getDealer(DateTime date)
{
ArrayList Foreign = new ArrayList();
Stream ms = getMemoryStreamByUrl("http://www.twse.com.tw/ch/trading/fund/TWT43U/TWT43U.php", date);
HtmlDocument doc = new HtmlDocument();
doc.Load(ms, Encoding.UTF8);
HtmlNodeCollection nodes = doc.DocumentNode.SelectNodes("//table/tbody/tr"); //自營商
cls買賣超物件 f = null;
foreach (HtmlNode n in nodes)
{
HtmlNodeCollection tdNodes = n.SelectNodes("td");
f = new cls買賣超物件();
f.StockId = tdNodes[0].InnerText.Trim();
f.Buy = Decimal.Parse(tdNodes[8].InnerText);
f.Sold = Decimal.Parse(tdNodes[9].InnerText);
f.Total = Decimal.Parse(tdNodes[10].InnerText);
Foreign.Add(f);
}
ms.Close();
return Foreign;
}
示例15: CrawleOnePage
public void CrawleOnePage()
{
//arrange
var loader = new Mock<IHtmlDocumentLoader>();
var context = new Mock<ICrawlerRepository>();
var crawler = new PrgJobsComCrawler(_logger);
var document = new HtmlDocument();
document.Load(new FileStream("TestData/prgjobscom/search.results.htm", FileMode.Open));
loader.Setup(l => l.LoadDocument("http://www.prgjobs.com/jobout.cfm?ApplicantSearchArea=&SearchText=&Page=1")).Returns(document);
loader.Setup(l => l.LoadDocument("http://www.prgjobs.com/jobout.cfm?ApplicantSearchArea=&SearchText=&Page=2")).Returns(new HtmlDocument());
var vacancy = new HtmlDocument();
vacancy.Load(new FileStream("TestData/prgjobscom/dnet.withtdd.htm", FileMode.Open));
loader.Setup(l => l.LoadDocument(It.IsRegex(@"http://www.prgjobs.com/Job.cfm/\d+"))).
Returns(vacancy);
var storage = new List<TddDemandRecord>();
context.Setup(c => c.Add(It.IsAny<TddDemandRecord>())).Callback((TddDemandRecord r) => storage.Add(r));
//act
crawler.Crawl(loader.Object, context.Object);
//assert
context.Verify(c => c.SaveChanges());
Assert.That(storage.Count, Is.EqualTo(50), "Expected that all 50 jobs processed");
}