本文整理汇总了Golang中github.com/PuerkitoBio/goquery.Document.Find方法的典型用法代码示例。如果您正苦于以下问题:Golang Document.Find方法的具体用法?Golang Document.Find怎么用?Golang Document.Find使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类github.com/PuerkitoBio/goquery.Document
的用法示例。
在下文中一共展示了Document.Find方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Golang代码示例。
示例1: scrape
func scrape(language string, filename string) {
var doc *goquery.Document
var e error
// var w *bufio.Writer
f, err := os.OpenFile(filename, os.O_APPEND|os.O_WRONLY, 0600)
if err != nil {
panic(err)
}
defer f.Close()
if _, err = f.WriteString(fmt.Sprintf("\n####%s\n", language)); err != nil {
panic(err)
}
if doc, e = goquery.NewDocument(fmt.Sprintf("https://github.com/trending?l=%s", language)); e != nil {
panic(e.Error())
}
doc.Find("li.repo-leaderboard-list-item").Each(func(i int, s *goquery.Selection) {
title := s.Find("div h2 a").Text()
owner := s.Find("span.owner-name").Text()
repoName := s.Find("strong").Text()
description := s.Find("p.repo-leaderboard-description").Text()
url, _ := s.Find("h2 a").Attr("href")
url = "https://github.com" + url
fmt.Println("owner: ", owner)
fmt.Println("repo: ", repoName)
fmt.Println("URL: ", url)
if _, err = f.WriteString("* [" + title + "](" + url + "): " + description + "\n"); err != nil {
panic(err)
}
})
}
示例2: cleanBadTags
func (this *cleaner) cleanBadTags(doc *goquery.Document) *goquery.Document {
body := doc.Find("body")
children := body.Children()
selectors := [3]string{"id", "class", "name"}
for _, selector := range selectors {
children.Each(func(i int, s *goquery.Selection) {
naughtyList := s.Find("*[" + selector + "]")
cont := 0
naughtyList.Each(func(j int, e *goquery.Selection) {
attribute, _ := e.Attr(selector)
if this.matchNodeRegEx(attribute, REMOVENODES_RE) {
if this.config.debug {
log.Printf("Cleaning: Removing node with %s: %s\n", selector, this.config.parser.name(selector, e))
}
this.config.parser.removeNode(e)
cont++
}
})
if this.config.debug {
log.Printf("%d naughty %s elements found", cont, selector)
}
})
}
return doc
}
示例3: cleanDivs
func (this *cleaner) cleanDivs(doc *goquery.Document) *goquery.Document {
frames := make(map[string]int)
framesNodes := make(map[string]*list.List)
divs := doc.Find("div")
divs.Each(func(i int, s *goquery.Selection) {
children := s.Children()
if children.Size() == 0 {
text := s.Text()
text = strings.Trim(text, " ")
text = strings.Trim(text, "\t")
text = strings.ToLower(text)
frames[text]++
if framesNodes[text] == nil {
framesNodes[text] = list.New()
}
framesNodes[text].PushBack(s)
}
})
for text, freq := range frames {
if freq > 1 {
selections := framesNodes[text]
for s := selections.Front(); s != nil; s = s.Next() {
selection := s.Value.(*goquery.Selection)
this.config.parser.removeNode(selection)
}
}
}
return doc
}
示例4: defaultHTML
// ogtags extracts the og:title, og:image, ... tags from a webpage
func defaultHTML(i *data.Item, sourceURL string, doc *goquery.Document) {
fmt.Println("Running OG extract.")
selection := doc.Find("title")
if len(selection.Nodes) != 0 {
i.Caption = selection.Nodes[0].FirstChild.Data
}
selection = doc.Find("meta[property*='og']")
for _, e := range selection.Nodes {
m := htmlAttributeToMap(e.Attr)
if m["property"] == "og:title" {
i.Caption = m["content"]
}
if m["property"] == "og:image" {
if !govalidator.IsRequestURL(m["content"]) {
log.Println("Invalid url in og:image. " + sourceURL)
continue
}
i.ImageURL = m["content"]
}
if m["property"] == "og:url" {
if !govalidator.IsRequestURL(m["content"]) {
log.Println("Invalid url in og:url. " + sourceURL)
continue
}
i.URL = m["content"]
}
if m["property"] == "og:description" {
i.Description = m["content"]
}
}
}
示例5: parseOrderListPage
func parseOrderListPage(s *goquery.Document) ([]Order, bool, error) {
c := s.Find(".container").First()
t := c.Find("div").First().Text()
if t != ">注文情報(一覧)<" && t != ">注文情報(検索)<" {
return nil, false, fmt.Errorf("cannot open \"注文情報(一覧)\", but %#v", t)
}
// タイトル行の削除
c.Find("hr").First().Next().PrevAll().Remove()
results := []Order{}
c.Find("a").Each(
func(_ int, s *goquery.Selection) {
href, ok := s.Attr("href")
if !ok || !strings.HasPrefix(href, "../otc/C003.html?") {
return
}
u, err := url.Parse(href)
if err != nil || u.RawQuery == "" {
return
}
v, err := url.ParseQuery(u.RawQuery)
results = append(results, Order{
OrderId: v.Get("order_id"),
OrderMethod: v.Get("order_method"),
})
})
return results, c.Find("a[accesskey=\"#\"]").Length() == 1, nil
}
示例6: GetShopName
//获取店铺名称
func GetShopName(p *goquery.Document) string {
name := p.Find(".tb-shop-name").Text()
if name == "" {
name = p.Find(".slogo-shopname").Text()
}
return strings.TrimSpace(name)
}
示例7: feedsFromDoc
func feedsFromDoc(doc *goquery.Document, text string) []string {
sel := "link[type='application/rss+xml']"
sel += ", link[type='application/atom+xml']"
matches := doc.Find(sel)
if matches.Length() > 0 {
feeds := make([]string, matches.Length())
matches.Each(func(i int, s *goquery.Selection) {
url, _ := s.Attr("href")
feeds[i] = url
})
return feeds
}
rx := regexp.MustCompile(`href=['"]([^'"]*(rss|atom|feed|xml)[^'"]*)['"]`)
if rx.FindString(text) != "" {
matches := rx.FindAllStringSubmatch(text, -1)
feeds := make([]string, len(matches))
for i, e := range matches {
feeds[i] = e[1]
}
return feeds
}
return make([]string, 0)
}
示例8: getTeamsId
/*
Get the two teams in a match
*/
func getTeamsId(d *goquery.Document) ([2]int, error) {
var ids [2]int
url1, ok := d.Find("div.container.left h3 a").Attr("href")
if !ok {
return ids, errors.New("could not find team a")
}
idA, err := parseTeam(BASE + url1)
if err != nil {
return ids, err
}
url2, ok := d.Find("div.container.right h3 a").Attr("href")
if !ok {
return ids, errors.New("could not find team b")
}
idB, err := parseTeam(BASE + url2)
if err != nil {
return ids, err
}
ids[0] = idA
ids[1] = idB
return ids, nil
}
示例9: perseHTML
// Parse html
func perseHTML(htmldata *goquery.Document) []string {
var dates []string
htmldata.Find("a.bt-open").Each(func(_ int, s *goquery.Selection) {
if jsonData, ok := s.Attr("id"); ok {
//decode
htmlStringDecode(&jsonData)
//analyze json object
var jsonObject map[string]interface{}
//json.JsonAnalyze(jsonData, &jsonObject)
json.Unmarshal([]byte(jsonData), &jsonObject)
//extract date from json object
//e.g. 2016-02-27 03:30:00
strDate := jsonObject["field19"].(string)
if isTimeApplicable(strDate) {
dates = append(dates, strDate)
}
}
})
return dates
}
示例10: GetFFInfo
/*
** get friends' friends info
*/
func (w *SocialWorker) GetFFInfo(query *goquery.Document) {
var user User
// var uid string
var usex string
// var usersId []string
// var usersName []string
// uidString, _ := query.Find("div.c").Eq(1).Find("a").Attr("href")
// var digitsRegexp = regexp.MustCompile(`(^|&|\?)uid=([^&]*)(&|$)`)
/*
** 获取粉丝的粉丝的uid(str)
*/
// str := digitsRegexp.FindStringSubmatch(uidString)
// uid = crawlUrl.Id
// usersId = append(usersId, uid)
uStr := query.Find("div.c").Eq(2).Text()
nameStr_1 := GetBetweenStr(uStr, ":", "性别")
nameStr_2 := GetBetweenStr(nameStr_1, ":", "认证")
nameStr_3 := strings.Split(nameStr_2, ":")
uname := nameStr_3[1]
sexStr_1 := GetBetweenStr(uStr, "性别", "地区")
sexStr_2 := strings.Split(sexStr_1, ":")
if sexStr_2[1] == "男" {
usex = "male"
} else {
usex = "famale"
}
user.uid = crawlUrl.FatherId
user.friendid = crawlUrl.Id
user.uname = uname
user.usex = usex
glog.Infoln(user)
w.putItems(user)
}
示例11: getItems
func getItems(doc *goquery.Document) (items []item, maxWidth int) {
doc.Find("td.title a").EachWithBreak(func(i int, s *goquery.Selection) bool {
if i == maxItems {
return false
}
if s.Text() == "More" {
return true
}
href, _ := s.Attr("href")
title := s.Text()
points := s.Parent().Parent().Next().Find("span").Text()
a, b := len(fmt.Sprintf("%s (%s)", title, points)), len(href)
maxWidth = max(a, b, maxWidth)
items = append(items, item{
title: title,
url: href,
points: points,
})
return true
})
return
}
示例12: GetFriendsUrl
/*
**get friends url
*/
func (w *SocialWorker) GetFriendsUrl(query *goquery.Document, p *page.Page) {
var str_1 string
// newCrawlUrl := models.CrawlUrl{}
query.Find("div.c").Find("table").Find("tbody").Find("tr").Find("a:last-child").Each(func(j int, s *goquery.Selection) {
if j%2 != 0 {
friendsUrlString, _ := s.Attr("href")
var digitsRegexp = regexp.MustCompile(`(^|&|\?)uid=([^&]*)(&|$)`)
str := digitsRegexp.FindStringSubmatch(friendsUrlString)
if str == nil {
str_1 = "1"
} else {
str_1 = str[2]
}
friendsInfoUrl := "http://weibo.cn/" + str_1 + "/info"
// newCrawlUrl.Url = "http://weibo.cn/" + str_1 + "/fans"
// p.AddTargetRequestWithHeaderFile(friendsInfoUrl, "html", "./header.json")
// newCrawlUrl.Id = str_1
// newCrawlUrl.Layer = crawlUrl.Layer + 1
// newCrawlUrl.FatherId = crawlUrl.Id
// w.SendMessageToSQS(newCrawlUrl)
Urls = append(Urls, friendsInfoUrl)
UrlsLevel = append(UrlsLevel, UrlsLevel[i]+1)
}
})
}
示例13: Parse
// Parse 获取url对应的资源并根据规则进行解析
func (this *RedditLogic) Parse(redditUrl string) error {
redditUrl = strings.TrimSpace(redditUrl)
if redditUrl == "" {
redditUrl = this.domain + this.golang
} else if !strings.HasPrefix(redditUrl, "https") {
redditUrl = "https://" + redditUrl
}
var (
doc *goquery.Document
err error
)
// if doc, err = goquery.NewDocument(redditUrl); err != nil {
if doc, err = this.newDocumentFromResp(redditUrl); err != nil {
logger.Errorln("goquery reddit newdocument error:", err)
return err
}
// 最后面的先入库处理
resourcesSelection := doc.Find("#siteTable .link")
for i := resourcesSelection.Length() - 1; i >= 0; i-- {
err = this.dealRedditOneResource(goquery.NewDocumentFromNode(resourcesSelection.Get(i)).Selection)
if err != nil {
logger.Errorln(err)
}
}
return err
}
示例14: doWork
func doWork(links <-chan string, results chan<- string) {
for link := range links {
var doc *goquery.Document
for i := 1; ; i++ {
var err error
doc, err = goquery.NewDocument(link)
if err == nil {
break
}
fmt.Fprintf(os.Stderr, "[Tentativa %d] Erro tentando processar página de servidor: %s. Erro: %q", i, link, err)
if i == maxRetries {
fmt.Fprintf(os.Stderr, "Página não processada: %s", link)
return
}
time.Sleep(time.Duration(i) * time.Duration(rand.Intn(5)) * time.Second)
}
var row []string
doc.Find("td.desc").Each(func(i int, s *goquery.Selection) {
cell := strings.Replace(
strings.Trim(s.Next().Text(), " \n"),
",",
".",
1)
row = append(row, cell)
})
if len(row) > 0 {
results <- strings.Join(row, *sep)
} else {
fmt.Fprintf(os.Stderr, "Não achou td.desc: %s\n", link)
}
}
}
示例15: ARSOPotresi
// ARSOPotresi returs slice of Potres struct
func ARSOPotresi() []Potres {
var potresi []Potres
var doc *goquery.Document
var e error
if res, found := cacheArso.Get("potresi"); found {
return res.([]Potres)
}
if doc, e = goquery.NewDocument("http://www.arso.gov.si/potresi/obvestila%20o%20potresih/aip/"); e != nil {
return potresi
}
doc.Find("#glavna td.vsebina table tr").Each(func(i int, s *goquery.Selection) {
magnituda, err := strconv.ParseFloat(s.Find("td:nth-child(4)").Text(), 2)
if magnituda > 0 && err == nil {
potres := Potres{}
potres.Magnituda = magnituda
potres.Lat, _ = strconv.ParseFloat(s.Find("td:nth-child(2)").Text(), 3)
potres.Lon, _ = strconv.ParseFloat(s.Find("td:nth-child(3)").Text(), 3)
potres.Lokacija = s.Find("td:nth-child(6)").Text()
potres.Datum = s.Find("td:nth-child(1)").Text()
potresi = append(potresi, potres)
}
})
cacheArso.Set("potresi", potresi, cache.DefaultExpiration)
return potresi
}