本文整理汇总了Golang中github.com/yahoo/gryffin.Scan.Response方法的典型用法代码示例。如果您正苦于以下问题:Golang Scan.Response方法的具体用法?Golang Scan.Response怎么用?Golang Scan.Response使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类github.com/yahoo/gryffin.Scan
的用法示例。
在下文中一共展示了Scan.Response方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Golang代码示例。
示例1: fill
func (m *response) fill(s *gryffin.Scan) {
/*
{"response":{"headers":{"Date":["Thu, 30 Jul 2015 00:13:43 GMT"],"Set-Cookie":["B=82j3nrdarir1n&b=3&s=23; expires=Sun, 30-Jul-2017 00:13:43 GMT; path=/; domain=.yahoo.com"]
*/
resp := &http.Response{
Request: s.Request,
StatusCode: m.Status,
Status: strconv.FormatInt(int64(m.Status), 10),
Proto: "HTTP/1.1",
ProtoMajor: 1,
ProtoMinor: 1,
Header: m.Headers,
Body: noCloseReader{strings.NewReader(m.Body)},
}
s.Response = resp
s.ReadResponseBody()
}
示例2: Do
func (r *NoScriptRenderer) Do(s *gryffin.Scan) {
r.chanResponse = make(chan *gryffin.Scan, 10)
r.chanLinks = make(chan *gryffin.Scan, 10)
crawl := func() {
defer close(r.chanResponse)
defer close(r.chanLinks)
client := &http.Client{}
client.Timeout = time.Duration(3) * time.Second
if response, err := client.Do(s.Request); err == nil {
s.Response = response
} else {
s.Logm("NoScriptRenderer", fmt.Sprintf("error in building request: %s", err))
return
}
s.ReadResponseBody()
if s.IsDuplicatedPage() {
return
}
tokenizer := html.NewTokenizer(strings.NewReader(s.ResponseBody))
r.chanResponse <- s
for {
t := tokenizer.Next()
switch t {
case html.ErrorToken:
return
case html.StartTagToken:
token := tokenizer.Token()
if token.DataAtom.String() == "a" {
for _, attr := range token.Attr {
if attr.Key == "href" {
link := s.Spawn()
// TODO - we drop relative URL as it would drop "#".
// Yet, how about real relative URLs?
if req, err := http.NewRequest("GET", attr.Val, nil); err == nil {
if true {
// || req.URL.IsAbs() {
link.MergeRequest(req)
if link.IsScanAllowed() {
r.chanLinks <- link
}
} else {
// ignore relative URL. TOFIX.
}
} else {
log.Printf("error in building request: %s", err)
}
}
}
}
}
}
// parse and find links.
}
go crawl()
return
}