本文整理匯總了Golang中code/google/com/p/go/net/html.Tokenizer.Next方法的典型用法代碼示例。如果您正苦於以下問題:Golang Tokenizer.Next方法的具體用法?Golang Tokenizer.Next怎麽用?Golang Tokenizer.Next使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類code/google/com/p/go/net/html.Tokenizer
的用法示例。
在下文中一共展示了Tokenizer.Next方法的6個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Golang代碼示例。
示例1: traverse_html_token
func traverse_html_token(z *html.Tokenizer) {
for {
if z.Next() == html.ErrorToken {
return
}
token := z.Token()
token_type := token.Type
fmt.Printf("|token_type:%-20s|token.Data:%-10s|token.Attr:%-10s|\n", token_type, token.Data, token.Attr)
}
}
示例2: traverse_html_tokenizer
func traverse_html_tokenizer(z *html.Tokenizer) {
for {
if z.Next() == html.ErrorToken {
return
}
text_b := z.Text()
tag_name_b, hasAttri := z.TagName()
tag_attr_key_b, tag_attr_value_b, _ := z.TagAttr()
text := string(text_b)
tag_name := string(tag_name_b)
tag_attr_key := string(tag_attr_key_b)
tag_attr_value := string(tag_attr_value_b)
fmt.Printf("|Tokenizer.Text:%-10s|Tokenizer.TagName:%-10s|hasAttri:%-10t|tag_attr_key:%-10s|tag_attr_value:%-10s|\n", text, tag_name, hasAttri, tag_attr_key, tag_attr_value)
}
}
示例3: burnTokensUntilEndTag
func burnTokensUntilEndTag(firewood *html.Tokenizer, tagName string) {
rawTagName := []byte(tagName)
for {
token := firewood.Next()
switch token {
case html.ErrorToken:
return
case html.EndTagToken:
name, _ := firewood.TagName()
// log.Println("Struck token " + string(name))
if bytes.Equal(name, rawTagName) {
// log.Println("Extinguishing token fire.")
return
}
}
}
}
示例4: next
// skip forward to the next text, and return it as a string
func next(z *html.Tokenizer) string {
for tt := z.Next(); true; tt = z.Next() {
if tt == html.TextToken {
res := string(z.Text())
if debug {
fmt.Printf("next: %q\n", res)
}
return res
}
if tt == html.ErrorToken {
return ""
}
if debug {
fmt.Println("skipping: ", tt)
}
}
return ""
}
示例5: textUpToEndTag
func textUpToEndTag(tokenizer *html.Tokenizer, tagName string) []byte {
var textBuffer bytes.Buffer
rawTagName := []byte(tagName)
for done := false; !done; {
token := tokenizer.Next()
switch token {
case html.TextToken:
textBuffer.Write(tokenizer.Text())
case html.EndTagToken:
name, _ := tokenizer.TagName()
if bytes.Equal(rawTagName, name) {
done = true
}
case html.ErrorToken:
done = true
}
}
return textBuffer.Bytes()
}
示例6: parseToken
func parseToken(tokenizer *html.Tokenizer, htmlDoc *htmlDocument, parent *tagElement) (bool, bool, string) {
tokenType := tokenizer.Next()
switch tokenType {
case html.ErrorToken:
return true, false, ""
case html.TextToken:
text := string(tokenizer.Text())
if strings.TrimSpace(text) == "" {
break
}
textElement := &textElement{text: text}
appendElement(htmlDoc, parent, textElement)
case html.StartTagToken:
tagElement := &tagElement{tagName: getTagName(tokenizer), startTagRaw: string(tokenizer.Raw())}
appendElement(htmlDoc, parent, tagElement)
for {
errorToken, parentEnded, unsetEndTag := parseToken(tokenizer, htmlDoc, tagElement)
if errorToken {
return true, false, ""
}
if parentEnded {
if unsetEndTag != "" {
return false, false, unsetEndTag
}
break
}
if unsetEndTag != "" {
return false, false, setEndTagRaw(tokenizer, tagElement, unsetEndTag)
}
}
case html.EndTagToken:
return false, true, setEndTagRaw(tokenizer, parent, getTagName(tokenizer))
case html.DoctypeToken, html.SelfClosingTagToken, html.CommentToken:
tagElement := &tagElement{tagName: getTagName(tokenizer), startTagRaw: string(tokenizer.Raw())}
appendElement(htmlDoc, parent, tagElement)
}
return false, false, ""
}