本文整理汇总了Golang中github.com/blevesearch/bleve/analysis.Analyzer.TokenFilters方法的典型用法代码示例。如果您正苦于以下问题:Golang Analyzer.TokenFilters方法的具体用法?Golang Analyzer.TokenFilters怎么用?Golang Analyzer.TokenFilters使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类github.com/blevesearch/bleve/analysis.Analyzer
的用法示例。
在下文中一共展示了Analyzer.TokenFilters方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Golang代码示例。
示例1: AnalyzerConstructor
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {
var charFilters []analysis.CharFilter
charFilterNames, ok := config["char_filters"].([]interface{})
if ok {
charFilters = make([]analysis.CharFilter, len(charFilterNames))
for i, charFilterName := range charFilterNames {
charFilterNameString, ok := charFilterName.(string)
if ok {
charFilter, err := cache.CharFilterNamed(charFilterNameString)
if err != nil {
return nil, err
}
charFilters[i] = charFilter
} else {
return nil, fmt.Errorf("char filter name must be a string")
}
}
}
tokenizerName, ok := config["tokenizer"].(string)
if !ok {
return nil, fmt.Errorf("must specify tokenizer")
}
tokenizer, err := cache.TokenizerNamed(tokenizerName)
if err != nil {
return nil, err
}
var tokenFilters []analysis.TokenFilter
tokenFilterNames, ok := config["token_filters"].([]interface{})
if ok {
tokenFilters = make([]analysis.TokenFilter, len(tokenFilterNames))
for i, tokenFilterName := range tokenFilterNames {
tokenFilterNameString, ok := tokenFilterName.(string)
if ok {
tokenFilter, err := cache.TokenFilterNamed(tokenFilterNameString)
if err != nil {
return nil, err
}
tokenFilters[i] = tokenFilter
} else {
return nil, fmt.Errorf("token filter name must be a string")
}
}
}
rv := analysis.Analyzer{
Tokenizer: tokenizer,
}
if charFilters != nil {
rv.CharFilters = charFilters
}
if tokenFilters != nil {
rv.TokenFilters = tokenFilters
}
return &rv, nil
}
示例2: AnalyzerConstructor
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {
var err error
var charFilters []analysis.CharFilter
charFiltersNames, ok := config["char_filters"].([]string)
if ok {
charFilters, err = getCharFilters(charFiltersNames, cache)
if err != nil {
return nil, err
}
} else {
charFiltersNamesInterfaceSlice, ok := config["char_filters"].([]interface{})
if ok {
charFiltersNames, err := convertInterfaceSliceToStringSlice(charFiltersNamesInterfaceSlice, "char filter")
if err != nil {
return nil, err
}
charFilters, err = getCharFilters(charFiltersNames, cache)
if err != nil {
return nil, err
}
}
}
tokenizerName, ok := config["tokenizer"].(string)
if !ok {
return nil, fmt.Errorf("must specify tokenizer")
}
tokenizer, err := cache.TokenizerNamed(tokenizerName)
if err != nil {
return nil, err
}
var tokenFilters []analysis.TokenFilter
tokenFiltersNames, ok := config["token_filters"].([]string)
if ok {
tokenFilters, err = getTokenFilters(tokenFiltersNames, cache)
if err != nil {
return nil, err
}
} else {
tokenFiltersNamesInterfaceSlice, ok := config["token_filters"].([]interface{})
if ok {
tokenFiltersNames, err := convertInterfaceSliceToStringSlice(tokenFiltersNamesInterfaceSlice, "token filter")
if err != nil {
return nil, err
}
tokenFilters, err = getTokenFilters(tokenFiltersNames, cache)
if err != nil {
return nil, err
}
}
}
rv := analysis.Analyzer{
Tokenizer: tokenizer,
}
if charFilters != nil {
rv.CharFilters = charFilters
}
if tokenFilters != nil {
rv.TokenFilters = tokenFilters
}
return &rv, nil
}