當前位置: 首頁>>代碼示例>>Golang>>正文


Golang analysis.Analyzer類代碼示例

本文整理匯總了Golang中github.com/blevesearch/bleve/analysis.Analyzer的典型用法代碼示例。如果您正苦於以下問題:Golang Analyzer類的具體用法?Golang Analyzer怎麽用?Golang Analyzer使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


在下文中一共展示了Analyzer類的3個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Golang代碼示例。

示例1: AnalyzerConstructor

func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {

	var charFilters []analysis.CharFilter
	charFilterNames, ok := config["char_filters"].([]interface{})
	if ok {
		charFilters = make([]analysis.CharFilter, len(charFilterNames))
		for i, charFilterName := range charFilterNames {
			charFilterNameString, ok := charFilterName.(string)
			if ok {
				charFilter, err := cache.CharFilterNamed(charFilterNameString)
				if err != nil {
					return nil, err
				}
				charFilters[i] = charFilter
			} else {
				return nil, fmt.Errorf("char filter name must be a string")
			}
		}
	}

	tokenizerName, ok := config["tokenizer"].(string)
	if !ok {
		return nil, fmt.Errorf("must specify tokenizer")
	}

	tokenizer, err := cache.TokenizerNamed(tokenizerName)
	if err != nil {
		return nil, err
	}

	var tokenFilters []analysis.TokenFilter
	tokenFilterNames, ok := config["token_filters"].([]interface{})
	if ok {
		tokenFilters = make([]analysis.TokenFilter, len(tokenFilterNames))
		for i, tokenFilterName := range tokenFilterNames {
			tokenFilterNameString, ok := tokenFilterName.(string)
			if ok {
				tokenFilter, err := cache.TokenFilterNamed(tokenFilterNameString)
				if err != nil {
					return nil, err
				}
				tokenFilters[i] = tokenFilter
			} else {
				return nil, fmt.Errorf("token filter name must be a string")
			}
		}
	}

	rv := analysis.Analyzer{
		Tokenizer: tokenizer,
	}
	if charFilters != nil {
		rv.CharFilters = charFilters
	}
	if tokenFilters != nil {
		rv.TokenFilters = tokenFilters
	}
	return &rv, nil
}
開發者ID:bozzcq,項目名稱:bleve,代碼行數:59,代碼來源:custom_analyzer.go

示例2: AnalyzerConstructor

func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {

	var err error
	var charFilters []analysis.CharFilter
	charFiltersNames, ok := config["char_filters"].([]string)
	if ok {
		charFilters, err = getCharFilters(charFiltersNames, cache)
		if err != nil {
			return nil, err
		}
	} else {
		charFiltersNamesInterfaceSlice, ok := config["char_filters"].([]interface{})
		if ok {
			charFiltersNames, err := convertInterfaceSliceToStringSlice(charFiltersNamesInterfaceSlice, "char filter")
			if err != nil {
				return nil, err
			}
			charFilters, err = getCharFilters(charFiltersNames, cache)
			if err != nil {
				return nil, err
			}
		}
	}

	tokenizerName, ok := config["tokenizer"].(string)
	if !ok {
		return nil, fmt.Errorf("must specify tokenizer")
	}

	tokenizer, err := cache.TokenizerNamed(tokenizerName)
	if err != nil {
		return nil, err
	}

	var tokenFilters []analysis.TokenFilter
	tokenFiltersNames, ok := config["token_filters"].([]string)
	if ok {
		tokenFilters, err = getTokenFilters(tokenFiltersNames, cache)
		if err != nil {
			return nil, err
		}
	} else {
		tokenFiltersNamesInterfaceSlice, ok := config["token_filters"].([]interface{})
		if ok {
			tokenFiltersNames, err := convertInterfaceSliceToStringSlice(tokenFiltersNamesInterfaceSlice, "token filter")
			if err != nil {
				return nil, err
			}
			tokenFilters, err = getTokenFilters(tokenFiltersNames, cache)
			if err != nil {
				return nil, err
			}
		}
	}

	rv := analysis.Analyzer{
		Tokenizer: tokenizer,
	}
	if charFilters != nil {
		rv.CharFilters = charFilters
	}
	if tokenFilters != nil {
		rv.TokenFilters = tokenFilters
	}
	return &rv, nil
}
開發者ID:ekanite,項目名稱:bleve,代碼行數:66,代碼來源:custom_analyzer.go

示例3: TestSoraniStemmerFilter

func TestSoraniStemmerFilter(t *testing.T) {

	// in order to match the lucene tests
	// we will test with an analyzer, not just the stemmer
	analyzer := analysis.Analyzer{
		Tokenizer: single_token.NewSingleTokenTokenizer(),
		TokenFilters: []analysis.TokenFilter{
			NewSoraniNormalizeFilter(),
			NewSoraniStemmerFilter(),
		},
	}

	tests := []struct {
		input  []byte
		output analysis.TokenStream
	}{
		{ // -ek
			input: []byte("پیاوێک"),
			output: analysis.TokenStream{
				&analysis.Token{
					Term:     []byte("پیاو"),
					Position: 1,
					Start:    0,
					End:      12,
				},
			},
		},
		{ // -yek
			input: []byte("دەرگایەک"),
			output: analysis.TokenStream{
				&analysis.Token{
					Term:     []byte("دەرگا"),
					Position: 1,
					Start:    0,
					End:      16,
				},
			},
		},
		{ // -aka
			input: []byte("پیاوەكە"),
			output: analysis.TokenStream{
				&analysis.Token{
					Term:     []byte("پیاو"),
					Position: 1,
					Start:    0,
					End:      14,
				},
			},
		},
		{ // -ka
			input: []byte("دەرگاكە"),
			output: analysis.TokenStream{
				&analysis.Token{
					Term:     []byte("دەرگا"),
					Position: 1,
					Start:    0,
					End:      14,
				},
			},
		},
		{ // -a
			input: []byte("کتاویە"),
			output: analysis.TokenStream{
				&analysis.Token{
					Term:     []byte("کتاوی"),
					Position: 1,
					Start:    0,
					End:      12,
				},
			},
		},
		{ // -ya
			input: []byte("دەرگایە"),
			output: analysis.TokenStream{
				&analysis.Token{
					Term:     []byte("دەرگا"),
					Position: 1,
					Start:    0,
					End:      14,
				},
			},
		},
		{ // -An
			input: []byte("پیاوان"),
			output: analysis.TokenStream{
				&analysis.Token{
					Term:     []byte("پیاو"),
					Position: 1,
					Start:    0,
					End:      12,
				},
			},
		},
		{ // -yAn
			input: []byte("دەرگایان"),
			output: analysis.TokenStream{
				&analysis.Token{
					Term:     []byte("دەرگا"),
					Position: 1,
					Start:    0,
//.........這裏部分代碼省略.........
開發者ID:ekanite,項目名稱:bleve,代碼行數:101,代碼來源:sorani_stemmer_filter_test.go


注:本文中的github.com/blevesearch/bleve/analysis.Analyzer類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。