當前位置: 首頁>>代碼示例>>Golang>>正文


Golang registry.Cache類代碼示例

本文整理匯總了Golang中github.com/khlieng/name_pending/Godeps/_workspace/src/github.com/blevesearch/bleve/registry.Cache的典型用法代碼示例。如果您正苦於以下問題:Golang Cache類的具體用法?Golang Cache怎麽用?Golang Cache使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


在下文中一共展示了Cache類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Golang代碼示例。

示例1: ExceptionsTokenizerConstructor

func ExceptionsTokenizerConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.Tokenizer, error) {
	exceptions := []string{}
	iexceptions, ok := config["exceptions"].([]interface{})
	if ok {
		for _, exception := range iexceptions {
			exception, ok := exception.(string)
			if ok {
				exceptions = append(exceptions, exception)
			}
		}
	}
	aexceptions, ok := config["exceptions"].([]string)
	if ok {
		exceptions = append(exceptions, aexceptions...)
	}
	exceptionPattern := strings.Join(exceptions, "|")
	r, err := regexp.Compile(exceptionPattern)
	if err != nil {
		return nil, fmt.Errorf("unable to build regexp tokenizer: %v", err)
	}

	remainingName, ok := config["tokenizer"].(string)
	if !ok {
		return nil, fmt.Errorf("must specify tokenizer for remaining input")
	}
	remaining, err := cache.TokenizerNamed(remainingName)
	if err != nil {
		return nil, err
	}
	return NewExceptionsTokenizer(r, remaining), nil
}
開發者ID:postfix,項目名稱:name_pending,代碼行數:31,代碼來源:exception.go

示例2: ElisionFilterConstructor

func ElisionFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) {
	articlesTokenMap, err := cache.TokenMapNamed(ArticlesName)
	if err != nil {
		return nil, fmt.Errorf("error building elision filter: %v", err)
	}
	return elision_filter.NewElisionFilter(articlesTokenMap), nil
}
開發者ID:postfix,項目名稱:name_pending,代碼行數:7,代碼來源:elision_it.go

示例3: AnalyzerConstructor

func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {
	icuTokenizer, err := cache.TokenizerNamed(icu.Name)
	if err != nil {
		return nil, err
	}
	normCkbFilter, err := cache.TokenFilterNamed(NormalizeName)
	if err != nil {
		return nil, err
	}
	toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name)
	if err != nil {
		return nil, err
	}
	stopCkbFilter, err := cache.TokenFilterNamed(StopName)
	if err != nil {
		return nil, err
	}
	stemmerCkbFilter, err := cache.TokenFilterNamed(StemmerName)
	if err != nil {
		return nil, err
	}
	rv := analysis.Analyzer{
		Tokenizer: icuTokenizer,
		TokenFilters: []analysis.TokenFilter{
			normCkbFilter,
			toLowerFilter,
			stopCkbFilter,
			stemmerCkbFilter,
		},
	}
	return &rv, nil
}
開發者ID:postfix,項目名稱:name_pending,代碼行數:32,代碼來源:analyzer_ckb.go

示例4: AnalyzerConstructor

func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {
	tokenizer, err := cache.TokenizerNamed(unicode.Name)
	if err != nil {
		return nil, err
	}
	elisionFilter, err := cache.TokenFilterNamed(ElisionName)
	if err != nil {
		return nil, err
	}
	toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name)
	if err != nil {
		return nil, err
	}
	stopFrFilter, err := cache.TokenFilterNamed(StopName)
	if err != nil {
		return nil, err
	}
	stemmerFrFilter, err := cache.TokenFilterNamed(LightStemmerName)
	if err != nil {
		return nil, err
	}
	rv := analysis.Analyzer{
		Tokenizer: tokenizer,
		TokenFilters: []analysis.TokenFilter{
			elisionFilter,
			toLowerFilter,
			stopFrFilter,
			stemmerFrFilter,
		},
	}
	return &rv, nil
}
開發者ID:postfix,項目名稱:name_pending,代碼行數:32,代碼來源:analyzer_fr.go

示例5: DictionaryCompoundFilterConstructor

func DictionaryCompoundFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) {

	minWordSize := defaultMinWordSize
	minSubWordSize := defaultMinSubWordSize
	maxSubWordSize := defaultMaxSubWordSize
	onlyLongestMatch := defaultOnlyLongestMatch

	minVal, ok := config["min_word_size"].(float64)
	if ok {
		minWordSize = int(minVal)
	}
	minSubVal, ok := config["min_subword_size"].(float64)
	if ok {
		minSubWordSize = int(minSubVal)
	}
	maxSubVal, ok := config["max_subword_size"].(float64)
	if ok {
		maxSubWordSize = int(maxSubVal)
	}
	onlyVal, ok := config["only_longest_match"].(bool)
	if ok {
		onlyLongestMatch = onlyVal
	}

	dictTokenMapName, ok := config["dict_token_map"].(string)
	if !ok {
		return nil, fmt.Errorf("must specify dict_token_map")
	}
	dictTokenMap, err := cache.TokenMapNamed(dictTokenMapName)
	if err != nil {
		return nil, fmt.Errorf("error building dict compound words filter: %v", err)
	}
	return NewDictionaryCompoundFilter(dictTokenMap, minWordSize, minSubWordSize, maxSubWordSize, onlyLongestMatch), nil
}
開發者ID:postfix,項目名稱:name_pending,代碼行數:34,代碼來源:dict.go

示例6: StopTokenFilterConstructor

func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) {
	tokenMap, err := cache.TokenMapNamed(StopName)
	if err != nil {
		return nil, err
	}
	return stop_tokens_filter.NewStopTokensFilter(tokenMap), nil
}
開發者ID:postfix,項目名稱:name_pending,代碼行數:7,代碼來源:stop_filter_hi.go

示例7: AnalyzerConstructor

func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {
	tokenizer, err := cache.TokenizerNamed(unicode.Name)
	if err != nil {
		return nil, err
	}
	toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name)
	if err != nil {
		return nil, err
	}
	normalizeFilter := unicode_normalize.MustNewUnicodeNormalizeFilter(unicode_normalize.NFKC)
	stopArFilter, err := cache.TokenFilterNamed(StopName)
	if err != nil {
		return nil, err
	}
	normalizeArFilter, err := cache.TokenFilterNamed(NormalizeName)
	if err != nil {
		return nil, err
	}
	stemmerArFilter, err := cache.TokenFilterNamed(StemmerName)
	if err != nil {
		return nil, err
	}
	rv := analysis.Analyzer{
		Tokenizer: tokenizer,
		TokenFilters: []analysis.TokenFilter{
			toLowerFilter,
			normalizeFilter,
			stopArFilter,
			normalizeArFilter,
			stemmerArFilter,
		},
	}
	return &rv, nil
}
開發者ID:postfix,項目名稱:name_pending,代碼行數:34,代碼來源:analyzer_ar.go

示例8: AnalyzerConstructor

func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {
	tokenizer, err := cache.TokenizerNamed(unicode.Name)
	if err != nil {
		return nil, err
	}
	possEnFilter, err := cache.TokenFilterNamed(PossessiveName)
	if err != nil {
		return nil, err
	}
	toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name)
	if err != nil {
		return nil, err
	}
	stopEnFilter, err := cache.TokenFilterNamed(StopName)
	if err != nil {
		return nil, err
	}
	stemmerEnFilter, err := cache.TokenFilterNamed(porter.Name)
	if err != nil {
		return nil, err
	}
	rv := analysis.Analyzer{
		Tokenizer: tokenizer,
		TokenFilters: []analysis.TokenFilter{
			possEnFilter,
			toLowerFilter,
			stopEnFilter,
			stemmerEnFilter,
		},
	}
	return &rv, nil
}
開發者ID:postfix,項目名稱:name_pending,代碼行數:32,代碼來源:analyzer_en.go

示例9: AnalyzerConstructor

func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {
	keywordTokenizer, err := cache.TokenizerNamed(single_token.Name)
	if err != nil {
		return nil, err
	}
	rv := analysis.Analyzer{
		Tokenizer: keywordTokenizer,
	}
	return &rv, nil
}
開發者ID:postfix,項目名稱:name_pending,代碼行數:10,代碼來源:keyword_analyzer.go

示例10: KeyWordMarkerFilterConstructor

func KeyWordMarkerFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) {
	keywordsTokenMapName, ok := config["keywords_token_map"].(string)
	if !ok {
		return nil, fmt.Errorf("must specify keywords_token_map")
	}
	keywordsTokenMap, err := cache.TokenMapNamed(keywordsTokenMapName)
	if err != nil {
		return nil, fmt.Errorf("error building keyword marker filter: %v", err)
	}
	return NewKeyWordMarkerFilter(keywordsTokenMap), nil
}
開發者ID:postfix,項目名稱:name_pending,代碼行數:11,代碼來源:keyword_marker_filter.go

示例11: StopTokensFilterConstructor

func StopTokensFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) {
	stopTokenMapName, ok := config["stop_token_map"].(string)
	if !ok {
		return nil, fmt.Errorf("must specify stop_token_map")
	}
	stopTokenMap, err := cache.TokenMapNamed(stopTokenMapName)
	if err != nil {
		return nil, fmt.Errorf("error building stop words filter: %v", err)
	}
	return NewStopTokensFilter(stopTokenMap), nil
}
開發者ID:postfix,項目名稱:name_pending,代碼行數:11,代碼來源:stop_tokens_filter.go

示例12: ElisionFilterConstructor

func ElisionFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) {
	articlesTokenMapName, ok := config["articles_token_map"].(string)
	if !ok {
		return nil, fmt.Errorf("must specify articles_token_map")
	}
	articlesTokenMap, err := cache.TokenMapNamed(articlesTokenMapName)
	if err != nil {
		return nil, fmt.Errorf("error building elision filter: %v", err)
	}
	return NewElisionFilter(articlesTokenMap), nil
}
開發者ID:postfix,項目名稱:name_pending,代碼行數:11,代碼來源:elision_filter.go

示例13: getCharFilters

func getCharFilters(charFilterNames []string, cache *registry.Cache) ([]analysis.CharFilter, error) {
	charFilters := make([]analysis.CharFilter, len(charFilterNames))
	for i, charFilterName := range charFilterNames {
		charFilter, err := cache.CharFilterNamed(charFilterName)
		if err != nil {
			return nil, err
		}
		charFilters[i] = charFilter
	}

	return charFilters, nil
}
開發者ID:postfix,項目名稱:name_pending,代碼行數:12,代碼來源:custom_analyzer.go

示例14: getTokenFilters

func getTokenFilters(tokenFilterNames []string, cache *registry.Cache) ([]analysis.TokenFilter, error) {
	tokenFilters := make([]analysis.TokenFilter, len(tokenFilterNames))
	for i, tokenFilterName := range tokenFilterNames {
		tokenFilter, err := cache.TokenFilterNamed(tokenFilterName)
		if err != nil {
			return nil, err
		}
		tokenFilters[i] = tokenFilter
	}

	return tokenFilters, nil
}
開發者ID:postfix,項目名稱:name_pending,代碼行數:12,代碼來源:custom_analyzer.go

示例15: AnalyzerConstructor

func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {
	kagomeTokenizer, err := cache.TokenizerNamed(TokenizerName)
	if err != nil {
		return nil, err
	}
	normalizeFilter := unicode_normalize.MustNewUnicodeNormalizeFilter(unicode_normalize.NFKD)
	rv := analysis.Analyzer{
		Tokenizer: kagomeTokenizer,
		TokenFilters: []analysis.TokenFilter{
			normalizeFilter,
		},
	}
	return &rv, nil
}
開發者ID:postfix,項目名稱:name_pending,代碼行數:14,代碼來源:analyzer_ja.go


注:本文中的github.com/khlieng/name_pending/Godeps/_workspace/src/github.com/blevesearch/bleve/registry.Cache類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。