本文整理匯總了Golang中github.com/blevesearch/bleve/registry.NewCache函數的典型用法代碼示例。如果您正苦於以下問題:Golang NewCache函數的具體用法?Golang NewCache怎麽用?Golang NewCache使用的例子?那麽, 這裏精選的函數代碼示例或許可以為您提供幫助。
在下文中一共展示了NewCache函數的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Golang代碼示例。
示例1: CommonBenchmarkIndexBatch
func CommonBenchmarkIndexBatch(b *testing.B, create KVStoreCreate, destroy KVStoreDestroy, analysisWorkers, batchSize int) {
cache := registry.NewCache()
analyzer, err := cache.AnalyzerNamed("standard")
if err != nil {
b.Fatal(err)
}
b.ResetTimer()
b.StopTimer()
for i := 0; i < b.N; i++ {
s, err := create()
if err != nil {
b.Fatal(err)
}
analysisQueue := index.NewAnalysisQueue(analysisWorkers)
idx := NewUpsideDownCouch(s, analysisQueue)
err = idx.Open()
if err != nil {
b.Fatal(err)
}
b.StartTimer()
batch := index.NewBatch()
for j := 0; j < 1000; j++ {
if j%batchSize == 0 {
if len(batch.IndexOps) > 0 {
err := idx.Batch(batch)
if err != nil {
b.Fatal(err)
}
}
batch = index.NewBatch()
}
indexDocument := document.NewDocument("").
AddField(document.NewTextFieldWithAnalyzer("body", []uint64{}, []byte(benchmarkDocBodies[j%10]), analyzer))
indexDocument.ID = strconv.Itoa(i) + "-" + strconv.Itoa(j)
batch.Update(indexDocument)
}
// close last batch
if len(batch.IndexOps) > 0 {
err := idx.Batch(batch)
if err != nil {
b.Fatal(err)
}
}
b.StopTimer()
err = idx.Close()
if err != nil {
b.Fatal(err)
}
err = destroy()
if err != nil {
b.Fatal(err)
}
analysisQueue.Close()
}
}
示例2: BenchmarkBatch
func BenchmarkBatch(b *testing.B) {
cache := registry.NewCache()
analyzer, err := cache.AnalyzerNamed(standard_analyzer.Name)
if err != nil {
b.Fatal(err)
}
analysisQueue := index.NewAnalysisQueue(1)
idx, err := NewUpsideDownCouch(null.Name, nil, analysisQueue)
if err != nil {
b.Fatal(err)
}
err = idx.Open()
if err != nil {
b.Fatal(err)
}
batch := index.NewBatch()
for i := 0; i < 100; i++ {
d := document.NewDocument(strconv.Itoa(i))
f := document.NewTextFieldWithAnalyzer("desc", nil, bleveWikiArticle1K, analyzer)
d.AddField(f)
batch.Update(d)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
err = idx.Batch(batch)
if err != nil {
b.Fatal(err)
}
}
}
示例3: TestElisionFilter
func TestElisionFilter(t *testing.T) {
tests := []struct {
input analysis.TokenStream
output analysis.TokenStream
}{
{
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("ar" + string(Apostrophe) + "word"),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("word"),
},
},
},
{
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("ar" + string(RightSingleQuotationMark) + "word"),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("word"),
},
},
},
}
cache := registry.NewCache()
articleListConfig := map[string]interface{}{
"type": token_map.Name,
"tokens": []interface{}{"ar"},
}
_, err := cache.DefineTokenMap("articles_test", articleListConfig)
if err != nil {
t.Fatal(err)
}
elisionConfig := map[string]interface{}{
"type": "elision",
"articles_token_map": "articles_test",
}
elisionFilter, err := cache.DefineTokenFilter("elision_test", elisionConfig)
if err != nil {
t.Fatal(err)
}
for _, test := range tests {
actual := elisionFilter.Filter(test.input)
if !reflect.DeepEqual(actual, test.output) {
t.Errorf("expected %s, got %s", test.output[0].Term, actual[0].Term)
}
}
}
示例4: TestItalianElision
func TestItalianElision(t *testing.T) {
tests := []struct {
input analysis.TokenStream
output analysis.TokenStream
}{
{
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("dell'Italia"),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("Italia"),
},
},
},
}
cache := registry.NewCache()
elisionFilter, err := cache.TokenFilterNamed(ElisionName)
if err != nil {
t.Fatal(err)
}
for _, test := range tests {
actual := elisionFilter.Filter(test.input)
if !reflect.DeepEqual(actual, test.output) {
t.Errorf("expected %s, got %s", test.output[0].Term, actual[0].Term)
}
}
}
示例5: BenchmarkAnalyze
func BenchmarkAnalyze(b *testing.B) {
cache := registry.NewCache()
analyzer, err := cache.AnalyzerNamed(standard_analyzer.Name)
if err != nil {
b.Fatal(err)
}
analysisQueue := index.NewAnalysisQueue(1)
idx, err := NewFirestorm(null.Name, nil, analysisQueue)
if err != nil {
b.Fatal(err)
}
d := document.NewDocument("1")
f := document.NewTextFieldWithAnalyzer("desc", nil, bleveWikiArticle1K, analyzer)
d.AddField(f)
b.ResetTimer()
for i := 0; i < b.N; i++ {
rv := idx.Analyze(d)
if len(rv.Rows) < 92 || len(rv.Rows) > 93 {
b.Fatalf("expected 512-13 rows, got %d", len(rv.Rows))
}
}
}
示例6: TestStopWordsFilterLongestMatch
func TestStopWordsFilterLongestMatch(t *testing.T) {
inputTokenStream := analysis.TokenStream{
&analysis.Token{
Term: []byte("softestball"),
Start: 0,
End: 11,
Position: 1,
},
}
expectedTokenStream := analysis.TokenStream{
&analysis.Token{
Term: []byte("softestball"),
Start: 0,
End: 11,
Position: 1,
},
&analysis.Token{
Term: []byte("softest"),
Start: 0,
End: 7,
Position: 1,
},
&analysis.Token{
Term: []byte("ball"),
Start: 7,
End: 11,
Position: 1,
},
}
cache := registry.NewCache()
dictListConfig := map[string]interface{}{
"type": token_map.Name,
"tokens": []interface{}{"soft", "softest", "ball"},
}
_, err := cache.DefineTokenMap("dict_test", dictListConfig)
if err != nil {
t.Fatal(err)
}
dictConfig := map[string]interface{}{
"type": "dict_compound",
"dict_token_map": "dict_test",
"only_longest_match": true,
}
dictFilter, err := cache.DefineTokenFilter("dict_test", dictConfig)
if err != nil {
t.Fatal(err)
}
ouputTokenStream := dictFilter.Filter(inputTokenStream)
if !reflect.DeepEqual(ouputTokenStream, expectedTokenStream) {
t.Errorf("expected %#v got %#v", expectedTokenStream, ouputTokenStream)
}
}
示例7: BenchmarkCJKAnalyzer
func BenchmarkCJKAnalyzer(b *testing.B) {
cache := registry.NewCache()
analyzer, err := cache.AnalyzerNamed(AnalyzerName)
if err != nil {
b.Fatal(err)
}
for i := 0; i < b.N; i++ {
analyzer.Analyze(bleveWikiArticleJapanese)
}
}
示例8: TestSoraniAnalyzer
func TestSoraniAnalyzer(t *testing.T) {
tests := []struct {
input []byte
output analysis.TokenStream
}{
// stop word removal
{
input: []byte("ئەم پیاوە"),
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("پیاو"),
Position: 2,
Start: 7,
End: 17,
},
},
},
{
input: []byte("پیاوە"),
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("پیاو"),
Position: 1,
Start: 0,
End: 10,
},
},
},
{
input: []byte("پیاو"),
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("پیاو"),
Position: 1,
Start: 0,
End: 8,
},
},
},
}
cache := registry.NewCache()
analyzer, err := cache.AnalyzerNamed(AnalyzerName)
if err != nil {
t.Fatal(err)
}
for _, test := range tests {
actual := analyzer.Analyze(test.input)
if !reflect.DeepEqual(actual, test.output) {
t.Errorf("expected %v, got %v", test.output, actual)
}
}
}
示例9: TestStopWordsFilter
func TestStopWordsFilter(t *testing.T) {
inputTokenStream := analysis.TokenStream{
&analysis.Token{
Term: []byte("a"),
},
&analysis.Token{
Term: []byte("walk"),
},
&analysis.Token{
Term: []byte("in"),
},
&analysis.Token{
Term: []byte("the"),
},
&analysis.Token{
Term: []byte("park"),
},
}
expectedTokenStream := analysis.TokenStream{
&analysis.Token{
Term: []byte("walk"),
},
&analysis.Token{
Term: []byte("park"),
},
}
cache := registry.NewCache()
stopListConfig := map[string]interface{}{
"type": token_map.Name,
"tokens": []interface{}{"a", "in", "the"},
}
_, err := cache.DefineTokenMap("stop_test", stopListConfig)
if err != nil {
t.Fatal(err)
}
stopConfig := map[string]interface{}{
"type": "stop_tokens",
"stop_token_map": "stop_test",
}
stopFilter, err := cache.DefineTokenFilter("stop_test", stopConfig)
if err != nil {
t.Fatal(err)
}
ouputTokenStream := stopFilter.Filter(inputTokenStream)
if !reflect.DeepEqual(ouputTokenStream, expectedTokenStream) {
t.Errorf("expected %#v got %#v", expectedTokenStream, ouputTokenStream)
}
}
示例10: TestEnglishStemmer
func TestEnglishStemmer(t *testing.T) {
tests := []struct {
input analysis.TokenStream
output analysis.TokenStream
}{
{
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("walking"),
},
&analysis.Token{
Term: []byte("talked"),
},
&analysis.Token{
Term: []byte("business"),
},
&analysis.Token{
Term: []byte("protected"),
KeyWord: true,
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("walk"),
},
&analysis.Token{
Term: []byte("talk"),
},
&analysis.Token{
Term: []byte("busi"),
},
&analysis.Token{
Term: []byte("protected"),
KeyWord: true,
},
},
},
}
cache := registry.NewCache()
stemmerFilter, err := cache.TokenFilterNamed(StemmerName)
if err != nil {
t.Fatal(err)
}
for _, test := range tests {
actual := stemmerFilter.Filter(test.input)
if !reflect.DeepEqual(actual, test.output) {
t.Errorf("expected %s, got %s", test.output, actual)
}
}
}
示例11: TestThaiAnalyzerWihtoutOffsets
func TestThaiAnalyzerWihtoutOffsets(t *testing.T) {
tests := []struct {
input []byte
output analysis.TokenStream
}{
// stop words
{
input: []byte("บริษัทชื่อ XY&Z - คุยกับ [email protected]"),
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("บริษัท"),
},
&analysis.Token{
Term: []byte("ชื่อ"),
},
&analysis.Token{
Term: []byte("xy"),
},
&analysis.Token{
Term: []byte("z"),
},
&analysis.Token{
Term: []byte("คุย"),
},
&analysis.Token{
Term: []byte("xyz"),
},
&analysis.Token{
Term: []byte("demo.com"),
},
},
},
}
cache := registry.NewCache()
analyzer, err := cache.AnalyzerNamed(AnalyzerName)
if err != nil {
t.Fatal(err)
}
for _, test := range tests {
actual := analyzer.Analyze(test.input)
if len(actual) != len(test.output) {
t.Errorf("expected length: %d, got %d", len(test.output), len(actual))
}
for i, tok := range actual {
if !reflect.DeepEqual(tok.Term, test.output[i].Term) {
t.Errorf("expected term %s (% x) got %s (% x)", test.output[i].Term, test.output[i].Term, tok.Term, tok.Term)
}
}
}
}
示例12: NewIndexMapping
// NewIndexMapping creates a new IndexMapping that will use all the default indexing rules
func NewIndexMapping() *IndexMapping {
return &IndexMapping{
TypeMapping: make(map[string]*DocumentMapping),
DefaultMapping: NewDocumentMapping(),
TypeField: defaultTypeField,
DefaultType: defaultType,
DefaultAnalyzer: defaultAnalyzer,
DefaultDateTimeParser: defaultDateTimeParser,
DefaultField: defaultField,
ByteArrayConverter: defaultByteArrayConverter,
CustomAnalysis: newCustomAnalysis(),
cache: registry.NewCache(),
}
}
示例13: TestJaAnalyzer
func TestJaAnalyzer(t *testing.T) {
tests := []struct {
input []byte
output analysis.TokenStream
}{
{
input: []byte("こんにちは世界"),
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("こんにちは"),
Type: analysis.Ideographic,
Position: 1,
Start: 0,
End: 15,
},
&analysis.Token{
Term: []byte("世界"),
Type: analysis.Ideographic,
Position: 2,
Start: 15,
End: 21,
},
},
},
{
input: []byte("カタカナ"),
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("カタカナ"),
Type: analysis.Ideographic,
Position: 1,
Start: 0,
End: 12,
},
},
},
}
cache := registry.NewCache()
for _, test := range tests {
analyzer, err := cache.AnalyzerNamed(AnalyzerName)
if err != nil {
t.Fatal(err)
}
actual := analyzer.Analyze(test.input)
if !reflect.DeepEqual(actual, test.output) {
t.Errorf("expected %v, got %v", test.output, actual)
}
}
}
示例14: NewIndexMapping
// NewIndexMapping creates a new IndexMapping that will use all the default indexing rules
func NewIndexMapping() *IndexMapping {
return &IndexMapping{
TypeMapping: make(map[string]*DocumentMapping),
DefaultMapping: NewDocumentMapping(),
TypeField: defaultTypeField,
DefaultType: defaultType,
DefaultAnalyzer: defaultAnalyzer,
DefaultDateTimeParser: defaultDateTimeParser,
DefaultField: defaultField,
IndexDynamic: IndexDynamic,
StoreDynamic: StoreDynamic,
CustomAnalysis: newCustomAnalysis(),
cache: registry.NewCache(),
}
}
示例15: TestPortugueseAnalyzer
func TestPortugueseAnalyzer(t *testing.T) {
tests := []struct {
input []byte
output analysis.TokenStream
}{
// stemming
// fails due to stemming discrepencies
// got quilométr instead of quilometric
// {
// input: []byte("quilométricas"),
// output: analysis.TokenStream{
// &analysis.Token{
// Term: []byte("quilometric"),
// },
// },
// },
// {
// input: []byte("quilométricos"),
// output: analysis.TokenStream{
// &analysis.Token{
// Term: []byte("quilometric"),
// },
// },
// },
// stop word
{
input: []byte("não"),
output: analysis.TokenStream{},
},
}
cache := registry.NewCache()
analyzer, err := cache.AnalyzerNamed(AnalyzerName)
if err != nil {
t.Fatal(err)
}
for _, test := range tests {
actual := analyzer.Analyze(test.input)
if len(actual) != len(test.output) {
t.Fatalf("expected length: %d, got %d", len(test.output), len(actual))
}
for i, tok := range actual {
if !reflect.DeepEqual(tok.Term, test.output[i].Term) {
t.Errorf("expected term %s (% x) got %s (% x)", test.output[i].Term, test.output[i].Term, tok.Term, tok.Term)
}
}
}
}