0
0
Fork 0

update match_phrase query to handle multiple tokens in same pos

we now use a multiphrase query in all cases
internally its optimized to be the same as regular phrase query
anyway, and we simplly map all the tokens in the stream into
a multi-phrase query with the appropriate structure
This commit is contained in:
Marty Schoch 2017-02-10 17:12:13 -05:00
parent a5d1d7974c
commit 56a79528c3
2 changed files with 105 additions and 7 deletions

View File

@ -81,7 +81,7 @@ func (q *MatchPhraseQuery) Searcher(i index.IndexReader, m mapping.IndexMapping,
tokens := analyzer.Analyze([]byte(q.MatchPhrase))
if len(tokens) > 0 {
phrase := tokenStreamToPhrase(tokens)
phraseQuery := NewPhraseQuery(phrase, field)
phraseQuery := NewMultiPhraseQuery(phrase, field)
phraseQuery.SetBoost(q.BoostVal.Value())
return phraseQuery.Searcher(i, m, options)
}
@ -89,7 +89,7 @@ func (q *MatchPhraseQuery) Searcher(i index.IndexReader, m mapping.IndexMapping,
return noneQuery.Searcher(i, m, options)
}
func tokenStreamToPhrase(tokens analysis.TokenStream) []string {
func tokenStreamToPhrase(tokens analysis.TokenStream) [][]string {
firstPosition := int(^uint(0) >> 1)
lastPosition := 0
for _, token := range tokens {
@ -102,13 +102,10 @@ func tokenStreamToPhrase(tokens analysis.TokenStream) []string {
}
phraseLen := lastPosition - firstPosition + 1
if phraseLen > 0 {
rv := make([]string, phraseLen)
for i := 0; i < phraseLen; i++ {
rv[i] = ""
}
rv := make([][]string, phraseLen)
for _, token := range tokens {
pos := token.Position - firstPosition
rv[pos] = string(token.Term)
rv[pos] = append(rv[pos], string(token.Term))
}
return rv
}

View File

@ -0,0 +1,101 @@
// Copyright (c) 2014 Couchbase, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package query
import (
"reflect"
"testing"
"github.com/blevesearch/bleve/analysis"
)
func TestTokenStreamToPhrase(t *testing.T) {
tests := []struct {
tokens analysis.TokenStream
result [][]string
}{
// empty token stream returns nil
{
tokens: analysis.TokenStream{},
result: nil,
},
// typical token
{
tokens: analysis.TokenStream{
&analysis.Token{
Term: []byte("one"),
Position: 1,
},
&analysis.Token{
Term: []byte("two"),
Position: 2,
},
},
result: [][]string{[]string{"one"}, []string{"two"}},
},
// token stream containing a gap (usually from stop words)
{
tokens: analysis.TokenStream{
&analysis.Token{
Term: []byte("wag"),
Position: 1,
},
&analysis.Token{
Term: []byte("dog"),
Position: 3,
},
},
result: [][]string{[]string{"wag"}, nil, []string{"dog"}},
},
// token stream containing multiple tokens at the same position
{
tokens: analysis.TokenStream{
&analysis.Token{
Term: []byte("nia"),
Position: 1,
},
&analysis.Token{
Term: []byte("onia"),
Position: 1,
},
&analysis.Token{
Term: []byte("donia"),
Position: 1,
},
&analysis.Token{
Term: []byte("imo"),
Position: 2,
},
&analysis.Token{
Term: []byte("nimo"),
Position: 2,
},
&analysis.Token{
Term: []byte("ónimo"),
Position: 2,
},
},
result: [][]string{[]string{"nia", "onia", "donia"}, []string{"imo", "nimo", "ónimo"}},
},
}
for i, test := range tests {
actual := tokenStreamToPhrase(test.tokens)
if !reflect.DeepEqual(actual, test.result) {
t.Fatalf("expected %#v got %#v for test %d", test.result, actual, i)
}
}
}