0
0

added apostrophe filter to improve turkish analyzer

closes #27
This commit is contained in:
Marty Schoch 2014-08-06 08:50:00 -04:00
parent 649a4999a1
commit d84187fd24
3 changed files with 139 additions and 1 deletions

View File

@ -0,0 +1,41 @@
// Copyright (c) 2014 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package apostrophe_filter
import (
"bytes"
"github.com/couchbaselabs/bleve/analysis"
)
const RIGHT_SINGLE_QUOTATION_MARK = ""
const APOSTROPHE = "'"
const APOSTROPHES = APOSTROPHE + RIGHT_SINGLE_QUOTATION_MARK
type ApostropheFilter struct{}
func NewApostropheFilter() *ApostropheFilter {
return &ApostropheFilter{}
}
func (s *ApostropheFilter) Filter(input analysis.TokenStream) analysis.TokenStream {
rv := make(analysis.TokenStream, 0)
for _, token := range input {
firstApostrophe := bytes.IndexAny(token.Term, APOSTROPHES)
if firstApostrophe >= 0 {
// found an apostrophe
token.Term = token.Term[0:firstApostrophe]
}
rv = append(rv, token)
}
return rv
}

View File

@ -0,0 +1,93 @@
// Copyright (c) 2014 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package apostrophe_filter
import (
"reflect"
"testing"
"github.com/couchbaselabs/bleve/analysis"
)
func TestApostropheFilter(t *testing.T) {
tests := []struct {
input analysis.TokenStream
output analysis.TokenStream
}{
{
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("Türkiye'de"),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("Türkiye"),
},
},
},
{
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("2003'te"),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("2003"),
},
},
},
{
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("Van"),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("Van"),
},
},
},
{
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("Gölü'nü"),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("Gölü"),
},
},
},
{
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("gördüm"),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("gördüm"),
},
},
},
}
for _, test := range tests {
apostropheFilter := NewApostropheFilter()
actual := apostropheFilter.Filter(test.input)
if !reflect.DeepEqual(actual, test.output) {
t.Errorf("expected %s, got %s", test.output[0].Term, actual[0].Term)
}
}
}

View File

@ -23,6 +23,7 @@ import (
"github.com/couchbaselabs/bleve/analysis/tokenizers/single_token" "github.com/couchbaselabs/bleve/analysis/tokenizers/single_token"
"github.com/couchbaselabs/bleve/analysis/tokenizers/unicode_word_boundary" "github.com/couchbaselabs/bleve/analysis/tokenizers/unicode_word_boundary"
"github.com/couchbaselabs/bleve/analysis/token_filters/apostrophe_filter"
"github.com/couchbaselabs/bleve/analysis/token_filters/cld2" "github.com/couchbaselabs/bleve/analysis/token_filters/cld2"
"github.com/couchbaselabs/bleve/analysis/token_filters/elision_filter" "github.com/couchbaselabs/bleve/analysis/token_filters/elision_filter"
"github.com/couchbaselabs/bleve/analysis/token_filters/length_filter" "github.com/couchbaselabs/bleve/analysis/token_filters/length_filter"
@ -179,6 +180,9 @@ func init() {
Config.Analysis.TokenFilters["short"] = length_filter.NewLengthFilter(3, -1) Config.Analysis.TokenFilters["short"] = length_filter.NewLengthFilter(3, -1)
Config.Analysis.TokenFilters["long"] = length_filter.NewLengthFilter(-1, 255) Config.Analysis.TokenFilters["long"] = length_filter.NewLengthFilter(-1, 255)
Config.Analysis.TokenFilters["to_lower"] = lower_case_filter.NewLowerCaseFilter() Config.Analysis.TokenFilters["to_lower"] = lower_case_filter.NewLowerCaseFilter()
Config.Analysis.TokenFilters["apostrophe"] = apostrophe_filter.NewApostropheFilter()
// register stemmer filters
Config.Analysis.TokenFilters["stemmer_da"] = stemmer_filter.MustNewStemmerFilter("danish") Config.Analysis.TokenFilters["stemmer_da"] = stemmer_filter.MustNewStemmerFilter("danish")
Config.Analysis.TokenFilters["stemmer_nl"] = stemmer_filter.MustNewStemmerFilter("dutch") Config.Analysis.TokenFilters["stemmer_nl"] = stemmer_filter.MustNewStemmerFilter("dutch")
Config.Analysis.TokenFilters["stemmer_en"] = stemmer_filter.MustNewStemmerFilter("english") Config.Analysis.TokenFilters["stemmer_en"] = stemmer_filter.MustNewStemmerFilter("english")
@ -309,7 +313,7 @@ func init() {
Config.Analysis.Analyzers["es"] = spanishAnalyzer Config.Analysis.Analyzers["es"] = spanishAnalyzer
swedishAnalyzer := Config.MustBuildNewAnalyzer([]string{}, "unicode", []string{"to_lower", "stop_token_sv", "stemmer_sv"}) swedishAnalyzer := Config.MustBuildNewAnalyzer([]string{}, "unicode", []string{"to_lower", "stop_token_sv", "stemmer_sv"})
Config.Analysis.Analyzers["sv"] = swedishAnalyzer Config.Analysis.Analyzers["sv"] = swedishAnalyzer
turkishAnalyzer := Config.MustBuildNewAnalyzer([]string{}, "unicode", []string{"to_lower", "stop_token_tr", "stemmer_tr"}) turkishAnalyzer := Config.MustBuildNewAnalyzer([]string{}, "unicode", []string{"apostrophe", "to_lower", "stop_token_tr", "stemmer_tr"})
Config.Analysis.Analyzers["tr"] = turkishAnalyzer Config.Analysis.Analyzers["tr"] = turkishAnalyzer
thaiAnalyzer := Config.MustBuildNewAnalyzer([]string{}, "unicode_th", []string{"to_lower", "stop_token_th"}) thaiAnalyzer := Config.MustBuildNewAnalyzer([]string{}, "unicode_th", []string{"to_lower", "stop_token_th"})
Config.Analysis.Analyzers["th"] = thaiAnalyzer Config.Analysis.Analyzers["th"] = thaiAnalyzer