0
0
Fork 0

change cjk analyzer to use unicode tokenizer

change cjk bigram analyzer to work with multi-rune terms
add cjk width filter replaces full unicode normailzation

these changes make the cjk analyzer behave more like elasticsearch
they also remove the depenency on the whitespace analyzer
which is now free to also behave more like lucene/es

fixes #33
This commit is contained in:
Marty Schoch 2016-06-10 13:04:40 -04:00
parent b91c5375e4
commit 043a3bfb7c
6 changed files with 341 additions and 63 deletions

View File

@ -14,18 +14,20 @@ import (
"github.com/blevesearch/bleve/registry"
"github.com/blevesearch/bleve/analysis/token_filters/lower_case_filter"
"github.com/blevesearch/bleve/analysis/token_filters/unicode_normalize"
"github.com/blevesearch/bleve/analysis/tokenizers/whitespace_tokenizer"
"github.com/blevesearch/bleve/analysis/tokenizers/unicode"
)
const AnalyzerName = "cjk"
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {
whitespaceTokenizer, err := cache.TokenizerNamed(whitespace_tokenizer.Name)
whitespaceTokenizer, err := cache.TokenizerNamed(unicode.Name)
if err != nil {
return nil, err
}
widthFilter, err := cache.TokenFilterNamed(WidthName)
if err != nil {
return nil, err
}
normalizeFilter := unicode_normalize.MustNewUnicodeNormalizeFilter(unicode_normalize.NFKD)
toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name)
if err != nil {
return nil, err
@ -37,7 +39,7 @@ func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (
rv := analysis.Analyzer{
Tokenizer: whitespaceTokenizer,
TokenFilters: []analysis.TokenFilter{
normalizeFilter,
widthFilter,
toLowerFilter,
bigramFilter,
},

View File

@ -164,35 +164,35 @@ func TestCJKAnalyzer(t *testing.T) {
&analysis.Token{
Term: []byte("五六"),
Type: analysis.Double,
Position: 5,
Position: 4,
Start: 14,
End: 20,
},
&analysis.Token{
Term: []byte("六七"),
Type: analysis.Double,
Position: 6,
Position: 5,
Start: 17,
End: 23,
},
&analysis.Token{
Term: []byte("七八"),
Type: analysis.Double,
Position: 7,
Position: 6,
Start: 20,
End: 26,
},
&analysis.Token{
Term: []byte("八九"),
Type: analysis.Double,
Position: 8,
Position: 7,
Start: 23,
End: 29,
},
&analysis.Token{
Term: []byte("十"),
Type: analysis.Single,
Position: 10,
Position: 8,
Start: 30,
End: 33,
},
@ -306,7 +306,7 @@ func TestCJKAnalyzer(t *testing.T) {
&analysis.Token{
Term: []byte("test"),
Type: analysis.AlphaNumeric,
Position: 3,
Position: 2,
Start: 6,
End: 10,
},
@ -365,35 +365,35 @@ func TestCJKAnalyzer(t *testing.T) {
&analysis.Token{
Term: []byte("abc"),
Type: analysis.AlphaNumeric,
Position: 6,
Position: 5,
Start: 15,
End: 18,
},
&analysis.Token{
Term: []byte("かき"),
Type: analysis.Double,
Position: 7,
Position: 6,
Start: 18,
End: 24,
},
&analysis.Token{
Term: []byte("きく"),
Type: analysis.Double,
Position: 8,
Position: 7,
Start: 21,
End: 27,
},
&analysis.Token{
Term: []byte("くけ"),
Type: analysis.Double,
Position: 9,
Position: 8,
Start: 24,
End: 30,
},
&analysis.Token{
Term: []byte("けこ"),
Type: analysis.Double,
Position: 10,
Position: 9,
Start: 27,
End: 33,
},
@ -433,49 +433,49 @@ func TestCJKAnalyzer(t *testing.T) {
&analysis.Token{
Term: []byte("ab"),
Type: analysis.AlphaNumeric,
Position: 6,
Position: 5,
Start: 15,
End: 17,
},
&analysis.Token{
Term: []byte("ん"),
Type: analysis.Single,
Position: 7,
Position: 6,
Start: 17,
End: 20,
},
&analysis.Token{
Term: []byte("c"),
Type: analysis.AlphaNumeric,
Position: 8,
Position: 7,
Start: 20,
End: 21,
},
&analysis.Token{
Term: []byte("かき"),
Type: analysis.Double,
Position: 9,
Position: 8,
Start: 21,
End: 27,
},
&analysis.Token{
Term: []byte("きく"),
Type: analysis.Double,
Position: 10,
Position: 9,
Start: 24,
End: 30,
},
&analysis.Token{
Term: []byte("くけ"),
Type: analysis.Double,
Position: 11,
Position: 10,
Start: 27,
End: 33,
},
&analysis.Token{
Term: []byte("こ"),
Type: analysis.Single,
Position: 13,
Position: 11,
Start: 34,
End: 37,
},
@ -618,3 +618,20 @@ func TestCJKAnalyzer(t *testing.T) {
}
}
}
func BenchmarkCJKAnalyzer(b *testing.B) {
cache := registry.NewCache()
analyzer, err := cache.AnalyzerNamed(AnalyzerName)
if err != nil {
b.Fatal(err)
}
for i := 0; i < b.N; i++ {
ts := analyzer.Analyze(bleveWikiArticleJapanese)
}
}
var bleveWikiArticleJapanese = []byte(`加圧容器に貯蔵されている液体物質はその時の気液平衡状態にあるが火災により容器が加熱されていると容器内の液体はその物質の大気圧のもとでの沸点より十分に高い温度まで加熱され圧力も高くなるこの状態で容器が破裂すると容器内部の圧力は瞬間的に大気圧にまで低下する
この時に容器内の平衡状態が破られ液体は突沸し気体になることで爆発現象を起こす液化石油ガスなどではさらに拡散して空気と混ざったガスが自由空間蒸気雲爆発を起こす液化石油ガスなどの常温常圧で気体になる物を高い圧力で液化して収納している容器あるいはそのような液体を輸送するためのパイプラインや配管などが火災などによって破壊されたときに起きる
ブリーブという現象が明らかになったのはフランスリヨンの郊外にあるフェザンという町のフェザン製油所ウニオンゼネラルペトロールで大規模な爆発火災事故が発生したときだと言われている
中身の液体が高温高圧の水である場合には水蒸気爆発と呼ばれる`)

View File

@ -10,7 +10,9 @@
package cjk
import (
"bytes"
"container/ring"
"unicode/utf8"
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/registry"
@ -31,47 +33,70 @@ func NewCJKBigramFilter(outputUnigram bool) *CJKBigramFilter {
func (s *CJKBigramFilter) Filter(input analysis.TokenStream) analysis.TokenStream {
r := ring.New(2)
itemsInRing := 0
pos := 1
outputPos := 1
rv := make(analysis.TokenStream, 0, len(input))
for _, token := range input {
if token.Type == analysis.Ideographic {
if itemsInRing > 0 {
// if items already buffered
// check to see if this is aligned
curr := r.Value.(*analysis.Token)
if token.Start-curr.End != 0 {
// not aligned flush
flushToken := s.flush(r, &itemsInRing)
if flushToken != nil {
rv = append(rv, flushToken)
for _, tokout := range input {
if tokout.Type == analysis.Ideographic {
runes := bytes.Runes(tokout.Term)
sofar := 0
for _, run := range runes {
rlen := utf8.RuneLen(run)
token := &analysis.Token{
Term: tokout.Term[sofar : sofar+rlen],
Start: tokout.Start + sofar,
End: tokout.Start + sofar + rlen,
Position: pos,
Type: tokout.Type,
KeyWord: tokout.KeyWord,
}
pos++
sofar += rlen
if itemsInRing > 0 {
// if items already buffered
// check to see if this is aligned
curr := r.Value.(*analysis.Token)
if token.Start-curr.End != 0 {
// not aligned flush
flushToken := s.flush(r, &itemsInRing, outputPos)
if flushToken != nil {
outputPos++
rv = append(rv, flushToken)
}
}
}
}
// now we can add this token to the buffer
r = r.Next()
r.Value = token
if itemsInRing < 2 {
itemsInRing++
}
if itemsInRing > 1 && s.outputUnigram {
unigram := s.buildUnigram(r, &itemsInRing)
if unigram != nil {
rv = append(rv, unigram)
// now we can add this token to the buffer
r = r.Next()
r.Value = token
if itemsInRing < 2 {
itemsInRing++
}
if itemsInRing > 1 && s.outputUnigram {
unigram := s.buildUnigram(r, &itemsInRing, outputPos)
if unigram != nil {
rv = append(rv, unigram)
}
}
bigramToken := s.outputBigram(r, &itemsInRing, outputPos)
if bigramToken != nil {
rv = append(rv, bigramToken)
outputPos++
}
}
bigramToken := s.outputBigram(r, &itemsInRing)
if bigramToken != nil {
rv = append(rv, bigramToken)
}
} else {
// flush anything already buffered
flushToken := s.flush(r, &itemsInRing)
flushToken := s.flush(r, &itemsInRing, outputPos)
if flushToken != nil {
rv = append(rv, flushToken)
outputPos++
}
// output this token as is
rv = append(rv, token)
tokout.Position = outputPos
rv = append(rv, tokout)
outputPos++
}
}
@ -80,7 +105,7 @@ func (s *CJKBigramFilter) Filter(input analysis.TokenStream) analysis.TokenStrea
if itemsInRing == 2 {
r = r.Next()
}
unigram := s.buildUnigram(r, &itemsInRing)
unigram := s.buildUnigram(r, &itemsInRing, outputPos)
if unigram != nil {
rv = append(rv, unigram)
}
@ -88,17 +113,17 @@ func (s *CJKBigramFilter) Filter(input analysis.TokenStream) analysis.TokenStrea
return rv
}
func (s *CJKBigramFilter) flush(r *ring.Ring, itemsInRing *int) *analysis.Token {
func (s *CJKBigramFilter) flush(r *ring.Ring, itemsInRing *int, pos int) *analysis.Token {
var rv *analysis.Token
if *itemsInRing == 1 {
rv = s.buildUnigram(r, itemsInRing)
rv = s.buildUnigram(r, itemsInRing, pos)
}
r.Value = nil
*itemsInRing = 0
return rv
}
func (s *CJKBigramFilter) outputBigram(r *ring.Ring, itemsInRing *int) *analysis.Token {
func (s *CJKBigramFilter) outputBigram(r *ring.Ring, itemsInRing *int, pos int) *analysis.Token {
if *itemsInRing == 2 {
thisShingleRing := r.Move(-1)
shingledBytes := make([]byte, 0)
@ -115,7 +140,7 @@ func (s *CJKBigramFilter) outputBigram(r *ring.Ring, itemsInRing *int) *analysis
token := analysis.Token{
Type: analysis.Double,
Term: shingledBytes,
Position: prev.Position,
Position: pos,
Start: prev.Start,
End: curr.End,
}
@ -124,7 +149,7 @@ func (s *CJKBigramFilter) outputBigram(r *ring.Ring, itemsInRing *int) *analysis
return nil
}
func (s *CJKBigramFilter) buildUnigram(r *ring.Ring, itemsInRing *int) *analysis.Token {
func (s *CJKBigramFilter) buildUnigram(r *ring.Ring, itemsInRing *int, pos int) *analysis.Token {
if *itemsInRing == 2 {
thisShingleRing := r.Move(-1)
// do first token
@ -132,7 +157,7 @@ func (s *CJKBigramFilter) buildUnigram(r *ring.Ring, itemsInRing *int) *analysis
token := analysis.Token{
Type: analysis.Single,
Term: prev.Term,
Position: prev.Position,
Position: pos,
Start: prev.Start,
End: prev.End,
}
@ -143,7 +168,7 @@ func (s *CJKBigramFilter) buildUnigram(r *ring.Ring, itemsInRing *int) *analysis
token := analysis.Token{
Type: analysis.Single,
Term: prev.Term,
Position: prev.Position,
Position: pos,
Start: prev.Start,
End: prev.End,
}

View File

@ -23,6 +23,7 @@ func TestCJKBigramFilter(t *testing.T) {
input analysis.TokenStream
output analysis.TokenStream
}{
// first test that non-adjacent terms are not combined
{
outputUnigram: false,
input: analysis.TokenStream{
@ -38,7 +39,7 @@ func TestCJKBigramFilter(t *testing.T) {
Type: analysis.Ideographic,
Position: 2,
Start: 5,
End: 7,
End: 8,
},
},
output: analysis.TokenStream{
@ -54,7 +55,7 @@ func TestCJKBigramFilter(t *testing.T) {
Type: analysis.Single,
Position: 2,
Start: 5,
End: 7,
End: 8,
},
},
},
@ -395,19 +396,68 @@ func TestCJKBigramFilter(t *testing.T) {
&analysis.Token{
Term: []byte("cat"),
Type: analysis.AlphaNumeric,
Position: 6,
Position: 5,
Start: 12,
End: 15,
},
&analysis.Token{
Term: []byte("世界"),
Type: analysis.Double,
Position: 7,
Position: 6,
Start: 18,
End: 24,
},
},
},
{
outputUnigram: false,
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("パイプライン"),
Type: analysis.Ideographic,
Position: 1,
Start: 0,
End: 18,
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("パイ"),
Type: analysis.Double,
Position: 1,
Start: 0,
End: 6,
},
&analysis.Token{
Term: []byte("イプ"),
Type: analysis.Double,
Position: 2,
Start: 3,
End: 9,
},
&analysis.Token{
Term: []byte("プラ"),
Type: analysis.Double,
Position: 3,
Start: 6,
End: 12,
},
&analysis.Token{
Term: []byte("ライ"),
Type: analysis.Double,
Position: 4,
Start: 9,
End: 15,
},
&analysis.Token{
Term: []byte("イン"),
Type: analysis.Double,
Position: 5,
Start: 12,
End: 18,
},
},
},
}
for _, test := range tests {

View File

@ -0,0 +1,96 @@
// Copyright (c) 2016 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package cjk
import (
"bytes"
"unicode/utf8"
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/registry"
)
const WidthName = "cjk_width"
type CJKWidthFilter struct{}
func NewCJKWidthFilter() *CJKWidthFilter {
return &CJKWidthFilter{}
}
func (s *CJKWidthFilter) Filter(input analysis.TokenStream) analysis.TokenStream {
for _, token := range input {
runeCount := utf8.RuneCount(token.Term)
runes := bytes.Runes(token.Term)
for i := 0; i < runeCount; i++ {
ch := runes[i]
if ch >= 0xFF01 && ch <= 0xFF5E {
// fullwidth ASCII variants
runes[i] -= 0xFEE0
} else if ch >= 0xFF65 && ch <= 0xFF9F {
// halfwidth Katakana variants
if (ch == 0xFF9E || ch == 0xFF9F) && i > 0 && combine(runes, i, ch) {
runes = analysis.DeleteRune(runes, i)
i--
runeCount = len(runes)
} else {
runes[i] = kanaNorm[ch-0xFF65]
}
}
}
token.Term = analysis.BuildTermFromRunes(runes)
}
return input
}
var kanaNorm = []rune{
0x30fb, 0x30f2, 0x30a1, 0x30a3, 0x30a5, 0x30a7, 0x30a9, 0x30e3, 0x30e5,
0x30e7, 0x30c3, 0x30fc, 0x30a2, 0x30a4, 0x30a6, 0x30a8, 0x30aa, 0x30ab,
0x30ad, 0x30af, 0x30b1, 0x30b3, 0x30b5, 0x30b7, 0x30b9, 0x30bb, 0x30bd,
0x30bf, 0x30c1, 0x30c4, 0x30c6, 0x30c8, 0x30ca, 0x30cb, 0x30cc, 0x30cd,
0x30ce, 0x30cf, 0x30d2, 0x30d5, 0x30d8, 0x30db, 0x30de, 0x30df, 0x30e0,
0x30e1, 0x30e2, 0x30e4, 0x30e6, 0x30e8, 0x30e9, 0x30ea, 0x30eb, 0x30ec,
0x30ed, 0x30ef, 0x30f3, 0x3099, 0x309A,
}
var kanaCombineVoiced = []rune{
78, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1,
0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 8, 8, 8, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
}
var kanaCombineHalfVoiced = []rune{
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 2, 0, 0, 2,
0, 0, 2, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
}
func combine(text []rune, pos int, r rune) bool {
prev := text[pos-1]
if prev >= 0x30A6 && prev <= 0x30FD {
if r == 0xFF9F {
text[pos-1] += kanaCombineHalfVoiced[prev-0x30A6]
} else {
text[pos-1] += kanaCombineVoiced[prev-0x30A6]
}
return text[pos-1] != prev
}
return false
}
func CJKWidthFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) {
return NewCJKWidthFilter(), nil
}
func init() {
registry.RegisterTokenFilter(WidthName, CJKWidthFilterConstructor)
}

View File

@ -0,0 +1,88 @@
// Copyright (c) 2016 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package cjk
import (
"reflect"
"testing"
"github.com/blevesearch/bleve/analysis"
)
func TestCJKWidthFilter(t *testing.T) {
tests := []struct {
input analysis.TokenStream
output analysis.TokenStream
}{
{
input: analysis.TokenStream{
&analysis.Token{
Term: []byte(""),
},
&analysis.Token{
Term: []byte(""),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("Test"),
},
&analysis.Token{
Term: []byte("1234"),
},
},
},
{
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("カタカナ"),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("カタカナ"),
},
},
},
{
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("ヴィッツ"),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("ヴィッツ"),
},
},
},
{
input: analysis.TokenStream{
&analysis.Token{
Term: []byte("パナソニック"),
},
},
output: analysis.TokenStream{
&analysis.Token{
Term: []byte("パナソニック"),
},
},
},
}
for _, test := range tests {
cjkWidthFilter := NewCJKWidthFilter()
actual := cjkWidthFilter.Filter(test.input)
if !reflect.DeepEqual(actual, test.output) {
t.Errorf("expected %s, got %s", test.output, actual)
}
}
}