Merge branch 'master' into firestorm
This commit is contained in:
commit
9777846206
|
@ -0,0 +1,47 @@
|
|||
// Copyright (c) 2014 Couchbase, Inc.
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
|
||||
// except in compliance with the License. You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// Unless required by applicable law or agreed to in writing, software distributed under the
|
||||
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
// either express or implied. See the License for the specific language governing permissions
|
||||
// and limitations under the License.
|
||||
|
||||
package web
|
||||
|
||||
import (
|
||||
"github.com/blevesearch/bleve/analysis"
|
||||
"github.com/blevesearch/bleve/analysis/language/en"
|
||||
"github.com/blevesearch/bleve/analysis/token_filters/lower_case_filter"
|
||||
webt "github.com/blevesearch/bleve/analysis/tokenizers/web"
|
||||
"github.com/blevesearch/bleve/registry"
|
||||
)
|
||||
|
||||
const Name = "web"
|
||||
|
||||
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {
|
||||
tokenizer, err := cache.TokenizerNamed(webt.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
stopEnFilter, err := cache.TokenFilterNamed(en.StopName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rv := analysis.Analyzer{
|
||||
Tokenizer: tokenizer,
|
||||
TokenFilters: []analysis.TokenFilter{
|
||||
toLowerFilter,
|
||||
stopEnFilter,
|
||||
},
|
||||
}
|
||||
return &rv, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registry.RegisterAnalyzer(Name, AnalyzerConstructor)
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
// Copyright (c) 2014 Couchbase, Inc.
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
|
||||
// except in compliance with the License. You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// Unless required by applicable law or agreed to in writing, software distributed under the
|
||||
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
// either express or implied. See the License for the specific language governing permissions
|
||||
// and limitations under the License.
|
||||
|
||||
package web
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/blevesearch/bleve/analysis"
|
||||
"github.com/blevesearch/bleve/analysis/tokenizers/exception"
|
||||
"github.com/blevesearch/bleve/analysis/tokenizers/unicode"
|
||||
"github.com/blevesearch/bleve/registry"
|
||||
)
|
||||
|
||||
const Name = "web"
|
||||
|
||||
var email = `(?:[a-z0-9!#$%&'*+/=?^_` + "`" + `{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_` + "`" + `{|}~-]+)*|"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\[(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?|[a-z0-9-]*[a-z0-9]:(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])`
|
||||
var url = `(?i)\b((?:[a-z][\w-]+:(?:/{1,3}|[a-z0-9%])|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s` + "`" + `!()\[\]{};:'".,<>?«»“”‘’]))`
|
||||
var twitterHandle = `@([a-zA-Z0-9_]){1,15}`
|
||||
var twitterHashtag = `#([a-zA-Z0-9_])+`
|
||||
var exceptions = []string{email, url, twitterHandle, twitterHashtag}
|
||||
|
||||
var exceptionsRegexp = regexp.MustCompile(strings.Join(exceptions, "|"))
|
||||
|
||||
func TokenizerConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.Tokenizer, error) {
|
||||
remainingTokenizer, err := cache.TokenizerNamed(unicode.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return exception.NewExceptionsTokenizer(exceptionsRegexp, remainingTokenizer), nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registry.RegisterTokenizer(Name, TokenizerConstructor)
|
||||
}
|
|
@ -0,0 +1,143 @@
|
|||
// Copyright (c) 2014 Couchbase, Inc.
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
|
||||
// except in compliance with the License. You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// Unless required by applicable law or agreed to in writing, software distributed under the
|
||||
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
// either express or implied. See the License for the specific language governing permissions
|
||||
// and limitations under the License.
|
||||
|
||||
package web
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/blevesearch/bleve/analysis"
|
||||
"github.com/blevesearch/bleve/registry"
|
||||
)
|
||||
|
||||
func TestWeb(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
input []byte
|
||||
output analysis.TokenStream
|
||||
}{
|
||||
{
|
||||
[]byte("Hello info@blevesearch.com"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 5,
|
||||
Term: []byte("Hello"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 6,
|
||||
End: 26,
|
||||
Term: []byte("info@blevesearch.com"),
|
||||
Position: 2,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte("That http://blevesearch.com"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 4,
|
||||
Term: []byte("That"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 5,
|
||||
End: 27,
|
||||
Term: []byte("http://blevesearch.com"),
|
||||
Position: 2,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte("Hey @blevesearch"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 3,
|
||||
Term: []byte("Hey"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 4,
|
||||
End: 16,
|
||||
Term: []byte("@blevesearch"),
|
||||
Position: 2,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte("This #bleve"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 4,
|
||||
Term: []byte("This"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 5,
|
||||
End: 11,
|
||||
Term: []byte("#bleve"),
|
||||
Position: 2,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
[]byte("What about @blevesearch?"),
|
||||
analysis.TokenStream{
|
||||
{
|
||||
Start: 0,
|
||||
End: 4,
|
||||
Term: []byte("What"),
|
||||
Position: 1,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 5,
|
||||
End: 10,
|
||||
Term: []byte("about"),
|
||||
Position: 2,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
{
|
||||
Start: 11,
|
||||
End: 23,
|
||||
Term: []byte("@blevesearch"),
|
||||
Position: 3,
|
||||
Type: analysis.AlphaNumeric,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
cache := registry.NewCache()
|
||||
tokenizer, err := cache.TokenizerNamed(Name)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
|
||||
actual := tokenizer.Tokenize(test.input)
|
||||
if !reflect.DeepEqual(actual, test.output) {
|
||||
t.Errorf("Expected %v, got %v for %s", test.output, actual, string(test.input))
|
||||
}
|
||||
}
|
||||
}
|
|
@ -35,6 +35,7 @@ import (
|
|||
_ "github.com/blevesearch/bleve/analysis/analyzers/keyword_analyzer"
|
||||
_ "github.com/blevesearch/bleve/analysis/analyzers/simple_analyzer"
|
||||
_ "github.com/blevesearch/bleve/analysis/analyzers/standard_analyzer"
|
||||
_ "github.com/blevesearch/bleve/analysis/analyzers/web"
|
||||
|
||||
// token filters
|
||||
_ "github.com/blevesearch/bleve/analysis/token_filters/apostrophe_filter"
|
||||
|
@ -55,6 +56,7 @@ import (
|
|||
_ "github.com/blevesearch/bleve/analysis/tokenizers/regexp_tokenizer"
|
||||
_ "github.com/blevesearch/bleve/analysis/tokenizers/single_token"
|
||||
_ "github.com/blevesearch/bleve/analysis/tokenizers/unicode"
|
||||
_ "github.com/blevesearch/bleve/analysis/tokenizers/web"
|
||||
_ "github.com/blevesearch/bleve/analysis/tokenizers/whitespace_tokenizer"
|
||||
|
||||
// date time parsers
|
||||
|
|
Loading…
Reference in New Issue