e472b3e807
The goal of the "web" tokenizer is to recognize web things like - email addresses - URLs - twitter @handles and #hashtags This implementation uses regexp exceptions. There will most likely be endless debate about the regular expressions. These were chosein as "good enough for now". There is also a "web" analyzer. This is just the "standard" analyzer, but using the "web" tokenizer instead of the "unicode" one. NOTE: after processing the exceptions, it still falls back to the standard "unicode" one. For many users, you can simply set your mapping's default analyzer to be "web". closes #269
48 lines
1.5 KiB
Go
48 lines
1.5 KiB
Go
// Copyright (c) 2014 Couchbase, Inc.
|
|
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
|
|
// except in compliance with the License. You may obtain a copy of the License at
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
// Unless required by applicable law or agreed to in writing, software distributed under the
|
|
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
|
// either express or implied. See the License for the specific language governing permissions
|
|
// and limitations under the License.
|
|
|
|
package web
|
|
|
|
import (
|
|
"github.com/blevesearch/bleve/analysis"
|
|
"github.com/blevesearch/bleve/analysis/language/en"
|
|
"github.com/blevesearch/bleve/analysis/token_filters/lower_case_filter"
|
|
webt "github.com/blevesearch/bleve/analysis/tokenizers/web"
|
|
"github.com/blevesearch/bleve/registry"
|
|
)
|
|
|
|
const Name = "web"
|
|
|
|
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {
|
|
tokenizer, err := cache.TokenizerNamed(webt.Name)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
stopEnFilter, err := cache.TokenFilterNamed(en.StopName)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
rv := analysis.Analyzer{
|
|
Tokenizer: tokenizer,
|
|
TokenFilters: []analysis.TokenFilter{
|
|
toLowerFilter,
|
|
stopEnFilter,
|
|
},
|
|
}
|
|
return &rv, nil
|
|
}
|
|
|
|
func init() {
|
|
registry.RegisterAnalyzer(Name, AnalyzerConstructor)
|
|
}
|