2014-09-01 19:55:23 +02:00
|
|
|
// Copyright (c) 2014 Couchbase, Inc.
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
|
|
|
|
// except in compliance with the License. You may obtain a copy of the License at
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
// Unless required by applicable law or agreed to in writing, software distributed under the
|
|
|
|
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
|
|
|
// either express or implied. See the License for the specific language governing permissions
|
|
|
|
// and limitations under the License.
|
|
|
|
|
2015-08-11 23:22:03 +02:00
|
|
|
package custom_analyzer
|
2014-09-01 19:55:23 +02:00
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
|
|
|
|
"github.com/blevesearch/bleve/analysis"
|
|
|
|
"github.com/blevesearch/bleve/registry"
|
|
|
|
)
|
|
|
|
|
|
|
|
const Name = "custom"
|
|
|
|
|
|
|
|
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {
|
|
|
|
|
2015-01-07 19:38:20 +01:00
|
|
|
var err error
|
2014-09-01 19:55:23 +02:00
|
|
|
var charFilters []analysis.CharFilter
|
2015-01-07 19:55:16 +01:00
|
|
|
charFiltersNames, ok := config["char_filters"].([]string)
|
|
|
|
if ok {
|
2015-01-07 19:38:20 +01:00
|
|
|
charFilters, err = getCharFilters(charFiltersNames, cache)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2014-09-01 19:55:23 +02:00
|
|
|
}
|
2015-01-07 19:55:16 +01:00
|
|
|
} else {
|
|
|
|
charFiltersNamesInterfaceSlice, ok := config["char_filters"].([]interface{})
|
|
|
|
if ok {
|
|
|
|
charFiltersNames, err := convertInterfaceSliceToStringSlice(charFiltersNamesInterfaceSlice, "char filter")
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
charFilters, err = getCharFilters(charFiltersNames, cache)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
2014-09-01 19:55:23 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
tokenizerName, ok := config["tokenizer"].(string)
|
|
|
|
if !ok {
|
|
|
|
return nil, fmt.Errorf("must specify tokenizer")
|
|
|
|
}
|
|
|
|
|
|
|
|
tokenizer, err := cache.TokenizerNamed(tokenizerName)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var tokenFilters []analysis.TokenFilter
|
2015-01-07 19:55:16 +01:00
|
|
|
tokenFiltersNames, ok := config["token_filters"].([]string)
|
|
|
|
if ok {
|
2015-01-07 19:38:20 +01:00
|
|
|
tokenFilters, err = getTokenFilters(tokenFiltersNames, cache)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2014-09-01 19:55:23 +02:00
|
|
|
}
|
2015-01-07 19:55:16 +01:00
|
|
|
} else {
|
|
|
|
tokenFiltersNamesInterfaceSlice, ok := config["token_filters"].([]interface{})
|
|
|
|
if ok {
|
|
|
|
tokenFiltersNames, err := convertInterfaceSliceToStringSlice(tokenFiltersNamesInterfaceSlice, "token filter")
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
tokenFilters, err = getTokenFilters(tokenFiltersNames, cache)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
2014-09-01 19:55:23 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
rv := analysis.Analyzer{
|
|
|
|
Tokenizer: tokenizer,
|
|
|
|
}
|
|
|
|
if charFilters != nil {
|
|
|
|
rv.CharFilters = charFilters
|
|
|
|
}
|
|
|
|
if tokenFilters != nil {
|
|
|
|
rv.TokenFilters = tokenFilters
|
|
|
|
}
|
|
|
|
return &rv, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
registry.RegisterAnalyzer(Name, AnalyzerConstructor)
|
|
|
|
}
|
2015-01-07 19:38:20 +01:00
|
|
|
|
|
|
|
func getCharFilters(charFilterNames []string, cache *registry.Cache) ([]analysis.CharFilter, error) {
|
|
|
|
charFilters := make([]analysis.CharFilter, len(charFilterNames))
|
|
|
|
for i, charFilterName := range charFilterNames {
|
|
|
|
charFilter, err := cache.CharFilterNamed(charFilterName)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
charFilters[i] = charFilter
|
|
|
|
}
|
|
|
|
|
|
|
|
return charFilters, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func getTokenFilters(tokenFilterNames []string, cache *registry.Cache) ([]analysis.TokenFilter, error) {
|
|
|
|
tokenFilters := make([]analysis.TokenFilter, len(tokenFilterNames))
|
|
|
|
for i, tokenFilterName := range tokenFilterNames {
|
|
|
|
tokenFilter, err := cache.TokenFilterNamed(tokenFilterName)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
tokenFilters[i] = tokenFilter
|
|
|
|
}
|
|
|
|
|
|
|
|
return tokenFilters, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func convertInterfaceSliceToStringSlice(interfaceSlice []interface{}, objType string) ([]string, error) {
|
|
|
|
stringSlice := make([]string, len(interfaceSlice))
|
|
|
|
for i, interfaceObj := range interfaceSlice {
|
|
|
|
stringObj, ok := interfaceObj.(string)
|
|
|
|
if ok {
|
|
|
|
stringSlice[i] = stringObj
|
|
|
|
} else {
|
|
|
|
return nil, fmt.Errorf(objType + " name must be a string")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return stringSlice, nil
|
|
|
|
}
|