0
0
Fork 0

add validtion which checks the type of char/token filters

when specified in the custom type of analyzer
This commit is contained in:
Marty Schoch 2017-02-24 15:57:10 -05:00
parent 518abe742a
commit b9db744def
1 changed files with 30 additions and 20 deletions

View File

@ -27,16 +27,16 @@ func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (
var err error var err error
var charFilters []analysis.CharFilter var charFilters []analysis.CharFilter
charFiltersNames, ok := config["char_filters"].([]string) charFiltersValue, ok := config["char_filters"]
if ok { if ok {
charFilters, err = getCharFilters(charFiltersNames, cache) switch charFiltersValue := charFiltersValue.(type) {
if err != nil { case []string:
return nil, err charFilters, err = getCharFilters(charFiltersValue, cache)
} if err != nil {
} else { return nil, err
charFiltersNamesInterfaceSlice, ok := config["char_filters"].([]interface{}) }
if ok { case []interface{}:
charFiltersNames, err := convertInterfaceSliceToStringSlice(charFiltersNamesInterfaceSlice, "char filter") charFiltersNames, err := convertInterfaceSliceToStringSlice(charFiltersValue, "char filter")
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -44,11 +44,19 @@ func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (
if err != nil { if err != nil {
return nil, err return nil, err
} }
default:
return nil, fmt.Errorf("unsupported type for char_filters, must be slice")
} }
} }
tokenizerName, ok := config["tokenizer"].(string) var tokenizerName string
if !ok { tokenizerValue, ok := config["tokenizer"]
if ok {
tokenizerName, ok = tokenizerValue.(string)
if !ok {
return nil, fmt.Errorf("must specify tokenizer as string")
}
} else {
return nil, fmt.Errorf("must specify tokenizer") return nil, fmt.Errorf("must specify tokenizer")
} }
@ -58,16 +66,16 @@ func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (
} }
var tokenFilters []analysis.TokenFilter var tokenFilters []analysis.TokenFilter
tokenFiltersNames, ok := config["token_filters"].([]string) tokenFiltersValue, ok := config["token_filters"]
if ok { if ok {
tokenFilters, err = getTokenFilters(tokenFiltersNames, cache) switch tokenFiltersValue := tokenFiltersValue.(type) {
if err != nil { case []string:
return nil, err tokenFilters, err = getTokenFilters(tokenFiltersValue, cache)
} if err != nil {
} else { return nil, err
tokenFiltersNamesInterfaceSlice, ok := config["token_filters"].([]interface{}) }
if ok { case []interface{}:
tokenFiltersNames, err := convertInterfaceSliceToStringSlice(tokenFiltersNamesInterfaceSlice, "token filter") tokenFiltersNames, err := convertInterfaceSliceToStringSlice(tokenFiltersValue, "token filter")
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -75,6 +83,8 @@ func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (
if err != nil { if err != nil {
return nil, err return nil, err
} }
default:
return nil, fmt.Errorf("unsupported type for token_filters, must be slice")
} }
} }