diff --git a/analysis/tokenizers/exception/exception.go b/analysis/tokenizers/exception/exception.go index 51da89ac..63f9897c 100644 --- a/analysis/tokenizers/exception/exception.go +++ b/analysis/tokenizers/exception/exception.go @@ -111,6 +111,9 @@ func ExceptionsTokenizerConstructor(config map[string]interface{}, cache *regist if ok { exceptions = append(exceptions, aexceptions...) } + if len(exceptions) == 0 { + return nil, fmt.Errorf("no pattern found in 'exception' property") + } exceptionPattern := strings.Join(exceptions, "|") r, err := regexp.Compile(exceptionPattern) if err != nil { diff --git a/registry/tokenizer.go b/registry/tokenizer.go index efd8339a..24b5bf93 100644 --- a/registry/tokenizer.go +++ b/registry/tokenizer.go @@ -38,7 +38,7 @@ func (c TokenizerCache) TokenizerNamed(name string, cache *Cache) (analysis.Toke } tokenizer, err := tokenizerConstructor(nil, cache) if err != nil { - return nil, fmt.Errorf("error building tokenizer: %v", err) + return nil, fmt.Errorf("error building tokenizer '%s': %v", name, err) } c[name] = tokenizer return tokenizer, nil @@ -55,7 +55,7 @@ func (c TokenizerCache) DefineTokenizer(name string, typ string, config map[stri } tokenizer, err := tokenizerConstructor(config, cache) if err != nil { - return nil, fmt.Errorf("error building tokenizer: %v", err) + return nil, fmt.Errorf("error building tokenizer '%s': %v", name, err) } c[name] = tokenizer return tokenizer, nil