0
0

fix registering tokenizers with dependencies

closes #201
This commit is contained in:
Marty Schoch 2015-05-14 09:50:10 -04:00
parent 7b871fde6a
commit 580d9013b2
2 changed files with 102 additions and 4 deletions

View File

@ -40,10 +40,33 @@ func (c *customAnalysis) registerAll(i *IndexMapping) error {
return err
}
}
for name, config := range c.Tokenizers {
_, err := i.cache.DefineTokenizer(name, config)
if err != nil {
return err
if len(c.Tokenizers) > 0 {
// put all the names in map tracking work to do
todo := map[string]struct{}{}
for name, _ := range c.Tokenizers {
todo[name] = struct{}{}
}
registered := 1
errs := []error{}
// as long as we keep making progress, keep going
for len(todo) > 0 && registered > 0 {
registered = 0
errs = []error{}
for name, _ := range todo {
config := c.Tokenizers[name]
_, err := i.cache.DefineTokenizer(name, config)
if err != nil {
errs = append(errs, err)
} else {
delete(todo, name)
registered++
}
}
}
if len(errs) > 0 {
return errs[0]
}
}
for name, config := range c.TokenMaps {

View File

@ -244,3 +244,78 @@ func TestMappingForPath(t *testing.T) {
}
}
func TestMappingWithTokenizerDeps(t *testing.T) {
tokNoDeps := map[string]interface{}{
"type": "regexp",
"regexp": "",
}
tokDepsL1 := map[string]interface{}{
"type": "exception",
"tokenizer": "a",
}
// this tests a 1-level dependency
// it is run 100 times to increase the
// likelihood that it fails along time way
// (depends on key order iteration in map)
for i := 0; i < 100; i++ {
m := NewIndexMapping()
ca := customAnalysis{
Tokenizers: map[string]map[string]interface{}{
"a": tokNoDeps,
"b": tokDepsL1,
},
}
err := ca.registerAll(m)
if err != nil {
t.Fatal(err)
}
}
tokDepsL2 := map[string]interface{}{
"type": "exception",
"tokenizer": "b",
}
// now test a second-level dependency
for i := 0; i < 100; i++ {
m := NewIndexMapping()
ca := customAnalysis{
Tokenizers: map[string]map[string]interface{}{
"a": tokNoDeps,
"b": tokDepsL1,
"c": tokDepsL2,
},
}
err := ca.registerAll(m)
if err != nil {
t.Fatal(err)
}
}
tokUnsatisfied := map[string]interface{}{
"type": "exception",
"tokenizer": "e",
}
// now make sure an unsatisfied dep still
// results in an error
m := NewIndexMapping()
ca := customAnalysis{
Tokenizers: map[string]map[string]interface{}{
"a": tokNoDeps,
"b": tokDepsL1,
"c": tokDepsL2,
"d": tokUnsatisfied,
},
}
err := ca.registerAll(m)
if err == nil {
t.Fatal(err)
}
}