0
0
Fork 0

BREAKING CHANGE - renamed packages to be shorter and not use _

this commit only addresses the analysis sub-package
This commit is contained in:
Marty Schoch 2016-09-30 11:18:39 -04:00
parent bf055b6bf2
commit 35da361bfa
160 changed files with 214 additions and 232 deletions

View File

@ -7,7 +7,7 @@
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package custom_analyzer
package custom
import (
"fmt"

View File

@ -7,18 +7,18 @@
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package keyword_analyzer
package keyword
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/tokenizers/single_token"
"github.com/blevesearch/bleve/analysis/tokenizers/single"
"github.com/blevesearch/bleve/registry"
)
const Name = "keyword"
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {
keywordTokenizer, err := cache.TokenizerNamed(single_token.Name)
keywordTokenizer, err := cache.TokenizerNamed(single.Name)
if err != nil {
return nil, err
}

View File

@ -7,12 +7,12 @@
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package simple_analyzer
package simple
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/lower_case_filter"
"github.com/blevesearch/bleve/analysis/tokenizers/letter"
"github.com/blevesearch/bleve/analysis/tokens/lowercase"
"github.com/blevesearch/bleve/registry"
)
@ -23,7 +23,7 @@ func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (
if err != nil {
return nil, err
}
toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name)
toLowerFilter, err := cache.TokenFilterNamed(lowercase.Name)
if err != nil {
return nil, err
}

View File

@ -7,13 +7,13 @@
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package standard_analyzer
package standard
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/language/en"
"github.com/blevesearch/bleve/analysis/token_filters/lower_case_filter"
"github.com/blevesearch/bleve/analysis/lang/en"
"github.com/blevesearch/bleve/analysis/tokenizers/unicode"
"github.com/blevesearch/bleve/analysis/tokens/lowercase"
"github.com/blevesearch/bleve/registry"
)
@ -24,7 +24,7 @@ func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (
if err != nil {
return nil, err
}
toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name)
toLowerFilter, err := cache.TokenFilterNamed(lowercase.Name)
if err != nil {
return nil, err
}

View File

@ -11,9 +11,9 @@ package web
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/language/en"
"github.com/blevesearch/bleve/analysis/token_filters/lower_case_filter"
"github.com/blevesearch/bleve/analysis/lang/en"
webt "github.com/blevesearch/bleve/analysis/tokenizers/web"
"github.com/blevesearch/bleve/analysis/tokens/lowercase"
"github.com/blevesearch/bleve/registry"
)
@ -24,7 +24,7 @@ func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (
if err != nil {
return nil, err
}
toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name)
toLowerFilter, err := cache.TokenFilterNamed(lowercase.Name)
if err != nil {
return nil, err
}

View File

@ -4,7 +4,7 @@ import (
"testing"
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/analyzers/standard_analyzer"
"github.com/blevesearch/bleve/analysis/analyzers/standard"
"github.com/blevesearch/bleve/registry"
)
@ -12,7 +12,7 @@ func BenchmarkAnalysis(b *testing.B) {
for i := 0; i < b.N; i++ {
cache := registry.NewCache()
analyzer, err := cache.AnalyzerNamed(standard_analyzer.Name)
analyzer, err := cache.AnalyzerNamed(standard.Name)
if err != nil {
b.Fatal(err)
}

View File

@ -7,13 +7,13 @@
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package html_char_filter
package html
import (
"regexp"
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/char_filters/regexp_char_filter"
regexpCharFilter "github.com/blevesearch/bleve/analysis/char/regexp"
"github.com/blevesearch/bleve/registry"
)
@ -23,7 +23,7 @@ var htmlCharFilterRegexp = regexp.MustCompile(`</?[!\w]+((\s+\w+(\s*=\s*(?:".*?"
func CharFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.CharFilter, error) {
replaceBytes := []byte(" ")
return regexp_char_filter.NewRegexpCharFilter(htmlCharFilterRegexp, replaceBytes), nil
return regexpCharFilter.NewRegexpCharFilter(htmlCharFilterRegexp, replaceBytes), nil
}
func init() {

View File

@ -7,7 +7,7 @@
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package regexp_char_filter
package regexp
import (
"bytes"

View File

@ -7,7 +7,7 @@
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package regexp_char_filter
package regexp
import (
"reflect"
@ -25,26 +25,8 @@ func TestRegexpCharFilter(t *testing.T) {
output []byte
}{
{
input: []byte(`<!DOCTYPE html>
<html>
<body>
<h1>My First Heading</h1>
<p>My first paragraph.</p>
</body>
</html>`),
output: []byte(`
My First Heading
My first paragraph.
`),
input: []byte(`<html>test</html>`),
output: []byte(` test `),
},
}

View File

@ -7,13 +7,13 @@
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package zero_width_non_joiner
package zerowidthnonjoiner
import (
"regexp"
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/char_filters/regexp_char_filter"
regexpCharFilter "github.com/blevesearch/bleve/analysis/char/regexp"
"github.com/blevesearch/bleve/registry"
)
@ -23,7 +23,7 @@ var zeroWidthNonJoinerRegexp = regexp.MustCompile(`\x{200C}`)
func CharFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.CharFilter, error) {
replaceBytes := []byte(" ")
return regexp_char_filter.NewRegexpCharFilter(zeroWidthNonJoinerRegexp, replaceBytes), nil
return regexpCharFilter.NewRegexpCharFilter(zeroWidthNonJoinerRegexp, replaceBytes), nil
}
func init() {

View File

@ -7,7 +7,7 @@
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package flexible_go
package flexible
import (
"fmt"

View File

@ -7,7 +7,7 @@
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package flexible_go
package flexible
import (
"reflect"

View File

@ -7,13 +7,13 @@
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package datetime_optional
package optional
import (
"time"
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/datetime_parsers/flexible_go"
"github.com/blevesearch/bleve/analysis/datetime/flexible"
"github.com/blevesearch/bleve/registry"
)
@ -32,7 +32,7 @@ var layouts = []string{
}
func DateTimeParserConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.DateTimeParser, error) {
return flexible_go.NewFlexibleGoDateTimeParser(layouts), nil
return flexible.NewFlexibleGoDateTimeParser(layouts), nil
}
func init() {

View File

@ -13,9 +13,9 @@ import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/registry"
"github.com/blevesearch/bleve/analysis/token_filters/lower_case_filter"
"github.com/blevesearch/bleve/analysis/token_filters/unicode_normalize"
"github.com/blevesearch/bleve/analysis/tokenizers/unicode"
"github.com/blevesearch/bleve/analysis/tokens/lowercase"
"github.com/blevesearch/bleve/analysis/tokens/unicodenorm"
)
const AnalyzerName = "ar"
@ -25,11 +25,11 @@ func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (
if err != nil {
return nil, err
}
toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name)
toLowerFilter, err := cache.TokenFilterNamed(lowercase.Name)
if err != nil {
return nil, err
}
normalizeFilter := unicode_normalize.MustNewUnicodeNormalizeFilter(unicode_normalize.NFKC)
normalizeFilter := unicodenorm.MustNewUnicodeNormalizeFilter(unicodenorm.NFKC)
stopArFilter, err := cache.TokenFilterNamed(StopName)
if err != nil {
return nil, err

View File

@ -11,7 +11,7 @@ package ar
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/stop_tokens_filter"
"github.com/blevesearch/bleve/analysis/tokens/stop"
"github.com/blevesearch/bleve/registry"
)
@ -20,7 +20,7 @@ func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.C
if err != nil {
return nil, err
}
return stop_tokens_filter.NewStopTokensFilter(tokenMap), nil
return stop.NewStopTokensFilter(tokenMap), nil
}
func init() {

View File

@ -11,7 +11,7 @@ package bg
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/stop_tokens_filter"
"github.com/blevesearch/bleve/analysis/tokens/stop"
"github.com/blevesearch/bleve/registry"
)
@ -20,7 +20,7 @@ func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.C
if err != nil {
return nil, err
}
return stop_tokens_filter.NewStopTokensFilter(tokenMap), nil
return stop.NewStopTokensFilter(tokenMap), nil
}
func init() {

View File

@ -13,7 +13,7 @@ import (
"fmt"
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/elision_filter"
"github.com/blevesearch/bleve/analysis/tokens/elision"
"github.com/blevesearch/bleve/registry"
)
@ -24,7 +24,7 @@ func ElisionFilterConstructor(config map[string]interface{}, cache *registry.Cac
if err != nil {
return nil, fmt.Errorf("error building elision filter: %v", err)
}
return elision_filter.NewElisionFilter(articlesTokenMap), nil
return elision.NewElisionFilter(articlesTokenMap), nil
}
func init() {

View File

@ -11,7 +11,7 @@ package ca
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/stop_tokens_filter"
"github.com/blevesearch/bleve/analysis/tokens/stop"
"github.com/blevesearch/bleve/registry"
)
@ -20,7 +20,7 @@ func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.C
if err != nil {
return nil, err
}
return stop_tokens_filter.NewStopTokensFilter(tokenMap), nil
return stop.NewStopTokensFilter(tokenMap), nil
}
func init() {

View File

@ -13,8 +13,8 @@ import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/registry"
"github.com/blevesearch/bleve/analysis/token_filters/lower_case_filter"
"github.com/blevesearch/bleve/analysis/tokenizers/unicode"
"github.com/blevesearch/bleve/analysis/tokens/lowercase"
)
const AnalyzerName = "cjk"
@ -28,7 +28,7 @@ func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (
if err != nil {
return nil, err
}
toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name)
toLowerFilter, err := cache.TokenFilterNamed(lowercase.Name)
if err != nil {
return nil, err
}

View File

@ -11,8 +11,8 @@ package ckb
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/lower_case_filter"
"github.com/blevesearch/bleve/analysis/tokenizers/unicode"
"github.com/blevesearch/bleve/analysis/tokens/lowercase"
"github.com/blevesearch/bleve/registry"
)
@ -27,7 +27,7 @@ func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (
if err != nil {
return nil, err
}
toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name)
toLowerFilter, err := cache.TokenFilterNamed(lowercase.Name)
if err != nil {
return nil, err
}

View File

@ -14,7 +14,7 @@ import (
"testing"
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/tokenizers/single_token"
"github.com/blevesearch/bleve/analysis/tokenizers/single"
)
func TestSoraniStemmerFilter(t *testing.T) {
@ -22,7 +22,7 @@ func TestSoraniStemmerFilter(t *testing.T) {
// in order to match the lucene tests
// we will test with an analyzer, not just the stemmer
analyzer := analysis.Analyzer{
Tokenizer: single_token.NewSingleTokenTokenizer(),
Tokenizer: single.NewSingleTokenTokenizer(),
TokenFilters: []analysis.TokenFilter{
NewSoraniNormalizeFilter(),
NewSoraniStemmerFilter(),

View File

@ -11,7 +11,7 @@ package ckb
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/stop_tokens_filter"
"github.com/blevesearch/bleve/analysis/tokens/stop"
"github.com/blevesearch/bleve/registry"
)
@ -20,7 +20,7 @@ func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.C
if err != nil {
return nil, err
}
return stop_tokens_filter.NewStopTokensFilter(tokenMap), nil
return stop.NewStopTokensFilter(tokenMap), nil
}
func init() {

View File

@ -11,7 +11,7 @@ package cs
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/stop_tokens_filter"
"github.com/blevesearch/bleve/analysis/tokens/stop"
"github.com/blevesearch/bleve/registry"
)
@ -20,7 +20,7 @@ func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.C
if err != nil {
return nil, err
}
return stop_tokens_filter.NewStopTokensFilter(tokenMap), nil
return stop.NewStopTokensFilter(tokenMap), nil
}
func init() {

View File

@ -11,7 +11,7 @@ package el
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/stop_tokens_filter"
"github.com/blevesearch/bleve/analysis/tokens/stop"
"github.com/blevesearch/bleve/registry"
)
@ -20,7 +20,7 @@ func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.C
if err != nil {
return nil, err
}
return stop_tokens_filter.NewStopTokensFilter(tokenMap), nil
return stop.NewStopTokensFilter(tokenMap), nil
}
func init() {

View File

@ -20,9 +20,9 @@ import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/registry"
"github.com/blevesearch/bleve/analysis/token_filters/lower_case_filter"
"github.com/blevesearch/bleve/analysis/token_filters/porter"
"github.com/blevesearch/bleve/analysis/tokenizers/unicode"
"github.com/blevesearch/bleve/analysis/tokens/lowercase"
"github.com/blevesearch/bleve/analysis/tokens/porter"
)
const AnalyzerName = "en"
@ -36,7 +36,7 @@ func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (
if err != nil {
return nil, err
}
toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name)
toLowerFilter, err := cache.TokenFilterNamed(lowercase.Name)
if err != nil {
return nil, err
}

View File

@ -11,7 +11,7 @@ package en
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/stop_tokens_filter"
"github.com/blevesearch/bleve/analysis/tokens/stop"
"github.com/blevesearch/bleve/registry"
)
@ -20,7 +20,7 @@ func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.C
if err != nil {
return nil, err
}
return stop_tokens_filter.NewStopTokensFilter(tokenMap), nil
return stop.NewStopTokensFilter(tokenMap), nil
}
func init() {

View File

@ -11,7 +11,7 @@ package eu
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/stop_tokens_filter"
"github.com/blevesearch/bleve/analysis/tokens/stop"
"github.com/blevesearch/bleve/registry"
)
@ -20,7 +20,7 @@ func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.C
if err != nil {
return nil, err
}
return stop_tokens_filter.NewStopTokensFilter(tokenMap), nil
return stop.NewStopTokensFilter(tokenMap), nil
}
func init() {

View File

@ -13,16 +13,16 @@ import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/registry"
"github.com/blevesearch/bleve/analysis/char_filters/zero_width_non_joiner"
"github.com/blevesearch/bleve/analysis/language/ar"
"github.com/blevesearch/bleve/analysis/token_filters/lower_case_filter"
"github.com/blevesearch/bleve/analysis/char/zerowidthnonjoiner"
"github.com/blevesearch/bleve/analysis/lang/ar"
"github.com/blevesearch/bleve/analysis/tokenizers/unicode"
"github.com/blevesearch/bleve/analysis/tokens/lowercase"
)
const AnalyzerName = "fa"
func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {
zFilter, err := cache.CharFilterNamed(zero_width_non_joiner.Name)
zFilter, err := cache.CharFilterNamed(zerowidthnonjoiner.Name)
if err != nil {
return nil, err
}
@ -38,7 +38,7 @@ func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (
if err != nil {
return nil, err
}
toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name)
toLowerFilter, err := cache.TokenFilterNamed(lowercase.Name)
if err != nil {
return nil, err
}

View File

@ -11,7 +11,7 @@ package fa
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/stop_tokens_filter"
"github.com/blevesearch/bleve/analysis/tokens/stop"
"github.com/blevesearch/bleve/registry"
)
@ -20,7 +20,7 @@ func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.C
if err != nil {
return nil, err
}
return stop_tokens_filter.NewStopTokensFilter(tokenMap), nil
return stop.NewStopTokensFilter(tokenMap), nil
}
func init() {

View File

@ -13,8 +13,8 @@ import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/registry"
"github.com/blevesearch/bleve/analysis/token_filters/lower_case_filter"
"github.com/blevesearch/bleve/analysis/tokenizers/unicode"
"github.com/blevesearch/bleve/analysis/tokens/lowercase"
)
const AnalyzerName = "fr"
@ -28,7 +28,7 @@ func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (
if err != nil {
return nil, err
}
toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name)
toLowerFilter, err := cache.TokenFilterNamed(lowercase.Name)
if err != nil {
return nil, err
}

View File

@ -13,7 +13,7 @@ import (
"fmt"
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/elision_filter"
"github.com/blevesearch/bleve/analysis/tokens/elision"
"github.com/blevesearch/bleve/registry"
)
@ -24,7 +24,7 @@ func ElisionFilterConstructor(config map[string]interface{}, cache *registry.Cac
if err != nil {
return nil, fmt.Errorf("error building elision filter: %v", err)
}
return elision_filter.NewElisionFilter(articlesTokenMap), nil
return elision.NewElisionFilter(articlesTokenMap), nil
}
func init() {

View File

@ -11,7 +11,7 @@ package fr
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/stop_tokens_filter"
"github.com/blevesearch/bleve/analysis/tokens/stop"
"github.com/blevesearch/bleve/registry"
)
@ -20,7 +20,7 @@ func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.C
if err != nil {
return nil, err
}
return stop_tokens_filter.NewStopTokensFilter(tokenMap), nil
return stop.NewStopTokensFilter(tokenMap), nil
}
func init() {

View File

@ -13,7 +13,7 @@ import (
"fmt"
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/elision_filter"
"github.com/blevesearch/bleve/analysis/tokens/elision"
"github.com/blevesearch/bleve/registry"
)
@ -24,7 +24,7 @@ func ElisionFilterConstructor(config map[string]interface{}, cache *registry.Cac
if err != nil {
return nil, fmt.Errorf("error building elision filter: %v", err)
}
return elision_filter.NewElisionFilter(articlesTokenMap), nil
return elision.NewElisionFilter(articlesTokenMap), nil
}
func init() {

View File

@ -11,7 +11,7 @@ package ga
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/stop_tokens_filter"
"github.com/blevesearch/bleve/analysis/tokens/stop"
"github.com/blevesearch/bleve/registry"
)
@ -20,7 +20,7 @@ func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.C
if err != nil {
return nil, err
}
return stop_tokens_filter.NewStopTokensFilter(tokenMap), nil
return stop.NewStopTokensFilter(tokenMap), nil
}
func init() {

View File

@ -11,7 +11,7 @@ package gl
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/stop_tokens_filter"
"github.com/blevesearch/bleve/analysis/tokens/stop"
"github.com/blevesearch/bleve/registry"
)
@ -20,7 +20,7 @@ func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.C
if err != nil {
return nil, err
}
return stop_tokens_filter.NewStopTokensFilter(tokenMap), nil
return stop.NewStopTokensFilter(tokenMap), nil
}
func init() {

View File

@ -13,9 +13,9 @@ import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/registry"
"github.com/blevesearch/bleve/analysis/language/in"
"github.com/blevesearch/bleve/analysis/token_filters/lower_case_filter"
"github.com/blevesearch/bleve/analysis/lang/in"
"github.com/blevesearch/bleve/analysis/tokenizers/unicode"
"github.com/blevesearch/bleve/analysis/tokens/lowercase"
)
const AnalyzerName = "hi"
@ -25,7 +25,7 @@ func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (
if err != nil {
return nil, err
}
toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name)
toLowerFilter, err := cache.TokenFilterNamed(lowercase.Name)
if err != nil {
return nil, err
}

View File

@ -11,7 +11,7 @@ package hi
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/stop_tokens_filter"
"github.com/blevesearch/bleve/analysis/tokens/stop"
"github.com/blevesearch/bleve/registry"
)
@ -20,7 +20,7 @@ func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.C
if err != nil {
return nil, err
}
return stop_tokens_filter.NewStopTokensFilter(tokenMap), nil
return stop.NewStopTokensFilter(tokenMap), nil
}
func init() {

View File

@ -11,7 +11,7 @@ package hy
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/stop_tokens_filter"
"github.com/blevesearch/bleve/analysis/tokens/stop"
"github.com/blevesearch/bleve/registry"
)
@ -20,7 +20,7 @@ func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.C
if err != nil {
return nil, err
}
return stop_tokens_filter.NewStopTokensFilter(tokenMap), nil
return stop.NewStopTokensFilter(tokenMap), nil
}
func init() {

View File

@ -11,7 +11,7 @@ package id
import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/stop_tokens_filter"
"github.com/blevesearch/bleve/analysis/tokens/stop"
"github.com/blevesearch/bleve/registry"
)
@ -20,7 +20,7 @@ func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.C
if err != nil {
return nil, err
}
return stop_tokens_filter.NewStopTokensFilter(tokenMap), nil
return stop.NewStopTokensFilter(tokenMap), nil
}
func init() {

View File

@ -13,8 +13,8 @@ import (
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/registry"
"github.com/blevesearch/bleve/analysis/token_filters/lower_case_filter"
"github.com/blevesearch/bleve/analysis/tokenizers/unicode"
"github.com/blevesearch/bleve/analysis/tokens/lowercase"
)
const AnalyzerName = "it"
@ -28,7 +28,7 @@ func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (
if err != nil {
return nil, err
}
toLowerFilter, err := cache.TokenFilterNamed(lower_case_filter.Name)
toLowerFilter, err := cache.TokenFilterNamed(lowercase.Name)
if err != nil {
return nil, err
}

View File

@ -13,7 +13,7 @@ import (
"fmt"
"github.com/blevesearch/bleve/analysis"
"github.com/blevesearch/bleve/analysis/token_filters/elision_filter"
"github.com/blevesearch/bleve/analysis/tokens/elision"
"github.com/blevesearch/bleve/registry"
)
@ -24,7 +24,7 @@ func ElisionFilterConstructor(config map[string]interface{}, cache *registry.Cac
if err != nil {
return nil, fmt.Errorf("error building elision filter: %v", err)
}
return elision_filter.NewElisionFilter(articlesTokenMap), nil
return elision.NewElisionFilter(articlesTokenMap), nil
}
func init() {

Some files were not shown because too many files have changed in this diff Show More