0
0
Fork 0

major refactor of kvstore/index internals, see below

In the index/store package
introduce KVReader
  creates snapshot
  all read operations consistent from this snapshot
  must close to release

introduce KVWriter
  only one writer active
  access to all operations
  allows for consisten read-modify-write
  must close to release

introduce AssociativeMerge operation on batch
  allows efficient read-modify-write
  for associative operations
  used to consolidate updates to the term summary rows
  saves 1 set and 1 get op per shared instance of term in field

In the index package
introduced an IndexReader
  exposes a consisten snapshot of the index for searching

At top level
  All searches now operate on a consisten snapshot of the index
This commit is contained in:
Marty Schoch 2014-09-12 17:21:35 -04:00
parent 7819deb447
commit 198ca1ad4d
62 changed files with 1556 additions and 598 deletions

View File

@ -17,29 +17,38 @@ type Index interface {
Open() error
Close()
DocCount() uint64
Update(doc *document.Document) error
Delete(id string) error
Batch(batch Batch) error
SetInternal(key, val []byte) error
DeleteInternal(key []byte) error
DumpAll() chan interface{}
DumpDoc(id string) chan interface{}
DumpFields() chan interface{}
Reader() IndexReader
}
type IndexReader interface {
TermFieldReader(term []byte, field string) (TermFieldReader, error)
DocIDReader(start, end string) (DocIDReader, error)
FieldReader(field string, startTerm []byte, endTerm []byte) (FieldReader, error)
DocCount() uint64
Document(id string) (*document.Document, error)
DocumentFieldTerms(id string) (FieldTerms, error)
Fields() ([]string, error)
SetInternal(key, val []byte) error
GetInternal(key []byte) ([]byte, error)
DeleteInternal(key []byte) error
DumpAll() chan interface{}
DumpDoc(id string) chan interface{}
DumpFields() chan interface{}
DocCount() uint64
Close()
}
type FieldTerms map[string][]string

View File

@ -10,6 +10,7 @@
package boltdb
import (
indexStore "github.com/blevesearch/bleve/index/store"
"github.com/boltdb/bolt"
)
@ -19,14 +20,27 @@ type op struct {
}
type Batch struct {
store *Store
ops []op
store *Store
alreadyLocked bool
ops []op
merges map[string]indexStore.AssociativeMergeChain
}
func newBatch(store *Store) *Batch {
rv := Batch{
store: store,
ops: make([]op, 0),
store: store,
ops: make([]op, 0),
merges: make(map[string]indexStore.AssociativeMergeChain),
}
return &rv
}
func newBatchAlreadyLocked(store *Store) *Batch {
rv := Batch{
store: store,
alreadyLocked: true,
ops: make([]op, 0),
merges: make(map[string]indexStore.AssociativeMergeChain),
}
return &rv
}
@ -39,10 +53,45 @@ func (i *Batch) Delete(key []byte) {
i.ops = append(i.ops, op{key, nil})
}
func (i *Batch) Merge(key []byte, oper indexStore.AssociativeMerge) {
opers, ok := i.merges[string(key)]
if !ok {
opers = make(indexStore.AssociativeMergeChain, 0, 1)
}
opers = append(opers, oper)
i.merges[string(key)] = opers
}
func (i *Batch) Execute() error {
if !i.alreadyLocked {
i.store.writer.Lock()
defer i.store.writer.Unlock()
}
return i.store.db.Update(func(tx *bolt.Tx) error {
b := tx.Bucket([]byte(i.store.bucket))
// first processed the merges
for k, mc := range i.merges {
val := b.Get([]byte(k))
var err error
val, err = mc.Merge([]byte(k), val)
if err != nil {
return err
}
if val == nil {
err := b.Delete([]byte(k))
if err != nil {
return err
}
} else {
err := b.Put([]byte(k), val)
if err != nil {
return err
}
}
}
// now process the regular get/set ops
for _, o := range i.ops {
if o.v == nil {
if err := b.Delete(o.k); err != nil {

View File

@ -15,6 +15,7 @@ import (
type Iterator struct {
store *Store
ownTx bool
tx *bolt.Tx
cursor *bolt.Cursor
valid bool
@ -27,6 +28,18 @@ func newIterator(store *Store) *Iterator {
b := tx.Bucket([]byte(store.bucket))
cursor := b.Cursor()
return &Iterator{
store: store,
tx: tx,
ownTx: true,
cursor: cursor,
}
}
func newIteratorExistingTx(store *Store, tx *bolt.Tx) *Iterator {
b := tx.Bucket([]byte(store.bucket))
cursor := b.Cursor()
return &Iterator{
store: store,
tx: tx,
@ -69,5 +82,8 @@ func (i *Iterator) Valid() bool {
}
func (i *Iterator) Close() {
i.tx.Rollback()
// only close the transaction if we opened it
if i.ownTx {
i.tx.Rollback()
}
}

View File

@ -0,0 +1,43 @@
// Copyright (c) 2014 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package boltdb
import (
"github.com/blevesearch/bleve/index/store"
"github.com/boltdb/bolt"
)
type Reader struct {
store *Store
tx *bolt.Tx
}
func newReader(store *Store) *Reader {
tx, _ := store.db.Begin(false)
return &Reader{
store: store,
tx: tx,
}
}
func (r *Reader) Get(key []byte) ([]byte, error) {
rv := r.tx.Bucket([]byte(r.store.bucket)).Get(key)
return rv, nil
}
func (r *Reader) Iterator(key []byte) store.KVIterator {
rv := newIteratorExistingTx(r.store, r.tx)
rv.Seek(key)
return rv
}
func (r *Reader) Close() error {
return r.tx.Rollback()
}

View File

@ -11,6 +11,7 @@ package boltdb
import (
"fmt"
"sync"
"github.com/blevesearch/bleve/index/store"
"github.com/blevesearch/bleve/registry"
@ -23,6 +24,7 @@ type Store struct {
path string
bucket string
db *bolt.DB
writer sync.Mutex
}
func Open(path string, bucket string) (*Store, error) {
@ -49,7 +51,7 @@ func Open(path string, bucket string) (*Store, error) {
return &rv, nil
}
func (bs *Store) Get(key []byte) ([]byte, error) {
func (bs *Store) get(key []byte) ([]byte, error) {
var rv []byte
err := bs.db.View(func(tx *bolt.Tx) error {
@ -61,33 +63,49 @@ func (bs *Store) Get(key []byte) ([]byte, error) {
return rv, err
}
func (bs *Store) Set(key, val []byte) error {
func (bs *Store) set(key, val []byte) error {
bs.writer.Lock()
defer bs.writer.Unlock()
return bs.setlocked(key, val)
}
func (bs *Store) setlocked(key, val []byte) error {
return bs.db.Update(func(tx *bolt.Tx) error {
return tx.Bucket([]byte(bs.bucket)).Put(key, val)
})
}
func (bs *Store) Delete(key []byte) error {
func (bs *Store) delete(key []byte) error {
bs.writer.Lock()
defer bs.writer.Unlock()
return bs.deletelocked(key)
}
func (bs *Store) deletelocked(key []byte) error {
return bs.db.Update(func(tx *bolt.Tx) error {
return tx.Bucket([]byte(bs.bucket)).Delete(key)
})
}
func (bs *Store) Commit() error {
return nil
}
func (bs *Store) Close() error {
return bs.db.Close()
}
func (bs *Store) Iterator(key []byte) store.KVIterator {
func (bs *Store) iterator(key []byte) store.KVIterator {
rv := newIterator(bs)
rv.Seek(key)
return rv
}
func (bs *Store) NewBatch() store.KVBatch {
func (bs *Store) Reader() store.KVReader {
return newReader(bs)
}
func (bs *Store) Writer() store.KVWriter {
return newWriter(bs)
}
func (bs *Store) newBatch() store.KVBatch {
return newBatch(bs)
}

View File

@ -25,3 +25,13 @@ func TestStore(t *testing.T) {
store_test.CommonTestKVStore(t, s)
}
func TestReaderIsolation(t *testing.T) {
s, err := Open("test", "bleve")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll("test")
store_test.CommonTestReaderIsolation(t, s)
}

View File

@ -0,0 +1,53 @@
// Copyright (c) 2014 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package boltdb
import (
"github.com/blevesearch/bleve/index/store"
)
type Writer struct {
store *Store
}
func newWriter(store *Store) *Writer {
store.writer.Lock()
return &Writer{
store: store,
}
}
func (w *Writer) Set(key, val []byte) error {
return w.store.setlocked(key, val)
}
func (w *Writer) Delete(key []byte) error {
return w.store.deletelocked(key)
}
func (w *Writer) NewBatch() store.KVBatch {
return newBatchAlreadyLocked(w.store)
}
func (w *Writer) Close() error {
w.store.writer.Unlock()
return nil
}
// these two methods can safely read using the regular
// methods without a read transaction, because we know
// that no one else is writing but us
func (w *Writer) Get(key []byte) ([]byte, error) {
return w.store.get(key)
}
func (w *Writer) Iterator(key []byte) store.KVIterator {
return w.store.iterator(key)
}

View File

@ -9,17 +9,35 @@
package inmem
import (
indexStore "github.com/blevesearch/bleve/index/store"
)
type Batch struct {
store *Store
keys [][]byte
vals [][]byte
store *Store
keys [][]byte
vals [][]byte
alreadyLocked bool
merges map[string]indexStore.AssociativeMergeChain
}
func newBatch(store *Store) *Batch {
rv := Batch{
store: store,
keys: make([][]byte, 0),
vals: make([][]byte, 0),
store: store,
keys: make([][]byte, 0),
vals: make([][]byte, 0),
merges: make(map[string]indexStore.AssociativeMergeChain),
}
return &rv
}
func newBatchAlreadyLocked(store *Store) *Batch {
rv := Batch{
store: store,
keys: make([][]byte, 0),
vals: make([][]byte, 0),
alreadyLocked: true,
merges: make(map[string]indexStore.AssociativeMergeChain),
}
return &rv
}
@ -34,13 +52,56 @@ func (i *Batch) Delete(key []byte) {
i.vals = append(i.vals, nil)
}
func (i *Batch) Merge(key []byte, oper indexStore.AssociativeMerge) {
opers, ok := i.merges[string(key)]
if !ok {
opers = make(indexStore.AssociativeMergeChain, 0, 1)
}
opers = append(opers, oper)
i.merges[string(key)] = opers
}
func (i *Batch) Execute() error {
if !i.alreadyLocked {
i.store.writer.Lock()
defer i.store.writer.Unlock()
}
// first processed the merges
for k, mc := range i.merges {
val, err := i.store.get([]byte(k))
if err != nil {
return err
}
val, err = mc.Merge([]byte(k), val)
if err != nil {
return err
}
if val == nil {
err := i.store.deletelocked([]byte(k))
if err != nil {
return err
}
} else {
err := i.store.setlocked([]byte(k), val)
if err != nil {
return err
}
}
}
for index, key := range i.keys {
val := i.vals[index]
if val == nil {
i.store.list.Delete(string(key))
err := i.store.deletelocked(key)
if err != nil {
return err
}
} else {
i.store.Set(key, val)
err := i.store.setlocked(key, val)
if err != nil {
return err
}
}
}
return nil

View File

@ -0,0 +1,36 @@
// Copyright (c) 2014 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package inmem
import (
"github.com/blevesearch/bleve/index/store"
)
type Reader struct {
store *Store
}
func newReader(store *Store) *Reader {
return &Reader{
store: store,
}
}
func (r *Reader) Get(key []byte) ([]byte, error) {
return r.store.get(key)
}
func (r *Reader) Iterator(key []byte) store.KVIterator {
return r.store.iterator(key)
}
func (r *Reader) Close() error {
return nil
}

View File

@ -10,6 +10,8 @@
package inmem
import (
"sync"
"github.com/blevesearch/bleve/index/store"
"github.com/blevesearch/bleve/registry"
"github.com/ryszard/goskiplist/skiplist"
@ -18,7 +20,8 @@ import (
const Name = "mem"
type Store struct {
list *skiplist.SkipList
list *skiplist.SkipList
writer sync.Mutex
}
func Open() (*Store, error) {
@ -37,7 +40,7 @@ func MustOpen() *Store {
return &rv
}
func (i *Store) Get(key []byte) ([]byte, error) {
func (i *Store) get(key []byte) ([]byte, error) {
val, ok := i.list.Get(string(key))
if ok {
return []byte(val.(string)), nil
@ -45,17 +48,25 @@ func (i *Store) Get(key []byte) ([]byte, error) {
return nil, nil
}
func (i *Store) Set(key, val []byte) error {
func (i *Store) set(key, val []byte) error {
i.writer.Lock()
defer i.writer.Unlock()
return i.setlocked(key, val)
}
func (i *Store) setlocked(key, val []byte) error {
i.list.Set(string(key), string(val))
return nil
}
func (i *Store) Delete(key []byte) error {
i.list.Delete(string(key))
return nil
func (i *Store) delete(key []byte) error {
i.writer.Lock()
defer i.writer.Unlock()
return i.deletelocked(key)
}
func (i *Store) Commit() error {
func (i *Store) deletelocked(key []byte) error {
i.list.Delete(string(key))
return nil
}
@ -63,13 +74,21 @@ func (i *Store) Close() error {
return nil
}
func (i *Store) Iterator(key []byte) store.KVIterator {
func (i *Store) iterator(key []byte) store.KVIterator {
rv := newIterator(i)
rv.Seek(key)
return rv
}
func (i *Store) NewBatch() store.KVBatch {
func (i *Store) Reader() store.KVReader {
return newReader(i)
}
func (i *Store) Writer() store.KVWriter {
return newWriter(i)
}
func (i *Store) newBatch() store.KVBatch {
return newBatch(i)
}

View File

@ -0,0 +1,53 @@
// Copyright (c) 2014 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package inmem
import (
"github.com/blevesearch/bleve/index/store"
)
type Writer struct {
store *Store
}
func newWriter(store *Store) *Writer {
store.writer.Lock()
return &Writer{
store: store,
}
}
func (w *Writer) Set(key, val []byte) error {
return w.store.setlocked(key, val)
}
func (w *Writer) Delete(key []byte) error {
return w.store.deletelocked(key)
}
func (w *Writer) NewBatch() store.KVBatch {
return newBatchAlreadyLocked(w.store)
}
func (w *Writer) Close() error {
w.store.writer.Unlock()
return nil
}
// these two methods can safely read using the regular
// methods without a read transaction, because we know
// that no one else is writing but us
func (w *Writer) Get(key []byte) ([]byte, error) {
return w.store.get(key)
}
func (w *Writer) Iterator(key []byte) store.KVIterator {
return w.store.iterator(key)
}

View File

@ -9,9 +9,12 @@
package store
import ()
type KVBatch interface {
Set(key, val []byte)
Delete(key []byte)
Merge(key []byte, oper AssociativeMerge)
Execute() error
Close() error
}
@ -30,12 +33,38 @@ type KVIterator interface {
}
type KVStore interface {
Get(key []byte) ([]byte, error)
Writer() KVWriter
Reader() KVReader
Close() error
}
type KVWriter interface {
KVReader
Set(key, val []byte) error
Delete(key []byte) error
Commit() error
Close() error
Iterator(key []byte) KVIterator
NewBatch() KVBatch
}
type KVReader interface {
Get(key []byte) ([]byte, error)
Iterator(key []byte) KVIterator
Close() error
}
type AssociativeMerge interface {
Merge(key, existing []byte) ([]byte, error)
}
type AssociativeMergeChain []AssociativeMerge
func (a AssociativeMergeChain) Merge(key, orig []byte) ([]byte, error) {
curr := orig
for _, m := range a {
var err error
curr, err = m.Merge(key, curr)
if err != nil {
return nil, err
}
}
return curr, nil
}

View File

@ -0,0 +1,56 @@
// Copyright (c) 2014 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package store
import (
"encoding/binary"
"testing"
)
type addUint64Operator struct {
offset uint64
}
func newAddUint64Operator(offset uint64) *addUint64Operator {
return &addUint64Operator{offset: offset}
}
func (a *addUint64Operator) Merge(key, existing []byte) ([]byte, error) {
var existingUint64 uint64
if len(existing) > 0 {
existingUint64, _ = binary.Uvarint(existing)
}
existingUint64 += a.offset
result := make([]byte, 8)
binary.PutUvarint(result, existingUint64)
return result, nil
}
func TestAssociativeMerge(t *testing.T) {
// simulate original lookup of value
existingValue := make([]byte, 8)
binary.PutUvarint(existingValue, 27)
mergeChain := make(AssociativeMergeChain, 0)
mergeChain = append(mergeChain, newAddUint64Operator(6))
mergeChain = append(mergeChain, newAddUint64Operator(3))
mergeChain = append(mergeChain, newAddUint64Operator(25))
mergeChain = append(mergeChain, newAddUint64Operator(1))
newValueBytes, err := mergeChain.Merge([]byte("key"), existingValue)
if err != nil {
t.Fatal(err)
}
newValue, _ := binary.Uvarint(newValueBytes)
if newValue != 62 {
t.Errorf("expected 62, got %d", newValue)
}
}

View File

@ -12,35 +12,96 @@
package leveldb
import (
indexStore "github.com/blevesearch/bleve/index/store"
"github.com/jmhodges/levigo"
)
type LevelDBBatch struct {
store *LevelDBStore
batch *levigo.WriteBatch
type op struct {
k []byte
v []byte
}
func newLevelDBBatch(store *LevelDBStore) *LevelDBBatch {
rv := LevelDBBatch{
store: store,
batch: levigo.NewWriteBatch(),
type Batch struct {
store *Store
ops []op
alreadyLocked bool
merges map[string]indexStore.AssociativeMergeChain
}
func newBatch(store *Store) *Batch {
rv := Batch{
store: store,
ops: make([]op, 0),
merges: make(map[string]indexStore.AssociativeMergeChain),
}
return &rv
}
func (ldb *LevelDBBatch) Set(key, val []byte) {
ldb.batch.Put(key, val)
func newBatchAlreadyLocked(store *Store) *Batch {
rv := Batch{
store: store,
ops: make([]op, 0),
alreadyLocked: true,
merges: make(map[string]indexStore.AssociativeMergeChain),
}
return &rv
}
func (ldb *LevelDBBatch) Delete(key []byte) {
ldb.batch.Delete(key)
func (ldb *Batch) Set(key, val []byte) {
ldb.ops = append(ldb.ops, op{key, val})
}
func (ldb *LevelDBBatch) Execute() error {
return ldb.store.db.Write(defaultWriteOptions(), ldb.batch)
func (ldb *Batch) Delete(key []byte) {
ldb.ops = append(ldb.ops, op{key, nil})
}
func (ldb *LevelDBBatch) Close() error {
ldb.batch.Close()
func (ldb *Batch) Merge(key []byte, oper indexStore.AssociativeMerge) {
opers, ok := ldb.merges[string(key)]
if !ok {
opers = make(indexStore.AssociativeMergeChain, 0, 1)
}
opers = append(opers, oper)
ldb.merges[string(key)] = opers
}
func (ldb *Batch) Execute() error {
if !ldb.alreadyLocked {
ldb.store.writer.Lock()
defer ldb.store.writer.Unlock()
}
batch := levigo.NewWriteBatch()
defer batch.Close()
// first processed the merges
for k, mc := range ldb.merges {
val, err := ldb.store.get([]byte(k))
if err != nil {
return err
}
val, err = mc.Merge([]byte(k), val)
if err != nil {
return err
}
if val == nil {
batch.Delete([]byte(k))
} else {
batch.Put([]byte(k), val)
}
}
// now add all the other ops to the batch
for _, op := range ldb.ops {
if op.v == nil {
batch.Delete(op.k)
} else {
batch.Put(op.k, op.v)
}
}
return ldb.store.db.Write(defaultWriteOptions(), batch)
}
func (ldb *Batch) Close() error {
return nil
}

View File

@ -15,50 +15,60 @@ import (
"github.com/jmhodges/levigo"
)
type LevelDBIterator struct {
store *LevelDBStore
type Iterator struct {
store *Store
iterator *levigo.Iterator
}
func newLevelDBIterator(store *LevelDBStore) *LevelDBIterator {
rv := LevelDBIterator{
func newIterator(store *Store) *Iterator {
rv := Iterator{
store: store,
iterator: store.db.NewIterator(defaultReadOptions()),
}
return &rv
}
func (ldi *LevelDBIterator) SeekFirst() {
func newIteratorWithSnapshot(store *Store, snapshot *levigo.Snapshot) *Iterator {
options := defaultReadOptions()
options.SetSnapshot(snapshot)
rv := Iterator{
store: store,
iterator: store.db.NewIterator(options),
}
return &rv
}
func (ldi *Iterator) SeekFirst() {
ldi.iterator.SeekToFirst()
}
func (ldi *LevelDBIterator) Seek(key []byte) {
func (ldi *Iterator) Seek(key []byte) {
ldi.iterator.Seek(key)
}
func (ldi *LevelDBIterator) Next() {
func (ldi *Iterator) Next() {
ldi.iterator.Next()
}
func (ldi *LevelDBIterator) Current() ([]byte, []byte, bool) {
func (ldi *Iterator) Current() ([]byte, []byte, bool) {
if ldi.Valid() {
return ldi.Key(), ldi.Value(), true
}
return nil, nil, false
}
func (ldi *LevelDBIterator) Key() []byte {
func (ldi *Iterator) Key() []byte {
return ldi.iterator.Key()
}
func (ldi *LevelDBIterator) Value() []byte {
func (ldi *Iterator) Value() []byte {
return ldi.iterator.Value()
}
func (ldi *LevelDBIterator) Valid() bool {
func (ldi *Iterator) Valid() bool {
return ldi.iterator.Valid()
}
func (ldi *LevelDBIterator) Close() {
func (ldi *Iterator) Close() {
ldi.iterator.Close()
}

View File

@ -0,0 +1,44 @@
// Copyright (c) 2014 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
// +build leveldb full
package leveldb
import (
"github.com/blevesearch/bleve/index/store"
"github.com/jmhodges/levigo"
)
type Reader struct {
store *Store
snapshot *levigo.Snapshot
}
func newReader(store *Store) *Reader {
return &Reader{
store: store,
snapshot: store.db.NewSnapshot(),
}
}
func (r *Reader) Get(key []byte) ([]byte, error) {
return r.store.getWithSnapshot(key, r.snapshot)
}
func (r *Reader) Iterator(key []byte) store.KVIterator {
rv := newIteratorWithSnapshot(r.store, r.snapshot)
rv.Seek(key)
return rv
}
func (r *Reader) Close() error {
r.store.db.ReleaseSnapshot(r.snapshot)
return nil
}

View File

@ -13,6 +13,7 @@ package leveldb
import (
"fmt"
"sync"
"github.com/blevesearch/bleve/index/store"
"github.com/blevesearch/bleve/registry"
@ -21,14 +22,15 @@ import (
const Name = "leveldb"
type LevelDBStore struct {
path string
opts *levigo.Options
db *levigo.DB
type Store struct {
path string
opts *levigo.Options
db *levigo.DB
writer sync.Mutex
}
func Open(path string, createIfMissing bool, errorIfExists bool) (*LevelDBStore, error) {
rv := LevelDBStore{
func Open(path string, createIfMissing bool, errorIfExists bool) (*Store, error) {
rv := Store{
path: path,
}
@ -46,35 +48,57 @@ func Open(path string, createIfMissing bool, errorIfExists bool) (*LevelDBStore,
return &rv, nil
}
func (ldbs *LevelDBStore) Get(key []byte) ([]byte, error) {
func (ldbs *Store) get(key []byte) ([]byte, error) {
return ldbs.db.Get(defaultReadOptions(), key)
}
func (ldbs *LevelDBStore) Set(key, val []byte) error {
func (ldbs *Store) getWithSnapshot(key []byte, snapshot *levigo.Snapshot) ([]byte, error) {
options := defaultReadOptions()
options.SetSnapshot(snapshot)
return ldbs.db.Get(options, key)
}
func (ldbs *Store) set(key, val []byte) error {
ldbs.writer.Lock()
defer ldbs.writer.Unlock()
return ldbs.setlocked(key, val)
}
func (ldbs *Store) setlocked(key, val []byte) error {
return ldbs.db.Put(defaultWriteOptions(), key, val)
}
func (ldbs *LevelDBStore) Delete(key []byte) error {
func (ldbs *Store) delete(key []byte) error {
ldbs.writer.Lock()
defer ldbs.writer.Unlock()
return ldbs.deletelocked(key)
}
func (ldbs *Store) deletelocked(key []byte) error {
return ldbs.db.Delete(defaultWriteOptions(), key)
}
func (ldbs *LevelDBStore) Commit() error {
return nil
}
func (ldbs *LevelDBStore) Close() error {
func (ldbs *Store) Close() error {
ldbs.db.Close()
return nil
}
func (ldbs *LevelDBStore) Iterator(key []byte) store.KVIterator {
rv := newLevelDBIterator(ldbs)
func (ldbs *Store) iterator(key []byte) store.KVIterator {
rv := newIterator(ldbs)
rv.Seek(key)
return rv
}
func (ldbs *LevelDBStore) NewBatch() store.KVBatch {
return newLevelDBBatch(ldbs)
func (ldbs *Store) Reader() store.KVReader {
return newReader(ldbs)
}
func (ldbs *Store) Writer() store.KVWriter {
return newWriter(ldbs)
}
func (ldbs *Store) newBatch() store.KVBatch {
return newBatch(ldbs)
}
func StoreConstructor(config map[string]interface{}) (store.KVStore, error) {

View File

@ -19,11 +19,25 @@ import (
)
func TestLevelDBStore(t *testing.T) {
defer os.RemoveAll("test")
s, err := Open("test", true, true)
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll("test")
defer s.Close()
store_test.CommonTestKVStore(t, s)
}
func TestReaderIsolation(t *testing.T) {
defer os.RemoveAll("test")
s, err := Open("test", true, true)
if err != nil {
t.Fatal(err)
}
defer s.Close()
store_test.CommonTestReaderIsolation(t, s)
}

View File

@ -0,0 +1,55 @@
// Copyright (c) 2014 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
// +build leveldb full
package leveldb
import (
"github.com/blevesearch/bleve/index/store"
)
type Writer struct {
store *Store
}
func newWriter(store *Store) *Writer {
store.writer.Lock()
return &Writer{
store: store,
}
}
func (w *Writer) Set(key, val []byte) error {
return w.store.setlocked(key, val)
}
func (w *Writer) Delete(key []byte) error {
return w.store.deletelocked(key)
}
func (w *Writer) NewBatch() store.KVBatch {
return newBatchAlreadyLocked(w.store)
}
func (w *Writer) Close() error {
w.store.writer.Unlock()
return nil
}
// these two methods can safely read using the regular
// methods without a read transaction, because we know
// that no one else is writing but us
func (w *Writer) Get(key []byte) ([]byte, error) {
return w.store.get(key)
}
func (w *Writer) Iterator(key []byte) store.KVIterator {
return w.store.iterator(key)
}

View File

@ -10,6 +10,7 @@
package store_test
import (
"reflect"
"testing"
"github.com/blevesearch/bleve/index/store"
@ -17,20 +18,21 @@ import (
func CommonTestKVStore(t *testing.T, s store.KVStore) {
err := s.Set([]byte("a"), []byte("val-a"))
writer := s.Writer()
err := writer.Set([]byte("a"), []byte("val-a"))
if err != nil {
t.Fatal(err)
}
err = s.Set([]byte("z"), []byte("val-z"))
err = writer.Set([]byte("z"), []byte("val-z"))
if err != nil {
t.Fatal(err)
}
err = s.Delete([]byte("z"))
err = writer.Delete([]byte("z"))
if err != nil {
t.Fatal(err)
}
batch := s.NewBatch()
batch := writer.NewBatch()
batch.Set([]byte("b"), []byte("val-b"))
batch.Set([]byte("c"), []byte("val-c"))
batch.Set([]byte("d"), []byte("val-d"))
@ -45,8 +47,11 @@ func CommonTestKVStore(t *testing.T, s store.KVStore) {
if err != nil {
t.Fatal(err)
}
writer.Close()
it := s.Iterator([]byte("b"))
reader := s.Reader()
defer reader.Close()
it := reader.Iterator([]byte("b"))
key, val, valid := it.Current()
if !valid {
t.Fatalf("valid false, expected true")
@ -84,3 +89,91 @@ func CommonTestKVStore(t *testing.T, s store.KVStore) {
it.Close()
}
func CommonTestReaderIsolation(t *testing.T, s store.KVStore) {
// insert a kv pair
writer := s.Writer()
err := writer.Set([]byte("a"), []byte("val-a"))
if err != nil {
t.Fatal(err)
}
writer.Close()
// create an isoalted reader
reader := s.Reader()
defer reader.Close()
// verify we see the value already inserted
val, err := reader.Get([]byte("a"))
if err != nil {
t.Error(err)
}
if !reflect.DeepEqual(val, []byte("val-a")) {
t.Errorf("expected val-a, got nil")
}
// verify that an iterator sees it
count := 0
it := reader.Iterator([]byte{0})
defer it.Close()
for it.Valid() {
it.Next()
count++
}
if count != 1 {
t.Errorf("expected iterator to see 1, saw %d", count)
}
// add something after the reader was created
writer = s.Writer()
err = writer.Set([]byte("b"), []byte("val-b"))
if err != nil {
t.Fatal(err)
}
writer.Close()
// ensure that a newer reader sees it
newReader := s.Reader()
defer newReader.Close()
val, err = newReader.Get([]byte("b"))
if err != nil {
t.Error(err)
}
if !reflect.DeepEqual(val, []byte("val-b")) {
t.Errorf("expected val-b, got nil")
}
// ensure director iterator sees it
count = 0
it = newReader.Iterator([]byte{0})
defer it.Close()
for it.Valid() {
it.Next()
count++
}
if count != 2 {
t.Errorf("expected iterator to see 2, saw %d", count)
}
// but that the isolated reader does not
val, err = reader.Get([]byte("b"))
if err != nil {
t.Error(err)
}
if val != nil {
t.Errorf("expected nil, got %v", val)
}
// and ensure that the iterator on the isolated reader also does not
count = 0
it = reader.Iterator([]byte{0})
defer it.Close()
for it.Valid() {
it.Next()
count++
}
if count != 1 {
t.Errorf("expected iterator to see 1, saw %d", count)
}
}

View File

@ -12,6 +12,8 @@ package upside_down
import (
"bytes"
"sort"
"github.com/blevesearch/bleve/index/store"
)
// the functions in this file are only intended to be used by
@ -19,12 +21,12 @@ import (
// if your application relies on the, you're doing something wrong
// they may change or be removed at any time
func (udc *UpsideDownCouch) dumpPrefix(rv chan interface{}, prefix []byte) {
func (udc *UpsideDownCouch) dumpPrefix(kvreader store.KVReader, rv chan interface{}, prefix []byte) {
start := prefix
if start == nil {
start = []byte{0}
}
it := udc.store.Iterator(start)
it := kvreader.Iterator(start)
defer it.Close()
key, val, valid := it.Current()
for valid {
@ -49,7 +51,12 @@ func (udc *UpsideDownCouch) DumpAll() chan interface{} {
rv := make(chan interface{})
go func() {
defer close(rv)
udc.dumpPrefix(rv, nil)
// start an isolated reader for use during the dump
kvreader := udc.store.Reader()
defer kvreader.Close()
udc.dumpPrefix(kvreader, rv, nil)
}()
return rv
}
@ -58,7 +65,12 @@ func (udc *UpsideDownCouch) DumpFields() chan interface{} {
rv := make(chan interface{})
go func() {
defer close(rv)
udc.dumpPrefix(rv, []byte{'f'})
// start an isolated reader for use during the dump
kvreader := udc.store.Reader()
defer kvreader.Close()
udc.dumpPrefix(kvreader, rv, []byte{'f'})
}()
return rv
}
@ -76,7 +88,11 @@ func (udc *UpsideDownCouch) DumpDoc(id string) chan interface{} {
go func() {
defer close(rv)
back, err := udc.backIndexRowForDoc(id)
// start an isolated reader for use during the dump
kvreader := udc.store.Reader()
defer kvreader.Close()
back, err := udc.backIndexRowForDoc(kvreader, id)
if err != nil {
rv <- err
return
@ -97,11 +113,11 @@ func (udc *UpsideDownCouch) DumpDoc(id string) chan interface{} {
// first add all the stored rows
storedRowPrefix := NewStoredRow(id, 0, []uint64{}, 'x', []byte{}).ScanPrefixForDoc()
udc.dumpPrefix(rv, storedRowPrefix)
udc.dumpPrefix(kvreader, rv, storedRowPrefix)
// now walk term keys in order and add them as well
if len(keys) > 0 {
it := udc.store.Iterator(keys[0])
it := kvreader.Iterator(keys[0])
defer it.Close()
for _, key := range keys {

View File

@ -18,26 +18,26 @@ import (
)
type UpsideDownCouchFieldReader struct {
index *UpsideDownCouch
iterator store.KVIterator
endKey []byte
field uint16
indexReader *IndexReader
iterator store.KVIterator
endKey []byte
field uint16
}
func newUpsideDownCouchFieldReader(index *UpsideDownCouch, field uint16, startTerm, endTerm []byte) (*UpsideDownCouchFieldReader, error) {
func newUpsideDownCouchFieldReader(indexReader *IndexReader, field uint16, startTerm, endTerm []byte) (*UpsideDownCouchFieldReader, error) {
startRow := NewTermFrequencyRow(startTerm, field, "", 0, 0)
startKey := startRow.ScanPrefixForFieldTermPrefix()
endKey := NewTermFrequencyRow(endTerm, field, "", 0, 0).Key()
it := index.store.Iterator(startKey)
it := indexReader.kvreader.Iterator(startKey)
return &UpsideDownCouchFieldReader{
index: index,
iterator: it,
field: field,
endKey: endKey,
indexReader: indexReader,
iterator: it,
field: field,
endKey: endKey,
}, nil
}

View File

@ -48,7 +48,9 @@ func TestIndexFieldReader(t *testing.T) {
}
expectedCount++
reader, err := idx.FieldReader("name", nil, nil)
indexReader := idx.Reader()
defer indexReader.Close()
reader, err := indexReader.FieldReader("name", nil, nil)
if err != nil {
t.Errorf("error creating reader: %v", err)
}
@ -67,7 +69,7 @@ func TestIndexFieldReader(t *testing.T) {
t.Errorf("expected 1 term for this field, got %d", termCount)
}
reader, err = idx.FieldReader("desc", nil, nil)
reader, err = indexReader.FieldReader("desc", nil, nil)
if err != nil {
t.Errorf("error creating reader: %v", err)
}
@ -90,7 +92,7 @@ func TestIndexFieldReader(t *testing.T) {
}
// test use case for prefix
reader, err = idx.FieldReader("prefix", []byte("cat"), []byte("cat"))
reader, err = indexReader.FieldReader("prefix", []byte("cat"), []byte("cat"))
if err != nil {
t.Errorf("error creating reader: %v", err)
}

View File

@ -0,0 +1,138 @@
// Copyright (c) 2014 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package upside_down
import (
"bytes"
"github.com/blevesearch/bleve/document"
"github.com/blevesearch/bleve/index"
"github.com/blevesearch/bleve/index/store"
)
type IndexReader struct {
index *UpsideDownCouch
kvreader store.KVReader
docCount uint64
}
func (i *IndexReader) TermFieldReader(term []byte, fieldName string) (index.TermFieldReader, error) {
fieldIndex, fieldExists := i.index.fieldIndexes[fieldName]
if fieldExists {
return newUpsideDownCouchTermFieldReader(i, term, uint16(fieldIndex))
}
return newUpsideDownCouchTermFieldReader(i, []byte{ByteSeparator}, ^uint16(0))
}
func (i *IndexReader) FieldReader(fieldName string, startTerm []byte, endTerm []byte) (index.FieldReader, error) {
fieldIndex, fieldExists := i.index.fieldIndexes[fieldName]
if fieldExists {
return newUpsideDownCouchFieldReader(i, uint16(fieldIndex), startTerm, endTerm)
}
return newUpsideDownCouchTermFieldReader(i, []byte{ByteSeparator}, ^uint16(0))
}
func (i *IndexReader) DocIDReader(start, end string) (index.DocIDReader, error) {
return newUpsideDownCouchDocIDReader(i, start, end)
}
func (i *IndexReader) Document(id string) (*document.Document, error) {
// first hit the back index to confirm doc exists
backIndexRow, err := i.index.backIndexRowForDoc(i.kvreader, id)
if err != nil {
return nil, err
}
if backIndexRow == nil {
return nil, nil
}
rv := document.NewDocument(id)
storedRow := NewStoredRow(id, 0, []uint64{}, 'x', nil)
storedRowScanPrefix := storedRow.ScanPrefixForDoc()
it := i.kvreader.Iterator(storedRowScanPrefix)
defer it.Close()
key, val, valid := it.Current()
for valid {
if !bytes.HasPrefix(key, storedRowScanPrefix) {
break
}
row, err := NewStoredRowKV(key, val)
if err != nil {
return nil, err
}
if row != nil {
fieldName := i.index.fieldIndexToName(row.field)
field := decodeFieldType(row.typ, fieldName, row.value)
if field != nil {
rv.AddField(field)
}
}
it.Next()
key, val, valid = it.Current()
}
return rv, nil
}
func (i *IndexReader) DocumentFieldTerms(id string) (index.FieldTerms, error) {
back, err := i.index.backIndexRowForDoc(i.kvreader, id)
if err != nil {
return nil, err
}
rv := make(index.FieldTerms, len(back.termEntries))
for _, entry := range back.termEntries {
fieldName := i.index.fieldIndexToName(uint16(*entry.Field))
terms, ok := rv[fieldName]
if !ok {
terms = make([]string, 0)
}
terms = append(terms, *entry.Term)
rv[fieldName] = terms
}
return rv, nil
}
func (i *IndexReader) Fields() ([]string, error) {
rv := make([]string, 0)
it := i.kvreader.Iterator([]byte{'f'})
defer it.Close()
key, val, valid := it.Current()
for valid {
if !bytes.HasPrefix(key, []byte{'f'}) {
break
}
row, err := ParseFromKeyValue(key, val)
if err != nil {
return nil, err
}
if row != nil {
fieldRow, ok := row.(*FieldRow)
if ok {
rv = append(rv, fieldRow.name)
}
}
it.Next()
key, val, valid = it.Current()
}
return rv, nil
}
func (i *IndexReader) GetInternal(key []byte) ([]byte, error) {
internalRow := NewInternalRow(key, nil)
return i.kvreader.Get(internalRow.Key())
}
func (i *IndexReader) DocCount() uint64 {
return i.docCount
}
func (i *IndexReader) Close() {
i.kvreader.Close()
}

View File

@ -17,16 +17,16 @@ import (
)
type UpsideDownCouchTermFieldReader struct {
index *UpsideDownCouch
iterator store.KVIterator
count uint64
term []byte
field uint16
indexReader *IndexReader
iterator store.KVIterator
count uint64
term []byte
field uint16
}
func newUpsideDownCouchTermFieldReader(index *UpsideDownCouch, term []byte, field uint16) (*UpsideDownCouchTermFieldReader, error) {
func newUpsideDownCouchTermFieldReader(indexReader *IndexReader, term []byte, field uint16) (*UpsideDownCouchTermFieldReader, error) {
tfr := NewTermFrequencyRow(term, field, "", 0, 0)
it := index.store.Iterator(tfr.Key())
it := indexReader.kvreader.Iterator(tfr.Key())
var count uint64
key, val, valid := it.Current()
@ -41,11 +41,11 @@ func newUpsideDownCouchTermFieldReader(index *UpsideDownCouch, term []byte, fiel
}
return &UpsideDownCouchTermFieldReader{
index: index,
iterator: it,
count: count,
term: term,
field: field,
indexReader: indexReader,
iterator: it,
count: count,
term: term,
field: field,
}, nil
}
@ -70,7 +70,7 @@ func (r *UpsideDownCouchTermFieldReader) Next() (*index.TermFieldDoc, error) {
ID: string(tfr.doc),
Freq: tfr.freq,
Norm: float64(tfr.norm),
Vectors: r.index.termFieldVectorsFromTermVectors(tfr.vectors),
Vectors: r.indexReader.index.termFieldVectorsFromTermVectors(tfr.vectors),
}, nil
}
return nil, nil
@ -94,7 +94,7 @@ func (r *UpsideDownCouchTermFieldReader) Advance(docID string) (*index.TermField
ID: string(tfr.doc),
Freq: tfr.freq,
Norm: float64(tfr.norm),
Vectors: r.index.termFieldVectorsFromTermVectors(tfr.vectors),
Vectors: r.indexReader.index.termFieldVectorsFromTermVectors(tfr.vectors),
}, nil
}
return nil, nil
@ -105,13 +105,13 @@ func (r *UpsideDownCouchTermFieldReader) Close() {
}
type UpsideDownCouchDocIDReader struct {
index *UpsideDownCouch
iterator store.KVIterator
start string
end string
indexReader *IndexReader
iterator store.KVIterator
start string
end string
}
func newUpsideDownCouchDocIDReader(index *UpsideDownCouch, start, end string) (*UpsideDownCouchDocIDReader, error) {
func newUpsideDownCouchDocIDReader(indexReader *IndexReader, start, end string) (*UpsideDownCouchDocIDReader, error) {
if start == "" {
start = string([]byte{0x0})
}
@ -119,13 +119,13 @@ func newUpsideDownCouchDocIDReader(index *UpsideDownCouch, start, end string) (*
end = string([]byte{0xff})
}
bisr := NewBackIndexRow(start, nil, nil)
it := index.store.Iterator(bisr.Key())
it := indexReader.kvreader.Iterator(bisr.Key())
return &UpsideDownCouchDocIDReader{
index: index,
iterator: it,
start: start,
end: end,
indexReader: indexReader,
iterator: it,
start: start,
end: end,
}, nil
}

View File

@ -48,8 +48,11 @@ func TestIndexReader(t *testing.T) {
}
expectedCount++
indexReader := idx.Reader()
defer indexReader.Close()
// first look for a term that doesnt exist
reader, err := idx.TermFieldReader([]byte("nope"), "name")
reader, err := indexReader.TermFieldReader([]byte("nope"), "name")
if err != nil {
t.Errorf("Error accessing term field reader: %v", err)
}
@ -59,7 +62,7 @@ func TestIndexReader(t *testing.T) {
}
reader.Close()
reader, err = idx.TermFieldReader([]byte("test"), "name")
reader, err = indexReader.TermFieldReader([]byte("test"), "name")
if err != nil {
t.Errorf("Error accessing term field reader: %v", err)
}
@ -97,7 +100,7 @@ func TestIndexReader(t *testing.T) {
},
},
}
tfr, err := idx.TermFieldReader([]byte("rice"), "desc")
tfr, err := indexReader.TermFieldReader([]byte("rice"), "desc")
if err != nil {
t.Errorf("unexpected error: %v", err)
}
@ -111,7 +114,7 @@ func TestIndexReader(t *testing.T) {
reader.Close()
// now test usage of advance
reader, err = idx.TermFieldReader([]byte("test"), "name")
reader, err = indexReader.TermFieldReader([]byte("test"), "name")
if err != nil {
t.Errorf("Error accessing term field reader: %v", err)
}
@ -136,7 +139,7 @@ func TestIndexReader(t *testing.T) {
reader.Close()
// now test creating a reader for a field that doesn't exist
reader, err = idx.TermFieldReader([]byte("water"), "doesnotexist")
reader, err = indexReader.TermFieldReader([]byte("water"), "doesnotexist")
if err != nil {
t.Errorf("Error accessing term field reader: %v", err)
}
@ -190,8 +193,11 @@ func TestIndexDocIdReader(t *testing.T) {
}
expectedCount++
indexReader := idx.Reader()
defer indexReader.Close()
// first get all doc ids
reader, err := idx.DocIDReader("", "")
reader, err := indexReader.DocIDReader("", "")
if err != nil {
t.Errorf("Error accessing doc id reader: %v", err)
}
@ -208,7 +214,7 @@ func TestIndexDocIdReader(t *testing.T) {
}
// try it again, but jump to the second doc this time
reader, err = idx.DocIDReader("", "")
reader, err = indexReader.DocIDReader("", "")
if err != nil {
t.Errorf("Error accessing doc id reader: %v", err)
}

View File

@ -224,6 +224,15 @@ func (tfr *TermFrequencyRow) Key() []byte {
return buf
}
func (tfr *TermFrequencyRow) SummaryKey() []byte {
buf := make([]byte, 3+len(tfr.term)+1)
buf[0] = 't'
binary.LittleEndian.PutUint16(buf[1:3], tfr.field)
termLen := copy(buf[3:], tfr.term)
buf[3+termLen] = ByteSeparator
return buf
}
func (tfr *TermFrequencyRow) Value() []byte {
buf := make([]byte, 8+4+(len(tfr.vectors)*(2+8+8+8)))
@ -298,52 +307,59 @@ func NewTermFrequencyRowK(key []byte) (*TermFrequencyRow, error) {
return &rv, nil
}
func (tfr *TermFrequencyRow) parseV(value []byte) error {
buf := bytes.NewBuffer((value))
err := binary.Read(buf, binary.LittleEndian, &tfr.freq)
if err != nil {
return err
}
err = binary.Read(buf, binary.LittleEndian, &tfr.norm)
if err != nil {
return err
}
var field uint16
err = binary.Read(buf, binary.LittleEndian, &field)
if err != nil && err != io.EOF {
return err
}
for err != io.EOF {
tv := TermVector{}
tv.field = field
// at this point we expect at least one term vector
if tfr.vectors == nil {
tfr.vectors = make([]*TermVector, 0)
}
err = binary.Read(buf, binary.LittleEndian, &tv.pos)
if err != nil {
return err
}
err = binary.Read(buf, binary.LittleEndian, &tv.start)
if err != nil {
return err
}
err = binary.Read(buf, binary.LittleEndian, &tv.end)
if err != nil {
return err
}
tfr.vectors = append(tfr.vectors, &tv)
// try to read next record (may not exist)
err = binary.Read(buf, binary.LittleEndian, &field)
}
return nil
}
func NewTermFrequencyRowKV(key, value []byte) (*TermFrequencyRow, error) {
rv, err := NewTermFrequencyRowK(key)
if err != nil {
return nil, err
}
buf := bytes.NewBuffer((value))
err = binary.Read(buf, binary.LittleEndian, &rv.freq)
err = rv.parseV(value)
if err != nil {
return nil, err
}
err = binary.Read(buf, binary.LittleEndian, &rv.norm)
if err != nil {
return nil, err
}
var field uint16
err = binary.Read(buf, binary.LittleEndian, &field)
if err != nil && err != io.EOF {
return nil, err
}
for err != io.EOF {
tv := TermVector{}
tv.field = field
// at this point we expect at least one term vector
if rv.vectors == nil {
rv.vectors = make([]*TermVector, 0)
}
err = binary.Read(buf, binary.LittleEndian, &tv.pos)
if err != nil {
return nil, err
}
err = binary.Read(buf, binary.LittleEndian, &tv.start)
if err != nil {
return nil, err
}
err = binary.Read(buf, binary.LittleEndian, &tv.end)
if err != nil {
return nil, err
}
rv.vectors = append(rv.vectors, &tv)
// try to read next record (may not exist)
err = binary.Read(buf, binary.LittleEndian, &field)
}
return rv, nil
}

View File

@ -0,0 +1,57 @@
// Copyright (c) 2014 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package upside_down
type termSummaryIncr struct{}
func newTermSummaryIncr() *termSummaryIncr {
return &termSummaryIncr{}
}
func (t *termSummaryIncr) Merge(key, existing []byte) ([]byte, error) {
if len(existing) > 0 {
tfr, err := NewTermFrequencyRowKV(key, existing)
if err != nil {
return nil, err
}
tfr.freq++
return tfr.Value(), nil
} else {
tfr, err := NewTermFrequencyRowK(key)
if err != nil {
return nil, err
}
tfr.freq = 1
return tfr.Value(), nil
}
}
type termSummaryDecr struct{}
func newTermSummaryDecr() *termSummaryDecr {
return &termSummaryDecr{}
}
func (t *termSummaryDecr) Merge(key, existing []byte) ([]byte, error) {
if len(existing) > 0 {
tfr, err := NewTermFrequencyRowKV(key, existing)
if err != nil {
return nil, err
}
tfr.freq--
if tfr.freq > 0 {
return tfr.Value(), nil
} else {
return nil, nil
}
} else {
return nil, nil
}
}

View File

@ -11,7 +11,6 @@ package upside_down
import (
"bytes"
"fmt"
"math"
"github.com/blevesearch/bleve/analysis"
@ -45,20 +44,20 @@ func NewUpsideDownCouch(s store.KVStore) *UpsideDownCouch {
}
}
func (udc *UpsideDownCouch) init() (err error) {
func (udc *UpsideDownCouch) init(kvwriter store.KVWriter) (err error) {
// prepare a list of rows
rows := make([]UpsideDownCouchRow, 0)
// version marker
rows = append(rows, NewVersionRow(udc.version))
return udc.batchRows(nil, rows, nil)
return udc.batchRows(kvwriter, nil, rows, nil)
}
func (udc *UpsideDownCouch) loadSchema() (err error) {
func (udc *UpsideDownCouch) loadSchema(kvreader store.KVReader) (err error) {
keyPrefix := []byte{'f'}
it := udc.store.Iterator(keyPrefix)
it := kvreader.Iterator(keyPrefix)
defer it.Close()
it.Seek(keyPrefix)
@ -85,33 +84,18 @@ func (udc *UpsideDownCouch) loadSchema() (err error) {
return
}
func (udc *UpsideDownCouch) batchRows(addRows []UpsideDownCouchRow, updateRows []UpsideDownCouchRow, deleteRows []UpsideDownCouchRow) (err error) {
func (udc *UpsideDownCouch) batchRows(writer store.KVWriter, addRows []UpsideDownCouchRow, updateRows []UpsideDownCouchRow, deleteRows []UpsideDownCouchRow) (err error) {
// prepare batch
wb := udc.store.NewBatch()
wb := writer.NewBatch()
// add
for _, row := range addRows {
tfr, ok := row.(*TermFrequencyRow)
if ok {
// need to increment counter
tr := NewTermFrequencyRow(tfr.term, tfr.field, "", 0, 0)
val, err := udc.store.Get(tr.Key())
if err != nil {
return err
}
if val != nil {
tr, err = NewTermFrequencyRowKV(tr.Key(), val)
if err != nil {
return err
}
tr.freq++ // incr
} else {
tr = NewTermFrequencyRow(tfr.term, tfr.field, "", 1, 0)
}
// now add this to the batch
wb.Set(tr.Key(), tr.Value())
summaryKey := tfr.SummaryKey()
wb.Merge(summaryKey, newTermSummaryIncr())
}
wb.Set(row.Key(), row.Value())
}
@ -126,28 +110,8 @@ func (udc *UpsideDownCouch) batchRows(addRows []UpsideDownCouchRow, updateRows [
tfr, ok := row.(*TermFrequencyRow)
if ok {
// need to decrement counter
tr := NewTermFrequencyRow(tfr.term, tfr.field, "", 0, 0)
val, err := udc.store.Get(tr.Key())
if err != nil {
return err
}
if val != nil {
tr, err = NewTermFrequencyRowKV(tr.Key(), val)
if err != nil {
return err
}
tr.freq-- // incr
} else {
return fmt.Errorf("unexpected missing row, deleting term, expected count row to exist: %v", tr.Key())
}
if tr.freq == 0 {
wb.Delete(tr.Key())
} else {
// now add this to the batch
wb.Set(tr.Key(), tr.Value())
}
summaryKey := tfr.SummaryKey()
wb.Merge(summaryKey, newTermSummaryDecr())
}
wb.Delete(row.Key())
}
@ -157,7 +121,6 @@ func (udc *UpsideDownCouch) batchRows(addRows []UpsideDownCouchRow, updateRows [
if err != nil {
return
}
err = udc.store.Commit()
return
}
@ -165,32 +128,35 @@ func (udc *UpsideDownCouch) DocCount() uint64 {
return udc.docCount
}
func (udc *UpsideDownCouch) Open() (err error) {
var value []byte
value, err = udc.store.Get(VersionKey)
func (udc *UpsideDownCouch) Open() error {
// start a writer for the open process
kvwriter := udc.store.Writer()
defer kvwriter.Close()
value, err := kvwriter.Get(VersionKey)
if err != nil {
return
return err
}
// init new index OR load schema
if value == nil {
err = udc.init()
err = udc.init(kvwriter)
if err != nil {
return
return err
}
} else {
err = udc.loadSchema()
err = udc.loadSchema(kvwriter)
if err != nil {
return
return err
}
}
// set doc count
udc.docCount = udc.countDocs()
return
udc.docCount = udc.countDocs(kvwriter)
return nil
}
func (udc *UpsideDownCouch) countDocs() uint64 {
it := udc.store.Iterator([]byte{'b'})
func (udc *UpsideDownCouch) countDocs(kvreader store.KVReader) uint64 {
it := kvreader.Iterator([]byte{'b'})
defer it.Close()
var rv uint64
@ -208,7 +174,10 @@ func (udc *UpsideDownCouch) countDocs() uint64 {
}
func (udc *UpsideDownCouch) rowCount() uint64 {
it := udc.store.Iterator([]byte{0})
// start an isolated reader for use during the rowcount
kvreader := udc.store.Reader()
defer kvreader.Close()
it := kvreader.Iterator([]byte{0})
defer it.Close()
var rv uint64
@ -227,9 +196,13 @@ func (udc *UpsideDownCouch) Close() {
}
func (udc *UpsideDownCouch) Update(doc *document.Document) error {
// start a writer for this update
kvwriter := udc.store.Writer()
defer kvwriter.Close()
// first we lookup the backindex row for the doc id if it exists
// lookup the back index row
backIndexRow, err := udc.backIndexRowForDoc(doc.ID)
backIndexRow, err := udc.backIndexRowForDoc(kvwriter, doc.ID)
if err != nil {
return err
}
@ -241,7 +214,7 @@ func (udc *UpsideDownCouch) Update(doc *document.Document) error {
addRows, updateRows, deleteRows = udc.updateSingle(doc, backIndexRow, addRows, updateRows, deleteRows)
err = udc.batchRows(addRows, updateRows, deleteRows)
err = udc.batchRows(kvwriter, addRows, updateRows, deleteRows)
if err == nil && backIndexRow == nil {
udc.docCount++
}
@ -426,8 +399,12 @@ func (udc *UpsideDownCouch) fieldNameToFieldIndex(fieldName string) (uint16, *Fi
}
func (udc *UpsideDownCouch) Delete(id string) error {
// start a writer for this delete
kvwriter := udc.store.Writer()
defer kvwriter.Close()
// lookup the back index row
backIndexRow, err := udc.backIndexRowForDoc(id)
backIndexRow, err := udc.backIndexRowForDoc(kvwriter, id)
if err != nil {
return err
}
@ -438,7 +415,7 @@ func (udc *UpsideDownCouch) Delete(id string) error {
deleteRows := make([]UpsideDownCouchRow, 0)
deleteRows = udc.deleteSingle(id, backIndexRow, deleteRows)
err = udc.batchRows(nil, nil, deleteRows)
err = udc.batchRows(kvwriter, nil, nil, deleteRows)
if err == nil {
udc.docCount--
}
@ -461,13 +438,13 @@ func (udc *UpsideDownCouch) deleteSingle(id string, backIndexRow *BackIndexRow,
return deleteRows
}
func (udc *UpsideDownCouch) backIndexRowForDoc(docID string) (*BackIndexRow, error) {
func (udc *UpsideDownCouch) backIndexRowForDoc(kvreader store.KVReader, docID string) (*BackIndexRow, error) {
// use a temporary row structure to build key
tempRow := &BackIndexRow{
doc: []byte(docID),
}
key := tempRow.Key()
value, err := udc.store.Get(key)
value, err := kvreader.Get(key)
if err != nil {
return nil, err
}
@ -481,12 +458,12 @@ func (udc *UpsideDownCouch) backIndexRowForDoc(docID string) (*BackIndexRow, err
return backIndexRow, nil
}
func (udc *UpsideDownCouch) backIndexRowsForBatch(batch index.Batch) (map[string]*BackIndexRow, error) {
func (udc *UpsideDownCouch) backIndexRowsForBatch(kvreader store.KVReader, batch index.Batch) (map[string]*BackIndexRow, error) {
// FIXME faster to order the ids and scan sequentially
// for now just get it working
rv := make(map[string]*BackIndexRow, 0)
for docID := range batch {
backIndexRow, err := udc.backIndexRowForDoc(docID)
backIndexRow, err := udc.backIndexRowForDoc(kvreader, docID)
if err != nil {
return nil, err
}
@ -495,107 +472,6 @@ func (udc *UpsideDownCouch) backIndexRowsForBatch(batch index.Batch) (map[string
return rv, nil
}
func (udc *UpsideDownCouch) Fields() ([]string, error) {
rv := make([]string, 0)
it := udc.store.Iterator([]byte{'f'})
defer it.Close()
key, val, valid := it.Current()
for valid {
if !bytes.HasPrefix(key, []byte{'f'}) {
break
}
row, err := ParseFromKeyValue(key, val)
if err != nil {
return nil, err
}
if row != nil {
fieldRow, ok := row.(*FieldRow)
if ok {
rv = append(rv, fieldRow.name)
}
}
it.Next()
key, val, valid = it.Current()
}
return rv, nil
}
func (udc *UpsideDownCouch) TermFieldReader(term []byte, fieldName string) (index.TermFieldReader, error) {
fieldIndex, fieldExists := udc.fieldIndexes[fieldName]
if fieldExists {
return newUpsideDownCouchTermFieldReader(udc, term, uint16(fieldIndex))
}
return newUpsideDownCouchTermFieldReader(udc, []byte{ByteSeparator}, ^uint16(0))
}
func (udc *UpsideDownCouch) FieldReader(fieldName string, startTerm []byte, endTerm []byte) (index.FieldReader, error) {
fieldIndex, fieldExists := udc.fieldIndexes[fieldName]
if fieldExists {
return newUpsideDownCouchFieldReader(udc, uint16(fieldIndex), startTerm, endTerm)
}
return newUpsideDownCouchTermFieldReader(udc, []byte{ByteSeparator}, ^uint16(0))
}
func (udc *UpsideDownCouch) DocIDReader(start, end string) (index.DocIDReader, error) {
return newUpsideDownCouchDocIDReader(udc, start, end)
}
func (udc *UpsideDownCouch) Document(id string) (*document.Document, error) {
// first hit the back index to confirm doc exists
backIndexRow, err := udc.backIndexRowForDoc(id)
if err != nil {
return nil, err
}
if backIndexRow == nil {
return nil, nil
}
rv := document.NewDocument(id)
storedRow := NewStoredRow(id, 0, []uint64{}, 'x', nil)
storedRowScanPrefix := storedRow.ScanPrefixForDoc()
it := udc.store.Iterator(storedRowScanPrefix)
defer it.Close()
key, val, valid := it.Current()
for valid {
if !bytes.HasPrefix(key, storedRowScanPrefix) {
break
}
row, err := NewStoredRowKV(key, val)
if err != nil {
return nil, err
}
if row != nil {
fieldName := udc.fieldIndexToName(row.field)
field := decodeFieldType(row.typ, fieldName, row.value)
if field != nil {
rv.AddField(field)
}
}
it.Next()
key, val, valid = it.Current()
}
return rv, nil
}
func (udc *UpsideDownCouch) DocumentFieldTerms(id string) (index.FieldTerms, error) {
back, err := udc.backIndexRowForDoc(id)
if err != nil {
return nil, err
}
rv := make(index.FieldTerms, len(back.termEntries))
for _, entry := range back.termEntries {
fieldName := udc.fieldIndexToName(uint16(*entry.Field))
terms, ok := rv[fieldName]
if !ok {
terms = make([]string, 0)
}
terms = append(terms, *entry.Term)
rv[fieldName] = terms
}
return rv, nil
}
func decodeFieldType(typ byte, name string, value []byte) document.Field {
switch typ {
case 't':
@ -664,8 +540,12 @@ func (udc *UpsideDownCouch) fieldIndexToName(i uint16) string {
}
func (udc *UpsideDownCouch) Batch(batch index.Batch) error {
// start a writer for this batch
kvwriter := udc.store.Writer()
defer kvwriter.Close()
// first lookup all the back index rows
backIndexRows, err := udc.backIndexRowsForBatch(batch)
backIndexRows, err := udc.backIndexRowsForBatch(kvwriter, batch)
if err != nil {
return err
}
@ -691,7 +571,7 @@ func (udc *UpsideDownCouch) Batch(batch index.Batch) error {
}
}
err = udc.batchRows(addRows, updateRows, deleteRows)
err = udc.batchRows(kvwriter, addRows, updateRows, deleteRows)
if err == nil {
udc.docCount += docsAdded
udc.docCount -= docsDeleted
@ -701,15 +581,22 @@ func (udc *UpsideDownCouch) Batch(batch index.Batch) error {
func (udc *UpsideDownCouch) SetInternal(key, val []byte) error {
internalRow := NewInternalRow(key, val)
return udc.store.Set(internalRow.Key(), internalRow.Value())
}
func (udc *UpsideDownCouch) GetInternal(key []byte) ([]byte, error) {
internalRow := NewInternalRow(key, nil)
return udc.store.Get(internalRow.Key())
writer := udc.store.Writer()
defer writer.Close()
return writer.Set(internalRow.Key(), internalRow.Value())
}
func (udc *UpsideDownCouch) DeleteInternal(key []byte) error {
internalRow := NewInternalRow(key, nil)
return udc.store.Delete(internalRow.Key())
writer := udc.store.Writer()
defer writer.Close()
return writer.Delete(internalRow.Key())
}
func (udc *UpsideDownCouch) Reader() index.IndexReader {
return &IndexReader{
index: udc,
kvreader: udc.store.Reader(),
docCount: udc.docCount,
}
}

View File

@ -321,7 +321,10 @@ func TestIndexInsertWithStore(t *testing.T) {
t.Errorf("expected %d rows, got: %d", expectedLength, rowCount)
}
storedDoc, err := idx.Document("1")
indexReader := idx.Reader()
defer indexReader.Close()
storedDoc, err := indexReader.Document("1")
if err != nil {
t.Error(err)
}
@ -349,8 +352,11 @@ func TestIndexInternalCRUD(t *testing.T) {
}
defer idx.Close()
indexReader := idx.Reader()
defer indexReader.Close()
// get something that doesnt exist yet
val, err := idx.GetInternal([]byte("key"))
val, err := indexReader.GetInternal([]byte("key"))
if err != nil {
t.Error(err)
}
@ -364,8 +370,11 @@ func TestIndexInternalCRUD(t *testing.T) {
t.Error(err)
}
indexReader = idx.Reader()
defer indexReader.Close()
// get
val, err = idx.GetInternal([]byte("key"))
val, err = indexReader.GetInternal([]byte("key"))
if err != nil {
t.Error(err)
}
@ -379,8 +388,11 @@ func TestIndexInternalCRUD(t *testing.T) {
t.Error(err)
}
indexReader = idx.Reader()
defer indexReader.Close()
// get again
val, err = idx.GetInternal([]byte("key"))
val, err = indexReader.GetInternal([]byte("key"))
if err != nil {
t.Error(err)
}
@ -439,12 +451,15 @@ func TestIndexBatch(t *testing.T) {
t.Error(err)
}
docCount := idx.DocCount()
indexReader := idx.Reader()
defer indexReader.Close()
docCount := indexReader.DocCount()
if docCount != expectedCount {
t.Errorf("Expected document count to be %d got %d", expectedCount, docCount)
}
docIDReader, err := idx.DocIDReader("", "")
docIDReader, err := indexReader.DocIDReader("", "")
if err != nil {
t.Error(err)
}
@ -519,7 +534,10 @@ func TestIndexInsertUpdateDeleteWithMultipleTypesStored(t *testing.T) {
t.Errorf("expected %d rows, got: %d", expectedLength, rowCount)
}
storedDoc, err := idx.Document("1")
indexReader := idx.Reader()
defer indexReader.Close()
storedDoc, err := indexReader.Document("1")
if err != nil {
t.Error(err)
}
@ -568,14 +586,17 @@ func TestIndexInsertUpdateDeleteWithMultipleTypesStored(t *testing.T) {
t.Errorf("Error updating index: %v", err)
}
indexReader = idx.Reader()
defer indexReader.Close()
// expected doc count shouldn't have changed
docCount = idx.DocCount()
docCount = indexReader.DocCount()
if docCount != expectedCount {
t.Errorf("Expected document count to be %d got %d", expectedCount, docCount)
}
// should only get 2 fields back now though
storedDoc, err = idx.Document("1")
storedDoc, err = indexReader.Document("1")
if err != nil {
t.Error(err)
}
@ -641,7 +662,10 @@ func TestIndexInsertFields(t *testing.T) {
t.Errorf("Error updating index: %v", err)
}
fields, err := idx.Fields()
indexReader := idx.Reader()
defer indexReader.Close()
fields, err := indexReader.Fields()
if err != nil {
t.Error(err)
} else {
@ -699,8 +723,11 @@ func TestIndexUpdateComposites(t *testing.T) {
t.Errorf("Error updating index: %v", err)
}
indexReader := idx.Reader()
defer indexReader.Close()
// make sure new values are in index
storedDoc, err := idx.Document("1")
storedDoc, err := indexReader.Document("1")
if err != nil {
t.Error(err)
}
@ -782,7 +809,10 @@ func TestIndexTermReaderCompositeFields(t *testing.T) {
t.Errorf("Error updating index: %v", err)
}
termFieldReader, err := idx.TermFieldReader([]byte("mister"), "_all")
indexReader := idx.Reader()
defer indexReader.Close()
termFieldReader, err := indexReader.TermFieldReader([]byte("mister"), "_all")
if err != nil {
t.Error(err)
}
@ -821,7 +851,10 @@ func TestIndexDocumentFieldTerms(t *testing.T) {
t.Errorf("Error updating index: %v", err)
}
fieldTerms, err := idx.DocumentFieldTerms("1")
indexReader := idx.Reader()
defer indexReader.Close()
fieldTerms, err := indexReader.DocumentFieldTerms("1")
if err != nil {
t.Error(err)
}

View File

@ -183,7 +183,9 @@ func openIndex(path string) (*indexImpl, error) {
}
// now load the mapping
mappingBytes, err := rv.i.GetInternal(mappingInternalKey)
indexReader := rv.i.Reader()
defer indexReader.Close()
mappingBytes, err := indexReader.GetInternal(mappingInternalKey)
if err != nil {
return nil, err
}
@ -296,7 +298,9 @@ func (i *indexImpl) Document(id string) (*document.Document, error) {
if !i.open {
return nil, ErrorIndexClosed
}
return i.i.Document(id)
indexReader := i.i.Reader()
defer indexReader.Close()
return indexReader.Document(id)
}
// DocCount returns the number of documents in the
@ -323,14 +327,19 @@ func (i *indexImpl) Search(req *SearchRequest) (*SearchResult, error) {
}
collector := collectors.NewTopScorerSkipCollector(req.Size, req.From)
searcher, err := req.Query.Searcher(i, req.Explain)
// open a reader for this search
indexReader := i.i.Reader()
defer indexReader.Close()
searcher, err := req.Query.Searcher(indexReader, i.m, req.Explain)
if err != nil {
return nil, err
}
defer searcher.Close()
if req.Facets != nil {
facetsBuilder := search.NewFacetsBuilder(i.i)
facetsBuilder := search.NewFacetsBuilder(indexReader)
for facetName, facetRequest := range req.Facets {
if facetRequest.NumericRanges != nil {
// build numeric range facet
@ -381,7 +390,7 @@ func (i *indexImpl) Search(req *SearchRequest) (*SearchResult, error) {
}
for _, hit := range hits {
doc, err := i.i.Document(hit.ID)
doc, err := indexReader.Document(hit.ID)
if err == nil {
highlightFields := req.Highlight.Fields
if highlightFields == nil {
@ -403,7 +412,7 @@ func (i *indexImpl) Search(req *SearchRequest) (*SearchResult, error) {
for _, hit := range hits {
// FIXME avoid loading doc second time
// if we already loaded it for highlighting
doc, err := i.i.Document(hit.ID)
doc, err := indexReader.Document(hit.ID)
if err == nil {
for _, f := range req.Fields {
for _, docF := range doc.Fields {
@ -452,7 +461,10 @@ func (i *indexImpl) Fields() ([]string, error) {
if !i.open {
return nil, ErrorIndexClosed
}
return i.i.Fields()
indexReader := i.i.Reader()
defer indexReader.Close()
return indexReader.Fields()
}
// DumpAll writes all index rows to a channel.

View File

@ -12,6 +12,7 @@ package bleve
import (
"encoding/json"
"github.com/blevesearch/bleve/index"
"github.com/blevesearch/bleve/search"
)
@ -22,7 +23,7 @@ type Query interface {
SetBoost(b float64) Query
Field() string
SetField(f string) Query
Searcher(i *indexImpl, explain bool) (search.Searcher, error)
Searcher(i index.IndexReader, m *IndexMapping, explain bool) (search.Searcher, error)
Validate() error
}

View File

@ -13,6 +13,7 @@ import (
"encoding/json"
"fmt"
"github.com/blevesearch/bleve/index"
"github.com/blevesearch/bleve/search"
"github.com/blevesearch/bleve/search/searchers"
)
@ -71,12 +72,12 @@ func (q *booleanQuery) SetBoost(b float64) Query {
return q
}
func (q *booleanQuery) Searcher(i *indexImpl, explain bool) (search.Searcher, error) {
func (q *booleanQuery) Searcher(i index.IndexReader, m *IndexMapping, explain bool) (search.Searcher, error) {
var err error
var mustSearcher search.Searcher
if q.Must != nil {
mustSearcher, err = q.Must.Searcher(i, explain)
mustSearcher, err = q.Must.Searcher(i, m, explain)
if err != nil {
return nil, err
}
@ -84,7 +85,7 @@ func (q *booleanQuery) Searcher(i *indexImpl, explain bool) (search.Searcher, er
var shouldSearcher search.Searcher
if q.Should != nil {
shouldSearcher, err = q.Should.Searcher(i, explain)
shouldSearcher, err = q.Should.Searcher(i, m, explain)
if err != nil {
return nil, err
}
@ -92,13 +93,13 @@ func (q *booleanQuery) Searcher(i *indexImpl, explain bool) (search.Searcher, er
var mustNotSearcher search.Searcher
if q.MustNot != nil {
mustNotSearcher, err = q.MustNot.Searcher(i, explain)
mustNotSearcher, err = q.MustNot.Searcher(i, m, explain)
if err != nil {
return nil, err
}
}
return searchers.NewBooleanSearcher(i.i, mustSearcher, shouldSearcher, mustNotSearcher, explain)
return searchers.NewBooleanSearcher(i, mustSearcher, shouldSearcher, mustNotSearcher, explain)
}
func (q *booleanQuery) Validate() error {

View File

@ -12,6 +12,7 @@ package bleve
import (
"encoding/json"
"github.com/blevesearch/bleve/index"
"github.com/blevesearch/bleve/search"
"github.com/blevesearch/bleve/search/searchers"
)
@ -44,16 +45,16 @@ func (q *conjunctionQuery) AddQuery(aq Query) *conjunctionQuery {
return q
}
func (q *conjunctionQuery) Searcher(i *indexImpl, explain bool) (search.Searcher, error) {
func (q *conjunctionQuery) Searcher(i index.IndexReader, m *IndexMapping, explain bool) (search.Searcher, error) {
ss := make([]search.Searcher, len(q.Conjuncts))
for in, conjunct := range q.Conjuncts {
var err error
ss[in], err = conjunct.Searcher(i, explain)
ss[in], err = conjunct.Searcher(i, m, explain)
if err != nil {
return nil, err
}
}
return searchers.NewConjunctionSearcher(i.i, ss, explain)
return searchers.NewConjunctionSearcher(i, ss, explain)
}
func (q *conjunctionQuery) Validate() error {

View File

@ -13,6 +13,7 @@ import (
"fmt"
"math"
"github.com/blevesearch/bleve/index"
"github.com/blevesearch/bleve/numeric_util"
"github.com/blevesearch/bleve/search"
"github.com/blevesearch/bleve/search/searchers"
@ -69,22 +70,22 @@ func (q *dateRangeQuery) SetField(f string) Query {
return q
}
func (q *dateRangeQuery) Searcher(i *indexImpl, explain bool) (search.Searcher, error) {
func (q *dateRangeQuery) Searcher(i index.IndexReader, m *IndexMapping, explain bool) (search.Searcher, error) {
dateTimeParserName := ""
if q.DateTimeParser != nil {
dateTimeParserName = *q.DateTimeParser
} else {
dateTimeParserName = i.m.datetimeParserNameForPath(q.FieldVal)
dateTimeParserName = m.datetimeParserNameForPath(q.FieldVal)
}
dateTimeParser := i.m.dateTimeParserNamed(dateTimeParserName)
dateTimeParser := m.dateTimeParserNamed(dateTimeParserName)
if dateTimeParser == nil {
return nil, fmt.Errorf("no datetime parser named '%s' registered", *q.DateTimeParser)
}
field := q.FieldVal
if q.FieldVal == "" {
field = i.m.DefaultField
field = m.DefaultField
}
// now parse the endpoints
@ -105,7 +106,7 @@ func (q *dateRangeQuery) Searcher(i *indexImpl, explain bool) (search.Searcher,
max = numeric_util.Int64ToFloat64(endTime.UnixNano())
}
return searchers.NewNumericRangeSearcher(i.i, &min, &max, q.InclusiveStart, q.InclusiveEnd, field, q.BoostVal, explain)
return searchers.NewNumericRangeSearcher(i, &min, &max, q.InclusiveStart, q.InclusiveEnd, field, q.BoostVal, explain)
}
func (q *dateRangeQuery) Validate() error {

View File

@ -12,6 +12,7 @@ package bleve
import (
"encoding/json"
"github.com/blevesearch/bleve/index"
"github.com/blevesearch/bleve/search"
"github.com/blevesearch/bleve/search/searchers"
)
@ -64,16 +65,16 @@ func (q *disjunctionQuery) SetMin(m float64) Query {
return q
}
func (q *disjunctionQuery) Searcher(i *indexImpl, explain bool) (search.Searcher, error) {
func (q *disjunctionQuery) Searcher(i index.IndexReader, m *IndexMapping, explain bool) (search.Searcher, error) {
ss := make([]search.Searcher, len(q.Disjuncts))
for in, disjunct := range q.Disjuncts {
var err error
ss[in], err = disjunct.Searcher(i, explain)
ss[in], err = disjunct.Searcher(i, m, explain)
if err != nil {
return nil, err
}
}
return searchers.NewDisjunctionSearcher(i.i, ss, q.MinVal, explain)
return searchers.NewDisjunctionSearcher(i, ss, q.MinVal, explain)
}
func (q *disjunctionQuery) Validate() error {

View File

@ -12,6 +12,7 @@ package bleve
import (
"fmt"
"github.com/blevesearch/bleve/index"
"github.com/blevesearch/bleve/search"
)
@ -53,20 +54,20 @@ func (q *matchQuery) SetField(f string) Query {
return q
}
func (q *matchQuery) Searcher(i *indexImpl, explain bool) (search.Searcher, error) {
func (q *matchQuery) Searcher(i index.IndexReader, m *IndexMapping, explain bool) (search.Searcher, error) {
field := q.FieldVal
if q.FieldVal == "" {
field = i.m.DefaultField
field = m.DefaultField
}
analyzerName := ""
if q.Analyzer != "" {
analyzerName = q.Analyzer
} else {
analyzerName = i.m.analyzerNameForPath(field)
analyzerName = m.analyzerNameForPath(field)
}
analyzer := i.m.analyzerNamed(analyzerName)
analyzer := m.analyzerNamed(analyzerName)
if analyzer == nil {
return nil, fmt.Errorf("no analyzer named '%s' registered", q.Analyzer)
@ -85,10 +86,10 @@ func (q *matchQuery) Searcher(i *indexImpl, explain bool) (search.Searcher, erro
shouldQuery := NewDisjunctionQueryMin(tqs, 1).
SetBoost(q.BoostVal)
return shouldQuery.Searcher(i, explain)
return shouldQuery.Searcher(i, m, explain)
}
noneQuery := NewMatchNoneQuery()
return noneQuery.Searcher(i, explain)
return noneQuery.Searcher(i, m, explain)
}
func (q *matchQuery) Validate() error {

View File

@ -10,6 +10,7 @@
package bleve
import (
"github.com/blevesearch/bleve/index"
"github.com/blevesearch/bleve/search"
"github.com/blevesearch/bleve/search/searchers"
)
@ -35,8 +36,8 @@ func (q *matchAllQuery) SetBoost(b float64) Query {
return q
}
func (q *matchAllQuery) Searcher(i *indexImpl, explain bool) (search.Searcher, error) {
return searchers.NewMatchAllSearcher(i.i, q.BoostVal, explain)
func (q *matchAllQuery) Searcher(i index.IndexReader, m *IndexMapping, explain bool) (search.Searcher, error) {
return searchers.NewMatchAllSearcher(i, q.BoostVal, explain)
}
func (q *matchAllQuery) Validate() error {

View File

@ -10,6 +10,7 @@
package bleve
import (
"github.com/blevesearch/bleve/index"
"github.com/blevesearch/bleve/search"
"github.com/blevesearch/bleve/search/searchers"
)
@ -35,8 +36,8 @@ func (q *matchNoneQuery) SetBoost(b float64) Query {
return q
}
func (q *matchNoneQuery) Searcher(i *indexImpl, explain bool) (search.Searcher, error) {
return searchers.NewMatchNoneSearcher(i.i)
func (q *matchNoneQuery) Searcher(i index.IndexReader, m *IndexMapping, explain bool) (search.Searcher, error) {
return searchers.NewMatchNoneSearcher(i)
}
func (q *matchNoneQuery) Validate() error {

View File

@ -12,6 +12,7 @@ package bleve
import (
"fmt"
"github.com/blevesearch/bleve/index"
"github.com/blevesearch/bleve/search"
)
@ -54,20 +55,20 @@ func (q *matchPhraseQuery) SetField(f string) Query {
return q
}
func (q *matchPhraseQuery) Searcher(i *indexImpl, explain bool) (search.Searcher, error) {
func (q *matchPhraseQuery) Searcher(i index.IndexReader, m *IndexMapping, explain bool) (search.Searcher, error) {
field := q.FieldVal
if q.FieldVal == "" {
field = i.m.DefaultField
field = m.DefaultField
}
analyzerName := ""
if q.Analyzer != "" {
analyzerName = q.Analyzer
} else {
analyzerName = i.m.analyzerNameForPath(field)
analyzerName = m.analyzerNameForPath(field)
}
analyzer := i.m.analyzerNamed(analyzerName)
analyzer := m.analyzerNamed(analyzerName)
if analyzer == nil {
return nil, fmt.Errorf("no analyzer named '%s' registered", q.Analyzer)
}
@ -80,10 +81,10 @@ func (q *matchPhraseQuery) Searcher(i *indexImpl, explain bool) (search.Searcher
}
phraseQuery := NewPhraseQuery(ts, field).SetBoost(q.BoostVal)
return phraseQuery.Searcher(i, explain)
return phraseQuery.Searcher(i, m, explain)
}
noneQuery := NewMatchNoneQuery()
return noneQuery.Searcher(i, explain)
return noneQuery.Searcher(i, m, explain)
}
func (q *matchPhraseQuery) Validate() error {

View File

@ -10,6 +10,7 @@
package bleve
import (
"github.com/blevesearch/bleve/index"
"github.com/blevesearch/bleve/search"
"github.com/blevesearch/bleve/search/searchers"
)
@ -64,12 +65,12 @@ func (q *numericRangeQuery) SetField(f string) Query {
return q
}
func (q *numericRangeQuery) Searcher(i *indexImpl, explain bool) (search.Searcher, error) {
func (q *numericRangeQuery) Searcher(i index.IndexReader, m *IndexMapping, explain bool) (search.Searcher, error) {
field := q.FieldVal
if q.FieldVal == "" {
field = i.m.DefaultField
field = m.DefaultField
}
return searchers.NewNumericRangeSearcher(i.i, q.Min, q.Max, q.InclusiveMin, q.InclusiveMax, field, q.BoostVal, explain)
return searchers.NewNumericRangeSearcher(i, q.Min, q.Max, q.InclusiveMin, q.InclusiveMax, field, q.BoostVal, explain)
}
func (q *numericRangeQuery) Validate() error {

View File

@ -13,6 +13,7 @@ import (
"encoding/json"
"fmt"
"github.com/blevesearch/bleve/index"
"github.com/blevesearch/bleve/search"
"github.com/blevesearch/bleve/search/searchers"
)
@ -47,7 +48,7 @@ func (q *phraseQuery) SetBoost(b float64) Query {
return q
}
func (q *phraseQuery) Searcher(i *indexImpl, explain bool) (search.Searcher, error) {
func (q *phraseQuery) Searcher(i index.IndexReader, m *IndexMapping, explain bool) (search.Searcher, error) {
terms := make([]string, len(q.Terms))
for i, term := range q.Terms {
@ -55,11 +56,11 @@ func (q *phraseQuery) Searcher(i *indexImpl, explain bool) (search.Searcher, err
}
conjunctionQuery := NewConjunctionQuery(q.Terms)
conjunctionSearcher, err := conjunctionQuery.Searcher(i, explain)
conjunctionSearcher, err := conjunctionQuery.Searcher(i, m, explain)
if err != nil {
return nil, err
}
return searchers.NewPhraseSearcher(i.i, conjunctionSearcher.(*searchers.ConjunctionSearcher), terms)
return searchers.NewPhraseSearcher(i, conjunctionSearcher.(*searchers.ConjunctionSearcher), terms)
}
func (q *phraseQuery) Validate() error {

View File

@ -10,6 +10,7 @@
package bleve
import (
"github.com/blevesearch/bleve/index"
"github.com/blevesearch/bleve/search"
"github.com/blevesearch/bleve/search/searchers"
)
@ -48,12 +49,12 @@ func (q *prefixQuery) SetField(f string) Query {
return q
}
func (q *prefixQuery) Searcher(i *indexImpl, explain bool) (search.Searcher, error) {
func (q *prefixQuery) Searcher(i index.IndexReader, m *IndexMapping, explain bool) (search.Searcher, error) {
field := q.FieldVal
if q.FieldVal == "" {
field = i.m.DefaultField
field = m.DefaultField
}
return searchers.NewTermPrefixSearcher(i.i, q.Prefix, field, q.BoostVal, explain)
return searchers.NewTermPrefixSearcher(i, q.Prefix, field, q.BoostVal, explain)
}
func (q *prefixQuery) Validate() error {

View File

@ -10,6 +10,7 @@
package bleve
import (
"github.com/blevesearch/bleve/index"
"github.com/blevesearch/bleve/search"
)
@ -37,12 +38,12 @@ func (q *queryStringQuery) SetBoost(b float64) Query {
return q
}
func (q *queryStringQuery) Searcher(i *indexImpl, explain bool) (search.Searcher, error) {
newQuery, err := parseQuerySyntax(q.Query, i.m)
func (q *queryStringQuery) Searcher(i index.IndexReader, m *IndexMapping, explain bool) (search.Searcher, error) {
newQuery, err := parseQuerySyntax(q.Query, m)
if err != nil {
return nil, err
}
return newQuery.Searcher(i, explain)
return newQuery.Searcher(i, m, explain)
}
func (q *queryStringQuery) Validate() error {

View File

@ -10,6 +10,7 @@
package bleve
import (
"github.com/blevesearch/bleve/index"
"github.com/blevesearch/bleve/search"
"github.com/blevesearch/bleve/search/searchers"
)
@ -47,12 +48,12 @@ func (q *termQuery) SetField(f string) Query {
return q
}
func (q *termQuery) Searcher(i *indexImpl, explain bool) (search.Searcher, error) {
func (q *termQuery) Searcher(i index.IndexReader, m *IndexMapping, explain bool) (search.Searcher, error) {
field := q.FieldVal
if q.FieldVal == "" {
field = i.m.DefaultField
field = m.DefaultField
}
return searchers.NewTermSearcher(i.i, q.Term, field, q.BoostVal, explain)
return searchers.NewTermSearcher(i, q.Term, field, q.BoostVal, explain)
}
func (q *termQuery) Validate() error {

View File

@ -19,14 +19,14 @@ type FacetBuilder interface {
}
type FacetsBuilder struct {
index index.Index
facets map[string]FacetBuilder
indexReader index.IndexReader
facets map[string]FacetBuilder
}
func NewFacetsBuilder(index index.Index) *FacetsBuilder {
func NewFacetsBuilder(indexReader index.IndexReader) *FacetsBuilder {
return &FacetsBuilder{
index: index,
facets: make(map[string]FacetBuilder, 0),
indexReader: indexReader,
facets: make(map[string]FacetBuilder, 0),
}
}
@ -35,7 +35,7 @@ func (fb *FacetsBuilder) Add(name string, facetBuilder FacetBuilder) {
}
func (fb *FacetsBuilder) Update(docMatch *DocumentMatch) error {
fieldTerms, err := fb.index.DocumentFieldTerms(docMatch.ID)
fieldTerms, err := fb.indexReader.DocumentFieldTerms(docMatch.ID)
if err != nil {
return err
}

View File

@ -19,7 +19,7 @@ import (
type BooleanSearcher struct {
initialized bool
index index.Index
indexReader index.IndexReader
mustSearcher search.Searcher
shouldSearcher search.Searcher
mustNotSearcher search.Searcher
@ -32,10 +32,10 @@ type BooleanSearcher struct {
scorer *scorers.ConjunctionQueryScorer
}
func NewBooleanSearcher(index index.Index, mustSearcher search.Searcher, shouldSearcher search.Searcher, mustNotSearcher search.Searcher, explain bool) (*BooleanSearcher, error) {
func NewBooleanSearcher(indexReader index.IndexReader, mustSearcher search.Searcher, shouldSearcher search.Searcher, mustNotSearcher search.Searcher, explain bool) (*BooleanSearcher, error) {
// build our searcher
rv := BooleanSearcher{
index: index,
indexReader: indexReader,
mustSearcher: mustSearcher,
shouldSearcher: shouldSearcher,
mustNotSearcher: mustNotSearcher,

View File

@ -17,211 +17,214 @@ import (
func TestBooleanSearch(t *testing.T) {
twoDocIndexReader := twoDocIndex.Reader()
defer twoDocIndexReader.Close()
// test 0
beerTermSearcher, err := NewTermSearcher(twoDocIndex, "beer", "desc", 1.0, true)
beerTermSearcher, err := NewTermSearcher(twoDocIndexReader, "beer", "desc", 1.0, true)
if err != nil {
t.Fatal(err)
}
mustSearcher, err := NewConjunctionSearcher(twoDocIndex, []search.Searcher{beerTermSearcher}, true)
mustSearcher, err := NewConjunctionSearcher(twoDocIndexReader, []search.Searcher{beerTermSearcher}, true)
if err != nil {
t.Fatal(err)
}
martyTermSearcher, err := NewTermSearcher(twoDocIndex, "marty", "name", 1.0, true)
martyTermSearcher, err := NewTermSearcher(twoDocIndexReader, "marty", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
dustinTermSearcher, err := NewTermSearcher(twoDocIndex, "dustin", "name", 1.0, true)
dustinTermSearcher, err := NewTermSearcher(twoDocIndexReader, "dustin", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
shouldSearcher, err := NewDisjunctionSearcher(twoDocIndex, []search.Searcher{martyTermSearcher, dustinTermSearcher}, 0, true)
shouldSearcher, err := NewDisjunctionSearcher(twoDocIndexReader, []search.Searcher{martyTermSearcher, dustinTermSearcher}, 0, true)
if err != nil {
t.Fatal(err)
}
steveTermSearcher, err := NewTermSearcher(twoDocIndex, "steve", "name", 1.0, true)
steveTermSearcher, err := NewTermSearcher(twoDocIndexReader, "steve", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
mustNotSearcher, err := NewDisjunctionSearcher(twoDocIndex, []search.Searcher{steveTermSearcher}, 0, true)
mustNotSearcher, err := NewDisjunctionSearcher(twoDocIndexReader, []search.Searcher{steveTermSearcher}, 0, true)
if err != nil {
t.Fatal(err)
}
booleanSearcher, err := NewBooleanSearcher(twoDocIndex, mustSearcher, shouldSearcher, mustNotSearcher, true)
booleanSearcher, err := NewBooleanSearcher(twoDocIndexReader, mustSearcher, shouldSearcher, mustNotSearcher, true)
if err != nil {
t.Fatal(err)
}
// test 1
martyTermSearcher2, err := NewTermSearcher(twoDocIndex, "marty", "name", 1.0, true)
martyTermSearcher2, err := NewTermSearcher(twoDocIndexReader, "marty", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
dustinTermSearcher2, err := NewTermSearcher(twoDocIndex, "dustin", "name", 1.0, true)
dustinTermSearcher2, err := NewTermSearcher(twoDocIndexReader, "dustin", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
shouldSearcher2, err := NewDisjunctionSearcher(twoDocIndex, []search.Searcher{martyTermSearcher2, dustinTermSearcher2}, 0, true)
shouldSearcher2, err := NewDisjunctionSearcher(twoDocIndexReader, []search.Searcher{martyTermSearcher2, dustinTermSearcher2}, 0, true)
if err != nil {
t.Fatal(err)
}
steveTermSearcher2, err := NewTermSearcher(twoDocIndex, "steve", "name", 1.0, true)
steveTermSearcher2, err := NewTermSearcher(twoDocIndexReader, "steve", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
mustNotSearcher2, err := NewDisjunctionSearcher(twoDocIndex, []search.Searcher{steveTermSearcher2}, 0, true)
mustNotSearcher2, err := NewDisjunctionSearcher(twoDocIndexReader, []search.Searcher{steveTermSearcher2}, 0, true)
if err != nil {
t.Fatal(err)
}
booleanSearcher2, err := NewBooleanSearcher(twoDocIndex, nil, shouldSearcher2, mustNotSearcher2, true)
booleanSearcher2, err := NewBooleanSearcher(twoDocIndexReader, nil, shouldSearcher2, mustNotSearcher2, true)
if err != nil {
t.Fatal(err)
}
// test 2
steveTermSearcher3, err := NewTermSearcher(twoDocIndex, "steve", "name", 1.0, true)
steveTermSearcher3, err := NewTermSearcher(twoDocIndexReader, "steve", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
mustNotSearcher3, err := NewDisjunctionSearcher(twoDocIndex, []search.Searcher{steveTermSearcher3}, 0, true)
mustNotSearcher3, err := NewDisjunctionSearcher(twoDocIndexReader, []search.Searcher{steveTermSearcher3}, 0, true)
if err != nil {
t.Fatal(err)
}
booleanSearcher3, err := NewBooleanSearcher(twoDocIndex, nil, nil, mustNotSearcher3, true)
booleanSearcher3, err := NewBooleanSearcher(twoDocIndexReader, nil, nil, mustNotSearcher3, true)
if err != nil {
t.Fatal(err)
}
// test 3
beerTermSearcher4, err := NewTermSearcher(twoDocIndex, "beer", "desc", 1.0, true)
beerTermSearcher4, err := NewTermSearcher(twoDocIndexReader, "beer", "desc", 1.0, true)
if err != nil {
t.Fatal(err)
}
mustSearcher4, err := NewConjunctionSearcher(twoDocIndex, []search.Searcher{beerTermSearcher4}, true)
mustSearcher4, err := NewConjunctionSearcher(twoDocIndexReader, []search.Searcher{beerTermSearcher4}, true)
if err != nil {
t.Fatal(err)
}
steveTermSearcher4, err := NewTermSearcher(twoDocIndex, "steve", "name", 1.0, true)
steveTermSearcher4, err := NewTermSearcher(twoDocIndexReader, "steve", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
mustNotSearcher4, err := NewDisjunctionSearcher(twoDocIndex, []search.Searcher{steveTermSearcher4}, 0, true)
mustNotSearcher4, err := NewDisjunctionSearcher(twoDocIndexReader, []search.Searcher{steveTermSearcher4}, 0, true)
if err != nil {
t.Fatal(err)
}
booleanSearcher4, err := NewBooleanSearcher(twoDocIndex, mustSearcher4, nil, mustNotSearcher4, true)
booleanSearcher4, err := NewBooleanSearcher(twoDocIndexReader, mustSearcher4, nil, mustNotSearcher4, true)
if err != nil {
t.Fatal(err)
}
// test 4
beerTermSearcher5, err := NewTermSearcher(twoDocIndex, "beer", "desc", 1.0, true)
beerTermSearcher5, err := NewTermSearcher(twoDocIndexReader, "beer", "desc", 1.0, true)
if err != nil {
t.Fatal(err)
}
mustSearcher5, err := NewConjunctionSearcher(twoDocIndex, []search.Searcher{beerTermSearcher5}, true)
mustSearcher5, err := NewConjunctionSearcher(twoDocIndexReader, []search.Searcher{beerTermSearcher5}, true)
if err != nil {
t.Fatal(err)
}
steveTermSearcher5, err := NewTermSearcher(twoDocIndex, "steve", "name", 1.0, true)
steveTermSearcher5, err := NewTermSearcher(twoDocIndexReader, "steve", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
martyTermSearcher5, err := NewTermSearcher(twoDocIndex, "marty", "name", 1.0, true)
martyTermSearcher5, err := NewTermSearcher(twoDocIndexReader, "marty", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
mustNotSearcher5, err := NewDisjunctionSearcher(twoDocIndex, []search.Searcher{steveTermSearcher5, martyTermSearcher5}, 0, true)
mustNotSearcher5, err := NewDisjunctionSearcher(twoDocIndexReader, []search.Searcher{steveTermSearcher5, martyTermSearcher5}, 0, true)
if err != nil {
t.Fatal(err)
}
booleanSearcher5, err := NewBooleanSearcher(twoDocIndex, mustSearcher5, nil, mustNotSearcher5, true)
booleanSearcher5, err := NewBooleanSearcher(twoDocIndexReader, mustSearcher5, nil, mustNotSearcher5, true)
if err != nil {
t.Fatal(err)
}
// test 5
beerTermSearcher6, err := NewTermSearcher(twoDocIndex, "beer", "desc", 1.0, true)
beerTermSearcher6, err := NewTermSearcher(twoDocIndexReader, "beer", "desc", 1.0, true)
if err != nil {
t.Fatal(err)
}
mustSearcher6, err := NewConjunctionSearcher(twoDocIndex, []search.Searcher{beerTermSearcher6}, true)
mustSearcher6, err := NewConjunctionSearcher(twoDocIndexReader, []search.Searcher{beerTermSearcher6}, true)
if err != nil {
t.Fatal(err)
}
martyTermSearcher6, err := NewTermSearcher(twoDocIndex, "marty", "name", 1.0, true)
martyTermSearcher6, err := NewTermSearcher(twoDocIndexReader, "marty", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
dustinTermSearcher6, err := NewTermSearcher(twoDocIndex, "dustin", "name", 1.0, true)
dustinTermSearcher6, err := NewTermSearcher(twoDocIndexReader, "dustin", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
shouldSearcher6, err := NewDisjunctionSearcher(twoDocIndex, []search.Searcher{martyTermSearcher6, dustinTermSearcher6}, 2, true)
shouldSearcher6, err := NewDisjunctionSearcher(twoDocIndexReader, []search.Searcher{martyTermSearcher6, dustinTermSearcher6}, 2, true)
if err != nil {
t.Fatal(err)
}
booleanSearcher6, err := NewBooleanSearcher(twoDocIndex, mustSearcher6, shouldSearcher6, nil, true)
booleanSearcher6, err := NewBooleanSearcher(twoDocIndexReader, mustSearcher6, shouldSearcher6, nil, true)
if err != nil {
t.Fatal(err)
}
// test 6
beerTermSearcher7, err := NewTermSearcher(twoDocIndex, "beer", "desc", 1.0, true)
beerTermSearcher7, err := NewTermSearcher(twoDocIndexReader, "beer", "desc", 1.0, true)
if err != nil {
t.Fatal(err)
}
mustSearcher7, err := NewConjunctionSearcher(twoDocIndex, []search.Searcher{beerTermSearcher7}, true)
mustSearcher7, err := NewConjunctionSearcher(twoDocIndexReader, []search.Searcher{beerTermSearcher7}, true)
if err != nil {
t.Fatal(err)
}
booleanSearcher7, err := NewBooleanSearcher(twoDocIndex, mustSearcher7, nil, nil, true)
booleanSearcher7, err := NewBooleanSearcher(twoDocIndexReader, mustSearcher7, nil, nil, true)
if err != nil {
t.Fatal(err)
}
martyTermSearcher7, err := NewTermSearcher(twoDocIndex, "marty", "name", 5.0, true)
martyTermSearcher7, err := NewTermSearcher(twoDocIndexReader, "marty", "name", 5.0, true)
if err != nil {
t.Fatal(err)
}
conjunctionSearcher7, err := NewConjunctionSearcher(twoDocIndex, []search.Searcher{martyTermSearcher7, booleanSearcher7}, true)
conjunctionSearcher7, err := NewConjunctionSearcher(twoDocIndexReader, []search.Searcher{martyTermSearcher7, booleanSearcher7}, true)
// test 7
beerTermSearcher8, err := NewTermSearcher(twoDocIndex, "beer", "desc", 1.0, true)
beerTermSearcher8, err := NewTermSearcher(twoDocIndexReader, "beer", "desc", 1.0, true)
if err != nil {
t.Fatal(err)
}
mustSearcher8, err := NewConjunctionSearcher(twoDocIndex, []search.Searcher{beerTermSearcher8}, true)
mustSearcher8, err := NewConjunctionSearcher(twoDocIndexReader, []search.Searcher{beerTermSearcher8}, true)
if err != nil {
t.Fatal(err)
}
martyTermSearcher8, err := NewTermSearcher(twoDocIndex, "marty", "name", 1.0, true)
martyTermSearcher8, err := NewTermSearcher(twoDocIndexReader, "marty", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
dustinTermSearcher8, err := NewTermSearcher(twoDocIndex, "dustin", "name", 1.0, true)
dustinTermSearcher8, err := NewTermSearcher(twoDocIndexReader, "dustin", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
shouldSearcher8, err := NewDisjunctionSearcher(twoDocIndex, []search.Searcher{martyTermSearcher8, dustinTermSearcher8}, 0, true)
shouldSearcher8, err := NewDisjunctionSearcher(twoDocIndexReader, []search.Searcher{martyTermSearcher8, dustinTermSearcher8}, 0, true)
if err != nil {
t.Fatal(err)
}
steveTermSearcher8, err := NewTermSearcher(twoDocIndex, "steve", "name", 1.0, true)
steveTermSearcher8, err := NewTermSearcher(twoDocIndexReader, "steve", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
mustNotSearcher8, err := NewDisjunctionSearcher(twoDocIndex, []search.Searcher{steveTermSearcher8}, 0, true)
mustNotSearcher8, err := NewDisjunctionSearcher(twoDocIndexReader, []search.Searcher{steveTermSearcher8}, 0, true)
if err != nil {
t.Fatal(err)
}
booleanSearcher8, err := NewBooleanSearcher(twoDocIndex, mustSearcher8, shouldSearcher8, mustNotSearcher8, true)
booleanSearcher8, err := NewBooleanSearcher(twoDocIndexReader, mustSearcher8, shouldSearcher8, mustNotSearcher8, true)
if err != nil {
t.Fatal(err)
}
dustinTermSearcher8a, err := NewTermSearcher(twoDocIndex, "dustin", "name", 5.0, true)
dustinTermSearcher8a, err := NewTermSearcher(twoDocIndexReader, "dustin", "name", 5.0, true)
if err != nil {
t.Fatal(err)
}
conjunctionSearcher8, err := NewConjunctionSearcher(twoDocIndex, []search.Searcher{booleanSearcher8, dustinTermSearcher8a}, true)
conjunctionSearcher8, err := NewConjunctionSearcher(twoDocIndexReader, []search.Searcher{booleanSearcher8, dustinTermSearcher8a}, true)
if err != nil {
t.Fatal(err)
}

View File

@ -20,7 +20,7 @@ import (
type ConjunctionSearcher struct {
initialized bool
index index.Index
indexReader index.IndexReader
searchers OrderedSearcherList
explain bool
queryNorm float64
@ -29,7 +29,7 @@ type ConjunctionSearcher struct {
scorer *scorers.ConjunctionQueryScorer
}
func NewConjunctionSearcher(index index.Index, qsearchers []search.Searcher, explain bool) (*ConjunctionSearcher, error) {
func NewConjunctionSearcher(indexReader index.IndexReader, qsearchers []search.Searcher, explain bool) (*ConjunctionSearcher, error) {
// build the downstream searchres
searchers := make(OrderedSearcherList, len(qsearchers))
for i, searcher := range qsearchers {
@ -39,11 +39,11 @@ func NewConjunctionSearcher(index index.Index, qsearchers []search.Searcher, exp
sort.Sort(searchers)
// build our searcher
rv := ConjunctionSearcher{
index: index,
explain: explain,
searchers: searchers,
currs: make([]*search.DocumentMatch, len(searchers)),
scorer: scorers.NewConjunctionQueryScorer(explain),
indexReader: indexReader,
explain: explain,
searchers: searchers,
currs: make([]*search.DocumentMatch, len(searchers)),
scorer: scorers.NewConjunctionQueryScorer(explain),
}
rv.computeQueryNorm()
return &rv, nil

View File

@ -17,94 +17,97 @@ import (
func TestConjunctionSearch(t *testing.T) {
twoDocIndexReader := twoDocIndex.Reader()
defer twoDocIndexReader.Close()
// test 0
beerTermSearcher, err := NewTermSearcher(twoDocIndex, "beer", "desc", 1.0, true)
beerTermSearcher, err := NewTermSearcher(twoDocIndexReader, "beer", "desc", 1.0, true)
if err != nil {
t.Fatal(err)
}
martyTermSearcher, err := NewTermSearcher(twoDocIndex, "marty", "name", 5.0, true)
martyTermSearcher, err := NewTermSearcher(twoDocIndexReader, "marty", "name", 5.0, true)
if err != nil {
t.Fatal(err)
}
beerAndMartySearcher, err := NewConjunctionSearcher(twoDocIndex, []search.Searcher{beerTermSearcher, martyTermSearcher}, true)
beerAndMartySearcher, err := NewConjunctionSearcher(twoDocIndexReader, []search.Searcher{beerTermSearcher, martyTermSearcher}, true)
if err != nil {
t.Fatal(err)
}
// test 1
angstTermSearcher, err := NewTermSearcher(twoDocIndex, "angst", "desc", 1.0, true)
angstTermSearcher, err := NewTermSearcher(twoDocIndexReader, "angst", "desc", 1.0, true)
if err != nil {
t.Fatal(err)
}
beerTermSearcher2, err := NewTermSearcher(twoDocIndex, "beer", "desc", 1.0, true)
beerTermSearcher2, err := NewTermSearcher(twoDocIndexReader, "beer", "desc", 1.0, true)
if err != nil {
t.Fatal(err)
}
angstAndBeerSearcher, err := NewConjunctionSearcher(twoDocIndex, []search.Searcher{angstTermSearcher, beerTermSearcher2}, true)
angstAndBeerSearcher, err := NewConjunctionSearcher(twoDocIndexReader, []search.Searcher{angstTermSearcher, beerTermSearcher2}, true)
if err != nil {
t.Fatal(err)
}
// test 2
beerTermSearcher3, err := NewTermSearcher(twoDocIndex, "beer", "desc", 1.0, true)
beerTermSearcher3, err := NewTermSearcher(twoDocIndexReader, "beer", "desc", 1.0, true)
if err != nil {
t.Fatal(err)
}
jackTermSearcher, err := NewTermSearcher(twoDocIndex, "jack", "name", 5.0, true)
jackTermSearcher, err := NewTermSearcher(twoDocIndexReader, "jack", "name", 5.0, true)
if err != nil {
t.Fatal(err)
}
beerAndJackSearcher, err := NewConjunctionSearcher(twoDocIndex, []search.Searcher{beerTermSearcher3, jackTermSearcher}, true)
beerAndJackSearcher, err := NewConjunctionSearcher(twoDocIndexReader, []search.Searcher{beerTermSearcher3, jackTermSearcher}, true)
if err != nil {
t.Fatal(err)
}
// test 3
beerTermSearcher4, err := NewTermSearcher(twoDocIndex, "beer", "desc", 1.0, true)
beerTermSearcher4, err := NewTermSearcher(twoDocIndexReader, "beer", "desc", 1.0, true)
if err != nil {
t.Fatal(err)
}
misterTermSearcher, err := NewTermSearcher(twoDocIndex, "mister", "title", 5.0, true)
misterTermSearcher, err := NewTermSearcher(twoDocIndexReader, "mister", "title", 5.0, true)
if err != nil {
t.Fatal(err)
}
beerAndMisterSearcher, err := NewConjunctionSearcher(twoDocIndex, []search.Searcher{beerTermSearcher4, misterTermSearcher}, true)
beerAndMisterSearcher, err := NewConjunctionSearcher(twoDocIndexReader, []search.Searcher{beerTermSearcher4, misterTermSearcher}, true)
if err != nil {
t.Fatal(err)
}
// test 4
couchbaseTermSearcher, err := NewTermSearcher(twoDocIndex, "couchbase", "street", 1.0, true)
couchbaseTermSearcher, err := NewTermSearcher(twoDocIndexReader, "couchbase", "street", 1.0, true)
if err != nil {
t.Fatal(err)
}
misterTermSearcher2, err := NewTermSearcher(twoDocIndex, "mister", "title", 5.0, true)
misterTermSearcher2, err := NewTermSearcher(twoDocIndexReader, "mister", "title", 5.0, true)
if err != nil {
t.Fatal(err)
}
couchbaseAndMisterSearcher, err := NewConjunctionSearcher(twoDocIndex, []search.Searcher{couchbaseTermSearcher, misterTermSearcher2}, true)
couchbaseAndMisterSearcher, err := NewConjunctionSearcher(twoDocIndexReader, []search.Searcher{couchbaseTermSearcher, misterTermSearcher2}, true)
if err != nil {
t.Fatal(err)
}
// test 5
beerTermSearcher5, err := NewTermSearcher(twoDocIndex, "beer", "desc", 5.0, true)
beerTermSearcher5, err := NewTermSearcher(twoDocIndexReader, "beer", "desc", 5.0, true)
if err != nil {
t.Fatal(err)
}
couchbaseTermSearcher2, err := NewTermSearcher(twoDocIndex, "couchbase", "street", 1.0, true)
couchbaseTermSearcher2, err := NewTermSearcher(twoDocIndexReader, "couchbase", "street", 1.0, true)
if err != nil {
t.Fatal(err)
}
misterTermSearcher3, err := NewTermSearcher(twoDocIndex, "mister", "title", 5.0, true)
misterTermSearcher3, err := NewTermSearcher(twoDocIndexReader, "mister", "title", 5.0, true)
if err != nil {
t.Fatal(err)
}
couchbaseAndMisterSearcher2, err := NewConjunctionSearcher(twoDocIndex, []search.Searcher{couchbaseTermSearcher2, misterTermSearcher3}, true)
couchbaseAndMisterSearcher2, err := NewConjunctionSearcher(twoDocIndexReader, []search.Searcher{couchbaseTermSearcher2, misterTermSearcher3}, true)
if err != nil {
t.Fatal(err)
}
beerAndCouchbaseAndMisterSearcher, err := NewConjunctionSearcher(twoDocIndex, []search.Searcher{beerTermSearcher5, couchbaseAndMisterSearcher2}, true)
beerAndCouchbaseAndMisterSearcher, err := NewConjunctionSearcher(twoDocIndexReader, []search.Searcher{beerTermSearcher5, couchbaseAndMisterSearcher2}, true)
if err != nil {
t.Fatal(err)
}

View File

@ -20,7 +20,7 @@ import (
type DisjunctionSearcher struct {
initialized bool
index index.Index
indexReader index.IndexReader
searchers OrderedSearcherList
queryNorm float64
currs []*search.DocumentMatch
@ -29,7 +29,7 @@ type DisjunctionSearcher struct {
min float64
}
func NewDisjunctionSearcher(index index.Index, qsearchers []search.Searcher, min float64, explain bool) (*DisjunctionSearcher, error) {
func NewDisjunctionSearcher(indexReader index.IndexReader, qsearchers []search.Searcher, min float64, explain bool) (*DisjunctionSearcher, error) {
// build the downstream searchres
searchers := make(OrderedSearcherList, len(qsearchers))
for i, searcher := range qsearchers {
@ -39,11 +39,11 @@ func NewDisjunctionSearcher(index index.Index, qsearchers []search.Searcher, min
sort.Sort(sort.Reverse(searchers))
// build our searcher
rv := DisjunctionSearcher{
index: index,
searchers: searchers,
currs: make([]*search.DocumentMatch, len(searchers)),
scorer: scorers.NewDisjunctionQueryScorer(explain),
min: min,
indexReader: indexReader,
searchers: searchers,
currs: make([]*search.DocumentMatch, len(searchers)),
scorer: scorers.NewDisjunctionQueryScorer(explain),
min: min,
}
rv.computeQueryNorm()
return &rv, nil

View File

@ -17,37 +17,40 @@ import (
func TestDisjunctionSearch(t *testing.T) {
martyTermSearcher, err := NewTermSearcher(twoDocIndex, "marty", "name", 1.0, true)
twoDocIndexReader := twoDocIndex.Reader()
defer twoDocIndexReader.Close()
martyTermSearcher, err := NewTermSearcher(twoDocIndexReader, "marty", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
dustinTermSearcher, err := NewTermSearcher(twoDocIndex, "dustin", "name", 1.0, true)
dustinTermSearcher, err := NewTermSearcher(twoDocIndexReader, "dustin", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
martyOrDustinSearcher, err := NewDisjunctionSearcher(twoDocIndex, []search.Searcher{martyTermSearcher, dustinTermSearcher}, 0, true)
martyOrDustinSearcher, err := NewDisjunctionSearcher(twoDocIndexReader, []search.Searcher{martyTermSearcher, dustinTermSearcher}, 0, true)
if err != nil {
t.Fatal(err)
}
martyTermSearcher2, err := NewTermSearcher(twoDocIndex, "marty", "name", 1.0, true)
martyTermSearcher2, err := NewTermSearcher(twoDocIndexReader, "marty", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
dustinTermSearcher2, err := NewTermSearcher(twoDocIndex, "dustin", "name", 1.0, true)
dustinTermSearcher2, err := NewTermSearcher(twoDocIndexReader, "dustin", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
martyOrDustinSearcher2, err := NewDisjunctionSearcher(twoDocIndex, []search.Searcher{martyTermSearcher2, dustinTermSearcher2}, 0, true)
martyOrDustinSearcher2, err := NewDisjunctionSearcher(twoDocIndexReader, []search.Searcher{martyTermSearcher2, dustinTermSearcher2}, 0, true)
if err != nil {
t.Fatal(err)
}
raviTermSearcher, err := NewTermSearcher(twoDocIndex, "ravi", "name", 1.0, true)
raviTermSearcher, err := NewTermSearcher(twoDocIndexReader, "ravi", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
nestedRaviOrMartyOrDustinSearcher, err := NewDisjunctionSearcher(twoDocIndex, []search.Searcher{raviTermSearcher, martyOrDustinSearcher2}, 0, true)
nestedRaviOrMartyOrDustinSearcher, err := NewDisjunctionSearcher(twoDocIndexReader, []search.Searcher{raviTermSearcher, martyOrDustinSearcher2}, 0, true)
if err != nil {
t.Fatal(err)
}
@ -119,15 +122,18 @@ func TestDisjunctionSearch(t *testing.T) {
func TestDisjunctionAdvance(t *testing.T) {
martyTermSearcher, err := NewTermSearcher(twoDocIndex, "marty", "name", 1.0, true)
twoDocIndexReader := twoDocIndex.Reader()
defer twoDocIndexReader.Close()
martyTermSearcher, err := NewTermSearcher(twoDocIndexReader, "marty", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
dustinTermSearcher, err := NewTermSearcher(twoDocIndex, "dustin", "name", 1.0, true)
dustinTermSearcher, err := NewTermSearcher(twoDocIndexReader, "dustin", "name", 1.0, true)
if err != nil {
t.Fatal(err)
}
martyOrDustinSearcher, err := NewDisjunctionSearcher(twoDocIndex, []search.Searcher{martyTermSearcher, dustinTermSearcher}, 0, true)
martyOrDustinSearcher, err := NewDisjunctionSearcher(twoDocIndexReader, []search.Searcher{martyTermSearcher, dustinTermSearcher}, 0, true)
if err != nil {
t.Fatal(err)
}

View File

@ -16,26 +16,26 @@ import (
)
type MatchAllSearcher struct {
index index.Index
reader index.DocIDReader
scorer *scorers.ConstantScorer
indexReader index.IndexReader
reader index.DocIDReader
scorer *scorers.ConstantScorer
}
func NewMatchAllSearcher(index index.Index, boost float64, explain bool) (*MatchAllSearcher, error) {
reader, err := index.DocIDReader("", "")
func NewMatchAllSearcher(indexReader index.IndexReader, boost float64, explain bool) (*MatchAllSearcher, error) {
reader, err := indexReader.DocIDReader("", "")
if err != nil {
return nil, err
}
scorer := scorers.NewConstantScorer(1.0, boost, explain)
return &MatchAllSearcher{
index: index,
reader: reader,
scorer: scorer,
indexReader: indexReader,
reader: reader,
scorer: scorer,
}, nil
}
func (s *MatchAllSearcher) Count() uint64 {
return s.index.DocCount()
return s.indexReader.DocCount()
}
func (s *MatchAllSearcher) Weight() float64 {

View File

@ -17,12 +17,15 @@ import (
func TestMatchAllSearch(t *testing.T) {
allSearcher, err := NewMatchAllSearcher(twoDocIndex, 1.0, true)
twoDocIndexReader := twoDocIndex.Reader()
defer twoDocIndexReader.Close()
allSearcher, err := NewMatchAllSearcher(twoDocIndexReader, 1.0, true)
if err != nil {
t.Fatal(err)
}
allSearcher2, err := NewMatchAllSearcher(twoDocIndex, 1.2, true)
allSearcher2, err := NewMatchAllSearcher(twoDocIndexReader, 1.2, true)
if err != nil {
t.Fatal(err)
}

View File

@ -15,12 +15,12 @@ import (
)
type MatchNoneSearcher struct {
index index.Index
indexReader index.IndexReader
}
func NewMatchNoneSearcher(index index.Index) (*MatchNoneSearcher, error) {
func NewMatchNoneSearcher(indexReader index.IndexReader) (*MatchNoneSearcher, error) {
return &MatchNoneSearcher{
index: index,
indexReader: indexReader,
}, nil
}

View File

@ -17,7 +17,10 @@ import (
func TestMatchNoneSearch(t *testing.T) {
noneSearcher, err := NewMatchNoneSearcher(twoDocIndex)
twoDocIndexReader := twoDocIndex.Reader()
defer twoDocIndexReader.Close()
noneSearcher, err := NewMatchNoneSearcher(twoDocIndexReader)
if err != nil {
t.Fatal(err)
}

View File

@ -19,15 +19,15 @@ import (
)
type NumericRangeSearcher struct {
index index.Index
min *float64
max *float64
field string
explain bool
searcher *DisjunctionSearcher
indexReader index.IndexReader
min *float64
max *float64
field string
explain bool
searcher *DisjunctionSearcher
}
func NewNumericRangeSearcher(index index.Index, min *float64, max *float64, inclusiveMin, inclusiveMax *bool, field string, boost float64, explain bool) (*NumericRangeSearcher, error) {
func NewNumericRangeSearcher(indexReader index.IndexReader, min *float64, max *float64, inclusiveMin, inclusiveMax *bool, field string, boost float64, explain bool) (*NumericRangeSearcher, error) {
// account for unbounded edges
if min == nil {
negInf := math.Inf(-1)
@ -61,23 +61,23 @@ func NewNumericRangeSearcher(index index.Index, min *float64, max *float64, incl
qsearchers := make([]search.Searcher, len(terms))
for i, term := range terms {
var err error
qsearchers[i], err = NewTermSearcher(index, string(term), field, 1.0, explain)
qsearchers[i], err = NewTermSearcher(indexReader, string(term), field, 1.0, explain)
if err != nil {
return nil, err
}
}
// build disjunction searcher of these ranges
searcher, err := NewDisjunctionSearcher(index, qsearchers, 0, explain)
searcher, err := NewDisjunctionSearcher(indexReader, qsearchers, 0, explain)
if err != nil {
return nil, err
}
return &NumericRangeSearcher{
index: index,
min: min,
max: max,
field: field,
explain: explain,
searcher: searcher,
indexReader: indexReader,
min: min,
max: max,
field: field,
explain: explain,
searcher: searcher,
}, nil
}

View File

@ -18,7 +18,7 @@ import (
type PhraseSearcher struct {
initialized bool
index index.Index
indexReader index.IndexReader
mustSearcher *ConjunctionSearcher
queryNorm float64
currMust *search.DocumentMatch
@ -26,11 +26,11 @@ type PhraseSearcher struct {
terms []string
}
func NewPhraseSearcher(index index.Index, mustSearcher *ConjunctionSearcher, terms []string) (*PhraseSearcher, error) {
func NewPhraseSearcher(indexReader index.IndexReader, mustSearcher *ConjunctionSearcher, terms []string) (*PhraseSearcher, error) {
// build our searcher
rv := PhraseSearcher{
index: index,
indexReader: indexReader,
mustSearcher: mustSearcher,
terms: terms,
}

View File

@ -17,19 +17,22 @@ import (
func TestPhraseSearch(t *testing.T) {
angstTermSearcher, err := NewTermSearcher(twoDocIndex, "angst", "desc", 1.0, true)
twoDocIndexReader := twoDocIndex.Reader()
defer twoDocIndexReader.Close()
angstTermSearcher, err := NewTermSearcher(twoDocIndexReader, "angst", "desc", 1.0, true)
if err != nil {
t.Fatal(err)
}
beerTermSearcher, err := NewTermSearcher(twoDocIndex, "beer", "desc", 1.0, true)
beerTermSearcher, err := NewTermSearcher(twoDocIndexReader, "beer", "desc", 1.0, true)
if err != nil {
t.Fatal(err)
}
mustSearcher, err := NewConjunctionSearcher(twoDocIndex, []search.Searcher{angstTermSearcher, beerTermSearcher}, true)
mustSearcher, err := NewConjunctionSearcher(twoDocIndexReader, []search.Searcher{angstTermSearcher, beerTermSearcher}, true)
if err != nil {
t.Fatal(err)
}
phraseSearcher, err := NewPhraseSearcher(twoDocIndex, mustSearcher, []string{"angst", "beer"})
phraseSearcher, err := NewPhraseSearcher(twoDocIndexReader, mustSearcher, []string{"angst", "beer"})
if err != nil {
t.Fatal(err)
}

View File

@ -16,27 +16,27 @@ import (
)
type TermSearcher struct {
index index.Index
term string
field string
explain bool
reader index.TermFieldReader
scorer *scorers.TermQueryScorer
indexReader index.IndexReader
term string
field string
explain bool
reader index.TermFieldReader
scorer *scorers.TermQueryScorer
}
func NewTermSearcher(index index.Index, term string, field string, boost float64, explain bool) (*TermSearcher, error) {
reader, err := index.TermFieldReader([]byte(term), field)
func NewTermSearcher(indexReader index.IndexReader, term string, field string, boost float64, explain bool) (*TermSearcher, error) {
reader, err := indexReader.TermFieldReader([]byte(term), field)
if err != nil {
return nil, err
}
scorer := scorers.NewTermQueryScorer(term, field, boost, index.DocCount(), reader.Count(), explain)
scorer := scorers.NewTermQueryScorer(term, field, boost, indexReader.DocCount(), reader.Count(), explain)
return &TermSearcher{
index: index,
term: term,
field: field,
explain: explain,
reader: reader,
scorer: scorer,
indexReader: indexReader,
term: term,
field: field,
explain: explain,
reader: reader,
scorer: scorer,
}, nil
}

View File

@ -15,22 +15,22 @@ import (
)
type TermPrefixSearcher struct {
index index.Index
prefix string
field string
explain bool
searcher *DisjunctionSearcher
indexReader index.IndexReader
prefix string
field string
explain bool
searcher *DisjunctionSearcher
}
func NewTermPrefixSearcher(index index.Index, prefix string, field string, boost float64, explain bool) (*TermPrefixSearcher, error) {
func NewTermPrefixSearcher(indexReader index.IndexReader, prefix string, field string, boost float64, explain bool) (*TermPrefixSearcher, error) {
// find the terms with this prefix
fieldReader, err := index.FieldReader(field, []byte(prefix), []byte(prefix))
fieldReader, err := indexReader.FieldReader(field, []byte(prefix), []byte(prefix))
// enumerate all the terms in the range
qsearchers := make([]search.Searcher, 0, 25)
tfd, err := fieldReader.Next()
for err == nil && tfd != nil {
qsearcher, err := NewTermSearcher(index, string(tfd.Term), field, 1.0, explain)
qsearcher, err := NewTermSearcher(indexReader, string(tfd.Term), field, 1.0, explain)
if err != nil {
return nil, err
}
@ -38,17 +38,17 @@ func NewTermPrefixSearcher(index index.Index, prefix string, field string, boost
tfd, err = fieldReader.Next()
}
// build disjunction searcher of these ranges
searcher, err := NewDisjunctionSearcher(index, qsearchers, 0, explain)
searcher, err := NewDisjunctionSearcher(indexReader, qsearchers, 0, explain)
if err != nil {
return nil, err
}
return &TermPrefixSearcher{
index: index,
prefix: prefix,
field: field,
explain: explain,
searcher: searcher,
indexReader: indexReader,
prefix: prefix,
field: field,
explain: explain,
searcher: searcher,
}, nil
}
func (s *TermPrefixSearcher) Count() uint64 {

View File

@ -88,7 +88,10 @@ func TestTermSearcher(t *testing.T) {
},
})
searcher, err := NewTermSearcher(i, queryTerm, queryField, queryBoost, queryExplain)
indexReader := i.Reader()
defer indexReader.Close()
searcher, err := NewTermSearcher(indexReader, queryTerm, queryField, queryBoost, queryExplain)
if err != nil {
t.Fatal(err)
}