0
0

Merge pull request #446 from steveyen/perf-locations-alloc

perf avoid locations alloc
This commit is contained in:
Marty Schoch 2016-09-23 12:19:39 -04:00 committed by GitHub
commit 1d81d34a5a
7 changed files with 89 additions and 81 deletions

View File

@ -12,8 +12,6 @@
package moss
import (
"bytes"
"github.com/couchbase/moss"
)
@ -29,29 +27,9 @@ type Iterator struct {
}
func (x *Iterator) Seek(seekToKey []byte) {
x.k = nil
x.v = nil
x.err = moss.ErrIteratorDone
_ = x.iter.SeekTo(seekToKey)
if bytes.Compare(seekToKey, x.start) < 0 {
seekToKey = x.start
}
iter, err := x.ss.StartIterator(seekToKey, x.end, moss.IteratorOptions{})
if err != nil {
x.store.Logf("bleve moss StartIterator err: %v", err)
return
}
err = x.iter.Close()
if err != nil {
x.store.Logf("bleve moss iterator.Seek err: %v", err)
return
}
x.iter = iter
x.current()
x.k, x.v, x.err = x.iter.Current()
}
func (x *Iterator) Next() {

View File

@ -393,6 +393,12 @@ func (lli *llIterator) Next() error {
return nil
}
func (lli *llIterator) SeekTo(k []byte) error {
lli.kvIterator.Seek(k)
return nil
}
func (lli *llIterator) Current() (key, val []byte, err error) {
key, val, ok := lli.kvIterator.Current()
if !ok {

View File

@ -24,6 +24,7 @@ type UpsideDownCouchTermFieldReader struct {
iterator store.KVIterator
term []byte
tfrNext *TermFrequencyRow
keyBuf []byte
field uint16
}
@ -104,11 +105,23 @@ func (r *UpsideDownCouchTermFieldReader) Next(preAlloced *index.TermFieldDoc) (*
func (r *UpsideDownCouchTermFieldReader) Advance(docID index.IndexInternalID, preAlloced *index.TermFieldDoc) (*index.TermFieldDoc, error) {
if r.iterator != nil {
tfr := NewTermFrequencyRow(r.term, r.field, docID, 0, 0)
r.iterator.Seek(tfr.Key())
if r.tfrNext == nil {
r.tfrNext = &TermFrequencyRow{}
}
tfr := InitTermFrequencyRow(r.tfrNext, r.term, r.field, docID, 0, 0)
keySize := tfr.KeySize()
if cap(r.keyBuf) < keySize {
r.keyBuf = make([]byte, keySize)
}
keySize, _ = tfr.KeyTo(r.keyBuf[0:keySize])
r.iterator.Seek(r.keyBuf[0:keySize])
key, val, valid := r.iterator.Current()
if valid {
tfr, err := NewTermFrequencyRowKV(key, val)
err := tfr.parseKDoc(key, r.term)
if err != nil {
return nil, err
}
err = tfr.parseV(val)
if err != nil {
return nil, err
}

View File

@ -461,6 +461,15 @@ func (tfr *TermFrequencyRow) String() string {
return fmt.Sprintf("Term: `%s` Field: %d DocId: `%s` Frequency: %d Norm: %f Vectors: %v", string(tfr.term), tfr.field, string(tfr.doc), tfr.freq, tfr.norm, tfr.vectors)
}
func InitTermFrequencyRow(tfr *TermFrequencyRow, term []byte, field uint16, docID []byte, freq uint64, norm float32) *TermFrequencyRow {
tfr.term = term
tfr.field = field
tfr.doc = docID
tfr.freq = freq
tfr.norm = norm
return tfr
}
func NewTermFrequencyRow(term []byte, field uint16, docID []byte, freq uint64, norm float32) *TermFrequencyRow {
return &TermFrequencyRow{
term: term,

View File

@ -32,7 +32,7 @@ func (s *DisjunctionQueryScorer) Score(ctx *search.SearchContext, constituents [
childrenExplanations = make([]*search.Explanation, len(constituents))
}
locations := []search.FieldTermLocationMap{}
var locations []search.FieldTermLocationMap
for i, docMatch := range constituents {
sum += docMatch.Score
if s.explain {

View File

@ -23,7 +23,7 @@ type ConjunctionSearcher struct {
searchers OrderedSearcherList
queryNorm float64
currs []*search.DocumentMatch
currentID index.IndexInternalID
maxIDIdx int
scorer *scorers.ConjunctionQueryScorer
initialized bool
explain bool
@ -75,15 +75,6 @@ func (s *ConjunctionSearcher) initSearchers(ctx *search.SearchContext) error {
return err
}
}
if len(s.currs) > 0 {
if s.currs[0] != nil {
s.currentID = s.currs[0].IndexInternalID
} else {
s.currentID = nil
}
}
s.initialized = true
return nil
}
@ -112,44 +103,58 @@ func (s *ConjunctionSearcher) Next(ctx *search.SearchContext) (*search.DocumentM
var rv *search.DocumentMatch
var err error
OUTER:
for s.currentID != nil {
for i, termSearcher := range s.searchers {
for s.currs[s.maxIDIdx] != nil {
maxID := s.currs[s.maxIDIdx].IndexInternalID
i := 0
for i < len(s.currs) {
if s.currs[i] == nil {
s.currentID = nil
return nil, nil
}
if i == s.maxIDIdx {
i++
continue
}
cmp := maxID.Compare(s.currs[i].IndexInternalID)
if cmp == 0 {
i++
continue
}
if cmp < 0 {
// maxID < currs[i], so we found a new maxIDIdx
s.maxIDIdx = i
// advance the positions where [0 <= x < i], since we
// know they were equal to the former max entry
maxID = s.currs[s.maxIDIdx].IndexInternalID
for x := 0; x < i; x++ {
err = s.advanceChild(ctx, x, maxID)
if err != nil {
return nil, err
}
}
continue OUTER
}
cmp := s.currentID.Compare(s.currs[i].IndexInternalID)
if cmp != 0 {
if cmp < 0 {
s.currentID = s.currs[i].IndexInternalID
continue OUTER
}
// this reader is less than the currentID, try to advance
if s.currs[i] != nil {
ctx.DocumentMatchPool.Put(s.currs[i])
}
s.currs[i], err = termSearcher.Advance(ctx, s.currentID)
if err != nil {
return nil, err
}
if s.currs[i] == nil {
s.currentID = nil
continue OUTER
}
if !s.currs[i].IndexInternalID.Equals(s.currentID) {
// we just advanced, so it doesn't match, it must be greater
// no need to call next
s.currentID = s.currs[i].IndexInternalID
continue OUTER
}
// maxID > currs[i], so need to advance searchers[i]
err = s.advanceChild(ctx, i, maxID)
if err != nil {
return nil, err
}
// don't bump i, so that we'll examine the just-advanced
// currs[i] again
}
// if we get here, a doc matched all readers, sum the score and add it
// if we get here, a doc matched all readers, so score and add it
rv = s.scorer.Score(ctx, s.currs)
// we know all the searchers are pointing at the same thing
// so they all need to be advanced
// so they all need to be bumped
for i, termSearcher := range s.searchers {
if s.currs[i] != rv {
ctx.DocumentMatchPool.Put(s.currs[i])
@ -160,12 +165,6 @@ OUTER:
}
}
if s.currs[0] == nil {
s.currentID = nil
} else {
s.currentID = s.currs[0].IndexInternalID
}
// don't continue now, wait for the next call to Next()
break
}
@ -179,20 +178,23 @@ func (s *ConjunctionSearcher) Advance(ctx *search.SearchContext, ID index.IndexI
return nil, err
}
}
var err error
for i, searcher := range s.searchers {
if s.currs[i] != nil {
ctx.DocumentMatchPool.Put(s.currs[i])
}
s.currs[i], err = searcher.Advance(ctx, ID)
for i := range s.searchers {
err := s.advanceChild(ctx, i, ID)
if err != nil {
return nil, err
}
}
s.currentID = ID
return s.Next(ctx)
}
func (s *ConjunctionSearcher) advanceChild(ctx *search.SearchContext, i int, ID index.IndexInternalID) (err error) {
if s.currs[i] != nil {
ctx.DocumentMatchPool.Put(s.currs[i])
}
s.currs[i], err = s.searchers[i].Advance(ctx, ID)
return err
}
func (s *ConjunctionSearcher) Count() uint64 {
// for now return a worst case
var sum uint64

2
vendor/manifest vendored
View File

@ -29,7 +29,7 @@
"importpath": "github.com/couchbase/moss",
"repository": "https://github.com/couchbase/moss",
"vcs": "git",
"revision": "564bdbc09ecc32cb398b56b855a5a6dc9fd7cce5",
"revision": "564b451e917875e4c580cd4a14a6bbb44a1faf7e",
"branch": "master",
"notests": true
},