id
int32
0
167k
repo
stringlengths
5
54
path
stringlengths
4
155
func_name
stringlengths
1
118
original_string
stringlengths
52
85.5k
language
stringclasses
1 value
code
stringlengths
52
85.5k
code_tokens
sequencelengths
21
1.41k
docstring
stringlengths
6
2.61k
docstring_tokens
sequencelengths
3
215
sha
stringlengths
40
40
url
stringlengths
85
252
162,100
blevesearch/bleve
mapping/document.go
AddFieldMapping
func (dm *DocumentMapping) AddFieldMapping(fm *FieldMapping) { if dm.Fields == nil { dm.Fields = make([]*FieldMapping, 0) } dm.Fields = append(dm.Fields, fm) }
go
func (dm *DocumentMapping) AddFieldMapping(fm *FieldMapping) { if dm.Fields == nil { dm.Fields = make([]*FieldMapping, 0) } dm.Fields = append(dm.Fields, fm) }
[ "func", "(", "dm", "*", "DocumentMapping", ")", "AddFieldMapping", "(", "fm", "*", "FieldMapping", ")", "{", "if", "dm", ".", "Fields", "==", "nil", "{", "dm", ".", "Fields", "=", "make", "(", "[", "]", "*", "FieldMapping", ",", "0", ")", "\n", "}", "\n", "dm", ".", "Fields", "=", "append", "(", "dm", ".", "Fields", ",", "fm", ")", "\n", "}" ]
// AddFieldMapping adds the provided FieldMapping for this section // of the document.
[ "AddFieldMapping", "adds", "the", "provided", "FieldMapping", "for", "this", "section", "of", "the", "document", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/mapping/document.go#L245-L250
162,101
blevesearch/bleve
index/upsidedown/reader.go
nextOnly
func (r *UpsideDownCouchDocIDReader) nextOnly() bool { // advance 1 position, until we see a different key // it's already sorted, so this skips duplicates start := r.onlyPos r.onlyPos++ for r.onlyPos < len(r.only) && r.only[r.onlyPos] == r.only[start] { start = r.onlyPos r.onlyPos++ } // inidicate if we got to the end of the list return r.onlyPos < len(r.only) }
go
func (r *UpsideDownCouchDocIDReader) nextOnly() bool { // advance 1 position, until we see a different key // it's already sorted, so this skips duplicates start := r.onlyPos r.onlyPos++ for r.onlyPos < len(r.only) && r.only[r.onlyPos] == r.only[start] { start = r.onlyPos r.onlyPos++ } // inidicate if we got to the end of the list return r.onlyPos < len(r.only) }
[ "func", "(", "r", "*", "UpsideDownCouchDocIDReader", ")", "nextOnly", "(", ")", "bool", "{", "// advance 1 position, until we see a different key", "// it's already sorted, so this skips duplicates", "start", ":=", "r", ".", "onlyPos", "\n", "r", ".", "onlyPos", "++", "\n", "for", "r", ".", "onlyPos", "<", "len", "(", "r", ".", "only", ")", "&&", "r", ".", "only", "[", "r", ".", "onlyPos", "]", "==", "r", ".", "only", "[", "start", "]", "{", "start", "=", "r", ".", "onlyPos", "\n", "r", ".", "onlyPos", "++", "\n", "}", "\n", "// inidicate if we got to the end of the list", "return", "r", ".", "onlyPos", "<", "len", "(", "r", ".", "only", ")", "\n", "}" ]
// move the r.only pos forward one, skipping duplicates // return true if there is more data, or false if we got to the end of the list
[ "move", "the", "r", ".", "only", "pos", "forward", "one", "skipping", "duplicates", "return", "true", "if", "there", "is", "more", "data", "or", "false", "if", "we", "got", "to", "the", "end", "of", "the", "list" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/upsidedown/reader.go#L364-L376
162,102
blevesearch/bleve
index/scorch/segment/zap/enumerator.go
newEnumerator
func newEnumerator(itrs []vellum.Iterator) (*enumerator, error) { rv := &enumerator{ itrs: itrs, currKs: make([][]byte, len(itrs)), currVs: make([]uint64, len(itrs)), lowIdxs: make([]int, 0, len(itrs)), } for i, itr := range rv.itrs { rv.currKs[i], rv.currVs[i] = itr.Current() } rv.updateMatches(false) if rv.lowK == nil && len(rv.lowIdxs) == 0 { return rv, vellum.ErrIteratorDone } return rv, nil }
go
func newEnumerator(itrs []vellum.Iterator) (*enumerator, error) { rv := &enumerator{ itrs: itrs, currKs: make([][]byte, len(itrs)), currVs: make([]uint64, len(itrs)), lowIdxs: make([]int, 0, len(itrs)), } for i, itr := range rv.itrs { rv.currKs[i], rv.currVs[i] = itr.Current() } rv.updateMatches(false) if rv.lowK == nil && len(rv.lowIdxs) == 0 { return rv, vellum.ErrIteratorDone } return rv, nil }
[ "func", "newEnumerator", "(", "itrs", "[", "]", "vellum", ".", "Iterator", ")", "(", "*", "enumerator", ",", "error", ")", "{", "rv", ":=", "&", "enumerator", "{", "itrs", ":", "itrs", ",", "currKs", ":", "make", "(", "[", "]", "[", "]", "byte", ",", "len", "(", "itrs", ")", ")", ",", "currVs", ":", "make", "(", "[", "]", "uint64", ",", "len", "(", "itrs", ")", ")", ",", "lowIdxs", ":", "make", "(", "[", "]", "int", ",", "0", ",", "len", "(", "itrs", ")", ")", ",", "}", "\n", "for", "i", ",", "itr", ":=", "range", "rv", ".", "itrs", "{", "rv", ".", "currKs", "[", "i", "]", ",", "rv", ".", "currVs", "[", "i", "]", "=", "itr", ".", "Current", "(", ")", "\n", "}", "\n", "rv", ".", "updateMatches", "(", "false", ")", "\n", "if", "rv", ".", "lowK", "==", "nil", "&&", "len", "(", "rv", ".", "lowIdxs", ")", "==", "0", "{", "return", "rv", ",", "vellum", ".", "ErrIteratorDone", "\n", "}", "\n", "return", "rv", ",", "nil", "\n", "}" ]
// newEnumerator returns a new enumerator over the vellum Iterators
[ "newEnumerator", "returns", "a", "new", "enumerator", "over", "the", "vellum", "Iterators" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/enumerator.go#L39-L54
162,103
blevesearch/bleve
index/scorch/segment/zap/enumerator.go
updateMatches
func (m *enumerator) updateMatches(skipEmptyKey bool) { m.lowK = nil m.lowIdxs = m.lowIdxs[:0] m.lowCurr = 0 for i, key := range m.currKs { if (key == nil && m.currVs[i] == 0) || // in case of empty iterator (len(key) == 0 && skipEmptyKey) { // skip empty keys continue } cmp := bytes.Compare(key, m.lowK) if cmp < 0 || len(m.lowIdxs) == 0 { // reached a new low m.lowK = key m.lowIdxs = m.lowIdxs[:0] m.lowIdxs = append(m.lowIdxs, i) } else if cmp == 0 { m.lowIdxs = append(m.lowIdxs, i) } } }
go
func (m *enumerator) updateMatches(skipEmptyKey bool) { m.lowK = nil m.lowIdxs = m.lowIdxs[:0] m.lowCurr = 0 for i, key := range m.currKs { if (key == nil && m.currVs[i] == 0) || // in case of empty iterator (len(key) == 0 && skipEmptyKey) { // skip empty keys continue } cmp := bytes.Compare(key, m.lowK) if cmp < 0 || len(m.lowIdxs) == 0 { // reached a new low m.lowK = key m.lowIdxs = m.lowIdxs[:0] m.lowIdxs = append(m.lowIdxs, i) } else if cmp == 0 { m.lowIdxs = append(m.lowIdxs, i) } } }
[ "func", "(", "m", "*", "enumerator", ")", "updateMatches", "(", "skipEmptyKey", "bool", ")", "{", "m", ".", "lowK", "=", "nil", "\n", "m", ".", "lowIdxs", "=", "m", ".", "lowIdxs", "[", ":", "0", "]", "\n", "m", ".", "lowCurr", "=", "0", "\n\n", "for", "i", ",", "key", ":=", "range", "m", ".", "currKs", "{", "if", "(", "key", "==", "nil", "&&", "m", ".", "currVs", "[", "i", "]", "==", "0", ")", "||", "// in case of empty iterator", "(", "len", "(", "key", ")", "==", "0", "&&", "skipEmptyKey", ")", "{", "// skip empty keys", "continue", "\n", "}", "\n\n", "cmp", ":=", "bytes", ".", "Compare", "(", "key", ",", "m", ".", "lowK", ")", "\n", "if", "cmp", "<", "0", "||", "len", "(", "m", ".", "lowIdxs", ")", "==", "0", "{", "// reached a new low", "m", ".", "lowK", "=", "key", "\n", "m", ".", "lowIdxs", "=", "m", ".", "lowIdxs", "[", ":", "0", "]", "\n", "m", ".", "lowIdxs", "=", "append", "(", "m", ".", "lowIdxs", ",", "i", ")", "\n", "}", "else", "if", "cmp", "==", "0", "{", "m", ".", "lowIdxs", "=", "append", "(", "m", ".", "lowIdxs", ",", "i", ")", "\n", "}", "\n", "}", "\n", "}" ]
// updateMatches maintains the low key matches based on the currKs
[ "updateMatches", "maintains", "the", "low", "key", "matches", "based", "on", "the", "currKs" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/enumerator.go#L57-L78
162,104
blevesearch/bleve
index/scorch/segment/zap/enumerator.go
Close
func (m *enumerator) Close() error { var rv error for _, itr := range m.itrs { err := itr.Close() if rv == nil { rv = err } } return rv }
go
func (m *enumerator) Close() error { var rv error for _, itr := range m.itrs { err := itr.Close() if rv == nil { rv = err } } return rv }
[ "func", "(", "m", "*", "enumerator", ")", "Close", "(", ")", "error", "{", "var", "rv", "error", "\n", "for", "_", ",", "itr", ":=", "range", "m", ".", "itrs", "{", "err", ":=", "itr", ".", "Close", "(", ")", "\n", "if", "rv", "==", "nil", "{", "rv", "=", "err", "\n", "}", "\n", "}", "\n", "return", "rv", "\n", "}" ]
// Close all the underlying Iterators. The first error, if any, will // be returned.
[ "Close", "all", "the", "underlying", "Iterators", ".", "The", "first", "error", "if", "any", "will", "be", "returned", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/enumerator.go#L117-L126
162,105
blevesearch/bleve
index/scorch/optimize.go
Finish
func (o *OptimizeTFRDisjunctionUnadorned) Finish() (rv index.Optimized, err error) { if len(o.tfrs) <= 1 { return nil, nil } for i := range o.snapshot.segment { var cMax uint64 for _, tfr := range o.tfrs { itr, ok := tfr.iterators[i].(*zap.PostingsIterator) if !ok { return nil, nil } if itr.ActualBM != nil { c := itr.ActualBM.GetCardinality() if cMax < c { cMax = c } } } // Heuristic to skip the optimization if all the constituent // bitmaps are too small, where the processing & resource // overhead to create the OR'ed bitmap outweighs the benefit. if cMax < OptimizeDisjunctionUnadornedMinChildCardinality { return nil, nil } } // We use an artificial term and field because the optimized // termFieldReader can represent multiple terms and fields. oTFR := &IndexSnapshotTermFieldReader{ term: OptimizeTFRDisjunctionUnadornedTerm, field: OptimizeTFRDisjunctionUnadornedField, snapshot: o.snapshot, iterators: make([]segment.PostingsIterator, len(o.snapshot.segment)), segmentOffset: 0, includeFreq: false, includeNorm: false, includeTermVectors: false, } var docNums []uint32 // Collected docNum's from 1-hit posting lists. var actualBMs []*roaring.Bitmap // Collected from regular posting lists. for i := range o.snapshot.segment { docNums = docNums[:0] actualBMs = actualBMs[:0] for _, tfr := range o.tfrs { itr, ok := tfr.iterators[i].(*zap.PostingsIterator) if !ok { return nil, nil } docNum, ok := itr.DocNum1Hit() if ok { docNums = append(docNums, uint32(docNum)) continue } if itr.ActualBM != nil { actualBMs = append(actualBMs, itr.ActualBM) } } var bm *roaring.Bitmap if len(actualBMs) > 2 { bm = roaring.HeapOr(actualBMs...) } else if len(actualBMs) == 2 { bm = roaring.Or(actualBMs[0], actualBMs[1]) } else if len(actualBMs) == 1 { bm = actualBMs[0].Clone() } if bm == nil { bm = roaring.New() } bm.AddMany(docNums) oTFR.iterators[i], err = zap.PostingsIteratorFromBitmap(bm, false, false) if err != nil { return nil, nil } } return oTFR, nil }
go
func (o *OptimizeTFRDisjunctionUnadorned) Finish() (rv index.Optimized, err error) { if len(o.tfrs) <= 1 { return nil, nil } for i := range o.snapshot.segment { var cMax uint64 for _, tfr := range o.tfrs { itr, ok := tfr.iterators[i].(*zap.PostingsIterator) if !ok { return nil, nil } if itr.ActualBM != nil { c := itr.ActualBM.GetCardinality() if cMax < c { cMax = c } } } // Heuristic to skip the optimization if all the constituent // bitmaps are too small, where the processing & resource // overhead to create the OR'ed bitmap outweighs the benefit. if cMax < OptimizeDisjunctionUnadornedMinChildCardinality { return nil, nil } } // We use an artificial term and field because the optimized // termFieldReader can represent multiple terms and fields. oTFR := &IndexSnapshotTermFieldReader{ term: OptimizeTFRDisjunctionUnadornedTerm, field: OptimizeTFRDisjunctionUnadornedField, snapshot: o.snapshot, iterators: make([]segment.PostingsIterator, len(o.snapshot.segment)), segmentOffset: 0, includeFreq: false, includeNorm: false, includeTermVectors: false, } var docNums []uint32 // Collected docNum's from 1-hit posting lists. var actualBMs []*roaring.Bitmap // Collected from regular posting lists. for i := range o.snapshot.segment { docNums = docNums[:0] actualBMs = actualBMs[:0] for _, tfr := range o.tfrs { itr, ok := tfr.iterators[i].(*zap.PostingsIterator) if !ok { return nil, nil } docNum, ok := itr.DocNum1Hit() if ok { docNums = append(docNums, uint32(docNum)) continue } if itr.ActualBM != nil { actualBMs = append(actualBMs, itr.ActualBM) } } var bm *roaring.Bitmap if len(actualBMs) > 2 { bm = roaring.HeapOr(actualBMs...) } else if len(actualBMs) == 2 { bm = roaring.Or(actualBMs[0], actualBMs[1]) } else if len(actualBMs) == 1 { bm = actualBMs[0].Clone() } if bm == nil { bm = roaring.New() } bm.AddMany(docNums) oTFR.iterators[i], err = zap.PostingsIteratorFromBitmap(bm, false, false) if err != nil { return nil, nil } } return oTFR, nil }
[ "func", "(", "o", "*", "OptimizeTFRDisjunctionUnadorned", ")", "Finish", "(", ")", "(", "rv", "index", ".", "Optimized", ",", "err", "error", ")", "{", "if", "len", "(", "o", ".", "tfrs", ")", "<=", "1", "{", "return", "nil", ",", "nil", "\n", "}", "\n\n", "for", "i", ":=", "range", "o", ".", "snapshot", ".", "segment", "{", "var", "cMax", "uint64", "\n\n", "for", "_", ",", "tfr", ":=", "range", "o", ".", "tfrs", "{", "itr", ",", "ok", ":=", "tfr", ".", "iterators", "[", "i", "]", ".", "(", "*", "zap", ".", "PostingsIterator", ")", "\n", "if", "!", "ok", "{", "return", "nil", ",", "nil", "\n", "}", "\n\n", "if", "itr", ".", "ActualBM", "!=", "nil", "{", "c", ":=", "itr", ".", "ActualBM", ".", "GetCardinality", "(", ")", "\n", "if", "cMax", "<", "c", "{", "cMax", "=", "c", "\n", "}", "\n", "}", "\n", "}", "\n\n", "// Heuristic to skip the optimization if all the constituent", "// bitmaps are too small, where the processing & resource", "// overhead to create the OR'ed bitmap outweighs the benefit.", "if", "cMax", "<", "OptimizeDisjunctionUnadornedMinChildCardinality", "{", "return", "nil", ",", "nil", "\n", "}", "\n", "}", "\n\n", "// We use an artificial term and field because the optimized", "// termFieldReader can represent multiple terms and fields.", "oTFR", ":=", "&", "IndexSnapshotTermFieldReader", "{", "term", ":", "OptimizeTFRDisjunctionUnadornedTerm", ",", "field", ":", "OptimizeTFRDisjunctionUnadornedField", ",", "snapshot", ":", "o", ".", "snapshot", ",", "iterators", ":", "make", "(", "[", "]", "segment", ".", "PostingsIterator", ",", "len", "(", "o", ".", "snapshot", ".", "segment", ")", ")", ",", "segmentOffset", ":", "0", ",", "includeFreq", ":", "false", ",", "includeNorm", ":", "false", ",", "includeTermVectors", ":", "false", ",", "}", "\n\n", "var", "docNums", "[", "]", "uint32", "// Collected docNum's from 1-hit posting lists.", "\n", "var", "actualBMs", "[", "]", "*", "roaring", ".", "Bitmap", "// Collected from regular posting lists.", "\n\n", "for", "i", ":=", "range", "o", ".", "snapshot", ".", "segment", "{", "docNums", "=", "docNums", "[", ":", "0", "]", "\n", "actualBMs", "=", "actualBMs", "[", ":", "0", "]", "\n\n", "for", "_", ",", "tfr", ":=", "range", "o", ".", "tfrs", "{", "itr", ",", "ok", ":=", "tfr", ".", "iterators", "[", "i", "]", ".", "(", "*", "zap", ".", "PostingsIterator", ")", "\n", "if", "!", "ok", "{", "return", "nil", ",", "nil", "\n", "}", "\n\n", "docNum", ",", "ok", ":=", "itr", ".", "DocNum1Hit", "(", ")", "\n", "if", "ok", "{", "docNums", "=", "append", "(", "docNums", ",", "uint32", "(", "docNum", ")", ")", "\n", "continue", "\n", "}", "\n\n", "if", "itr", ".", "ActualBM", "!=", "nil", "{", "actualBMs", "=", "append", "(", "actualBMs", ",", "itr", ".", "ActualBM", ")", "\n", "}", "\n", "}", "\n\n", "var", "bm", "*", "roaring", ".", "Bitmap", "\n", "if", "len", "(", "actualBMs", ")", ">", "2", "{", "bm", "=", "roaring", ".", "HeapOr", "(", "actualBMs", "...", ")", "\n", "}", "else", "if", "len", "(", "actualBMs", ")", "==", "2", "{", "bm", "=", "roaring", ".", "Or", "(", "actualBMs", "[", "0", "]", ",", "actualBMs", "[", "1", "]", ")", "\n", "}", "else", "if", "len", "(", "actualBMs", ")", "==", "1", "{", "bm", "=", "actualBMs", "[", "0", "]", ".", "Clone", "(", ")", "\n", "}", "\n\n", "if", "bm", "==", "nil", "{", "bm", "=", "roaring", ".", "New", "(", ")", "\n", "}", "\n\n", "bm", ".", "AddMany", "(", "docNums", ")", "\n\n", "oTFR", ".", "iterators", "[", "i", "]", ",", "err", "=", "zap", ".", "PostingsIteratorFromBitmap", "(", "bm", ",", "false", ",", "false", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "nil", "\n", "}", "\n", "}", "\n\n", "return", "oTFR", ",", "nil", "\n", "}" ]
// Finish of an unadorned disjunction optimization will compute a // termFieldReader with an "actual" bitmap that represents the // constituent bitmaps OR'ed together. This termFieldReader cannot // provide any freq-norm or termVector associated information.
[ "Finish", "of", "an", "unadorned", "disjunction", "optimization", "will", "compute", "a", "termFieldReader", "with", "an", "actual", "bitmap", "that", "represents", "the", "constituent", "bitmaps", "OR", "ed", "together", ".", "This", "termFieldReader", "cannot", "provide", "any", "freq", "-", "norm", "or", "termVector", "associated", "information", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/optimize.go#L331-L420
162,106
blevesearch/bleve
geo/geo.go
MortonHash
func MortonHash(lon, lat float64) uint64 { return numeric.Interleave(scaleLon(lon), scaleLat(lat)) }
go
func MortonHash(lon, lat float64) uint64 { return numeric.Interleave(scaleLon(lon), scaleLat(lat)) }
[ "func", "MortonHash", "(", "lon", ",", "lat", "float64", ")", "uint64", "{", "return", "numeric", ".", "Interleave", "(", "scaleLon", "(", "lon", ")", ",", "scaleLat", "(", "lat", ")", ")", "\n", "}" ]
// MortonHash computes the morton hash value for the provided geo point // This point is ordered as lon, lat.
[ "MortonHash", "computes", "the", "morton", "hash", "value", "for", "the", "provided", "geo", "point", "This", "point", "is", "ordered", "as", "lon", "lat", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/geo/geo.go#L42-L44
162,107
blevesearch/bleve
geo/geo.go
compareGeo
func compareGeo(a, b float64) float64 { compare := a - b if math.Abs(compare) <= geoTolerance { return 0 } return compare }
go
func compareGeo(a, b float64) float64 { compare := a - b if math.Abs(compare) <= geoTolerance { return 0 } return compare }
[ "func", "compareGeo", "(", "a", ",", "b", "float64", ")", "float64", "{", "compare", ":=", "a", "-", "b", "\n", "if", "math", ".", "Abs", "(", "compare", ")", "<=", "geoTolerance", "{", "return", "0", "\n", "}", "\n", "return", "compare", "\n", "}" ]
// compareGeo will compare two float values and see if they are the same // taking into consideration a known geo tolerance.
[ "compareGeo", "will", "compare", "two", "float", "values", "and", "see", "if", "they", "are", "the", "same", "taking", "into", "consideration", "a", "known", "geo", "tolerance", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/geo/geo.go#L76-L82
162,108
blevesearch/bleve
geo/geo.go
RectIntersects
func RectIntersects(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY float64) bool { return !(aMaxX < bMinX || aMinX > bMaxX || aMaxY < bMinY || aMinY > bMaxY) }
go
func RectIntersects(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY float64) bool { return !(aMaxX < bMinX || aMinX > bMaxX || aMaxY < bMinY || aMinY > bMaxY) }
[ "func", "RectIntersects", "(", "aMinX", ",", "aMinY", ",", "aMaxX", ",", "aMaxY", ",", "bMinX", ",", "bMinY", ",", "bMaxX", ",", "bMaxY", "float64", ")", "bool", "{", "return", "!", "(", "aMaxX", "<", "bMinX", "||", "aMinX", ">", "bMaxX", "||", "aMaxY", "<", "bMinY", "||", "aMinY", ">", "bMaxY", ")", "\n", "}" ]
// RectIntersects checks whether rectangles a and b intersect
[ "RectIntersects", "checks", "whether", "rectangles", "a", "and", "b", "intersect" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/geo/geo.go#L85-L87
162,109
blevesearch/bleve
geo/geo.go
RectWithin
func RectWithin(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY float64) bool { rv := !(aMinX < bMinX || aMinY < bMinY || aMaxX > bMaxX || aMaxY > bMaxY) return rv }
go
func RectWithin(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY float64) bool { rv := !(aMinX < bMinX || aMinY < bMinY || aMaxX > bMaxX || aMaxY > bMaxY) return rv }
[ "func", "RectWithin", "(", "aMinX", ",", "aMinY", ",", "aMaxX", ",", "aMaxY", ",", "bMinX", ",", "bMinY", ",", "bMaxX", ",", "bMaxY", "float64", ")", "bool", "{", "rv", ":=", "!", "(", "aMinX", "<", "bMinX", "||", "aMinY", "<", "bMinY", "||", "aMaxX", ">", "bMaxX", "||", "aMaxY", ">", "bMaxY", ")", "\n", "return", "rv", "\n", "}" ]
// RectWithin checks whether box a is within box b
[ "RectWithin", "checks", "whether", "box", "a", "is", "within", "box", "b" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/geo/geo.go#L90-L93
162,110
blevesearch/bleve
index/field_cache.go
FieldNamed
func (f *FieldCache) FieldNamed(field string, createIfMissing bool) (uint16, bool) { f.mutex.RLock() if index, ok := f.fieldIndexes[field]; ok { f.mutex.RUnlock() return index, true } else if !createIfMissing { f.mutex.RUnlock() return 0, false } // trade read lock for write lock f.mutex.RUnlock() f.mutex.Lock() // need to check again with write lock if index, ok := f.fieldIndexes[field]; ok { f.mutex.Unlock() return index, true } // assign next field id index := f.addLOCKED(field, uint16(f.lastFieldIndex+1)) f.mutex.Unlock() return index, false }
go
func (f *FieldCache) FieldNamed(field string, createIfMissing bool) (uint16, bool) { f.mutex.RLock() if index, ok := f.fieldIndexes[field]; ok { f.mutex.RUnlock() return index, true } else if !createIfMissing { f.mutex.RUnlock() return 0, false } // trade read lock for write lock f.mutex.RUnlock() f.mutex.Lock() // need to check again with write lock if index, ok := f.fieldIndexes[field]; ok { f.mutex.Unlock() return index, true } // assign next field id index := f.addLOCKED(field, uint16(f.lastFieldIndex+1)) f.mutex.Unlock() return index, false }
[ "func", "(", "f", "*", "FieldCache", ")", "FieldNamed", "(", "field", "string", ",", "createIfMissing", "bool", ")", "(", "uint16", ",", "bool", ")", "{", "f", ".", "mutex", ".", "RLock", "(", ")", "\n", "if", "index", ",", "ok", ":=", "f", ".", "fieldIndexes", "[", "field", "]", ";", "ok", "{", "f", ".", "mutex", ".", "RUnlock", "(", ")", "\n", "return", "index", ",", "true", "\n", "}", "else", "if", "!", "createIfMissing", "{", "f", ".", "mutex", ".", "RUnlock", "(", ")", "\n", "return", "0", ",", "false", "\n", "}", "\n", "// trade read lock for write lock", "f", ".", "mutex", ".", "RUnlock", "(", ")", "\n", "f", ".", "mutex", ".", "Lock", "(", ")", "\n", "// need to check again with write lock", "if", "index", ",", "ok", ":=", "f", ".", "fieldIndexes", "[", "field", "]", ";", "ok", "{", "f", ".", "mutex", ".", "Unlock", "(", ")", "\n", "return", "index", ",", "true", "\n", "}", "\n", "// assign next field id", "index", ":=", "f", ".", "addLOCKED", "(", "field", ",", "uint16", "(", "f", ".", "lastFieldIndex", "+", "1", ")", ")", "\n", "f", ".", "mutex", ".", "Unlock", "(", ")", "\n", "return", "index", ",", "false", "\n", "}" ]
// FieldNamed returns the index of the field, and whether or not it existed // before this call. if createIfMissing is true, and new field index is assigned // but the second return value will still be false
[ "FieldNamed", "returns", "the", "index", "of", "the", "field", "and", "whether", "or", "not", "it", "existed", "before", "this", "call", ".", "if", "createIfMissing", "is", "true", "and", "new", "field", "index", "is", "assigned", "but", "the", "second", "return", "value", "will", "still", "be", "false" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/field_cache.go#L58-L79
162,111
blevesearch/bleve
index/scorch/segment/regexp.go
LiteralPrefix
func LiteralPrefix(s *syntax.Regexp) string { // traverse the left-most branch in the parse tree as long as the // node represents a concatenation for s != nil && s.Op == syntax.OpConcat { if len(s.Sub) < 1 { return "" } s = s.Sub[0] } if s.Op == syntax.OpLiteral { return string(s.Rune) } return "" // no literal prefix }
go
func LiteralPrefix(s *syntax.Regexp) string { // traverse the left-most branch in the parse tree as long as the // node represents a concatenation for s != nil && s.Op == syntax.OpConcat { if len(s.Sub) < 1 { return "" } s = s.Sub[0] } if s.Op == syntax.OpLiteral { return string(s.Rune) } return "" // no literal prefix }
[ "func", "LiteralPrefix", "(", "s", "*", "syntax", ".", "Regexp", ")", "string", "{", "// traverse the left-most branch in the parse tree as long as the", "// node represents a concatenation", "for", "s", "!=", "nil", "&&", "s", ".", "Op", "==", "syntax", ".", "OpConcat", "{", "if", "len", "(", "s", ".", "Sub", ")", "<", "1", "{", "return", "\"", "\"", "\n", "}", "\n\n", "s", "=", "s", ".", "Sub", "[", "0", "]", "\n", "}", "\n\n", "if", "s", ".", "Op", "==", "syntax", ".", "OpLiteral", "{", "return", "string", "(", "s", ".", "Rune", ")", "\n", "}", "\n\n", "return", "\"", "\"", "// no literal prefix", "\n", "}" ]
// Returns the literal prefix given the parse tree for a regexp
[ "Returns", "the", "literal", "prefix", "given", "the", "parse", "tree", "for", "a", "regexp" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/regexp.go#L47-L63
162,112
blevesearch/bleve
geo/geo_dist.go
ParseDistanceUnit
func ParseDistanceUnit(u string) (float64, error) { for _, unit := range distanceUnits { for _, unitSuffix := range unit.suffixes { if u == unitSuffix { return unit.conv, nil } } } return 0, fmt.Errorf("unknown distance unit: %s", u) }
go
func ParseDistanceUnit(u string) (float64, error) { for _, unit := range distanceUnits { for _, unitSuffix := range unit.suffixes { if u == unitSuffix { return unit.conv, nil } } } return 0, fmt.Errorf("unknown distance unit: %s", u) }
[ "func", "ParseDistanceUnit", "(", "u", "string", ")", "(", "float64", ",", "error", ")", "{", "for", "_", ",", "unit", ":=", "range", "distanceUnits", "{", "for", "_", ",", "unitSuffix", ":=", "range", "unit", ".", "suffixes", "{", "if", "u", "==", "unitSuffix", "{", "return", "unit", ".", "conv", ",", "nil", "\n", "}", "\n", "}", "\n", "}", "\n", "return", "0", ",", "fmt", ".", "Errorf", "(", "\"", "\"", ",", "u", ")", "\n", "}" ]
// ParseDistanceUnit attempts to parse a distance unit and return the // multiplier for converting this to meters. If the unit cannot be parsed // then 0 and the error message is returned.
[ "ParseDistanceUnit", "attempts", "to", "parse", "a", "distance", "unit", "and", "return", "the", "multiplier", "for", "converting", "this", "to", "meters", ".", "If", "the", "unit", "cannot", "be", "parsed", "then", "0", "and", "the", "error", "message", "is", "returned", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/geo/geo_dist.go#L74-L83
162,113
blevesearch/bleve
geo/geo_dist.go
Haversin
func Haversin(lon1, lat1, lon2, lat2 float64) float64 { x1 := lat1 * degreesToRadian x2 := lat2 * degreesToRadian h1 := 1 - cos(x1-x2) h2 := 1 - cos((lon1-lon2)*degreesToRadian) h := (h1 + cos(x1)*cos(x2)*h2) / 2 avgLat := (x1 + x2) / 2 diameter := earthDiameter(avgLat) return diameter * asin(math.Min(1, math.Sqrt(h))) }
go
func Haversin(lon1, lat1, lon2, lat2 float64) float64 { x1 := lat1 * degreesToRadian x2 := lat2 * degreesToRadian h1 := 1 - cos(x1-x2) h2 := 1 - cos((lon1-lon2)*degreesToRadian) h := (h1 + cos(x1)*cos(x2)*h2) / 2 avgLat := (x1 + x2) / 2 diameter := earthDiameter(avgLat) return diameter * asin(math.Min(1, math.Sqrt(h))) }
[ "func", "Haversin", "(", "lon1", ",", "lat1", ",", "lon2", ",", "lat2", "float64", ")", "float64", "{", "x1", ":=", "lat1", "*", "degreesToRadian", "\n", "x2", ":=", "lat2", "*", "degreesToRadian", "\n", "h1", ":=", "1", "-", "cos", "(", "x1", "-", "x2", ")", "\n", "h2", ":=", "1", "-", "cos", "(", "(", "lon1", "-", "lon2", ")", "*", "degreesToRadian", ")", "\n", "h", ":=", "(", "h1", "+", "cos", "(", "x1", ")", "*", "cos", "(", "x2", ")", "*", "h2", ")", "/", "2", "\n", "avgLat", ":=", "(", "x1", "+", "x2", ")", "/", "2", "\n", "diameter", ":=", "earthDiameter", "(", "avgLat", ")", "\n\n", "return", "diameter", "*", "asin", "(", "math", ".", "Min", "(", "1", ",", "math", ".", "Sqrt", "(", "h", ")", ")", ")", "\n", "}" ]
// Haversin computes the distance between two points. // This implemenation uses the sloppy math implemenations which trade off // accuracy for performance. The distance returned is in kilometers.
[ "Haversin", "computes", "the", "distance", "between", "two", "points", ".", "This", "implemenation", "uses", "the", "sloppy", "math", "implemenations", "which", "trade", "off", "accuracy", "for", "performance", ".", "The", "distance", "returned", "is", "in", "kilometers", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/geo/geo_dist.go#L88-L98
162,114
blevesearch/bleve
index_impl.go
Advanced
func (i *indexImpl) Advanced() (index.Index, store.KVStore, error) { s, err := i.i.Advanced() if err != nil { return nil, nil, err } return i.i, s, nil }
go
func (i *indexImpl) Advanced() (index.Index, store.KVStore, error) { s, err := i.i.Advanced() if err != nil { return nil, nil, err } return i.i, s, nil }
[ "func", "(", "i", "*", "indexImpl", ")", "Advanced", "(", ")", "(", "index", ".", "Index", ",", "store", ".", "KVStore", ",", "error", ")", "{", "s", ",", "err", ":=", "i", ".", "i", ".", "Advanced", "(", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "nil", ",", "err", "\n", "}", "\n", "return", "i", ".", "i", ",", "s", ",", "nil", "\n", "}" ]
// Advanced returns implementation internals // necessary ONLY for advanced usage.
[ "Advanced", "returns", "implementation", "internals", "necessary", "ONLY", "for", "advanced", "usage", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index_impl.go#L223-L229
162,115
blevesearch/bleve
index_impl.go
Index
func (i *indexImpl) Index(id string, data interface{}) (err error) { if id == "" { return ErrorEmptyID } i.mutex.RLock() defer i.mutex.RUnlock() if !i.open { return ErrorIndexClosed } doc := document.NewDocument(id) err = i.m.MapDocument(doc, data) if err != nil { return } err = i.i.Update(doc) return }
go
func (i *indexImpl) Index(id string, data interface{}) (err error) { if id == "" { return ErrorEmptyID } i.mutex.RLock() defer i.mutex.RUnlock() if !i.open { return ErrorIndexClosed } doc := document.NewDocument(id) err = i.m.MapDocument(doc, data) if err != nil { return } err = i.i.Update(doc) return }
[ "func", "(", "i", "*", "indexImpl", ")", "Index", "(", "id", "string", ",", "data", "interface", "{", "}", ")", "(", "err", "error", ")", "{", "if", "id", "==", "\"", "\"", "{", "return", "ErrorEmptyID", "\n", "}", "\n\n", "i", ".", "mutex", ".", "RLock", "(", ")", "\n", "defer", "i", ".", "mutex", ".", "RUnlock", "(", ")", "\n\n", "if", "!", "i", ".", "open", "{", "return", "ErrorIndexClosed", "\n", "}", "\n\n", "doc", ":=", "document", ".", "NewDocument", "(", "id", ")", "\n", "err", "=", "i", ".", "m", ".", "MapDocument", "(", "doc", ",", "data", ")", "\n", "if", "err", "!=", "nil", "{", "return", "\n", "}", "\n", "err", "=", "i", ".", "i", ".", "Update", "(", "doc", ")", "\n", "return", "\n", "}" ]
// Index the object with the specified identifier. // The IndexMapping for this index will determine // how the object is indexed.
[ "Index", "the", "object", "with", "the", "specified", "identifier", ".", "The", "IndexMapping", "for", "this", "index", "will", "determine", "how", "the", "object", "is", "indexed", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index_impl.go#L240-L259
162,116
blevesearch/bleve
index_impl.go
IndexAdvanced
func (i *indexImpl) IndexAdvanced(doc *document.Document) (err error) { if doc.ID == "" { return ErrorEmptyID } i.mutex.RLock() defer i.mutex.RUnlock() if !i.open { return ErrorIndexClosed } err = i.i.Update(doc) return }
go
func (i *indexImpl) IndexAdvanced(doc *document.Document) (err error) { if doc.ID == "" { return ErrorEmptyID } i.mutex.RLock() defer i.mutex.RUnlock() if !i.open { return ErrorIndexClosed } err = i.i.Update(doc) return }
[ "func", "(", "i", "*", "indexImpl", ")", "IndexAdvanced", "(", "doc", "*", "document", ".", "Document", ")", "(", "err", "error", ")", "{", "if", "doc", ".", "ID", "==", "\"", "\"", "{", "return", "ErrorEmptyID", "\n", "}", "\n\n", "i", ".", "mutex", ".", "RLock", "(", ")", "\n", "defer", "i", ".", "mutex", ".", "RUnlock", "(", ")", "\n\n", "if", "!", "i", ".", "open", "{", "return", "ErrorIndexClosed", "\n", "}", "\n\n", "err", "=", "i", ".", "i", ".", "Update", "(", "doc", ")", "\n", "return", "\n", "}" ]
// IndexAdvanced takes a document.Document object // skips the mapping and indexes it.
[ "IndexAdvanced", "takes", "a", "document", ".", "Document", "object", "skips", "the", "mapping", "and", "indexes", "it", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index_impl.go#L263-L277
162,117
blevesearch/bleve
index_impl.go
Delete
func (i *indexImpl) Delete(id string) (err error) { if id == "" { return ErrorEmptyID } i.mutex.RLock() defer i.mutex.RUnlock() if !i.open { return ErrorIndexClosed } err = i.i.Delete(id) return }
go
func (i *indexImpl) Delete(id string) (err error) { if id == "" { return ErrorEmptyID } i.mutex.RLock() defer i.mutex.RUnlock() if !i.open { return ErrorIndexClosed } err = i.i.Delete(id) return }
[ "func", "(", "i", "*", "indexImpl", ")", "Delete", "(", "id", "string", ")", "(", "err", "error", ")", "{", "if", "id", "==", "\"", "\"", "{", "return", "ErrorEmptyID", "\n", "}", "\n\n", "i", ".", "mutex", ".", "RLock", "(", ")", "\n", "defer", "i", ".", "mutex", ".", "RUnlock", "(", ")", "\n\n", "if", "!", "i", ".", "open", "{", "return", "ErrorIndexClosed", "\n", "}", "\n\n", "err", "=", "i", ".", "i", ".", "Delete", "(", "id", ")", "\n", "return", "\n", "}" ]
// Delete entries for the specified identifier from // the index.
[ "Delete", "entries", "for", "the", "specified", "identifier", "from", "the", "index", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index_impl.go#L281-L295
162,118
blevesearch/bleve
index_impl.go
Batch
func (i *indexImpl) Batch(b *Batch) error { i.mutex.RLock() defer i.mutex.RUnlock() if !i.open { return ErrorIndexClosed } return i.i.Batch(b.internal) }
go
func (i *indexImpl) Batch(b *Batch) error { i.mutex.RLock() defer i.mutex.RUnlock() if !i.open { return ErrorIndexClosed } return i.i.Batch(b.internal) }
[ "func", "(", "i", "*", "indexImpl", ")", "Batch", "(", "b", "*", "Batch", ")", "error", "{", "i", ".", "mutex", ".", "RLock", "(", ")", "\n", "defer", "i", ".", "mutex", ".", "RUnlock", "(", ")", "\n\n", "if", "!", "i", ".", "open", "{", "return", "ErrorIndexClosed", "\n", "}", "\n\n", "return", "i", ".", "i", ".", "Batch", "(", "b", ".", "internal", ")", "\n", "}" ]
// Batch executes multiple Index and Delete // operations at the same time. There are often // significant performance benefits when performing // operations in a batch.
[ "Batch", "executes", "multiple", "Index", "and", "Delete", "operations", "at", "the", "same", "time", ".", "There", "are", "often", "significant", "performance", "benefits", "when", "performing", "operations", "in", "a", "batch", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index_impl.go#L301-L310
162,119
blevesearch/bleve
index_impl.go
Document
func (i *indexImpl) Document(id string) (doc *document.Document, err error) { i.mutex.RLock() defer i.mutex.RUnlock() if !i.open { return nil, ErrorIndexClosed } indexReader, err := i.i.Reader() if err != nil { return nil, err } defer func() { if cerr := indexReader.Close(); err == nil && cerr != nil { err = cerr } }() doc, err = indexReader.Document(id) if err != nil { return nil, err } return doc, nil }
go
func (i *indexImpl) Document(id string) (doc *document.Document, err error) { i.mutex.RLock() defer i.mutex.RUnlock() if !i.open { return nil, ErrorIndexClosed } indexReader, err := i.i.Reader() if err != nil { return nil, err } defer func() { if cerr := indexReader.Close(); err == nil && cerr != nil { err = cerr } }() doc, err = indexReader.Document(id) if err != nil { return nil, err } return doc, nil }
[ "func", "(", "i", "*", "indexImpl", ")", "Document", "(", "id", "string", ")", "(", "doc", "*", "document", ".", "Document", ",", "err", "error", ")", "{", "i", ".", "mutex", ".", "RLock", "(", ")", "\n", "defer", "i", ".", "mutex", ".", "RUnlock", "(", ")", "\n\n", "if", "!", "i", ".", "open", "{", "return", "nil", ",", "ErrorIndexClosed", "\n", "}", "\n", "indexReader", ",", "err", ":=", "i", ".", "i", ".", "Reader", "(", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n", "defer", "func", "(", ")", "{", "if", "cerr", ":=", "indexReader", ".", "Close", "(", ")", ";", "err", "==", "nil", "&&", "cerr", "!=", "nil", "{", "err", "=", "cerr", "\n", "}", "\n", "}", "(", ")", "\n\n", "doc", ",", "err", "=", "indexReader", ".", "Document", "(", "id", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n", "return", "doc", ",", "nil", "\n", "}" ]
// Document is used to find the values of all the // stored fields for a document in the index. These // stored fields are put back into a Document object // and returned.
[ "Document", "is", "used", "to", "find", "the", "values", "of", "all", "the", "stored", "fields", "for", "a", "document", "in", "the", "index", ".", "These", "stored", "fields", "are", "put", "back", "into", "a", "Document", "object", "and", "returned", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index_impl.go#L316-L338
162,120
blevesearch/bleve
index_impl.go
DocCount
func (i *indexImpl) DocCount() (count uint64, err error) { i.mutex.RLock() defer i.mutex.RUnlock() if !i.open { return 0, ErrorIndexClosed } // open a reader for this search indexReader, err := i.i.Reader() if err != nil { return 0, fmt.Errorf("error opening index reader %v", err) } defer func() { if cerr := indexReader.Close(); err == nil && cerr != nil { err = cerr } }() count, err = indexReader.DocCount() return }
go
func (i *indexImpl) DocCount() (count uint64, err error) { i.mutex.RLock() defer i.mutex.RUnlock() if !i.open { return 0, ErrorIndexClosed } // open a reader for this search indexReader, err := i.i.Reader() if err != nil { return 0, fmt.Errorf("error opening index reader %v", err) } defer func() { if cerr := indexReader.Close(); err == nil && cerr != nil { err = cerr } }() count, err = indexReader.DocCount() return }
[ "func", "(", "i", "*", "indexImpl", ")", "DocCount", "(", ")", "(", "count", "uint64", ",", "err", "error", ")", "{", "i", ".", "mutex", ".", "RLock", "(", ")", "\n", "defer", "i", ".", "mutex", ".", "RUnlock", "(", ")", "\n\n", "if", "!", "i", ".", "open", "{", "return", "0", ",", "ErrorIndexClosed", "\n", "}", "\n\n", "// open a reader for this search", "indexReader", ",", "err", ":=", "i", ".", "i", ".", "Reader", "(", ")", "\n", "if", "err", "!=", "nil", "{", "return", "0", ",", "fmt", ".", "Errorf", "(", "\"", "\"", ",", "err", ")", "\n", "}", "\n", "defer", "func", "(", ")", "{", "if", "cerr", ":=", "indexReader", ".", "Close", "(", ")", ";", "err", "==", "nil", "&&", "cerr", "!=", "nil", "{", "err", "=", "cerr", "\n", "}", "\n", "}", "(", ")", "\n\n", "count", ",", "err", "=", "indexReader", ".", "DocCount", "(", ")", "\n", "return", "\n", "}" ]
// DocCount returns the number of documents in the // index.
[ "DocCount", "returns", "the", "number", "of", "documents", "in", "the", "index", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index_impl.go#L342-L363
162,121
blevesearch/bleve
index_impl.go
Search
func (i *indexImpl) Search(req *SearchRequest) (sr *SearchResult, err error) { return i.SearchInContext(context.Background(), req) }
go
func (i *indexImpl) Search(req *SearchRequest) (sr *SearchResult, err error) { return i.SearchInContext(context.Background(), req) }
[ "func", "(", "i", "*", "indexImpl", ")", "Search", "(", "req", "*", "SearchRequest", ")", "(", "sr", "*", "SearchResult", ",", "err", "error", ")", "{", "return", "i", ".", "SearchInContext", "(", "context", ".", "Background", "(", ")", ",", "req", ")", "\n", "}" ]
// Search executes a search request operation. // Returns a SearchResult object or an error.
[ "Search", "executes", "a", "search", "request", "operation", ".", "Returns", "a", "SearchResult", "object", "or", "an", "error", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index_impl.go#L367-L369
162,122
blevesearch/bleve
index_impl.go
memNeededForSearch
func memNeededForSearch(req *SearchRequest, searcher search.Searcher, topnCollector *collector.TopNCollector) uint64 { backingSize := req.Size + req.From + 1 if req.Size+req.From > collector.PreAllocSizeSkipCap { backingSize = collector.PreAllocSizeSkipCap + 1 } numDocMatches := backingSize + searcher.DocumentMatchPoolSize() estimate := 0 // overhead, size in bytes from collector estimate += topnCollector.Size() // pre-allocing DocumentMatchPool estimate += searchContextEmptySize + numDocMatches*documentMatchEmptySize // searcher overhead estimate += searcher.Size() // overhead from results, lowestMatchOutsideResults estimate += (numDocMatches + 1) * documentMatchEmptySize // additional overhead from SearchResult estimate += reflectStaticSizeSearchResult + reflectStaticSizeSearchStatus // overhead from facet results if req.Facets != nil { estimate += len(req.Facets) * facetResultEmptySize } // highlighting, store if len(req.Fields) > 0 || req.Highlight != nil { // Size + From => number of hits estimate += (req.Size + req.From) * documentEmptySize } return uint64(estimate) }
go
func memNeededForSearch(req *SearchRequest, searcher search.Searcher, topnCollector *collector.TopNCollector) uint64 { backingSize := req.Size + req.From + 1 if req.Size+req.From > collector.PreAllocSizeSkipCap { backingSize = collector.PreAllocSizeSkipCap + 1 } numDocMatches := backingSize + searcher.DocumentMatchPoolSize() estimate := 0 // overhead, size in bytes from collector estimate += topnCollector.Size() // pre-allocing DocumentMatchPool estimate += searchContextEmptySize + numDocMatches*documentMatchEmptySize // searcher overhead estimate += searcher.Size() // overhead from results, lowestMatchOutsideResults estimate += (numDocMatches + 1) * documentMatchEmptySize // additional overhead from SearchResult estimate += reflectStaticSizeSearchResult + reflectStaticSizeSearchStatus // overhead from facet results if req.Facets != nil { estimate += len(req.Facets) * facetResultEmptySize } // highlighting, store if len(req.Fields) > 0 || req.Highlight != nil { // Size + From => number of hits estimate += (req.Size + req.From) * documentEmptySize } return uint64(estimate) }
[ "func", "memNeededForSearch", "(", "req", "*", "SearchRequest", ",", "searcher", "search", ".", "Searcher", ",", "topnCollector", "*", "collector", ".", "TopNCollector", ")", "uint64", "{", "backingSize", ":=", "req", ".", "Size", "+", "req", ".", "From", "+", "1", "\n", "if", "req", ".", "Size", "+", "req", ".", "From", ">", "collector", ".", "PreAllocSizeSkipCap", "{", "backingSize", "=", "collector", ".", "PreAllocSizeSkipCap", "+", "1", "\n", "}", "\n", "numDocMatches", ":=", "backingSize", "+", "searcher", ".", "DocumentMatchPoolSize", "(", ")", "\n\n", "estimate", ":=", "0", "\n\n", "// overhead, size in bytes from collector", "estimate", "+=", "topnCollector", ".", "Size", "(", ")", "\n\n", "// pre-allocing DocumentMatchPool", "estimate", "+=", "searchContextEmptySize", "+", "numDocMatches", "*", "documentMatchEmptySize", "\n\n", "// searcher overhead", "estimate", "+=", "searcher", ".", "Size", "(", ")", "\n\n", "// overhead from results, lowestMatchOutsideResults", "estimate", "+=", "(", "numDocMatches", "+", "1", ")", "*", "documentMatchEmptySize", "\n\n", "// additional overhead from SearchResult", "estimate", "+=", "reflectStaticSizeSearchResult", "+", "reflectStaticSizeSearchStatus", "\n\n", "// overhead from facet results", "if", "req", ".", "Facets", "!=", "nil", "{", "estimate", "+=", "len", "(", "req", ".", "Facets", ")", "*", "facetResultEmptySize", "\n", "}", "\n\n", "// highlighting, store", "if", "len", "(", "req", ".", "Fields", ")", ">", "0", "||", "req", ".", "Highlight", "!=", "nil", "{", "// Size + From => number of hits", "estimate", "+=", "(", "req", ".", "Size", "+", "req", ".", "From", ")", "*", "documentEmptySize", "\n", "}", "\n\n", "return", "uint64", "(", "estimate", ")", "\n", "}" ]
// memNeededForSearch is a helper function that returns an estimate of RAM // needed to execute a search request.
[ "memNeededForSearch", "is", "a", "helper", "function", "that", "returns", "an", "estimate", "of", "RAM", "needed", "to", "execute", "a", "search", "request", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index_impl.go#L392-L431
162,123
blevesearch/bleve
index_impl.go
Fields
func (i *indexImpl) Fields() (fields []string, err error) { i.mutex.RLock() defer i.mutex.RUnlock() if !i.open { return nil, ErrorIndexClosed } indexReader, err := i.i.Reader() if err != nil { return nil, err } defer func() { if cerr := indexReader.Close(); err == nil && cerr != nil { err = cerr } }() fields, err = indexReader.Fields() if err != nil { return nil, err } return fields, nil }
go
func (i *indexImpl) Fields() (fields []string, err error) { i.mutex.RLock() defer i.mutex.RUnlock() if !i.open { return nil, ErrorIndexClosed } indexReader, err := i.i.Reader() if err != nil { return nil, err } defer func() { if cerr := indexReader.Close(); err == nil && cerr != nil { err = cerr } }() fields, err = indexReader.Fields() if err != nil { return nil, err } return fields, nil }
[ "func", "(", "i", "*", "indexImpl", ")", "Fields", "(", ")", "(", "fields", "[", "]", "string", ",", "err", "error", ")", "{", "i", ".", "mutex", ".", "RLock", "(", ")", "\n", "defer", "i", ".", "mutex", ".", "RUnlock", "(", ")", "\n\n", "if", "!", "i", ".", "open", "{", "return", "nil", ",", "ErrorIndexClosed", "\n", "}", "\n\n", "indexReader", ",", "err", ":=", "i", ".", "i", ".", "Reader", "(", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n", "defer", "func", "(", ")", "{", "if", "cerr", ":=", "indexReader", ".", "Close", "(", ")", ";", "err", "==", "nil", "&&", "cerr", "!=", "nil", "{", "err", "=", "cerr", "\n", "}", "\n", "}", "(", ")", "\n\n", "fields", ",", "err", "=", "indexReader", ".", "Fields", "(", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n", "return", "fields", ",", "nil", "\n", "}" ]
// Fields returns the name of all the fields this // Index has operated on.
[ "Fields", "returns", "the", "name", "of", "all", "the", "fields", "this", "Index", "has", "operated", "on", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index_impl.go#L648-L671
162,124
blevesearch/bleve
index_impl.go
NewBatch
func (i *indexImpl) NewBatch() *Batch { return &Batch{ index: i, internal: index.NewBatch(), } }
go
func (i *indexImpl) NewBatch() *Batch { return &Batch{ index: i, internal: index.NewBatch(), } }
[ "func", "(", "i", "*", "indexImpl", ")", "NewBatch", "(", ")", "*", "Batch", "{", "return", "&", "Batch", "{", "index", ":", "i", ",", "internal", ":", "index", ".", "NewBatch", "(", ")", ",", "}", "\n", "}" ]
// NewBatch creates a new empty batch.
[ "NewBatch", "creates", "a", "new", "empty", "batch", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index_impl.go#L820-L825
162,125
blevesearch/bleve
index_impl.go
deDuplicate
func deDuplicate(fields []string) []string { entries := make(map[string]struct{}) ret := []string{} for _, entry := range fields { if _, exists := entries[entry]; !exists { entries[entry] = struct{}{} ret = append(ret, entry) } } return ret }
go
func deDuplicate(fields []string) []string { entries := make(map[string]struct{}) ret := []string{} for _, entry := range fields { if _, exists := entries[entry]; !exists { entries[entry] = struct{}{} ret = append(ret, entry) } } return ret }
[ "func", "deDuplicate", "(", "fields", "[", "]", "string", ")", "[", "]", "string", "{", "entries", ":=", "make", "(", "map", "[", "string", "]", "struct", "{", "}", ")", "\n", "ret", ":=", "[", "]", "string", "{", "}", "\n", "for", "_", ",", "entry", ":=", "range", "fields", "{", "if", "_", ",", "exists", ":=", "entries", "[", "entry", "]", ";", "!", "exists", "{", "entries", "[", "entry", "]", "=", "struct", "{", "}", "{", "}", "\n", "ret", "=", "append", "(", "ret", ",", "entry", ")", "\n", "}", "\n", "}", "\n", "return", "ret", "\n", "}" ]
// helper function to remove duplicate entries from slice of strings
[ "helper", "function", "to", "remove", "duplicate", "entries", "from", "slice", "of", "strings" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index_impl.go#L857-L867
162,126
blevesearch/bleve
search/searcher/search_geopointdistance.go
boxSearcher
func boxSearcher(indexReader index.IndexReader, topLeftLon, topLeftLat, bottomRightLon, bottomRightLat float64, field string, boost float64, options search.SearcherOptions) ( search.Searcher, error) { if bottomRightLon < topLeftLon { // cross date line, rewrite as two parts leftSearcher, err := NewGeoBoundingBoxSearcher(indexReader, -180, bottomRightLat, bottomRightLon, topLeftLat, field, boost, options, false) if err != nil { return nil, err } rightSearcher, err := NewGeoBoundingBoxSearcher(indexReader, topLeftLon, bottomRightLat, 180, topLeftLat, field, boost, options, false) if err != nil { _ = leftSearcher.Close() return nil, err } boxSearcher, err := NewDisjunctionSearcher(indexReader, []search.Searcher{leftSearcher, rightSearcher}, 0, options) if err != nil { _ = leftSearcher.Close() _ = rightSearcher.Close() return nil, err } return boxSearcher, nil } // build geoboundinggox searcher for that bounding box boxSearcher, err := NewGeoBoundingBoxSearcher(indexReader, topLeftLon, bottomRightLat, bottomRightLon, topLeftLat, field, boost, options, false) if err != nil { return nil, err } return boxSearcher, nil }
go
func boxSearcher(indexReader index.IndexReader, topLeftLon, topLeftLat, bottomRightLon, bottomRightLat float64, field string, boost float64, options search.SearcherOptions) ( search.Searcher, error) { if bottomRightLon < topLeftLon { // cross date line, rewrite as two parts leftSearcher, err := NewGeoBoundingBoxSearcher(indexReader, -180, bottomRightLat, bottomRightLon, topLeftLat, field, boost, options, false) if err != nil { return nil, err } rightSearcher, err := NewGeoBoundingBoxSearcher(indexReader, topLeftLon, bottomRightLat, 180, topLeftLat, field, boost, options, false) if err != nil { _ = leftSearcher.Close() return nil, err } boxSearcher, err := NewDisjunctionSearcher(indexReader, []search.Searcher{leftSearcher, rightSearcher}, 0, options) if err != nil { _ = leftSearcher.Close() _ = rightSearcher.Close() return nil, err } return boxSearcher, nil } // build geoboundinggox searcher for that bounding box boxSearcher, err := NewGeoBoundingBoxSearcher(indexReader, topLeftLon, bottomRightLat, bottomRightLon, topLeftLat, field, boost, options, false) if err != nil { return nil, err } return boxSearcher, nil }
[ "func", "boxSearcher", "(", "indexReader", "index", ".", "IndexReader", ",", "topLeftLon", ",", "topLeftLat", ",", "bottomRightLon", ",", "bottomRightLat", "float64", ",", "field", "string", ",", "boost", "float64", ",", "options", "search", ".", "SearcherOptions", ")", "(", "search", ".", "Searcher", ",", "error", ")", "{", "if", "bottomRightLon", "<", "topLeftLon", "{", "// cross date line, rewrite as two parts", "leftSearcher", ",", "err", ":=", "NewGeoBoundingBoxSearcher", "(", "indexReader", ",", "-", "180", ",", "bottomRightLat", ",", "bottomRightLon", ",", "topLeftLat", ",", "field", ",", "boost", ",", "options", ",", "false", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n", "rightSearcher", ",", "err", ":=", "NewGeoBoundingBoxSearcher", "(", "indexReader", ",", "topLeftLon", ",", "bottomRightLat", ",", "180", ",", "topLeftLat", ",", "field", ",", "boost", ",", "options", ",", "false", ")", "\n", "if", "err", "!=", "nil", "{", "_", "=", "leftSearcher", ".", "Close", "(", ")", "\n", "return", "nil", ",", "err", "\n", "}", "\n\n", "boxSearcher", ",", "err", ":=", "NewDisjunctionSearcher", "(", "indexReader", ",", "[", "]", "search", ".", "Searcher", "{", "leftSearcher", ",", "rightSearcher", "}", ",", "0", ",", "options", ")", "\n", "if", "err", "!=", "nil", "{", "_", "=", "leftSearcher", ".", "Close", "(", ")", "\n", "_", "=", "rightSearcher", ".", "Close", "(", ")", "\n", "return", "nil", ",", "err", "\n", "}", "\n", "return", "boxSearcher", ",", "nil", "\n", "}", "\n\n", "// build geoboundinggox searcher for that bounding box", "boxSearcher", ",", "err", ":=", "NewGeoBoundingBoxSearcher", "(", "indexReader", ",", "topLeftLon", ",", "bottomRightLat", ",", "bottomRightLon", ",", "topLeftLat", ",", "field", ",", "boost", ",", "options", ",", "false", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n", "return", "boxSearcher", ",", "nil", "\n", "}" ]
// boxSearcher builds a searcher for the described bounding box // if the desired box crosses the dateline, it is automatically split into // two boxes joined through a disjunction searcher
[ "boxSearcher", "builds", "a", "searcher", "for", "the", "described", "bounding", "box", "if", "the", "desired", "box", "crosses", "the", "dateline", "it", "is", "automatically", "split", "into", "two", "boxes", "joined", "through", "a", "disjunction", "searcher" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search/searcher/search_geopointdistance.go#L55-L93
162,127
blevesearch/bleve
index_alias_impl.go
NewIndexAlias
func NewIndexAlias(indexes ...Index) *indexAliasImpl { return &indexAliasImpl{ name: "alias", indexes: indexes, open: true, } }
go
func NewIndexAlias(indexes ...Index) *indexAliasImpl { return &indexAliasImpl{ name: "alias", indexes: indexes, open: true, } }
[ "func", "NewIndexAlias", "(", "indexes", "...", "Index", ")", "*", "indexAliasImpl", "{", "return", "&", "indexAliasImpl", "{", "name", ":", "\"", "\"", ",", "indexes", ":", "indexes", ",", "open", ":", "true", ",", "}", "\n", "}" ]
// NewIndexAlias creates a new IndexAlias over the provided // Index objects.
[ "NewIndexAlias", "creates", "a", "new", "IndexAlias", "over", "the", "provided", "Index", "objects", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index_alias_impl.go#L39-L45
162,128
blevesearch/bleve
index_alias_impl.go
MultiSearch
func MultiSearch(ctx context.Context, req *SearchRequest, indexes ...Index) (*SearchResult, error) { searchStart := time.Now() asyncResults := make(chan *asyncSearchResult, len(indexes)) // run search on each index in separate go routine var waitGroup sync.WaitGroup var searchChildIndex = func(in Index, childReq *SearchRequest) { rv := asyncSearchResult{Name: in.Name()} rv.Result, rv.Err = in.SearchInContext(ctx, childReq) asyncResults <- &rv waitGroup.Done() } waitGroup.Add(len(indexes)) for _, in := range indexes { go searchChildIndex(in, createChildSearchRequest(req)) } // on another go routine, close after finished go func() { waitGroup.Wait() close(asyncResults) }() var sr *SearchResult indexErrors := make(map[string]error) for asr := range asyncResults { if asr.Err == nil { if sr == nil { // first result sr = asr.Result } else { // merge with previous sr.Merge(asr.Result) } } else { indexErrors[asr.Name] = asr.Err } } // merge just concatenated all the hits // now lets clean it up // handle case where no results were successful if sr == nil { sr = &SearchResult{ Status: &SearchStatus{ Errors: make(map[string]error), }, } } // sort all hits with the requested order if len(req.Sort) > 0 { sorter := newMultiSearchHitSorter(req.Sort, sr.Hits) sort.Sort(sorter) } // now skip over the correct From if req.From > 0 && len(sr.Hits) > req.From { sr.Hits = sr.Hits[req.From:] } else if req.From > 0 { sr.Hits = search.DocumentMatchCollection{} } // now trim to the correct size if req.Size > 0 && len(sr.Hits) > req.Size { sr.Hits = sr.Hits[0:req.Size] } // fix up facets for name, fr := range req.Facets { sr.Facets.Fixup(name, fr.Size) } // fix up original request sr.Request = req searchDuration := time.Since(searchStart) sr.Took = searchDuration // fix up errors if len(indexErrors) > 0 { if sr.Status.Errors == nil { sr.Status.Errors = make(map[string]error) } for indexName, indexErr := range indexErrors { sr.Status.Errors[indexName] = indexErr sr.Status.Total++ sr.Status.Failed++ } } return sr, nil }
go
func MultiSearch(ctx context.Context, req *SearchRequest, indexes ...Index) (*SearchResult, error) { searchStart := time.Now() asyncResults := make(chan *asyncSearchResult, len(indexes)) // run search on each index in separate go routine var waitGroup sync.WaitGroup var searchChildIndex = func(in Index, childReq *SearchRequest) { rv := asyncSearchResult{Name: in.Name()} rv.Result, rv.Err = in.SearchInContext(ctx, childReq) asyncResults <- &rv waitGroup.Done() } waitGroup.Add(len(indexes)) for _, in := range indexes { go searchChildIndex(in, createChildSearchRequest(req)) } // on another go routine, close after finished go func() { waitGroup.Wait() close(asyncResults) }() var sr *SearchResult indexErrors := make(map[string]error) for asr := range asyncResults { if asr.Err == nil { if sr == nil { // first result sr = asr.Result } else { // merge with previous sr.Merge(asr.Result) } } else { indexErrors[asr.Name] = asr.Err } } // merge just concatenated all the hits // now lets clean it up // handle case where no results were successful if sr == nil { sr = &SearchResult{ Status: &SearchStatus{ Errors: make(map[string]error), }, } } // sort all hits with the requested order if len(req.Sort) > 0 { sorter := newMultiSearchHitSorter(req.Sort, sr.Hits) sort.Sort(sorter) } // now skip over the correct From if req.From > 0 && len(sr.Hits) > req.From { sr.Hits = sr.Hits[req.From:] } else if req.From > 0 { sr.Hits = search.DocumentMatchCollection{} } // now trim to the correct size if req.Size > 0 && len(sr.Hits) > req.Size { sr.Hits = sr.Hits[0:req.Size] } // fix up facets for name, fr := range req.Facets { sr.Facets.Fixup(name, fr.Size) } // fix up original request sr.Request = req searchDuration := time.Since(searchStart) sr.Took = searchDuration // fix up errors if len(indexErrors) > 0 { if sr.Status.Errors == nil { sr.Status.Errors = make(map[string]error) } for indexName, indexErr := range indexErrors { sr.Status.Errors[indexName] = indexErr sr.Status.Total++ sr.Status.Failed++ } } return sr, nil }
[ "func", "MultiSearch", "(", "ctx", "context", ".", "Context", ",", "req", "*", "SearchRequest", ",", "indexes", "...", "Index", ")", "(", "*", "SearchResult", ",", "error", ")", "{", "searchStart", ":=", "time", ".", "Now", "(", ")", "\n", "asyncResults", ":=", "make", "(", "chan", "*", "asyncSearchResult", ",", "len", "(", "indexes", ")", ")", "\n\n", "// run search on each index in separate go routine", "var", "waitGroup", "sync", ".", "WaitGroup", "\n\n", "var", "searchChildIndex", "=", "func", "(", "in", "Index", ",", "childReq", "*", "SearchRequest", ")", "{", "rv", ":=", "asyncSearchResult", "{", "Name", ":", "in", ".", "Name", "(", ")", "}", "\n", "rv", ".", "Result", ",", "rv", ".", "Err", "=", "in", ".", "SearchInContext", "(", "ctx", ",", "childReq", ")", "\n", "asyncResults", "<-", "&", "rv", "\n", "waitGroup", ".", "Done", "(", ")", "\n", "}", "\n\n", "waitGroup", ".", "Add", "(", "len", "(", "indexes", ")", ")", "\n", "for", "_", ",", "in", ":=", "range", "indexes", "{", "go", "searchChildIndex", "(", "in", ",", "createChildSearchRequest", "(", "req", ")", ")", "\n", "}", "\n\n", "// on another go routine, close after finished", "go", "func", "(", ")", "{", "waitGroup", ".", "Wait", "(", ")", "\n", "close", "(", "asyncResults", ")", "\n", "}", "(", ")", "\n\n", "var", "sr", "*", "SearchResult", "\n", "indexErrors", ":=", "make", "(", "map", "[", "string", "]", "error", ")", "\n\n", "for", "asr", ":=", "range", "asyncResults", "{", "if", "asr", ".", "Err", "==", "nil", "{", "if", "sr", "==", "nil", "{", "// first result", "sr", "=", "asr", ".", "Result", "\n", "}", "else", "{", "// merge with previous", "sr", ".", "Merge", "(", "asr", ".", "Result", ")", "\n", "}", "\n", "}", "else", "{", "indexErrors", "[", "asr", ".", "Name", "]", "=", "asr", ".", "Err", "\n", "}", "\n", "}", "\n\n", "// merge just concatenated all the hits", "// now lets clean it up", "// handle case where no results were successful", "if", "sr", "==", "nil", "{", "sr", "=", "&", "SearchResult", "{", "Status", ":", "&", "SearchStatus", "{", "Errors", ":", "make", "(", "map", "[", "string", "]", "error", ")", ",", "}", ",", "}", "\n", "}", "\n\n", "// sort all hits with the requested order", "if", "len", "(", "req", ".", "Sort", ")", ">", "0", "{", "sorter", ":=", "newMultiSearchHitSorter", "(", "req", ".", "Sort", ",", "sr", ".", "Hits", ")", "\n", "sort", ".", "Sort", "(", "sorter", ")", "\n", "}", "\n\n", "// now skip over the correct From", "if", "req", ".", "From", ">", "0", "&&", "len", "(", "sr", ".", "Hits", ")", ">", "req", ".", "From", "{", "sr", ".", "Hits", "=", "sr", ".", "Hits", "[", "req", ".", "From", ":", "]", "\n", "}", "else", "if", "req", ".", "From", ">", "0", "{", "sr", ".", "Hits", "=", "search", ".", "DocumentMatchCollection", "{", "}", "\n", "}", "\n\n", "// now trim to the correct size", "if", "req", ".", "Size", ">", "0", "&&", "len", "(", "sr", ".", "Hits", ")", ">", "req", ".", "Size", "{", "sr", ".", "Hits", "=", "sr", ".", "Hits", "[", "0", ":", "req", ".", "Size", "]", "\n", "}", "\n\n", "// fix up facets", "for", "name", ",", "fr", ":=", "range", "req", ".", "Facets", "{", "sr", ".", "Facets", ".", "Fixup", "(", "name", ",", "fr", ".", "Size", ")", "\n", "}", "\n\n", "// fix up original request", "sr", ".", "Request", "=", "req", "\n", "searchDuration", ":=", "time", ".", "Since", "(", "searchStart", ")", "\n", "sr", ".", "Took", "=", "searchDuration", "\n\n", "// fix up errors", "if", "len", "(", "indexErrors", ")", ">", "0", "{", "if", "sr", ".", "Status", ".", "Errors", "==", "nil", "{", "sr", ".", "Status", ".", "Errors", "=", "make", "(", "map", "[", "string", "]", "error", ")", "\n", "}", "\n", "for", "indexName", ",", "indexErr", ":=", "range", "indexErrors", "{", "sr", ".", "Status", ".", "Errors", "[", "indexName", "]", "=", "indexErr", "\n", "sr", ".", "Status", ".", "Total", "++", "\n", "sr", ".", "Status", ".", "Failed", "++", "\n", "}", "\n", "}", "\n\n", "return", "sr", ",", "nil", "\n", "}" ]
// MultiSearch executes a SearchRequest across multiple Index objects, // then merges the results. The indexes must honor any ctx deadline.
[ "MultiSearch", "executes", "a", "SearchRequest", "across", "multiple", "Index", "objects", "then", "merges", "the", "results", ".", "The", "indexes", "must", "honor", "any", "ctx", "deadline", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index_alias_impl.go#L449-L545
162,129
blevesearch/bleve
index/scorch/segment/zap/segment.go
Open
func Open(path string) (segment.Segment, error) { f, err := os.Open(path) if err != nil { return nil, err } mm, err := mmap.Map(f, mmap.RDONLY, 0) if err != nil { // mmap failed, try to close the file _ = f.Close() return nil, err } rv := &Segment{ SegmentBase: SegmentBase{ mem: mm[0 : len(mm)-FooterSize], fieldsMap: make(map[string]uint16), fieldDvReaders: make(map[uint16]*docValueReader), fieldFSTs: make(map[uint16]*vellum.FST), }, f: f, mm: mm, path: path, refs: 1, } rv.SegmentBase.updateSize() err = rv.loadConfig() if err != nil { _ = rv.Close() return nil, err } err = rv.loadFields() if err != nil { _ = rv.Close() return nil, err } err = rv.loadDvReaders() if err != nil { _ = rv.Close() return nil, err } return rv, nil }
go
func Open(path string) (segment.Segment, error) { f, err := os.Open(path) if err != nil { return nil, err } mm, err := mmap.Map(f, mmap.RDONLY, 0) if err != nil { // mmap failed, try to close the file _ = f.Close() return nil, err } rv := &Segment{ SegmentBase: SegmentBase{ mem: mm[0 : len(mm)-FooterSize], fieldsMap: make(map[string]uint16), fieldDvReaders: make(map[uint16]*docValueReader), fieldFSTs: make(map[uint16]*vellum.FST), }, f: f, mm: mm, path: path, refs: 1, } rv.SegmentBase.updateSize() err = rv.loadConfig() if err != nil { _ = rv.Close() return nil, err } err = rv.loadFields() if err != nil { _ = rv.Close() return nil, err } err = rv.loadDvReaders() if err != nil { _ = rv.Close() return nil, err } return rv, nil }
[ "func", "Open", "(", "path", "string", ")", "(", "segment", ".", "Segment", ",", "error", ")", "{", "f", ",", "err", ":=", "os", ".", "Open", "(", "path", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n", "mm", ",", "err", ":=", "mmap", ".", "Map", "(", "f", ",", "mmap", ".", "RDONLY", ",", "0", ")", "\n", "if", "err", "!=", "nil", "{", "// mmap failed, try to close the file", "_", "=", "f", ".", "Close", "(", ")", "\n", "return", "nil", ",", "err", "\n", "}", "\n\n", "rv", ":=", "&", "Segment", "{", "SegmentBase", ":", "SegmentBase", "{", "mem", ":", "mm", "[", "0", ":", "len", "(", "mm", ")", "-", "FooterSize", "]", ",", "fieldsMap", ":", "make", "(", "map", "[", "string", "]", "uint16", ")", ",", "fieldDvReaders", ":", "make", "(", "map", "[", "uint16", "]", "*", "docValueReader", ")", ",", "fieldFSTs", ":", "make", "(", "map", "[", "uint16", "]", "*", "vellum", ".", "FST", ")", ",", "}", ",", "f", ":", "f", ",", "mm", ":", "mm", ",", "path", ":", "path", ",", "refs", ":", "1", ",", "}", "\n", "rv", ".", "SegmentBase", ".", "updateSize", "(", ")", "\n\n", "err", "=", "rv", ".", "loadConfig", "(", ")", "\n", "if", "err", "!=", "nil", "{", "_", "=", "rv", ".", "Close", "(", ")", "\n", "return", "nil", ",", "err", "\n", "}", "\n\n", "err", "=", "rv", ".", "loadFields", "(", ")", "\n", "if", "err", "!=", "nil", "{", "_", "=", "rv", ".", "Close", "(", ")", "\n", "return", "nil", ",", "err", "\n", "}", "\n\n", "err", "=", "rv", ".", "loadDvReaders", "(", ")", "\n", "if", "err", "!=", "nil", "{", "_", "=", "rv", ".", "Close", "(", ")", "\n", "return", "nil", ",", "err", "\n", "}", "\n\n", "return", "rv", ",", "nil", "\n", "}" ]
// Open returns a zap impl of a segment
[ "Open", "returns", "a", "zap", "impl", "of", "a", "segment" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/segment.go#L42-L87
162,130
blevesearch/bleve
index/scorch/segment/zap/segment.go
Dictionary
func (s *SegmentBase) Dictionary(field string) (segment.TermDictionary, error) { dict, err := s.dictionary(field) if err == nil && dict == nil { return &segment.EmptyDictionary{}, nil } return dict, err }
go
func (s *SegmentBase) Dictionary(field string) (segment.TermDictionary, error) { dict, err := s.dictionary(field) if err == nil && dict == nil { return &segment.EmptyDictionary{}, nil } return dict, err }
[ "func", "(", "s", "*", "SegmentBase", ")", "Dictionary", "(", "field", "string", ")", "(", "segment", ".", "TermDictionary", ",", "error", ")", "{", "dict", ",", "err", ":=", "s", ".", "dictionary", "(", "field", ")", "\n", "if", "err", "==", "nil", "&&", "dict", "==", "nil", "{", "return", "&", "segment", ".", "EmptyDictionary", "{", "}", ",", "nil", "\n", "}", "\n", "return", "dict", ",", "err", "\n", "}" ]
// Dictionary returns the term dictionary for the specified field
[ "Dictionary", "returns", "the", "term", "dictionary", "for", "the", "specified", "field" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/segment.go#L246-L252
162,131
blevesearch/bleve
index/scorch/segment/zap/segment.go
VisitDocument
func (s *SegmentBase) VisitDocument(num uint64, visitor segment.DocumentFieldValueVisitor) error { vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) defer visitDocumentCtxPool.Put(vdc) return s.visitDocument(vdc, num, visitor) }
go
func (s *SegmentBase) VisitDocument(num uint64, visitor segment.DocumentFieldValueVisitor) error { vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) defer visitDocumentCtxPool.Put(vdc) return s.visitDocument(vdc, num, visitor) }
[ "func", "(", "s", "*", "SegmentBase", ")", "VisitDocument", "(", "num", "uint64", ",", "visitor", "segment", ".", "DocumentFieldValueVisitor", ")", "error", "{", "vdc", ":=", "visitDocumentCtxPool", ".", "Get", "(", ")", ".", "(", "*", "visitDocumentCtx", ")", "\n", "defer", "visitDocumentCtxPool", ".", "Put", "(", "vdc", ")", "\n", "return", "s", ".", "visitDocument", "(", "vdc", ",", "num", ",", "visitor", ")", "\n", "}" ]
// VisitDocument invokes the DocFieldValueVistor for each stored field // for the specified doc number
[ "VisitDocument", "invokes", "the", "DocFieldValueVistor", "for", "each", "stored", "field", "for", "the", "specified", "doc", "number" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/segment.go#L309-L313
162,132
blevesearch/bleve
index/scorch/segment/zap/segment.go
DocID
func (s *SegmentBase) DocID(num uint64) ([]byte, error) { if num >= s.numDocs { return nil, nil } vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) meta, compressed := s.getDocStoredMetaAndCompressed(num) vdc.reader.Reset(meta) // handle _id field special case idFieldValLen, err := binary.ReadUvarint(&vdc.reader) if err != nil { return nil, err } idFieldVal := compressed[:idFieldValLen] visitDocumentCtxPool.Put(vdc) return idFieldVal, nil }
go
func (s *SegmentBase) DocID(num uint64) ([]byte, error) { if num >= s.numDocs { return nil, nil } vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) meta, compressed := s.getDocStoredMetaAndCompressed(num) vdc.reader.Reset(meta) // handle _id field special case idFieldValLen, err := binary.ReadUvarint(&vdc.reader) if err != nil { return nil, err } idFieldVal := compressed[:idFieldValLen] visitDocumentCtxPool.Put(vdc) return idFieldVal, nil }
[ "func", "(", "s", "*", "SegmentBase", ")", "DocID", "(", "num", "uint64", ")", "(", "[", "]", "byte", ",", "error", ")", "{", "if", "num", ">=", "s", ".", "numDocs", "{", "return", "nil", ",", "nil", "\n", "}", "\n\n", "vdc", ":=", "visitDocumentCtxPool", ".", "Get", "(", ")", ".", "(", "*", "visitDocumentCtx", ")", "\n\n", "meta", ",", "compressed", ":=", "s", ".", "getDocStoredMetaAndCompressed", "(", "num", ")", "\n\n", "vdc", ".", "reader", ".", "Reset", "(", "meta", ")", "\n\n", "// handle _id field special case", "idFieldValLen", ",", "err", ":=", "binary", ".", "ReadUvarint", "(", "&", "vdc", ".", "reader", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n", "idFieldVal", ":=", "compressed", "[", ":", "idFieldValLen", "]", "\n\n", "visitDocumentCtxPool", ".", "Put", "(", "vdc", ")", "\n\n", "return", "idFieldVal", ",", "nil", "\n", "}" ]
// DocID returns the value of the _id field for the given docNum
[ "DocID", "returns", "the", "value", "of", "the", "_id", "field", "for", "the", "given", "docNum" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/segment.go#L393-L414
162,133
blevesearch/bleve
index/scorch/segment/zap/segment.go
DocNumbers
func (s *SegmentBase) DocNumbers(ids []string) (*roaring.Bitmap, error) { rv := roaring.New() if len(s.fieldsMap) > 0 { idDict, err := s.dictionary("_id") if err != nil { return nil, err } postingsList := emptyPostingsList sMax, err := idDict.fst.GetMaxKey() if err != nil { return nil, err } sMaxStr := string(sMax) filteredIds := make([]string, 0, len(ids)) for _, id := range ids { if id <= sMaxStr { filteredIds = append(filteredIds, id) } } for _, id := range filteredIds { postingsList, err = idDict.postingsList([]byte(id), nil, postingsList) if err != nil { return nil, err } postingsList.OrInto(rv) } } return rv, nil }
go
func (s *SegmentBase) DocNumbers(ids []string) (*roaring.Bitmap, error) { rv := roaring.New() if len(s.fieldsMap) > 0 { idDict, err := s.dictionary("_id") if err != nil { return nil, err } postingsList := emptyPostingsList sMax, err := idDict.fst.GetMaxKey() if err != nil { return nil, err } sMaxStr := string(sMax) filteredIds := make([]string, 0, len(ids)) for _, id := range ids { if id <= sMaxStr { filteredIds = append(filteredIds, id) } } for _, id := range filteredIds { postingsList, err = idDict.postingsList([]byte(id), nil, postingsList) if err != nil { return nil, err } postingsList.OrInto(rv) } } return rv, nil }
[ "func", "(", "s", "*", "SegmentBase", ")", "DocNumbers", "(", "ids", "[", "]", "string", ")", "(", "*", "roaring", ".", "Bitmap", ",", "error", ")", "{", "rv", ":=", "roaring", ".", "New", "(", ")", "\n\n", "if", "len", "(", "s", ".", "fieldsMap", ")", ">", "0", "{", "idDict", ",", "err", ":=", "s", ".", "dictionary", "(", "\"", "\"", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n\n", "postingsList", ":=", "emptyPostingsList", "\n\n", "sMax", ",", "err", ":=", "idDict", ".", "fst", ".", "GetMaxKey", "(", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n", "sMaxStr", ":=", "string", "(", "sMax", ")", "\n", "filteredIds", ":=", "make", "(", "[", "]", "string", ",", "0", ",", "len", "(", "ids", ")", ")", "\n", "for", "_", ",", "id", ":=", "range", "ids", "{", "if", "id", "<=", "sMaxStr", "{", "filteredIds", "=", "append", "(", "filteredIds", ",", "id", ")", "\n", "}", "\n", "}", "\n\n", "for", "_", ",", "id", ":=", "range", "filteredIds", "{", "postingsList", ",", "err", "=", "idDict", ".", "postingsList", "(", "[", "]", "byte", "(", "id", ")", ",", "nil", ",", "postingsList", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n", "postingsList", ".", "OrInto", "(", "rv", ")", "\n", "}", "\n", "}", "\n\n", "return", "rv", ",", "nil", "\n", "}" ]
// DocNumbers returns a bitset corresponding to the doc numbers of all the // provided _id strings
[ "DocNumbers", "returns", "a", "bitset", "corresponding", "to", "the", "doc", "numbers", "of", "all", "the", "provided", "_id", "strings" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/segment.go#L423-L456
162,134
blevesearch/bleve
index/scorch/segment/zap/segment.go
DictAddr
func (s *Segment) DictAddr(field string) (uint64, error) { fieldIDPlus1, ok := s.fieldsMap[field] if !ok { return 0, fmt.Errorf("no such field '%s'", field) } return s.dictLocs[fieldIDPlus1-1], nil }
go
func (s *Segment) DictAddr(field string) (uint64, error) { fieldIDPlus1, ok := s.fieldsMap[field] if !ok { return 0, fmt.Errorf("no such field '%s'", field) } return s.dictLocs[fieldIDPlus1-1], nil }
[ "func", "(", "s", "*", "Segment", ")", "DictAddr", "(", "field", "string", ")", "(", "uint64", ",", "error", ")", "{", "fieldIDPlus1", ",", "ok", ":=", "s", ".", "fieldsMap", "[", "field", "]", "\n", "if", "!", "ok", "{", "return", "0", ",", "fmt", ".", "Errorf", "(", "\"", "\"", ",", "field", ")", "\n", "}", "\n\n", "return", "s", ".", "dictLocs", "[", "fieldIDPlus1", "-", "1", "]", ",", "nil", "\n", "}" ]
// DictAddr is a helper function to compute the file offset where the // dictionary is stored for the specified field.
[ "DictAddr", "is", "a", "helper", "function", "to", "compute", "the", "file", "offset", "where", "the", "dictionary", "is", "stored", "for", "the", "specified", "field", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/segment.go#L532-L539
162,135
blevesearch/bleve
query.go
NewTermRangeQuery
func NewTermRangeQuery(min, max string) *query.TermRangeQuery { return query.NewTermRangeQuery(min, max) }
go
func NewTermRangeQuery(min, max string) *query.TermRangeQuery { return query.NewTermRangeQuery(min, max) }
[ "func", "NewTermRangeQuery", "(", "min", ",", "max", "string", ")", "*", "query", ".", "TermRangeQuery", "{", "return", "query", ".", "NewTermRangeQuery", "(", "min", ",", "max", ")", "\n", "}" ]
// NewTermRangeQuery creates a new Query for ranges // of text terms. // Either, but not both endpoints can be "". // The minimum value is inclusive. // The maximum value is exclusive.
[ "NewTermRangeQuery", "creates", "a", "new", "Query", "for", "ranges", "of", "text", "terms", ".", "Either", "but", "not", "both", "endpoints", "can", "be", ".", "The", "minimum", "value", "is", "inclusive", ".", "The", "maximum", "value", "is", "exclusive", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/query.go#L147-L149
162,136
blevesearch/bleve
query.go
NewTermRangeInclusiveQuery
func NewTermRangeInclusiveQuery(min, max string, minInclusive, maxInclusive *bool) *query.TermRangeQuery { return query.NewTermRangeInclusiveQuery(min, max, minInclusive, maxInclusive) }
go
func NewTermRangeInclusiveQuery(min, max string, minInclusive, maxInclusive *bool) *query.TermRangeQuery { return query.NewTermRangeInclusiveQuery(min, max, minInclusive, maxInclusive) }
[ "func", "NewTermRangeInclusiveQuery", "(", "min", ",", "max", "string", ",", "minInclusive", ",", "maxInclusive", "*", "bool", ")", "*", "query", ".", "TermRangeQuery", "{", "return", "query", ".", "NewTermRangeInclusiveQuery", "(", "min", ",", "max", ",", "minInclusive", ",", "maxInclusive", ")", "\n", "}" ]
// NewTermRangeInclusiveQuery creates a new Query for ranges // of text terms. // Either, but not both endpoints can be "". // Control endpoint inclusion with inclusiveMin, inclusiveMax.
[ "NewTermRangeInclusiveQuery", "creates", "a", "new", "Query", "for", "ranges", "of", "text", "terms", ".", "Either", "but", "not", "both", "endpoints", "can", "be", ".", "Control", "endpoint", "inclusion", "with", "inclusiveMin", "inclusiveMax", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/query.go#L155-L157
162,137
blevesearch/bleve
query.go
NewGeoBoundingBoxQuery
func NewGeoBoundingBoxQuery(topLeftLon, topLeftLat, bottomRightLon, bottomRightLat float64) *query.GeoBoundingBoxQuery { return query.NewGeoBoundingBoxQuery(topLeftLon, topLeftLat, bottomRightLon, bottomRightLat) }
go
func NewGeoBoundingBoxQuery(topLeftLon, topLeftLat, bottomRightLon, bottomRightLat float64) *query.GeoBoundingBoxQuery { return query.NewGeoBoundingBoxQuery(topLeftLon, topLeftLat, bottomRightLon, bottomRightLat) }
[ "func", "NewGeoBoundingBoxQuery", "(", "topLeftLon", ",", "topLeftLat", ",", "bottomRightLon", ",", "bottomRightLat", "float64", ")", "*", "query", ".", "GeoBoundingBoxQuery", "{", "return", "query", ".", "NewGeoBoundingBoxQuery", "(", "topLeftLon", ",", "topLeftLat", ",", "bottomRightLon", ",", "bottomRightLat", ")", "\n", "}" ]
// NewGeoBoundingBoxQuery creates a new Query for performing geo bounding // box searches. The arguments describe the position of the box and documents // which have an indexed geo point inside the box will be returned.
[ "NewGeoBoundingBoxQuery", "creates", "a", "new", "Query", "for", "performing", "geo", "bounding", "box", "searches", ".", "The", "arguments", "describe", "the", "position", "of", "the", "box", "and", "documents", "which", "have", "an", "indexed", "geo", "point", "inside", "the", "box", "will", "be", "returned", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/query.go#L208-L210
162,138
blevesearch/bleve
query.go
NewGeoDistanceQuery
func NewGeoDistanceQuery(lon, lat float64, distance string) *query.GeoDistanceQuery { return query.NewGeoDistanceQuery(lon, lat, distance) }
go
func NewGeoDistanceQuery(lon, lat float64, distance string) *query.GeoDistanceQuery { return query.NewGeoDistanceQuery(lon, lat, distance) }
[ "func", "NewGeoDistanceQuery", "(", "lon", ",", "lat", "float64", ",", "distance", "string", ")", "*", "query", ".", "GeoDistanceQuery", "{", "return", "query", ".", "NewGeoDistanceQuery", "(", "lon", ",", "lat", ",", "distance", ")", "\n", "}" ]
// NewGeoDistanceQuery creates a new Query for performing geo distance // searches. The arguments describe a position and a distance. Documents // which have an indexed geo point which is less than or equal to the provided // distance from the given position will be returned.
[ "NewGeoDistanceQuery", "creates", "a", "new", "Query", "for", "performing", "geo", "distance", "searches", ".", "The", "arguments", "describe", "a", "position", "and", "a", "distance", ".", "Documents", "which", "have", "an", "indexed", "geo", "point", "which", "is", "less", "than", "or", "equal", "to", "the", "provided", "distance", "from", "the", "given", "position", "will", "be", "returned", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/query.go#L216-L218
162,139
blevesearch/bleve
index/index.go
FieldsNotYetCached
func (f FieldTerms) FieldsNotYetCached(fields []string) []string { rv := make([]string, 0, len(fields)) for _, field := range fields { if _, ok := f[field]; !ok { rv = append(rv, field) } } return rv }
go
func (f FieldTerms) FieldsNotYetCached(fields []string) []string { rv := make([]string, 0, len(fields)) for _, field := range fields { if _, ok := f[field]; !ok { rv = append(rv, field) } } return rv }
[ "func", "(", "f", "FieldTerms", ")", "FieldsNotYetCached", "(", "fields", "[", "]", "string", ")", "[", "]", "string", "{", "rv", ":=", "make", "(", "[", "]", "string", ",", "0", ",", "len", "(", "fields", ")", ")", "\n", "for", "_", ",", "field", ":=", "range", "fields", "{", "if", "_", ",", "ok", ":=", "f", "[", "field", "]", ";", "!", "ok", "{", "rv", "=", "append", "(", "rv", ",", "field", ")", "\n", "}", "\n", "}", "\n", "return", "rv", "\n", "}" ]
// FieldsNotYetCached returns a list of fields not yet cached out of a larger list of fields
[ "FieldsNotYetCached", "returns", "a", "list", "of", "fields", "not", "yet", "cached", "out", "of", "a", "larger", "list", "of", "fields" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/index.go#L128-L136
162,140
blevesearch/bleve
index/index.go
Reset
func (tfd *TermFieldDoc) Reset() *TermFieldDoc { // remember the []byte used for the ID id := tfd.ID vectors := tfd.Vectors // idiom to copy over from empty TermFieldDoc (0 allocations) *tfd = TermFieldDoc{} // reuse the []byte already allocated (and reset len to 0) tfd.ID = id[:0] tfd.Vectors = vectors[:0] return tfd }
go
func (tfd *TermFieldDoc) Reset() *TermFieldDoc { // remember the []byte used for the ID id := tfd.ID vectors := tfd.Vectors // idiom to copy over from empty TermFieldDoc (0 allocations) *tfd = TermFieldDoc{} // reuse the []byte already allocated (and reset len to 0) tfd.ID = id[:0] tfd.Vectors = vectors[:0] return tfd }
[ "func", "(", "tfd", "*", "TermFieldDoc", ")", "Reset", "(", ")", "*", "TermFieldDoc", "{", "// remember the []byte used for the ID", "id", ":=", "tfd", ".", "ID", "\n", "vectors", ":=", "tfd", ".", "Vectors", "\n", "// idiom to copy over from empty TermFieldDoc (0 allocations)", "*", "tfd", "=", "TermFieldDoc", "{", "}", "\n", "// reuse the []byte already allocated (and reset len to 0)", "tfd", ".", "ID", "=", "id", "[", ":", "0", "]", "\n", "tfd", ".", "Vectors", "=", "vectors", "[", ":", "0", "]", "\n", "return", "tfd", "\n", "}" ]
// Reset allows an already allocated TermFieldDoc to be reused
[ "Reset", "allows", "an", "already", "allocated", "TermFieldDoc", "to", "be", "reused" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/index.go#L191-L201
162,141
blevesearch/bleve
index/scorch/mergeplan/merge_plan.go
RaiseToFloorSegmentSize
func (o *MergePlanOptions) RaiseToFloorSegmentSize(s int64) int64 { if s > o.FloorSegmentSize { return s } return o.FloorSegmentSize }
go
func (o *MergePlanOptions) RaiseToFloorSegmentSize(s int64) int64 { if s > o.FloorSegmentSize { return s } return o.FloorSegmentSize }
[ "func", "(", "o", "*", "MergePlanOptions", ")", "RaiseToFloorSegmentSize", "(", "s", "int64", ")", "int64", "{", "if", "s", ">", "o", ".", "FloorSegmentSize", "{", "return", "s", "\n", "}", "\n", "return", "o", ".", "FloorSegmentSize", "\n", "}" ]
// Returns the higher of the input or FloorSegmentSize.
[ "Returns", "the", "higher", "of", "the", "input", "or", "FloorSegmentSize", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/mergeplan/merge_plan.go#L112-L117
162,142
blevesearch/bleve
index/scorch/mergeplan/merge_plan.go
CalcBudget
func CalcBudget(totalSize int64, firstTierSize int64, o *MergePlanOptions) ( budgetNumSegments int) { tierSize := firstTierSize if tierSize < 1 { tierSize = 1 } maxSegmentsPerTier := o.MaxSegmentsPerTier if maxSegmentsPerTier < 1 { maxSegmentsPerTier = 1 } tierGrowth := o.TierGrowth if tierGrowth < 1.0 { tierGrowth = 1.0 } for totalSize > 0 { segmentsInTier := float64(totalSize) / float64(tierSize) if segmentsInTier < float64(maxSegmentsPerTier) { budgetNumSegments += int(math.Ceil(segmentsInTier)) break } budgetNumSegments += maxSegmentsPerTier totalSize -= int64(maxSegmentsPerTier) * tierSize tierSize = int64(float64(tierSize) * tierGrowth) } return budgetNumSegments }
go
func CalcBudget(totalSize int64, firstTierSize int64, o *MergePlanOptions) ( budgetNumSegments int) { tierSize := firstTierSize if tierSize < 1 { tierSize = 1 } maxSegmentsPerTier := o.MaxSegmentsPerTier if maxSegmentsPerTier < 1 { maxSegmentsPerTier = 1 } tierGrowth := o.TierGrowth if tierGrowth < 1.0 { tierGrowth = 1.0 } for totalSize > 0 { segmentsInTier := float64(totalSize) / float64(tierSize) if segmentsInTier < float64(maxSegmentsPerTier) { budgetNumSegments += int(math.Ceil(segmentsInTier)) break } budgetNumSegments += maxSegmentsPerTier totalSize -= int64(maxSegmentsPerTier) * tierSize tierSize = int64(float64(tierSize) * tierGrowth) } return budgetNumSegments }
[ "func", "CalcBudget", "(", "totalSize", "int64", ",", "firstTierSize", "int64", ",", "o", "*", "MergePlanOptions", ")", "(", "budgetNumSegments", "int", ")", "{", "tierSize", ":=", "firstTierSize", "\n", "if", "tierSize", "<", "1", "{", "tierSize", "=", "1", "\n", "}", "\n\n", "maxSegmentsPerTier", ":=", "o", ".", "MaxSegmentsPerTier", "\n", "if", "maxSegmentsPerTier", "<", "1", "{", "maxSegmentsPerTier", "=", "1", "\n", "}", "\n\n", "tierGrowth", ":=", "o", ".", "TierGrowth", "\n", "if", "tierGrowth", "<", "1.0", "{", "tierGrowth", "=", "1.0", "\n", "}", "\n\n", "for", "totalSize", ">", "0", "{", "segmentsInTier", ":=", "float64", "(", "totalSize", ")", "/", "float64", "(", "tierSize", ")", "\n", "if", "segmentsInTier", "<", "float64", "(", "maxSegmentsPerTier", ")", "{", "budgetNumSegments", "+=", "int", "(", "math", ".", "Ceil", "(", "segmentsInTier", ")", ")", "\n", "break", "\n", "}", "\n\n", "budgetNumSegments", "+=", "maxSegmentsPerTier", "\n", "totalSize", "-=", "int64", "(", "maxSegmentsPerTier", ")", "*", "tierSize", "\n", "tierSize", "=", "int64", "(", "float64", "(", "tierSize", ")", "*", "tierGrowth", ")", "\n", "}", "\n\n", "return", "budgetNumSegments", "\n", "}" ]
// Compute the number of segments that would be needed to cover the // totalSize, by climbing up a logarithmically growing staircase of // segment tiers.
[ "Compute", "the", "number", "of", "segments", "that", "would", "be", "needed", "to", "cover", "the", "totalSize", "by", "climbing", "up", "a", "logarithmically", "growing", "staircase", "of", "segment", "tiers", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/mergeplan/merge_plan.go#L242-L272
162,143
blevesearch/bleve
index/scorch/mergeplan/merge_plan.go
ScoreSegments
func ScoreSegments(segments []Segment, o *MergePlanOptions) float64 { var totBeforeSize int64 var totAfterSize int64 var totAfterSizeFloored int64 for _, segment := range segments { totBeforeSize += segment.FullSize() totAfterSize += segment.LiveSize() totAfterSizeFloored += o.RaiseToFloorSegmentSize(segment.LiveSize()) } if totBeforeSize <= 0 || totAfterSize <= 0 || totAfterSizeFloored <= 0 { return 0 } // Roughly guess the "balance" of the segments -- whether the // segments are about the same size. balance := float64(o.RaiseToFloorSegmentSize(segments[0].LiveSize())) / float64(totAfterSizeFloored) // Gently favor smaller merges over bigger ones. We don't want to // make the exponent too large else we end up with poor merges of // small segments in order to avoid the large merges. score := balance * math.Pow(float64(totAfterSize), 0.05) // Strongly favor merges that reclaim deletes. nonDelRatio := float64(totAfterSize) / float64(totBeforeSize) score *= math.Pow(nonDelRatio, o.ReclaimDeletesWeight) return score }
go
func ScoreSegments(segments []Segment, o *MergePlanOptions) float64 { var totBeforeSize int64 var totAfterSize int64 var totAfterSizeFloored int64 for _, segment := range segments { totBeforeSize += segment.FullSize() totAfterSize += segment.LiveSize() totAfterSizeFloored += o.RaiseToFloorSegmentSize(segment.LiveSize()) } if totBeforeSize <= 0 || totAfterSize <= 0 || totAfterSizeFloored <= 0 { return 0 } // Roughly guess the "balance" of the segments -- whether the // segments are about the same size. balance := float64(o.RaiseToFloorSegmentSize(segments[0].LiveSize())) / float64(totAfterSizeFloored) // Gently favor smaller merges over bigger ones. We don't want to // make the exponent too large else we end up with poor merges of // small segments in order to avoid the large merges. score := balance * math.Pow(float64(totAfterSize), 0.05) // Strongly favor merges that reclaim deletes. nonDelRatio := float64(totAfterSize) / float64(totBeforeSize) score *= math.Pow(nonDelRatio, o.ReclaimDeletesWeight) return score }
[ "func", "ScoreSegments", "(", "segments", "[", "]", "Segment", ",", "o", "*", "MergePlanOptions", ")", "float64", "{", "var", "totBeforeSize", "int64", "\n", "var", "totAfterSize", "int64", "\n", "var", "totAfterSizeFloored", "int64", "\n\n", "for", "_", ",", "segment", ":=", "range", "segments", "{", "totBeforeSize", "+=", "segment", ".", "FullSize", "(", ")", "\n", "totAfterSize", "+=", "segment", ".", "LiveSize", "(", ")", "\n", "totAfterSizeFloored", "+=", "o", ".", "RaiseToFloorSegmentSize", "(", "segment", ".", "LiveSize", "(", ")", ")", "\n", "}", "\n\n", "if", "totBeforeSize", "<=", "0", "||", "totAfterSize", "<=", "0", "||", "totAfterSizeFloored", "<=", "0", "{", "return", "0", "\n", "}", "\n\n", "// Roughly guess the \"balance\" of the segments -- whether the", "// segments are about the same size.", "balance", ":=", "float64", "(", "o", ".", "RaiseToFloorSegmentSize", "(", "segments", "[", "0", "]", ".", "LiveSize", "(", ")", ")", ")", "/", "float64", "(", "totAfterSizeFloored", ")", "\n\n", "// Gently favor smaller merges over bigger ones. We don't want to", "// make the exponent too large else we end up with poor merges of", "// small segments in order to avoid the large merges.", "score", ":=", "balance", "*", "math", ".", "Pow", "(", "float64", "(", "totAfterSize", ")", ",", "0.05", ")", "\n\n", "// Strongly favor merges that reclaim deletes.", "nonDelRatio", ":=", "float64", "(", "totAfterSize", ")", "/", "float64", "(", "totBeforeSize", ")", "\n\n", "score", "*=", "math", ".", "Pow", "(", "nonDelRatio", ",", "o", ".", "ReclaimDeletesWeight", ")", "\n\n", "return", "score", "\n", "}" ]
// Smaller result score is better.
[ "Smaller", "result", "score", "is", "better", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/mergeplan/merge_plan.go#L290-L322
162,144
blevesearch/bleve
search/search.go
Reset
func (dm *DocumentMatch) Reset() *DocumentMatch { // remember the []byte used for the IndexInternalID indexInternalID := dm.IndexInternalID // remember the []interface{} used for sort sort := dm.Sort // remember the FieldTermLocations backing array ftls := dm.FieldTermLocations for i := range ftls { // recycle the ArrayPositions of each location ftls[i].Location.ArrayPositions = ftls[i].Location.ArrayPositions[:0] } // idiom to copy over from empty DocumentMatch (0 allocations) *dm = DocumentMatch{} // reuse the []byte already allocated (and reset len to 0) dm.IndexInternalID = indexInternalID[:0] // reuse the []interface{} already allocated (and reset len to 0) dm.Sort = sort[:0] // reuse the FieldTermLocations already allocated (and reset len to 0) dm.FieldTermLocations = ftls[:0] return dm }
go
func (dm *DocumentMatch) Reset() *DocumentMatch { // remember the []byte used for the IndexInternalID indexInternalID := dm.IndexInternalID // remember the []interface{} used for sort sort := dm.Sort // remember the FieldTermLocations backing array ftls := dm.FieldTermLocations for i := range ftls { // recycle the ArrayPositions of each location ftls[i].Location.ArrayPositions = ftls[i].Location.ArrayPositions[:0] } // idiom to copy over from empty DocumentMatch (0 allocations) *dm = DocumentMatch{} // reuse the []byte already allocated (and reset len to 0) dm.IndexInternalID = indexInternalID[:0] // reuse the []interface{} already allocated (and reset len to 0) dm.Sort = sort[:0] // reuse the FieldTermLocations already allocated (and reset len to 0) dm.FieldTermLocations = ftls[:0] return dm }
[ "func", "(", "dm", "*", "DocumentMatch", ")", "Reset", "(", ")", "*", "DocumentMatch", "{", "// remember the []byte used for the IndexInternalID", "indexInternalID", ":=", "dm", ".", "IndexInternalID", "\n", "// remember the []interface{} used for sort", "sort", ":=", "dm", ".", "Sort", "\n", "// remember the FieldTermLocations backing array", "ftls", ":=", "dm", ".", "FieldTermLocations", "\n", "for", "i", ":=", "range", "ftls", "{", "// recycle the ArrayPositions of each location", "ftls", "[", "i", "]", ".", "Location", ".", "ArrayPositions", "=", "ftls", "[", "i", "]", ".", "Location", ".", "ArrayPositions", "[", ":", "0", "]", "\n", "}", "\n", "// idiom to copy over from empty DocumentMatch (0 allocations)", "*", "dm", "=", "DocumentMatch", "{", "}", "\n", "// reuse the []byte already allocated (and reset len to 0)", "dm", ".", "IndexInternalID", "=", "indexInternalID", "[", ":", "0", "]", "\n", "// reuse the []interface{} already allocated (and reset len to 0)", "dm", ".", "Sort", "=", "sort", "[", ":", "0", "]", "\n", "// reuse the FieldTermLocations already allocated (and reset len to 0)", "dm", ".", "FieldTermLocations", "=", "ftls", "[", ":", "0", "]", "\n", "return", "dm", "\n", "}" ]
// Reset allows an already allocated DocumentMatch to be reused
[ "Reset", "allows", "an", "already", "allocated", "DocumentMatch", "to", "be", "reused" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search/search.go#L193-L212
162,145
blevesearch/bleve
search/search.go
Complete
func (dm *DocumentMatch) Complete(prealloc []Location) []Location { // transform the FieldTermLocations slice into the Locations map nlocs := len(dm.FieldTermLocations) if nlocs > 0 { if cap(prealloc) < nlocs { prealloc = make([]Location, nlocs) } prealloc = prealloc[:nlocs] var lastField string var tlm TermLocationMap var needsDedupe bool for i, ftl := range dm.FieldTermLocations { if lastField != ftl.Field { lastField = ftl.Field if dm.Locations == nil { dm.Locations = make(FieldTermLocationMap) } tlm = dm.Locations[ftl.Field] if tlm == nil { tlm = make(TermLocationMap) dm.Locations[ftl.Field] = tlm } } loc := &prealloc[i] *loc = ftl.Location if len(loc.ArrayPositions) > 0 { // copy loc.ArrayPositions = append(ArrayPositions(nil), loc.ArrayPositions...) } locs := tlm[ftl.Term] // if the loc is before or at the last location, then there // might be duplicates that need to be deduplicated if !needsDedupe && len(locs) > 0 { last := locs[len(locs)-1] cmp := loc.ArrayPositions.Compare(last.ArrayPositions) if cmp < 0 || (cmp == 0 && loc.Pos <= last.Pos) { needsDedupe = true } } tlm[ftl.Term] = append(locs, loc) dm.FieldTermLocations[i] = FieldTermLocation{ // recycle Location: Location{ ArrayPositions: ftl.Location.ArrayPositions[:0], }, } } if needsDedupe { for _, tlm := range dm.Locations { for term, locs := range tlm { tlm[term] = locs.Dedupe() } } } } dm.FieldTermLocations = dm.FieldTermLocations[:0] // recycle return prealloc }
go
func (dm *DocumentMatch) Complete(prealloc []Location) []Location { // transform the FieldTermLocations slice into the Locations map nlocs := len(dm.FieldTermLocations) if nlocs > 0 { if cap(prealloc) < nlocs { prealloc = make([]Location, nlocs) } prealloc = prealloc[:nlocs] var lastField string var tlm TermLocationMap var needsDedupe bool for i, ftl := range dm.FieldTermLocations { if lastField != ftl.Field { lastField = ftl.Field if dm.Locations == nil { dm.Locations = make(FieldTermLocationMap) } tlm = dm.Locations[ftl.Field] if tlm == nil { tlm = make(TermLocationMap) dm.Locations[ftl.Field] = tlm } } loc := &prealloc[i] *loc = ftl.Location if len(loc.ArrayPositions) > 0 { // copy loc.ArrayPositions = append(ArrayPositions(nil), loc.ArrayPositions...) } locs := tlm[ftl.Term] // if the loc is before or at the last location, then there // might be duplicates that need to be deduplicated if !needsDedupe && len(locs) > 0 { last := locs[len(locs)-1] cmp := loc.ArrayPositions.Compare(last.ArrayPositions) if cmp < 0 || (cmp == 0 && loc.Pos <= last.Pos) { needsDedupe = true } } tlm[ftl.Term] = append(locs, loc) dm.FieldTermLocations[i] = FieldTermLocation{ // recycle Location: Location{ ArrayPositions: ftl.Location.ArrayPositions[:0], }, } } if needsDedupe { for _, tlm := range dm.Locations { for term, locs := range tlm { tlm[term] = locs.Dedupe() } } } } dm.FieldTermLocations = dm.FieldTermLocations[:0] // recycle return prealloc }
[ "func", "(", "dm", "*", "DocumentMatch", ")", "Complete", "(", "prealloc", "[", "]", "Location", ")", "[", "]", "Location", "{", "// transform the FieldTermLocations slice into the Locations map", "nlocs", ":=", "len", "(", "dm", ".", "FieldTermLocations", ")", "\n", "if", "nlocs", ">", "0", "{", "if", "cap", "(", "prealloc", ")", "<", "nlocs", "{", "prealloc", "=", "make", "(", "[", "]", "Location", ",", "nlocs", ")", "\n", "}", "\n", "prealloc", "=", "prealloc", "[", ":", "nlocs", "]", "\n\n", "var", "lastField", "string", "\n", "var", "tlm", "TermLocationMap", "\n", "var", "needsDedupe", "bool", "\n\n", "for", "i", ",", "ftl", ":=", "range", "dm", ".", "FieldTermLocations", "{", "if", "lastField", "!=", "ftl", ".", "Field", "{", "lastField", "=", "ftl", ".", "Field", "\n\n", "if", "dm", ".", "Locations", "==", "nil", "{", "dm", ".", "Locations", "=", "make", "(", "FieldTermLocationMap", ")", "\n", "}", "\n\n", "tlm", "=", "dm", ".", "Locations", "[", "ftl", ".", "Field", "]", "\n", "if", "tlm", "==", "nil", "{", "tlm", "=", "make", "(", "TermLocationMap", ")", "\n", "dm", ".", "Locations", "[", "ftl", ".", "Field", "]", "=", "tlm", "\n", "}", "\n", "}", "\n\n", "loc", ":=", "&", "prealloc", "[", "i", "]", "\n", "*", "loc", "=", "ftl", ".", "Location", "\n\n", "if", "len", "(", "loc", ".", "ArrayPositions", ")", ">", "0", "{", "// copy", "loc", ".", "ArrayPositions", "=", "append", "(", "ArrayPositions", "(", "nil", ")", ",", "loc", ".", "ArrayPositions", "...", ")", "\n", "}", "\n\n", "locs", ":=", "tlm", "[", "ftl", ".", "Term", "]", "\n\n", "// if the loc is before or at the last location, then there", "// might be duplicates that need to be deduplicated", "if", "!", "needsDedupe", "&&", "len", "(", "locs", ")", ">", "0", "{", "last", ":=", "locs", "[", "len", "(", "locs", ")", "-", "1", "]", "\n", "cmp", ":=", "loc", ".", "ArrayPositions", ".", "Compare", "(", "last", ".", "ArrayPositions", ")", "\n", "if", "cmp", "<", "0", "||", "(", "cmp", "==", "0", "&&", "loc", ".", "Pos", "<=", "last", ".", "Pos", ")", "{", "needsDedupe", "=", "true", "\n", "}", "\n", "}", "\n\n", "tlm", "[", "ftl", ".", "Term", "]", "=", "append", "(", "locs", ",", "loc", ")", "\n\n", "dm", ".", "FieldTermLocations", "[", "i", "]", "=", "FieldTermLocation", "{", "// recycle", "Location", ":", "Location", "{", "ArrayPositions", ":", "ftl", ".", "Location", ".", "ArrayPositions", "[", ":", "0", "]", ",", "}", ",", "}", "\n", "}", "\n\n", "if", "needsDedupe", "{", "for", "_", ",", "tlm", ":=", "range", "dm", ".", "Locations", "{", "for", "term", ",", "locs", ":=", "range", "tlm", "{", "tlm", "[", "term", "]", "=", "locs", ".", "Dedupe", "(", ")", "\n", "}", "\n", "}", "\n", "}", "\n", "}", "\n\n", "dm", ".", "FieldTermLocations", "=", "dm", ".", "FieldTermLocations", "[", ":", "0", "]", "// recycle", "\n\n", "return", "prealloc", "\n", "}" ]
// Complete performs final preparation & transformation of the // DocumentMatch at the end of search processing, also allowing the // caller to provide an optional preallocated locations slice
[ "Complete", "performs", "final", "preparation", "&", "transformation", "of", "the", "DocumentMatch", "at", "the", "end", "of", "search", "processing", "also", "allowing", "the", "caller", "to", "provide", "an", "optional", "preallocated", "locations", "slice" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search/search.go#L259-L327
162,146
blevesearch/bleve
geo/sloppy.go
earthDiameter
func earthDiameter(lat float64) float64 { index := math.Mod(math.Abs(lat)*radiusIndexer+0.5, float64(len(earthDiameterPerLatitude))) if math.IsNaN(index) { return 0 } return earthDiameterPerLatitude[int(index)] }
go
func earthDiameter(lat float64) float64 { index := math.Mod(math.Abs(lat)*radiusIndexer+0.5, float64(len(earthDiameterPerLatitude))) if math.IsNaN(index) { return 0 } return earthDiameterPerLatitude[int(index)] }
[ "func", "earthDiameter", "(", "lat", "float64", ")", "float64", "{", "index", ":=", "math", ".", "Mod", "(", "math", ".", "Abs", "(", "lat", ")", "*", "radiusIndexer", "+", "0.5", ",", "float64", "(", "len", "(", "earthDiameterPerLatitude", ")", ")", ")", "\n", "if", "math", ".", "IsNaN", "(", "index", ")", "{", "return", "0", "\n", "}", "\n", "return", "earthDiameterPerLatitude", "[", "int", "(", "index", ")", "]", "\n", "}" ]
// earthDiameter returns an estimation of the earth's diameter at the specified // latitude in kilometers
[ "earthDiameter", "returns", "an", "estimation", "of", "the", "earth", "s", "diameter", "at", "the", "specified", "latitude", "in", "kilometers" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/geo/sloppy.go#L141-L147
162,147
blevesearch/bleve
index/scorch/segment/zap/merge.go
Merge
func Merge(segments []*Segment, drops []*roaring.Bitmap, path string, chunkFactor uint32, closeCh chan struct{}, s seg.StatsReporter) ( [][]uint64, uint64, error) { segmentBases := make([]*SegmentBase, len(segments)) for segmenti, segment := range segments { segmentBases[segmenti] = &segment.SegmentBase } return MergeSegmentBases(segmentBases, drops, path, chunkFactor, closeCh, s) }
go
func Merge(segments []*Segment, drops []*roaring.Bitmap, path string, chunkFactor uint32, closeCh chan struct{}, s seg.StatsReporter) ( [][]uint64, uint64, error) { segmentBases := make([]*SegmentBase, len(segments)) for segmenti, segment := range segments { segmentBases[segmenti] = &segment.SegmentBase } return MergeSegmentBases(segmentBases, drops, path, chunkFactor, closeCh, s) }
[ "func", "Merge", "(", "segments", "[", "]", "*", "Segment", ",", "drops", "[", "]", "*", "roaring", ".", "Bitmap", ",", "path", "string", ",", "chunkFactor", "uint32", ",", "closeCh", "chan", "struct", "{", "}", ",", "s", "seg", ".", "StatsReporter", ")", "(", "[", "]", "[", "]", "uint64", ",", "uint64", ",", "error", ")", "{", "segmentBases", ":=", "make", "(", "[", "]", "*", "SegmentBase", ",", "len", "(", "segments", ")", ")", "\n", "for", "segmenti", ",", "segment", ":=", "range", "segments", "{", "segmentBases", "[", "segmenti", "]", "=", "&", "segment", ".", "SegmentBase", "\n", "}", "\n\n", "return", "MergeSegmentBases", "(", "segmentBases", ",", "drops", ",", "path", ",", "chunkFactor", ",", "closeCh", ",", "s", ")", "\n", "}" ]
// Merge takes a slice of zap segments and bit masks describing which // documents may be dropped, and creates a new segment containing the // remaining data. This new segment is built at the specified path, // with the provided chunkFactor.
[ "Merge", "takes", "a", "slice", "of", "zap", "segments", "and", "bit", "masks", "describing", "which", "documents", "may", "be", "dropped", "and", "creates", "a", "new", "segment", "containing", "the", "remaining", "data", ".", "This", "new", "segment", "is", "built", "at", "the", "specified", "path", "with", "the", "provided", "chunkFactor", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/merge.go#L48-L57
162,148
blevesearch/bleve
index/scorch/segment/zap/merge.go
mapFields
func mapFields(fields []string) map[string]uint16 { rv := make(map[string]uint16, len(fields)) for i, fieldName := range fields { rv[fieldName] = uint16(i) + 1 } return rv }
go
func mapFields(fields []string) map[string]uint16 { rv := make(map[string]uint16, len(fields)) for i, fieldName := range fields { rv[fieldName] = uint16(i) + 1 } return rv }
[ "func", "mapFields", "(", "fields", "[", "]", "string", ")", "map", "[", "string", "]", "uint16", "{", "rv", ":=", "make", "(", "map", "[", "string", "]", "uint16", ",", "len", "(", "fields", ")", ")", "\n", "for", "i", ",", "fieldName", ":=", "range", "fields", "{", "rv", "[", "fieldName", "]", "=", "uint16", "(", "i", ")", "+", "1", "\n", "}", "\n", "return", "rv", "\n", "}" ]
// mapFields takes the fieldsInv list and returns a map of fieldName // to fieldID+1
[ "mapFields", "takes", "the", "fieldsInv", "list", "and", "returns", "a", "map", "of", "fieldName", "to", "fieldID", "+", "1" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/merge.go#L160-L166
162,149
blevesearch/bleve
index/scorch/segment/zap/merge.go
computeNewDocCount
func computeNewDocCount(segments []*SegmentBase, drops []*roaring.Bitmap) uint64 { var newDocCount uint64 for segI, segment := range segments { newDocCount += segment.numDocs if drops[segI] != nil { newDocCount -= drops[segI].GetCardinality() } } return newDocCount }
go
func computeNewDocCount(segments []*SegmentBase, drops []*roaring.Bitmap) uint64 { var newDocCount uint64 for segI, segment := range segments { newDocCount += segment.numDocs if drops[segI] != nil { newDocCount -= drops[segI].GetCardinality() } } return newDocCount }
[ "func", "computeNewDocCount", "(", "segments", "[", "]", "*", "SegmentBase", ",", "drops", "[", "]", "*", "roaring", ".", "Bitmap", ")", "uint64", "{", "var", "newDocCount", "uint64", "\n", "for", "segI", ",", "segment", ":=", "range", "segments", "{", "newDocCount", "+=", "segment", ".", "numDocs", "\n", "if", "drops", "[", "segI", "]", "!=", "nil", "{", "newDocCount", "-=", "drops", "[", "segI", "]", ".", "GetCardinality", "(", ")", "\n", "}", "\n", "}", "\n", "return", "newDocCount", "\n", "}" ]
// computeNewDocCount determines how many documents will be in the newly // merged segment when obsoleted docs are dropped
[ "computeNewDocCount", "determines", "how", "many", "documents", "will", "be", "in", "the", "newly", "merged", "segment", "when", "obsoleted", "docs", "are", "dropped" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/merge.go#L170-L179
162,150
blevesearch/bleve
index/scorch/segment/zap/new.go
AnalysisResultsToSegmentBase
func AnalysisResultsToSegmentBase(results []*index.AnalysisResult, chunkFactor uint32) (*SegmentBase, uint64, error) { s := interimPool.Get().(*interim) var br bytes.Buffer if s.lastNumDocs > 0 { // use previous results to initialize the buf with an estimate // size, but note that the interim instance comes from a // global interimPool, so multiple scorch instances indexing // different docs can lead to low quality estimates estimateAvgBytesPerDoc := int(float64(s.lastOutSize/s.lastNumDocs) * NewSegmentBufferNumResultsFactor) estimateNumResults := int(float64(len(results)+NewSegmentBufferNumResultsBump) * NewSegmentBufferAvgBytesPerDocFactor) br.Grow(estimateAvgBytesPerDoc * estimateNumResults) } s.results = results s.chunkFactor = chunkFactor s.w = NewCountHashWriter(&br) storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets, err := s.convert() if err != nil { return nil, uint64(0), err } sb, err := InitSegmentBase(br.Bytes(), s.w.Sum32(), chunkFactor, s.FieldsMap, s.FieldsInv, uint64(len(results)), storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets) if err == nil && s.reset() == nil { s.lastNumDocs = len(results) s.lastOutSize = len(br.Bytes()) interimPool.Put(s) } return sb, uint64(len(br.Bytes())), err }
go
func AnalysisResultsToSegmentBase(results []*index.AnalysisResult, chunkFactor uint32) (*SegmentBase, uint64, error) { s := interimPool.Get().(*interim) var br bytes.Buffer if s.lastNumDocs > 0 { // use previous results to initialize the buf with an estimate // size, but note that the interim instance comes from a // global interimPool, so multiple scorch instances indexing // different docs can lead to low quality estimates estimateAvgBytesPerDoc := int(float64(s.lastOutSize/s.lastNumDocs) * NewSegmentBufferNumResultsFactor) estimateNumResults := int(float64(len(results)+NewSegmentBufferNumResultsBump) * NewSegmentBufferAvgBytesPerDocFactor) br.Grow(estimateAvgBytesPerDoc * estimateNumResults) } s.results = results s.chunkFactor = chunkFactor s.w = NewCountHashWriter(&br) storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets, err := s.convert() if err != nil { return nil, uint64(0), err } sb, err := InitSegmentBase(br.Bytes(), s.w.Sum32(), chunkFactor, s.FieldsMap, s.FieldsInv, uint64(len(results)), storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets) if err == nil && s.reset() == nil { s.lastNumDocs = len(results) s.lastOutSize = len(br.Bytes()) interimPool.Put(s) } return sb, uint64(len(br.Bytes())), err }
[ "func", "AnalysisResultsToSegmentBase", "(", "results", "[", "]", "*", "index", ".", "AnalysisResult", ",", "chunkFactor", "uint32", ")", "(", "*", "SegmentBase", ",", "uint64", ",", "error", ")", "{", "s", ":=", "interimPool", ".", "Get", "(", ")", ".", "(", "*", "interim", ")", "\n\n", "var", "br", "bytes", ".", "Buffer", "\n", "if", "s", ".", "lastNumDocs", ">", "0", "{", "// use previous results to initialize the buf with an estimate", "// size, but note that the interim instance comes from a", "// global interimPool, so multiple scorch instances indexing", "// different docs can lead to low quality estimates", "estimateAvgBytesPerDoc", ":=", "int", "(", "float64", "(", "s", ".", "lastOutSize", "/", "s", ".", "lastNumDocs", ")", "*", "NewSegmentBufferNumResultsFactor", ")", "\n", "estimateNumResults", ":=", "int", "(", "float64", "(", "len", "(", "results", ")", "+", "NewSegmentBufferNumResultsBump", ")", "*", "NewSegmentBufferAvgBytesPerDocFactor", ")", "\n", "br", ".", "Grow", "(", "estimateAvgBytesPerDoc", "*", "estimateNumResults", ")", "\n", "}", "\n\n", "s", ".", "results", "=", "results", "\n", "s", ".", "chunkFactor", "=", "chunkFactor", "\n", "s", ".", "w", "=", "NewCountHashWriter", "(", "&", "br", ")", "\n\n", "storedIndexOffset", ",", "fieldsIndexOffset", ",", "fdvIndexOffset", ",", "dictOffsets", ",", "err", ":=", "s", ".", "convert", "(", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "uint64", "(", "0", ")", ",", "err", "\n", "}", "\n\n", "sb", ",", "err", ":=", "InitSegmentBase", "(", "br", ".", "Bytes", "(", ")", ",", "s", ".", "w", ".", "Sum32", "(", ")", ",", "chunkFactor", ",", "s", ".", "FieldsMap", ",", "s", ".", "FieldsInv", ",", "uint64", "(", "len", "(", "results", ")", ")", ",", "storedIndexOffset", ",", "fieldsIndexOffset", ",", "fdvIndexOffset", ",", "dictOffsets", ")", "\n\n", "if", "err", "==", "nil", "&&", "s", ".", "reset", "(", ")", "==", "nil", "{", "s", ".", "lastNumDocs", "=", "len", "(", "results", ")", "\n", "s", ".", "lastOutSize", "=", "len", "(", "br", ".", "Bytes", "(", ")", ")", "\n", "interimPool", ".", "Put", "(", "s", ")", "\n", "}", "\n\n", "return", "sb", ",", "uint64", "(", "len", "(", "br", ".", "Bytes", "(", ")", ")", ")", ",", "err", "\n", "}" ]
// AnalysisResultsToSegmentBase produces an in-memory zap-encoded // SegmentBase from analysis results
[ "AnalysisResultsToSegmentBase", "produces", "an", "in", "-", "memory", "zap", "-", "encoded", "SegmentBase", "from", "analysis", "results" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/new.go#L46-L84
162,151
blevesearch/bleve
index/scorch/persister.go
persistSnapshotMaybeMerge
func (s *Scorch) persistSnapshotMaybeMerge(snapshot *IndexSnapshot) ( bool, error) { // collect the in-memory zap segments (SegmentBase instances) var sbs []*zap.SegmentBase var sbsDrops []*roaring.Bitmap var sbsIndexes []int for i, segmentSnapshot := range snapshot.segment { if sb, ok := segmentSnapshot.segment.(*zap.SegmentBase); ok { sbs = append(sbs, sb) sbsDrops = append(sbsDrops, segmentSnapshot.deleted) sbsIndexes = append(sbsIndexes, i) } } if len(sbs) < DefaultMinSegmentsForInMemoryMerge { return false, nil } newSnapshot, newSegmentID, err := s.mergeSegmentBases( snapshot, sbs, sbsDrops, sbsIndexes, DefaultChunkFactor) if err != nil { return false, err } if newSnapshot == nil { return false, nil } defer func() { _ = newSnapshot.DecRef() }() mergedSegmentIDs := map[uint64]struct{}{} for _, idx := range sbsIndexes { mergedSegmentIDs[snapshot.segment[idx].id] = struct{}{} } // construct a snapshot that's logically equivalent to the input // snapshot, but with merged segments replaced by the new segment equiv := &IndexSnapshot{ parent: snapshot.parent, segment: make([]*SegmentSnapshot, 0, len(snapshot.segment)), internal: snapshot.internal, epoch: snapshot.epoch, creator: "persistSnapshotMaybeMerge", } // copy to the equiv the segments that weren't replaced for _, segment := range snapshot.segment { if _, wasMerged := mergedSegmentIDs[segment.id]; !wasMerged { equiv.segment = append(equiv.segment, segment) } } // append to the equiv the new segment for _, segment := range newSnapshot.segment { if segment.id == newSegmentID { equiv.segment = append(equiv.segment, &SegmentSnapshot{ id: newSegmentID, segment: segment.segment, deleted: nil, // nil since merging handled deletions }) break } } err = s.persistSnapshotDirect(equiv) if err != nil { return false, err } return true, nil }
go
func (s *Scorch) persistSnapshotMaybeMerge(snapshot *IndexSnapshot) ( bool, error) { // collect the in-memory zap segments (SegmentBase instances) var sbs []*zap.SegmentBase var sbsDrops []*roaring.Bitmap var sbsIndexes []int for i, segmentSnapshot := range snapshot.segment { if sb, ok := segmentSnapshot.segment.(*zap.SegmentBase); ok { sbs = append(sbs, sb) sbsDrops = append(sbsDrops, segmentSnapshot.deleted) sbsIndexes = append(sbsIndexes, i) } } if len(sbs) < DefaultMinSegmentsForInMemoryMerge { return false, nil } newSnapshot, newSegmentID, err := s.mergeSegmentBases( snapshot, sbs, sbsDrops, sbsIndexes, DefaultChunkFactor) if err != nil { return false, err } if newSnapshot == nil { return false, nil } defer func() { _ = newSnapshot.DecRef() }() mergedSegmentIDs := map[uint64]struct{}{} for _, idx := range sbsIndexes { mergedSegmentIDs[snapshot.segment[idx].id] = struct{}{} } // construct a snapshot that's logically equivalent to the input // snapshot, but with merged segments replaced by the new segment equiv := &IndexSnapshot{ parent: snapshot.parent, segment: make([]*SegmentSnapshot, 0, len(snapshot.segment)), internal: snapshot.internal, epoch: snapshot.epoch, creator: "persistSnapshotMaybeMerge", } // copy to the equiv the segments that weren't replaced for _, segment := range snapshot.segment { if _, wasMerged := mergedSegmentIDs[segment.id]; !wasMerged { equiv.segment = append(equiv.segment, segment) } } // append to the equiv the new segment for _, segment := range newSnapshot.segment { if segment.id == newSegmentID { equiv.segment = append(equiv.segment, &SegmentSnapshot{ id: newSegmentID, segment: segment.segment, deleted: nil, // nil since merging handled deletions }) break } } err = s.persistSnapshotDirect(equiv) if err != nil { return false, err } return true, nil }
[ "func", "(", "s", "*", "Scorch", ")", "persistSnapshotMaybeMerge", "(", "snapshot", "*", "IndexSnapshot", ")", "(", "bool", ",", "error", ")", "{", "// collect the in-memory zap segments (SegmentBase instances)", "var", "sbs", "[", "]", "*", "zap", ".", "SegmentBase", "\n", "var", "sbsDrops", "[", "]", "*", "roaring", ".", "Bitmap", "\n", "var", "sbsIndexes", "[", "]", "int", "\n\n", "for", "i", ",", "segmentSnapshot", ":=", "range", "snapshot", ".", "segment", "{", "if", "sb", ",", "ok", ":=", "segmentSnapshot", ".", "segment", ".", "(", "*", "zap", ".", "SegmentBase", ")", ";", "ok", "{", "sbs", "=", "append", "(", "sbs", ",", "sb", ")", "\n", "sbsDrops", "=", "append", "(", "sbsDrops", ",", "segmentSnapshot", ".", "deleted", ")", "\n", "sbsIndexes", "=", "append", "(", "sbsIndexes", ",", "i", ")", "\n", "}", "\n", "}", "\n\n", "if", "len", "(", "sbs", ")", "<", "DefaultMinSegmentsForInMemoryMerge", "{", "return", "false", ",", "nil", "\n", "}", "\n\n", "newSnapshot", ",", "newSegmentID", ",", "err", ":=", "s", ".", "mergeSegmentBases", "(", "snapshot", ",", "sbs", ",", "sbsDrops", ",", "sbsIndexes", ",", "DefaultChunkFactor", ")", "\n", "if", "err", "!=", "nil", "{", "return", "false", ",", "err", "\n", "}", "\n", "if", "newSnapshot", "==", "nil", "{", "return", "false", ",", "nil", "\n", "}", "\n\n", "defer", "func", "(", ")", "{", "_", "=", "newSnapshot", ".", "DecRef", "(", ")", "\n", "}", "(", ")", "\n\n", "mergedSegmentIDs", ":=", "map", "[", "uint64", "]", "struct", "{", "}", "{", "}", "\n", "for", "_", ",", "idx", ":=", "range", "sbsIndexes", "{", "mergedSegmentIDs", "[", "snapshot", ".", "segment", "[", "idx", "]", ".", "id", "]", "=", "struct", "{", "}", "{", "}", "\n", "}", "\n\n", "// construct a snapshot that's logically equivalent to the input", "// snapshot, but with merged segments replaced by the new segment", "equiv", ":=", "&", "IndexSnapshot", "{", "parent", ":", "snapshot", ".", "parent", ",", "segment", ":", "make", "(", "[", "]", "*", "SegmentSnapshot", ",", "0", ",", "len", "(", "snapshot", ".", "segment", ")", ")", ",", "internal", ":", "snapshot", ".", "internal", ",", "epoch", ":", "snapshot", ".", "epoch", ",", "creator", ":", "\"", "\"", ",", "}", "\n\n", "// copy to the equiv the segments that weren't replaced", "for", "_", ",", "segment", ":=", "range", "snapshot", ".", "segment", "{", "if", "_", ",", "wasMerged", ":=", "mergedSegmentIDs", "[", "segment", ".", "id", "]", ";", "!", "wasMerged", "{", "equiv", ".", "segment", "=", "append", "(", "equiv", ".", "segment", ",", "segment", ")", "\n", "}", "\n", "}", "\n\n", "// append to the equiv the new segment", "for", "_", ",", "segment", ":=", "range", "newSnapshot", ".", "segment", "{", "if", "segment", ".", "id", "==", "newSegmentID", "{", "equiv", ".", "segment", "=", "append", "(", "equiv", ".", "segment", ",", "&", "SegmentSnapshot", "{", "id", ":", "newSegmentID", ",", "segment", ":", "segment", ".", "segment", ",", "deleted", ":", "nil", ",", "// nil since merging handled deletions", "}", ")", "\n", "break", "\n", "}", "\n", "}", "\n\n", "err", "=", "s", ".", "persistSnapshotDirect", "(", "equiv", ")", "\n", "if", "err", "!=", "nil", "{", "return", "false", ",", "err", "\n", "}", "\n\n", "return", "true", ",", "nil", "\n", "}" ]
// persistSnapshotMaybeMerge examines the snapshot and might merge and // persist the in-memory zap segments if there are enough of them
[ "persistSnapshotMaybeMerge", "examines", "the", "snapshot", "and", "might", "merge", "and", "persist", "the", "in", "-", "memory", "zap", "segments", "if", "there", "are", "enough", "of", "them" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/persister.go#L333-L405
162,152
blevesearch/bleve
index/scorch/persister.go
removeOldBoltSnapshots
func (s *Scorch) removeOldBoltSnapshots() (numRemoved int, err error) { persistedEpochs, err := s.RootBoltSnapshotEpochs() if err != nil { return 0, err } if len(persistedEpochs) <= s.numSnapshotsToKeep { // we need to keep everything return 0, nil } // make a map of epochs to protect from deletion protectedEpochs := make(map[uint64]struct{}, s.numSnapshotsToKeep) for _, epoch := range persistedEpochs[0:s.numSnapshotsToKeep] { protectedEpochs[epoch] = struct{}{} } var epochsToRemove []uint64 var newEligible []uint64 s.rootLock.Lock() for _, epoch := range s.eligibleForRemoval { if _, ok := protectedEpochs[epoch]; ok { // protected newEligible = append(newEligible, epoch) } else { epochsToRemove = append(epochsToRemove, epoch) } } s.eligibleForRemoval = newEligible s.rootLock.Unlock() if len(epochsToRemove) == 0 { return 0, nil } tx, err := s.rootBolt.Begin(true) if err != nil { return 0, err } defer func() { if err == nil { err = tx.Commit() } else { _ = tx.Rollback() } if err == nil { err = s.rootBolt.Sync() } }() snapshots := tx.Bucket(boltSnapshotsBucket) if snapshots == nil { return 0, nil } for _, epochToRemove := range epochsToRemove { k := segment.EncodeUvarintAscending(nil, epochToRemove) err = snapshots.DeleteBucket(k) if err == bolt.ErrBucketNotFound { err = nil } if err == nil { numRemoved++ } } return numRemoved, err }
go
func (s *Scorch) removeOldBoltSnapshots() (numRemoved int, err error) { persistedEpochs, err := s.RootBoltSnapshotEpochs() if err != nil { return 0, err } if len(persistedEpochs) <= s.numSnapshotsToKeep { // we need to keep everything return 0, nil } // make a map of epochs to protect from deletion protectedEpochs := make(map[uint64]struct{}, s.numSnapshotsToKeep) for _, epoch := range persistedEpochs[0:s.numSnapshotsToKeep] { protectedEpochs[epoch] = struct{}{} } var epochsToRemove []uint64 var newEligible []uint64 s.rootLock.Lock() for _, epoch := range s.eligibleForRemoval { if _, ok := protectedEpochs[epoch]; ok { // protected newEligible = append(newEligible, epoch) } else { epochsToRemove = append(epochsToRemove, epoch) } } s.eligibleForRemoval = newEligible s.rootLock.Unlock() if len(epochsToRemove) == 0 { return 0, nil } tx, err := s.rootBolt.Begin(true) if err != nil { return 0, err } defer func() { if err == nil { err = tx.Commit() } else { _ = tx.Rollback() } if err == nil { err = s.rootBolt.Sync() } }() snapshots := tx.Bucket(boltSnapshotsBucket) if snapshots == nil { return 0, nil } for _, epochToRemove := range epochsToRemove { k := segment.EncodeUvarintAscending(nil, epochToRemove) err = snapshots.DeleteBucket(k) if err == bolt.ErrBucketNotFound { err = nil } if err == nil { numRemoved++ } } return numRemoved, err }
[ "func", "(", "s", "*", "Scorch", ")", "removeOldBoltSnapshots", "(", ")", "(", "numRemoved", "int", ",", "err", "error", ")", "{", "persistedEpochs", ",", "err", ":=", "s", ".", "RootBoltSnapshotEpochs", "(", ")", "\n", "if", "err", "!=", "nil", "{", "return", "0", ",", "err", "\n", "}", "\n\n", "if", "len", "(", "persistedEpochs", ")", "<=", "s", ".", "numSnapshotsToKeep", "{", "// we need to keep everything", "return", "0", ",", "nil", "\n", "}", "\n\n", "// make a map of epochs to protect from deletion", "protectedEpochs", ":=", "make", "(", "map", "[", "uint64", "]", "struct", "{", "}", ",", "s", ".", "numSnapshotsToKeep", ")", "\n", "for", "_", ",", "epoch", ":=", "range", "persistedEpochs", "[", "0", ":", "s", ".", "numSnapshotsToKeep", "]", "{", "protectedEpochs", "[", "epoch", "]", "=", "struct", "{", "}", "{", "}", "\n", "}", "\n\n", "var", "epochsToRemove", "[", "]", "uint64", "\n", "var", "newEligible", "[", "]", "uint64", "\n", "s", ".", "rootLock", ".", "Lock", "(", ")", "\n", "for", "_", ",", "epoch", ":=", "range", "s", ".", "eligibleForRemoval", "{", "if", "_", ",", "ok", ":=", "protectedEpochs", "[", "epoch", "]", ";", "ok", "{", "// protected", "newEligible", "=", "append", "(", "newEligible", ",", "epoch", ")", "\n", "}", "else", "{", "epochsToRemove", "=", "append", "(", "epochsToRemove", ",", "epoch", ")", "\n", "}", "\n", "}", "\n", "s", ".", "eligibleForRemoval", "=", "newEligible", "\n", "s", ".", "rootLock", ".", "Unlock", "(", ")", "\n\n", "if", "len", "(", "epochsToRemove", ")", "==", "0", "{", "return", "0", ",", "nil", "\n", "}", "\n\n", "tx", ",", "err", ":=", "s", ".", "rootBolt", ".", "Begin", "(", "true", ")", "\n", "if", "err", "!=", "nil", "{", "return", "0", ",", "err", "\n", "}", "\n", "defer", "func", "(", ")", "{", "if", "err", "==", "nil", "{", "err", "=", "tx", ".", "Commit", "(", ")", "\n", "}", "else", "{", "_", "=", "tx", ".", "Rollback", "(", ")", "\n", "}", "\n", "if", "err", "==", "nil", "{", "err", "=", "s", ".", "rootBolt", ".", "Sync", "(", ")", "\n", "}", "\n", "}", "(", ")", "\n\n", "snapshots", ":=", "tx", ".", "Bucket", "(", "boltSnapshotsBucket", ")", "\n", "if", "snapshots", "==", "nil", "{", "return", "0", ",", "nil", "\n", "}", "\n\n", "for", "_", ",", "epochToRemove", ":=", "range", "epochsToRemove", "{", "k", ":=", "segment", ".", "EncodeUvarintAscending", "(", "nil", ",", "epochToRemove", ")", "\n", "err", "=", "snapshots", ".", "DeleteBucket", "(", "k", ")", "\n", "if", "err", "==", "bolt", ".", "ErrBucketNotFound", "{", "err", "=", "nil", "\n", "}", "\n", "if", "err", "==", "nil", "{", "numRemoved", "++", "\n", "}", "\n", "}", "\n\n", "return", "numRemoved", ",", "err", "\n", "}" ]
// Removes enough snapshots from the rootBolt so that the // s.eligibleForRemoval stays under the NumSnapshotsToKeep policy.
[ "Removes", "enough", "snapshots", "from", "the", "rootBolt", "so", "that", "the", "s", ".", "eligibleForRemoval", "stays", "under", "the", "NumSnapshotsToKeep", "policy", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/persister.go#L766-L833
162,153
blevesearch/bleve
index/scorch/segment/zap/contentcoder.go
newChunkedContentCoder
func newChunkedContentCoder(chunkSize uint64, maxDocNum uint64, w io.Writer, progressiveWrite bool) *chunkedContentCoder { total := maxDocNum/chunkSize + 1 rv := &chunkedContentCoder{ chunkSize: chunkSize, chunkLens: make([]uint64, total), chunkMeta: make([]MetaData, 0, total), w: w, progressiveWrite: progressiveWrite, } return rv }
go
func newChunkedContentCoder(chunkSize uint64, maxDocNum uint64, w io.Writer, progressiveWrite bool) *chunkedContentCoder { total := maxDocNum/chunkSize + 1 rv := &chunkedContentCoder{ chunkSize: chunkSize, chunkLens: make([]uint64, total), chunkMeta: make([]MetaData, 0, total), w: w, progressiveWrite: progressiveWrite, } return rv }
[ "func", "newChunkedContentCoder", "(", "chunkSize", "uint64", ",", "maxDocNum", "uint64", ",", "w", "io", ".", "Writer", ",", "progressiveWrite", "bool", ")", "*", "chunkedContentCoder", "{", "total", ":=", "maxDocNum", "/", "chunkSize", "+", "1", "\n", "rv", ":=", "&", "chunkedContentCoder", "{", "chunkSize", ":", "chunkSize", ",", "chunkLens", ":", "make", "(", "[", "]", "uint64", ",", "total", ")", ",", "chunkMeta", ":", "make", "(", "[", "]", "MetaData", ",", "0", ",", "total", ")", ",", "w", ":", "w", ",", "progressiveWrite", ":", "progressiveWrite", ",", "}", "\n\n", "return", "rv", "\n", "}" ]
// newChunkedContentCoder returns a new chunk content coder which // packs data into chunks based on the provided chunkSize
[ "newChunkedContentCoder", "returns", "a", "new", "chunk", "content", "coder", "which", "packs", "data", "into", "chunks", "based", "on", "the", "provided", "chunkSize" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/contentcoder.go#L62-L74
162,154
blevesearch/bleve
index/scorch/segment/zap/contentcoder.go
Reset
func (c *chunkedContentCoder) Reset() { c.currChunk = 0 c.final = c.final[:0] c.chunkBuf.Reset() c.chunkMetaBuf.Reset() for i := range c.chunkLens { c.chunkLens[i] = 0 } c.chunkMeta = c.chunkMeta[:0] }
go
func (c *chunkedContentCoder) Reset() { c.currChunk = 0 c.final = c.final[:0] c.chunkBuf.Reset() c.chunkMetaBuf.Reset() for i := range c.chunkLens { c.chunkLens[i] = 0 } c.chunkMeta = c.chunkMeta[:0] }
[ "func", "(", "c", "*", "chunkedContentCoder", ")", "Reset", "(", ")", "{", "c", ".", "currChunk", "=", "0", "\n", "c", ".", "final", "=", "c", ".", "final", "[", ":", "0", "]", "\n", "c", ".", "chunkBuf", ".", "Reset", "(", ")", "\n", "c", ".", "chunkMetaBuf", ".", "Reset", "(", ")", "\n", "for", "i", ":=", "range", "c", ".", "chunkLens", "{", "c", ".", "chunkLens", "[", "i", "]", "=", "0", "\n", "}", "\n", "c", ".", "chunkMeta", "=", "c", ".", "chunkMeta", "[", ":", "0", "]", "\n", "}" ]
// Reset lets you reuse this chunked content coder. Buffers are reset // and re used. You cannot change the chunk size.
[ "Reset", "lets", "you", "reuse", "this", "chunked", "content", "coder", ".", "Buffers", "are", "reset", "and", "re", "used", ".", "You", "cannot", "change", "the", "chunk", "size", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/contentcoder.go#L78-L87
162,155
blevesearch/bleve
index/scorch/segment/zap/contentcoder.go
ReadDocValueBoundary
func ReadDocValueBoundary(chunk int, metaHeaders []MetaData) (uint64, uint64) { var start uint64 if chunk > 0 { start = metaHeaders[chunk-1].DocDvOffset } return start, metaHeaders[chunk].DocDvOffset }
go
func ReadDocValueBoundary(chunk int, metaHeaders []MetaData) (uint64, uint64) { var start uint64 if chunk > 0 { start = metaHeaders[chunk-1].DocDvOffset } return start, metaHeaders[chunk].DocDvOffset }
[ "func", "ReadDocValueBoundary", "(", "chunk", "int", ",", "metaHeaders", "[", "]", "MetaData", ")", "(", "uint64", ",", "uint64", ")", "{", "var", "start", "uint64", "\n", "if", "chunk", ">", "0", "{", "start", "=", "metaHeaders", "[", "chunk", "-", "1", "]", ".", "DocDvOffset", "\n", "}", "\n", "return", "start", ",", "metaHeaders", "[", "chunk", "]", ".", "DocDvOffset", "\n", "}" ]
// ReadDocValueBoundary elicits the start, end offsets from a // metaData header slice
[ "ReadDocValueBoundary", "elicits", "the", "start", "end", "offsets", "from", "a", "metaData", "header", "slice" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/contentcoder.go#L224-L230
162,156
blevesearch/bleve
search.go
NewFacetRequest
func NewFacetRequest(field string, size int) *FacetRequest { return &FacetRequest{ Field: field, Size: size, } }
go
func NewFacetRequest(field string, size int) *FacetRequest { return &FacetRequest{ Field: field, Size: size, } }
[ "func", "NewFacetRequest", "(", "field", "string", ",", "size", "int", ")", "*", "FacetRequest", "{", "return", "&", "FacetRequest", "{", "Field", ":", "field", ",", "Size", ":", "size", ",", "}", "\n", "}" ]
// NewFacetRequest creates a facet on the specified // field that limits the number of entries to the // specified size.
[ "NewFacetRequest", "creates", "a", "facet", "on", "the", "specified", "field", "that", "limits", "the", "number", "of", "entries", "to", "the", "specified", "size", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search.go#L169-L174
162,157
blevesearch/bleve
search.go
AddDateTimeRangeString
func (fr *FacetRequest) AddDateTimeRangeString(name string, start, end *string) { if fr.DateTimeRanges == nil { fr.DateTimeRanges = make([]*dateTimeRange, 0, 1) } fr.DateTimeRanges = append(fr.DateTimeRanges, &dateTimeRange{Name: name, startString: start, endString: end}) }
go
func (fr *FacetRequest) AddDateTimeRangeString(name string, start, end *string) { if fr.DateTimeRanges == nil { fr.DateTimeRanges = make([]*dateTimeRange, 0, 1) } fr.DateTimeRanges = append(fr.DateTimeRanges, &dateTimeRange{Name: name, startString: start, endString: end}) }
[ "func", "(", "fr", "*", "FacetRequest", ")", "AddDateTimeRangeString", "(", "name", "string", ",", "start", ",", "end", "*", "string", ")", "{", "if", "fr", ".", "DateTimeRanges", "==", "nil", "{", "fr", ".", "DateTimeRanges", "=", "make", "(", "[", "]", "*", "dateTimeRange", ",", "0", ",", "1", ")", "\n", "}", "\n", "fr", ".", "DateTimeRanges", "=", "append", "(", "fr", ".", "DateTimeRanges", ",", "&", "dateTimeRange", "{", "Name", ":", "name", ",", "startString", ":", "start", ",", "endString", ":", "end", "}", ")", "\n", "}" ]
// AddDateTimeRangeString adds a bucket to a field // containing date values.
[ "AddDateTimeRangeString", "adds", "a", "bucket", "to", "a", "field", "containing", "date", "values", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search.go#L189-L195
162,158
blevesearch/bleve
search.go
AddFacet
func (r *SearchRequest) AddFacet(facetName string, f *FacetRequest) { if r.Facets == nil { r.Facets = make(FacetsRequest, 1) } r.Facets[facetName] = f }
go
func (r *SearchRequest) AddFacet(facetName string, f *FacetRequest) { if r.Facets == nil { r.Facets = make(FacetsRequest, 1) } r.Facets[facetName] = f }
[ "func", "(", "r", "*", "SearchRequest", ")", "AddFacet", "(", "facetName", "string", ",", "f", "*", "FacetRequest", ")", "{", "if", "r", ".", "Facets", "==", "nil", "{", "r", ".", "Facets", "=", "make", "(", "FacetsRequest", ",", "1", ")", "\n", "}", "\n", "r", ".", "Facets", "[", "facetName", "]", "=", "f", "\n", "}" ]
// AddFacet adds a FacetRequest to this SearchRequest
[ "AddFacet", "adds", "a", "FacetRequest", "to", "this", "SearchRequest" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search.go#L292-L297
162,159
blevesearch/bleve
search.go
SortBy
func (r *SearchRequest) SortBy(order []string) { so := search.ParseSortOrderStrings(order) r.Sort = so }
go
func (r *SearchRequest) SortBy(order []string) { so := search.ParseSortOrderStrings(order) r.Sort = so }
[ "func", "(", "r", "*", "SearchRequest", ")", "SortBy", "(", "order", "[", "]", "string", ")", "{", "so", ":=", "search", ".", "ParseSortOrderStrings", "(", "order", ")", "\n", "r", ".", "Sort", "=", "so", "\n", "}" ]
// SortBy changes the request to use the requested sort order // this form uses the simplified syntax with an array of strings // each string can either be a field name // or the magic value _id and _score which refer to the doc id and search score // any of these values can optionally be prefixed with - to reverse the order
[ "SortBy", "changes", "the", "request", "to", "use", "the", "requested", "sort", "order", "this", "form", "uses", "the", "simplified", "syntax", "with", "an", "array", "of", "strings", "each", "string", "can", "either", "be", "a", "field", "name", "or", "the", "magic", "value", "_id", "and", "_score", "which", "refer", "to", "the", "doc", "id", "and", "search", "score", "any", "of", "these", "values", "can", "optionally", "be", "prefixed", "with", "-", "to", "reverse", "the", "order" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search.go#L304-L307
162,160
blevesearch/bleve
search.go
UnmarshalJSON
func (r *SearchRequest) UnmarshalJSON(input []byte) error { var temp struct { Q json.RawMessage `json:"query"` Size *int `json:"size"` From int `json:"from"` Highlight *HighlightRequest `json:"highlight"` Fields []string `json:"fields"` Facets FacetsRequest `json:"facets"` Explain bool `json:"explain"` Sort []json.RawMessage `json:"sort"` IncludeLocations bool `json:"includeLocations"` Score string `json:"score"` } err := json.Unmarshal(input, &temp) if err != nil { return err } if temp.Size == nil { r.Size = 10 } else { r.Size = *temp.Size } if temp.Sort == nil { r.Sort = search.SortOrder{&search.SortScore{Desc: true}} } else { r.Sort, err = search.ParseSortOrderJSON(temp.Sort) if err != nil { return err } } r.From = temp.From r.Explain = temp.Explain r.Highlight = temp.Highlight r.Fields = temp.Fields r.Facets = temp.Facets r.IncludeLocations = temp.IncludeLocations r.Score = temp.Score r.Query, err = query.ParseQuery(temp.Q) if err != nil { return err } if r.Size < 0 { r.Size = 10 } if r.From < 0 { r.From = 0 } return nil }
go
func (r *SearchRequest) UnmarshalJSON(input []byte) error { var temp struct { Q json.RawMessage `json:"query"` Size *int `json:"size"` From int `json:"from"` Highlight *HighlightRequest `json:"highlight"` Fields []string `json:"fields"` Facets FacetsRequest `json:"facets"` Explain bool `json:"explain"` Sort []json.RawMessage `json:"sort"` IncludeLocations bool `json:"includeLocations"` Score string `json:"score"` } err := json.Unmarshal(input, &temp) if err != nil { return err } if temp.Size == nil { r.Size = 10 } else { r.Size = *temp.Size } if temp.Sort == nil { r.Sort = search.SortOrder{&search.SortScore{Desc: true}} } else { r.Sort, err = search.ParseSortOrderJSON(temp.Sort) if err != nil { return err } } r.From = temp.From r.Explain = temp.Explain r.Highlight = temp.Highlight r.Fields = temp.Fields r.Facets = temp.Facets r.IncludeLocations = temp.IncludeLocations r.Score = temp.Score r.Query, err = query.ParseQuery(temp.Q) if err != nil { return err } if r.Size < 0 { r.Size = 10 } if r.From < 0 { r.From = 0 } return nil }
[ "func", "(", "r", "*", "SearchRequest", ")", "UnmarshalJSON", "(", "input", "[", "]", "byte", ")", "error", "{", "var", "temp", "struct", "{", "Q", "json", ".", "RawMessage", "`json:\"query\"`", "\n", "Size", "*", "int", "`json:\"size\"`", "\n", "From", "int", "`json:\"from\"`", "\n", "Highlight", "*", "HighlightRequest", "`json:\"highlight\"`", "\n", "Fields", "[", "]", "string", "`json:\"fields\"`", "\n", "Facets", "FacetsRequest", "`json:\"facets\"`", "\n", "Explain", "bool", "`json:\"explain\"`", "\n", "Sort", "[", "]", "json", ".", "RawMessage", "`json:\"sort\"`", "\n", "IncludeLocations", "bool", "`json:\"includeLocations\"`", "\n", "Score", "string", "`json:\"score\"`", "\n", "}", "\n\n", "err", ":=", "json", ".", "Unmarshal", "(", "input", ",", "&", "temp", ")", "\n", "if", "err", "!=", "nil", "{", "return", "err", "\n", "}", "\n\n", "if", "temp", ".", "Size", "==", "nil", "{", "r", ".", "Size", "=", "10", "\n", "}", "else", "{", "r", ".", "Size", "=", "*", "temp", ".", "Size", "\n", "}", "\n", "if", "temp", ".", "Sort", "==", "nil", "{", "r", ".", "Sort", "=", "search", ".", "SortOrder", "{", "&", "search", ".", "SortScore", "{", "Desc", ":", "true", "}", "}", "\n", "}", "else", "{", "r", ".", "Sort", ",", "err", "=", "search", ".", "ParseSortOrderJSON", "(", "temp", ".", "Sort", ")", "\n", "if", "err", "!=", "nil", "{", "return", "err", "\n", "}", "\n", "}", "\n", "r", ".", "From", "=", "temp", ".", "From", "\n", "r", ".", "Explain", "=", "temp", ".", "Explain", "\n", "r", ".", "Highlight", "=", "temp", ".", "Highlight", "\n", "r", ".", "Fields", "=", "temp", ".", "Fields", "\n", "r", ".", "Facets", "=", "temp", ".", "Facets", "\n", "r", ".", "IncludeLocations", "=", "temp", ".", "IncludeLocations", "\n", "r", ".", "Score", "=", "temp", ".", "Score", "\n", "r", ".", "Query", ",", "err", "=", "query", ".", "ParseQuery", "(", "temp", ".", "Q", ")", "\n", "if", "err", "!=", "nil", "{", "return", "err", "\n", "}", "\n\n", "if", "r", ".", "Size", "<", "0", "{", "r", ".", "Size", "=", "10", "\n", "}", "\n", "if", "r", ".", "From", "<", "0", "{", "r", ".", "From", "=", "0", "\n", "}", "\n\n", "return", "nil", "\n\n", "}" ]
// UnmarshalJSON deserializes a JSON representation of // a SearchRequest
[ "UnmarshalJSON", "deserializes", "a", "JSON", "representation", "of", "a", "SearchRequest" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search.go#L316-L369
162,161
blevesearch/bleve
search.go
NewSearchRequestOptions
func NewSearchRequestOptions(q query.Query, size, from int, explain bool) *SearchRequest { return &SearchRequest{ Query: q, Size: size, From: from, Explain: explain, Sort: search.SortOrder{&search.SortScore{Desc: true}}, } }
go
func NewSearchRequestOptions(q query.Query, size, from int, explain bool) *SearchRequest { return &SearchRequest{ Query: q, Size: size, From: from, Explain: explain, Sort: search.SortOrder{&search.SortScore{Desc: true}}, } }
[ "func", "NewSearchRequestOptions", "(", "q", "query", ".", "Query", ",", "size", ",", "from", "int", ",", "explain", "bool", ")", "*", "SearchRequest", "{", "return", "&", "SearchRequest", "{", "Query", ":", "q", ",", "Size", ":", "size", ",", "From", ":", "from", ",", "Explain", ":", "explain", ",", "Sort", ":", "search", ".", "SortOrder", "{", "&", "search", ".", "SortScore", "{", "Desc", ":", "true", "}", "}", ",", "}", "\n", "}" ]
// NewSearchRequestOptions creates a new SearchRequest // for the Query, with the requested size, from // and explanation search parameters. // By default results are ordered by score, descending.
[ "NewSearchRequestOptions", "creates", "a", "new", "SearchRequest", "for", "the", "Query", "with", "the", "requested", "size", "from", "and", "explanation", "search", "parameters", ".", "By", "default", "results", "are", "ordered", "by", "score", "descending", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search.go#L382-L390
162,162
blevesearch/bleve
search.go
MarshalJSON
func (iem IndexErrMap) MarshalJSON() ([]byte, error) { tmp := make(map[string]string, len(iem)) for k, v := range iem { tmp[k] = v.Error() } return json.Marshal(tmp) }
go
func (iem IndexErrMap) MarshalJSON() ([]byte, error) { tmp := make(map[string]string, len(iem)) for k, v := range iem { tmp[k] = v.Error() } return json.Marshal(tmp) }
[ "func", "(", "iem", "IndexErrMap", ")", "MarshalJSON", "(", ")", "(", "[", "]", "byte", ",", "error", ")", "{", "tmp", ":=", "make", "(", "map", "[", "string", "]", "string", ",", "len", "(", "iem", ")", ")", "\n", "for", "k", ",", "v", ":=", "range", "iem", "{", "tmp", "[", "k", "]", "=", "v", ".", "Error", "(", ")", "\n", "}", "\n", "return", "json", ".", "Marshal", "(", "tmp", ")", "\n", "}" ]
// MarshalJSON seralizes the error into a string for JSON consumption
[ "MarshalJSON", "seralizes", "the", "error", "into", "a", "string", "for", "JSON", "consumption" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search.go#L396-L402
162,163
blevesearch/bleve
search.go
Merge
func (ss *SearchStatus) Merge(other *SearchStatus) { ss.Total += other.Total ss.Failed += other.Failed ss.Successful += other.Successful if len(other.Errors) > 0 { if ss.Errors == nil { ss.Errors = make(map[string]error) } for otherIndex, otherError := range other.Errors { ss.Errors[otherIndex] = otherError } } }
go
func (ss *SearchStatus) Merge(other *SearchStatus) { ss.Total += other.Total ss.Failed += other.Failed ss.Successful += other.Successful if len(other.Errors) > 0 { if ss.Errors == nil { ss.Errors = make(map[string]error) } for otherIndex, otherError := range other.Errors { ss.Errors[otherIndex] = otherError } } }
[ "func", "(", "ss", "*", "SearchStatus", ")", "Merge", "(", "other", "*", "SearchStatus", ")", "{", "ss", ".", "Total", "+=", "other", ".", "Total", "\n", "ss", ".", "Failed", "+=", "other", ".", "Failed", "\n", "ss", ".", "Successful", "+=", "other", ".", "Successful", "\n", "if", "len", "(", "other", ".", "Errors", ")", ">", "0", "{", "if", "ss", ".", "Errors", "==", "nil", "{", "ss", ".", "Errors", "=", "make", "(", "map", "[", "string", "]", "error", ")", "\n", "}", "\n", "for", "otherIndex", ",", "otherError", ":=", "range", "other", ".", "Errors", "{", "ss", ".", "Errors", "[", "otherIndex", "]", "=", "otherError", "\n", "}", "\n", "}", "\n", "}" ]
// Merge will merge together multiple SearchStatuses during a MultiSearch
[ "Merge", "will", "merge", "together", "multiple", "SearchStatuses", "during", "a", "MultiSearch" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search.go#L427-L439
162,164
blevesearch/bleve
search.go
Merge
func (sr *SearchResult) Merge(other *SearchResult) { sr.Status.Merge(other.Status) sr.Hits = append(sr.Hits, other.Hits...) sr.Total += other.Total if other.MaxScore > sr.MaxScore { sr.MaxScore = other.MaxScore } if sr.Facets == nil && len(other.Facets) != 0 { sr.Facets = other.Facets return } sr.Facets.Merge(other.Facets) }
go
func (sr *SearchResult) Merge(other *SearchResult) { sr.Status.Merge(other.Status) sr.Hits = append(sr.Hits, other.Hits...) sr.Total += other.Total if other.MaxScore > sr.MaxScore { sr.MaxScore = other.MaxScore } if sr.Facets == nil && len(other.Facets) != 0 { sr.Facets = other.Facets return } sr.Facets.Merge(other.Facets) }
[ "func", "(", "sr", "*", "SearchResult", ")", "Merge", "(", "other", "*", "SearchResult", ")", "{", "sr", ".", "Status", ".", "Merge", "(", "other", ".", "Status", ")", "\n", "sr", ".", "Hits", "=", "append", "(", "sr", ".", "Hits", ",", "other", ".", "Hits", "...", ")", "\n", "sr", ".", "Total", "+=", "other", ".", "Total", "\n", "if", "other", ".", "MaxScore", ">", "sr", ".", "MaxScore", "{", "sr", ".", "MaxScore", "=", "other", ".", "MaxScore", "\n", "}", "\n", "if", "sr", ".", "Facets", "==", "nil", "&&", "len", "(", "other", ".", "Facets", ")", "!=", "0", "{", "sr", ".", "Facets", "=", "other", ".", "Facets", "\n", "return", "\n", "}", "\n\n", "sr", ".", "Facets", ".", "Merge", "(", "other", ".", "Facets", ")", "\n", "}" ]
// Merge will merge together multiple SearchResults during a MultiSearch
[ "Merge", "will", "merge", "together", "multiple", "SearchResults", "during", "a", "MultiSearch" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search.go#L513-L526
162,165
blevesearch/bleve
search.go
MemoryNeededForSearchResult
func MemoryNeededForSearchResult(req *SearchRequest) uint64 { if req == nil { return 0 } numDocMatches := req.Size + req.From if req.Size+req.From > collector.PreAllocSizeSkipCap { numDocMatches = collector.PreAllocSizeSkipCap } estimate := 0 // overhead from the SearchResult structure var sr SearchResult estimate += sr.Size() var dm search.DocumentMatch sizeOfDocumentMatch := dm.Size() // overhead from results estimate += numDocMatches * sizeOfDocumentMatch // overhead from facet results if req.Facets != nil { var fr search.FacetResult estimate += len(req.Facets) * fr.Size() } // highlighting, store var d document.Document if len(req.Fields) > 0 || req.Highlight != nil { for i := 0; i < (req.Size + req.From); i++ { estimate += (req.Size + req.From) * d.Size() } } return uint64(estimate) }
go
func MemoryNeededForSearchResult(req *SearchRequest) uint64 { if req == nil { return 0 } numDocMatches := req.Size + req.From if req.Size+req.From > collector.PreAllocSizeSkipCap { numDocMatches = collector.PreAllocSizeSkipCap } estimate := 0 // overhead from the SearchResult structure var sr SearchResult estimate += sr.Size() var dm search.DocumentMatch sizeOfDocumentMatch := dm.Size() // overhead from results estimate += numDocMatches * sizeOfDocumentMatch // overhead from facet results if req.Facets != nil { var fr search.FacetResult estimate += len(req.Facets) * fr.Size() } // highlighting, store var d document.Document if len(req.Fields) > 0 || req.Highlight != nil { for i := 0; i < (req.Size + req.From); i++ { estimate += (req.Size + req.From) * d.Size() } } return uint64(estimate) }
[ "func", "MemoryNeededForSearchResult", "(", "req", "*", "SearchRequest", ")", "uint64", "{", "if", "req", "==", "nil", "{", "return", "0", "\n", "}", "\n\n", "numDocMatches", ":=", "req", ".", "Size", "+", "req", ".", "From", "\n", "if", "req", ".", "Size", "+", "req", ".", "From", ">", "collector", ".", "PreAllocSizeSkipCap", "{", "numDocMatches", "=", "collector", ".", "PreAllocSizeSkipCap", "\n", "}", "\n\n", "estimate", ":=", "0", "\n\n", "// overhead from the SearchResult structure", "var", "sr", "SearchResult", "\n", "estimate", "+=", "sr", ".", "Size", "(", ")", "\n\n", "var", "dm", "search", ".", "DocumentMatch", "\n", "sizeOfDocumentMatch", ":=", "dm", ".", "Size", "(", ")", "\n\n", "// overhead from results", "estimate", "+=", "numDocMatches", "*", "sizeOfDocumentMatch", "\n\n", "// overhead from facet results", "if", "req", ".", "Facets", "!=", "nil", "{", "var", "fr", "search", ".", "FacetResult", "\n", "estimate", "+=", "len", "(", "req", ".", "Facets", ")", "*", "fr", ".", "Size", "(", ")", "\n", "}", "\n\n", "// highlighting, store", "var", "d", "document", ".", "Document", "\n", "if", "len", "(", "req", ".", "Fields", ")", ">", "0", "||", "req", ".", "Highlight", "!=", "nil", "{", "for", "i", ":=", "0", ";", "i", "<", "(", "req", ".", "Size", "+", "req", ".", "From", ")", ";", "i", "++", "{", "estimate", "+=", "(", "req", ".", "Size", "+", "req", ".", "From", ")", "*", "d", ".", "Size", "(", ")", "\n", "}", "\n", "}", "\n\n", "return", "uint64", "(", "estimate", ")", "\n", "}" ]
// MemoryNeededForSearchResult is an exported helper function to determine the RAM // needed to accommodate the results for a given search request.
[ "MemoryNeededForSearchResult", "is", "an", "exported", "helper", "function", "to", "determine", "the", "RAM", "needed", "to", "accommodate", "the", "results", "for", "a", "given", "search", "request", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search.go#L530-L567
162,166
blevesearch/bleve
search/query/boolean.go
NewBooleanQuery
func NewBooleanQuery(must []Query, should []Query, mustNot []Query) *BooleanQuery { rv := BooleanQuery{} if len(must) > 0 { rv.Must = NewConjunctionQuery(must) } if len(should) > 0 { rv.Should = NewDisjunctionQuery(should) } if len(mustNot) > 0 { rv.MustNot = NewDisjunctionQuery(mustNot) } return &rv }
go
func NewBooleanQuery(must []Query, should []Query, mustNot []Query) *BooleanQuery { rv := BooleanQuery{} if len(must) > 0 { rv.Must = NewConjunctionQuery(must) } if len(should) > 0 { rv.Should = NewDisjunctionQuery(should) } if len(mustNot) > 0 { rv.MustNot = NewDisjunctionQuery(mustNot) } return &rv }
[ "func", "NewBooleanQuery", "(", "must", "[", "]", "Query", ",", "should", "[", "]", "Query", ",", "mustNot", "[", "]", "Query", ")", "*", "BooleanQuery", "{", "rv", ":=", "BooleanQuery", "{", "}", "\n", "if", "len", "(", "must", ")", ">", "0", "{", "rv", ".", "Must", "=", "NewConjunctionQuery", "(", "must", ")", "\n", "}", "\n", "if", "len", "(", "should", ")", ">", "0", "{", "rv", ".", "Should", "=", "NewDisjunctionQuery", "(", "should", ")", "\n", "}", "\n", "if", "len", "(", "mustNot", ")", ">", "0", "{", "rv", ".", "MustNot", "=", "NewDisjunctionQuery", "(", "mustNot", ")", "\n", "}", "\n\n", "return", "&", "rv", "\n", "}" ]
// NewBooleanQuery creates a compound Query composed // of several other Query objects. // Result documents must satisfy ALL of the // must Queries. // Result documents must satisfy NONE of the must not // Queries. // Result documents that ALSO satisfy any of the should // Queries will score higher.
[ "NewBooleanQuery", "creates", "a", "compound", "Query", "composed", "of", "several", "other", "Query", "objects", ".", "Result", "documents", "must", "satisfy", "ALL", "of", "the", "must", "Queries", ".", "Result", "documents", "must", "satisfy", "NONE", "of", "the", "must", "not", "Queries", ".", "Result", "documents", "that", "ALSO", "satisfy", "any", "of", "the", "should", "Queries", "will", "score", "higher", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search/query/boolean.go#L43-L57
162,167
blevesearch/bleve
search/query/boolean.go
SetMinShould
func (q *BooleanQuery) SetMinShould(minShould float64) { q.Should.(*DisjunctionQuery).SetMin(minShould) }
go
func (q *BooleanQuery) SetMinShould(minShould float64) { q.Should.(*DisjunctionQuery).SetMin(minShould) }
[ "func", "(", "q", "*", "BooleanQuery", ")", "SetMinShould", "(", "minShould", "float64", ")", "{", "q", ".", "Should", ".", "(", "*", "DisjunctionQuery", ")", ".", "SetMin", "(", "minShould", ")", "\n", "}" ]
// SetMinShould requires that at least minShould of the // should Queries must be satisfied.
[ "SetMinShould", "requires", "that", "at", "least", "minShould", "of", "the", "should", "Queries", "must", "be", "satisfied", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search/query/boolean.go#L70-L72
162,168
blevesearch/bleve
cmd/bleve/cmd/root.go
CanMutateBleveIndex
func CanMutateBleveIndex(c *cobra.Command) bool { for k, v := range c.Annotations { if k == canMutateBleveIndex { if b, err := strconv.ParseBool(v); err == nil && b { return true } } } return false }
go
func CanMutateBleveIndex(c *cobra.Command) bool { for k, v := range c.Annotations { if k == canMutateBleveIndex { if b, err := strconv.ParseBool(v); err == nil && b { return true } } } return false }
[ "func", "CanMutateBleveIndex", "(", "c", "*", "cobra", ".", "Command", ")", "bool", "{", "for", "k", ",", "v", ":=", "range", "c", ".", "Annotations", "{", "if", "k", "==", "canMutateBleveIndex", "{", "if", "b", ",", "err", ":=", "strconv", ".", "ParseBool", "(", "v", ")", ";", "err", "==", "nil", "&&", "b", "{", "return", "true", "\n", "}", "\n", "}", "\n", "}", "\n", "return", "false", "\n", "}" ]
// CanMutateBleveIndex returns true if the command is capable // of mutating the bleve index, or false if its operation is // read-only
[ "CanMutateBleveIndex", "returns", "true", "if", "the", "command", "is", "capable", "of", "mutating", "the", "bleve", "index", "or", "false", "if", "its", "operation", "is", "read", "-", "only" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/cmd/bleve/cmd/root.go#L39-L48
162,169
blevesearch/bleve
search/collector/topn.go
NewTopNCollector
func NewTopNCollector(size int, skip int, sort search.SortOrder) *TopNCollector { hc := &TopNCollector{size: size, skip: skip, sort: sort} // pre-allocate space on the store to avoid reslicing // unless the size + skip is too large, then cap it // everything should still work, just reslices as necessary backingSize := size + skip + 1 if size+skip > PreAllocSizeSkipCap { backingSize = PreAllocSizeSkipCap + 1 } if size+skip > 10 { hc.store = newStoreHeap(backingSize, func(i, j *search.DocumentMatch) int { return hc.sort.Compare(hc.cachedScoring, hc.cachedDesc, i, j) }) } else { hc.store = newStoreSlice(backingSize, func(i, j *search.DocumentMatch) int { return hc.sort.Compare(hc.cachedScoring, hc.cachedDesc, i, j) }) } // these lookups traverse an interface, so do once up-front if sort.RequiresDocID() { hc.needDocIds = true } hc.neededFields = sort.RequiredFields() hc.cachedScoring = sort.CacheIsScore() hc.cachedDesc = sort.CacheDescending() return hc }
go
func NewTopNCollector(size int, skip int, sort search.SortOrder) *TopNCollector { hc := &TopNCollector{size: size, skip: skip, sort: sort} // pre-allocate space on the store to avoid reslicing // unless the size + skip is too large, then cap it // everything should still work, just reslices as necessary backingSize := size + skip + 1 if size+skip > PreAllocSizeSkipCap { backingSize = PreAllocSizeSkipCap + 1 } if size+skip > 10 { hc.store = newStoreHeap(backingSize, func(i, j *search.DocumentMatch) int { return hc.sort.Compare(hc.cachedScoring, hc.cachedDesc, i, j) }) } else { hc.store = newStoreSlice(backingSize, func(i, j *search.DocumentMatch) int { return hc.sort.Compare(hc.cachedScoring, hc.cachedDesc, i, j) }) } // these lookups traverse an interface, so do once up-front if sort.RequiresDocID() { hc.needDocIds = true } hc.neededFields = sort.RequiredFields() hc.cachedScoring = sort.CacheIsScore() hc.cachedDesc = sort.CacheDescending() return hc }
[ "func", "NewTopNCollector", "(", "size", "int", ",", "skip", "int", ",", "sort", "search", ".", "SortOrder", ")", "*", "TopNCollector", "{", "hc", ":=", "&", "TopNCollector", "{", "size", ":", "size", ",", "skip", ":", "skip", ",", "sort", ":", "sort", "}", "\n\n", "// pre-allocate space on the store to avoid reslicing", "// unless the size + skip is too large, then cap it", "// everything should still work, just reslices as necessary", "backingSize", ":=", "size", "+", "skip", "+", "1", "\n", "if", "size", "+", "skip", ">", "PreAllocSizeSkipCap", "{", "backingSize", "=", "PreAllocSizeSkipCap", "+", "1", "\n", "}", "\n\n", "if", "size", "+", "skip", ">", "10", "{", "hc", ".", "store", "=", "newStoreHeap", "(", "backingSize", ",", "func", "(", "i", ",", "j", "*", "search", ".", "DocumentMatch", ")", "int", "{", "return", "hc", ".", "sort", ".", "Compare", "(", "hc", ".", "cachedScoring", ",", "hc", ".", "cachedDesc", ",", "i", ",", "j", ")", "\n", "}", ")", "\n", "}", "else", "{", "hc", ".", "store", "=", "newStoreSlice", "(", "backingSize", ",", "func", "(", "i", ",", "j", "*", "search", ".", "DocumentMatch", ")", "int", "{", "return", "hc", ".", "sort", ".", "Compare", "(", "hc", ".", "cachedScoring", ",", "hc", ".", "cachedDesc", ",", "i", ",", "j", ")", "\n", "}", ")", "\n", "}", "\n\n", "// these lookups traverse an interface, so do once up-front", "if", "sort", ".", "RequiresDocID", "(", ")", "{", "hc", ".", "needDocIds", "=", "true", "\n", "}", "\n", "hc", ".", "neededFields", "=", "sort", ".", "RequiredFields", "(", ")", "\n", "hc", ".", "cachedScoring", "=", "sort", ".", "CacheIsScore", "(", ")", "\n", "hc", ".", "cachedDesc", "=", "sort", ".", "CacheDescending", "(", ")", "\n\n", "return", "hc", "\n", "}" ]
// NewTopNCollector builds a collector to find the top 'size' hits // skipping over the first 'skip' hits // ordering hits by the provided sort order
[ "NewTopNCollector", "builds", "a", "collector", "to", "find", "the", "top", "size", "hits", "skipping", "over", "the", "first", "skip", "hits", "ordering", "hits", "by", "the", "provided", "sort", "order" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search/collector/topn.go#L80-L110
162,170
blevesearch/bleve
search/collector/topn.go
Collect
func (hc *TopNCollector) Collect(ctx context.Context, searcher search.Searcher, reader index.IndexReader) error { startTime := time.Now() var err error var next *search.DocumentMatch // pre-allocate enough space in the DocumentMatchPool // unless the size + skip is too large, then cap it // everything should still work, just allocates DocumentMatches on demand backingSize := hc.size + hc.skip + 1 if hc.size+hc.skip > PreAllocSizeSkipCap { backingSize = PreAllocSizeSkipCap + 1 } searchContext := &search.SearchContext{ DocumentMatchPool: search.NewDocumentMatchPool(backingSize+searcher.DocumentMatchPoolSize(), len(hc.sort)), Collector: hc, IndexReader: reader, } hc.dvReader, err = reader.DocValueReader(hc.neededFields) if err != nil { return err } hc.updateFieldVisitor = func(field string, term []byte) { if hc.facetsBuilder != nil { hc.facetsBuilder.UpdateVisitor(field, term) } hc.sort.UpdateVisitor(field, term) } dmHandlerMaker := MakeTopNDocumentMatchHandler if cv := ctx.Value(search.MakeDocumentMatchHandlerKey); cv != nil { dmHandlerMaker = cv.(search.MakeDocumentMatchHandler) } // use the application given builder for making the custom document match // handler and perform callbacks/invocations on the newly made handler. dmHandler, loadID, err := dmHandlerMaker(searchContext) if err != nil { return err } hc.needDocIds = hc.needDocIds || loadID select { case <-ctx.Done(): return ctx.Err() default: next, err = searcher.Next(searchContext) } for err == nil && next != nil { if hc.total%CheckDoneEvery == 0 { select { case <-ctx.Done(): return ctx.Err() default: } } err = hc.prepareDocumentMatch(searchContext, reader, next) if err != nil { break } err = dmHandler(next) if err != nil { break } next, err = searcher.Next(searchContext) } // help finalize/flush the results in case // of custom document match handlers. err = dmHandler(nil) if err != nil { return err } // compute search duration hc.took = time.Since(startTime) if err != nil { return err } // finalize actual results err = hc.finalizeResults(reader) if err != nil { return err } return nil }
go
func (hc *TopNCollector) Collect(ctx context.Context, searcher search.Searcher, reader index.IndexReader) error { startTime := time.Now() var err error var next *search.DocumentMatch // pre-allocate enough space in the DocumentMatchPool // unless the size + skip is too large, then cap it // everything should still work, just allocates DocumentMatches on demand backingSize := hc.size + hc.skip + 1 if hc.size+hc.skip > PreAllocSizeSkipCap { backingSize = PreAllocSizeSkipCap + 1 } searchContext := &search.SearchContext{ DocumentMatchPool: search.NewDocumentMatchPool(backingSize+searcher.DocumentMatchPoolSize(), len(hc.sort)), Collector: hc, IndexReader: reader, } hc.dvReader, err = reader.DocValueReader(hc.neededFields) if err != nil { return err } hc.updateFieldVisitor = func(field string, term []byte) { if hc.facetsBuilder != nil { hc.facetsBuilder.UpdateVisitor(field, term) } hc.sort.UpdateVisitor(field, term) } dmHandlerMaker := MakeTopNDocumentMatchHandler if cv := ctx.Value(search.MakeDocumentMatchHandlerKey); cv != nil { dmHandlerMaker = cv.(search.MakeDocumentMatchHandler) } // use the application given builder for making the custom document match // handler and perform callbacks/invocations on the newly made handler. dmHandler, loadID, err := dmHandlerMaker(searchContext) if err != nil { return err } hc.needDocIds = hc.needDocIds || loadID select { case <-ctx.Done(): return ctx.Err() default: next, err = searcher.Next(searchContext) } for err == nil && next != nil { if hc.total%CheckDoneEvery == 0 { select { case <-ctx.Done(): return ctx.Err() default: } } err = hc.prepareDocumentMatch(searchContext, reader, next) if err != nil { break } err = dmHandler(next) if err != nil { break } next, err = searcher.Next(searchContext) } // help finalize/flush the results in case // of custom document match handlers. err = dmHandler(nil) if err != nil { return err } // compute search duration hc.took = time.Since(startTime) if err != nil { return err } // finalize actual results err = hc.finalizeResults(reader) if err != nil { return err } return nil }
[ "func", "(", "hc", "*", "TopNCollector", ")", "Collect", "(", "ctx", "context", ".", "Context", ",", "searcher", "search", ".", "Searcher", ",", "reader", "index", ".", "IndexReader", ")", "error", "{", "startTime", ":=", "time", ".", "Now", "(", ")", "\n", "var", "err", "error", "\n", "var", "next", "*", "search", ".", "DocumentMatch", "\n\n", "// pre-allocate enough space in the DocumentMatchPool", "// unless the size + skip is too large, then cap it", "// everything should still work, just allocates DocumentMatches on demand", "backingSize", ":=", "hc", ".", "size", "+", "hc", ".", "skip", "+", "1", "\n", "if", "hc", ".", "size", "+", "hc", ".", "skip", ">", "PreAllocSizeSkipCap", "{", "backingSize", "=", "PreAllocSizeSkipCap", "+", "1", "\n", "}", "\n", "searchContext", ":=", "&", "search", ".", "SearchContext", "{", "DocumentMatchPool", ":", "search", ".", "NewDocumentMatchPool", "(", "backingSize", "+", "searcher", ".", "DocumentMatchPoolSize", "(", ")", ",", "len", "(", "hc", ".", "sort", ")", ")", ",", "Collector", ":", "hc", ",", "IndexReader", ":", "reader", ",", "}", "\n\n", "hc", ".", "dvReader", ",", "err", "=", "reader", ".", "DocValueReader", "(", "hc", ".", "neededFields", ")", "\n", "if", "err", "!=", "nil", "{", "return", "err", "\n", "}", "\n\n", "hc", ".", "updateFieldVisitor", "=", "func", "(", "field", "string", ",", "term", "[", "]", "byte", ")", "{", "if", "hc", ".", "facetsBuilder", "!=", "nil", "{", "hc", ".", "facetsBuilder", ".", "UpdateVisitor", "(", "field", ",", "term", ")", "\n", "}", "\n", "hc", ".", "sort", ".", "UpdateVisitor", "(", "field", ",", "term", ")", "\n", "}", "\n\n", "dmHandlerMaker", ":=", "MakeTopNDocumentMatchHandler", "\n", "if", "cv", ":=", "ctx", ".", "Value", "(", "search", ".", "MakeDocumentMatchHandlerKey", ")", ";", "cv", "!=", "nil", "{", "dmHandlerMaker", "=", "cv", ".", "(", "search", ".", "MakeDocumentMatchHandler", ")", "\n", "}", "\n", "// use the application given builder for making the custom document match", "// handler and perform callbacks/invocations on the newly made handler.", "dmHandler", ",", "loadID", ",", "err", ":=", "dmHandlerMaker", "(", "searchContext", ")", "\n", "if", "err", "!=", "nil", "{", "return", "err", "\n", "}", "\n\n", "hc", ".", "needDocIds", "=", "hc", ".", "needDocIds", "||", "loadID", "\n\n", "select", "{", "case", "<-", "ctx", ".", "Done", "(", ")", ":", "return", "ctx", ".", "Err", "(", ")", "\n", "default", ":", "next", ",", "err", "=", "searcher", ".", "Next", "(", "searchContext", ")", "\n", "}", "\n", "for", "err", "==", "nil", "&&", "next", "!=", "nil", "{", "if", "hc", ".", "total", "%", "CheckDoneEvery", "==", "0", "{", "select", "{", "case", "<-", "ctx", ".", "Done", "(", ")", ":", "return", "ctx", ".", "Err", "(", ")", "\n", "default", ":", "}", "\n", "}", "\n\n", "err", "=", "hc", ".", "prepareDocumentMatch", "(", "searchContext", ",", "reader", ",", "next", ")", "\n", "if", "err", "!=", "nil", "{", "break", "\n", "}", "\n\n", "err", "=", "dmHandler", "(", "next", ")", "\n", "if", "err", "!=", "nil", "{", "break", "\n", "}", "\n\n", "next", ",", "err", "=", "searcher", ".", "Next", "(", "searchContext", ")", "\n", "}", "\n\n", "// help finalize/flush the results in case", "// of custom document match handlers.", "err", "=", "dmHandler", "(", "nil", ")", "\n", "if", "err", "!=", "nil", "{", "return", "err", "\n", "}", "\n\n", "// compute search duration", "hc", ".", "took", "=", "time", ".", "Since", "(", "startTime", ")", "\n", "if", "err", "!=", "nil", "{", "return", "err", "\n", "}", "\n", "// finalize actual results", "err", "=", "hc", ".", "finalizeResults", "(", "reader", ")", "\n", "if", "err", "!=", "nil", "{", "return", "err", "\n", "}", "\n", "return", "nil", "\n", "}" ]
// Collect goes to the index to find the matching documents
[ "Collect", "goes", "to", "the", "index", "to", "find", "the", "matching", "documents" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search/collector/topn.go#L129-L218
162,171
blevesearch/bleve
search/collector/topn.go
visitFieldTerms
func (hc *TopNCollector) visitFieldTerms(reader index.IndexReader, d *search.DocumentMatch) error { if hc.facetsBuilder != nil { hc.facetsBuilder.StartDoc() } err := hc.dvReader.VisitDocValues(d.IndexInternalID, hc.updateFieldVisitor) if hc.facetsBuilder != nil { hc.facetsBuilder.EndDoc() } return err }
go
func (hc *TopNCollector) visitFieldTerms(reader index.IndexReader, d *search.DocumentMatch) error { if hc.facetsBuilder != nil { hc.facetsBuilder.StartDoc() } err := hc.dvReader.VisitDocValues(d.IndexInternalID, hc.updateFieldVisitor) if hc.facetsBuilder != nil { hc.facetsBuilder.EndDoc() } return err }
[ "func", "(", "hc", "*", "TopNCollector", ")", "visitFieldTerms", "(", "reader", "index", ".", "IndexReader", ",", "d", "*", "search", ".", "DocumentMatch", ")", "error", "{", "if", "hc", ".", "facetsBuilder", "!=", "nil", "{", "hc", ".", "facetsBuilder", ".", "StartDoc", "(", ")", "\n", "}", "\n\n", "err", ":=", "hc", ".", "dvReader", ".", "VisitDocValues", "(", "d", ".", "IndexInternalID", ",", "hc", ".", "updateFieldVisitor", ")", "\n", "if", "hc", ".", "facetsBuilder", "!=", "nil", "{", "hc", ".", "facetsBuilder", ".", "EndDoc", "(", ")", "\n", "}", "\n\n", "return", "err", "\n", "}" ]
// visitFieldTerms is responsible for visiting the field terms of the // search hit, and passing visited terms to the sort and facet builder
[ "visitFieldTerms", "is", "responsible", "for", "visiting", "the", "field", "terms", "of", "the", "search", "hit", "and", "passing", "visited", "terms", "to", "the", "sort", "and", "facet", "builder" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search/collector/topn.go#L304-L315
162,172
blevesearch/bleve
search/collector/topn.go
SetFacetsBuilder
func (hc *TopNCollector) SetFacetsBuilder(facetsBuilder *search.FacetsBuilder) { hc.facetsBuilder = facetsBuilder hc.neededFields = append(hc.neededFields, hc.facetsBuilder.RequiredFields()...) }
go
func (hc *TopNCollector) SetFacetsBuilder(facetsBuilder *search.FacetsBuilder) { hc.facetsBuilder = facetsBuilder hc.neededFields = append(hc.neededFields, hc.facetsBuilder.RequiredFields()...) }
[ "func", "(", "hc", "*", "TopNCollector", ")", "SetFacetsBuilder", "(", "facetsBuilder", "*", "search", ".", "FacetsBuilder", ")", "{", "hc", ".", "facetsBuilder", "=", "facetsBuilder", "\n", "hc", ".", "neededFields", "=", "append", "(", "hc", ".", "neededFields", ",", "hc", ".", "facetsBuilder", ".", "RequiredFields", "(", ")", "...", ")", "\n", "}" ]
// SetFacetsBuilder registers a facet builder for this collector
[ "SetFacetsBuilder", "registers", "a", "facet", "builder", "for", "this", "collector" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search/collector/topn.go#L318-L321
162,173
blevesearch/bleve
search/collector/topn.go
FacetResults
func (hc *TopNCollector) FacetResults() search.FacetResults { if hc.facetsBuilder != nil { return hc.facetsBuilder.Results() } return nil }
go
func (hc *TopNCollector) FacetResults() search.FacetResults { if hc.facetsBuilder != nil { return hc.facetsBuilder.Results() } return nil }
[ "func", "(", "hc", "*", "TopNCollector", ")", "FacetResults", "(", ")", "search", ".", "FacetResults", "{", "if", "hc", ".", "facetsBuilder", "!=", "nil", "{", "return", "hc", ".", "facetsBuilder", ".", "Results", "(", ")", "\n", "}", "\n", "return", "nil", "\n", "}" ]
// FacetResults returns the computed facets results
[ "FacetResults", "returns", "the", "computed", "facets", "results" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search/collector/topn.go#L365-L370
162,174
blevesearch/bleve
index/store/multiget.go
MultiGet
func MultiGet(kvreader KVReader, keys [][]byte) ([][]byte, error) { vals := make([][]byte, 0, len(keys)) for i, key := range keys { val, err := kvreader.Get(key) if err != nil { return nil, err } vals[i] = val } return vals, nil }
go
func MultiGet(kvreader KVReader, keys [][]byte) ([][]byte, error) { vals := make([][]byte, 0, len(keys)) for i, key := range keys { val, err := kvreader.Get(key) if err != nil { return nil, err } vals[i] = val } return vals, nil }
[ "func", "MultiGet", "(", "kvreader", "KVReader", ",", "keys", "[", "]", "[", "]", "byte", ")", "(", "[", "]", "[", "]", "byte", ",", "error", ")", "{", "vals", ":=", "make", "(", "[", "]", "[", "]", "byte", ",", "0", ",", "len", "(", "keys", ")", ")", "\n\n", "for", "i", ",", "key", ":=", "range", "keys", "{", "val", ",", "err", ":=", "kvreader", ".", "Get", "(", "key", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n\n", "vals", "[", "i", "]", "=", "val", "\n", "}", "\n\n", "return", "vals", ",", "nil", "\n", "}" ]
// MultiGet is a helper function to retrieve mutiple keys from a // KVReader, and might be used by KVStore implementations that don't // have a native multi-get facility.
[ "MultiGet", "is", "a", "helper", "function", "to", "retrieve", "mutiple", "keys", "from", "a", "KVReader", "and", "might", "be", "used", "by", "KVStore", "implementations", "that", "don", "t", "have", "a", "native", "multi", "-", "get", "facility", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/store/multiget.go#L20-L33
162,175
blevesearch/bleve
search/searcher/search_disjunction_heap.go
Optimize
func (s *DisjunctionHeapSearcher) Optimize(kind string, octx index.OptimizableContext) ( index.OptimizableContext, error) { if len(s.searchers) == 1 { o, ok := s.searchers[0].(index.Optimizable) if ok { return o.Optimize(kind, octx) } } return octx, nil }
go
func (s *DisjunctionHeapSearcher) Optimize(kind string, octx index.OptimizableContext) ( index.OptimizableContext, error) { if len(s.searchers) == 1 { o, ok := s.searchers[0].(index.Optimizable) if ok { return o.Optimize(kind, octx) } } return octx, nil }
[ "func", "(", "s", "*", "DisjunctionHeapSearcher", ")", "Optimize", "(", "kind", "string", ",", "octx", "index", ".", "OptimizableContext", ")", "(", "index", ".", "OptimizableContext", ",", "error", ")", "{", "if", "len", "(", "s", ".", "searchers", ")", "==", "1", "{", "o", ",", "ok", ":=", "s", ".", "searchers", "[", "0", "]", ".", "(", "index", ".", "Optimizable", ")", "\n", "if", "ok", "{", "return", "o", ".", "Optimize", "(", "kind", ",", "octx", ")", "\n", "}", "\n", "}", "\n\n", "return", "octx", ",", "nil", "\n", "}" ]
// a disjunction searcher implements the index.Optimizable interface // but only activates on an edge case where the disjunction is a // wrapper around a single Optimizable child searcher
[ "a", "disjunction", "searcher", "implements", "the", "index", ".", "Optimizable", "interface", "but", "only", "activates", "on", "an", "edge", "case", "where", "the", "disjunction", "is", "a", "wrapper", "around", "a", "single", "Optimizable", "child", "searcher" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search/searcher/search_disjunction_heap.go#L304-L314
162,176
blevesearch/bleve
search/query/query.go
expandQuery
func expandQuery(m mapping.IndexMapping, query Query) (Query, error) { var expand func(query Query) (Query, error) var expandSlice func(queries []Query) ([]Query, error) expandSlice = func(queries []Query) ([]Query, error) { expanded := []Query{} for _, q := range queries { exp, err := expand(q) if err != nil { return nil, err } expanded = append(expanded, exp) } return expanded, nil } expand = func(query Query) (Query, error) { switch q := query.(type) { case *QueryStringQuery: parsed, err := parseQuerySyntax(q.Query) if err != nil { return nil, fmt.Errorf("could not parse '%s': %s", q.Query, err) } return expand(parsed) case *ConjunctionQuery: children, err := expandSlice(q.Conjuncts) if err != nil { return nil, err } q.Conjuncts = children return q, nil case *DisjunctionQuery: children, err := expandSlice(q.Disjuncts) if err != nil { return nil, err } q.Disjuncts = children return q, nil case *BooleanQuery: var err error q.Must, err = expand(q.Must) if err != nil { return nil, err } q.Should, err = expand(q.Should) if err != nil { return nil, err } q.MustNot, err = expand(q.MustNot) if err != nil { return nil, err } return q, nil default: return query, nil } } return expand(query) }
go
func expandQuery(m mapping.IndexMapping, query Query) (Query, error) { var expand func(query Query) (Query, error) var expandSlice func(queries []Query) ([]Query, error) expandSlice = func(queries []Query) ([]Query, error) { expanded := []Query{} for _, q := range queries { exp, err := expand(q) if err != nil { return nil, err } expanded = append(expanded, exp) } return expanded, nil } expand = func(query Query) (Query, error) { switch q := query.(type) { case *QueryStringQuery: parsed, err := parseQuerySyntax(q.Query) if err != nil { return nil, fmt.Errorf("could not parse '%s': %s", q.Query, err) } return expand(parsed) case *ConjunctionQuery: children, err := expandSlice(q.Conjuncts) if err != nil { return nil, err } q.Conjuncts = children return q, nil case *DisjunctionQuery: children, err := expandSlice(q.Disjuncts) if err != nil { return nil, err } q.Disjuncts = children return q, nil case *BooleanQuery: var err error q.Must, err = expand(q.Must) if err != nil { return nil, err } q.Should, err = expand(q.Should) if err != nil { return nil, err } q.MustNot, err = expand(q.MustNot) if err != nil { return nil, err } return q, nil default: return query, nil } } return expand(query) }
[ "func", "expandQuery", "(", "m", "mapping", ".", "IndexMapping", ",", "query", "Query", ")", "(", "Query", ",", "error", ")", "{", "var", "expand", "func", "(", "query", "Query", ")", "(", "Query", ",", "error", ")", "\n", "var", "expandSlice", "func", "(", "queries", "[", "]", "Query", ")", "(", "[", "]", "Query", ",", "error", ")", "\n\n", "expandSlice", "=", "func", "(", "queries", "[", "]", "Query", ")", "(", "[", "]", "Query", ",", "error", ")", "{", "expanded", ":=", "[", "]", "Query", "{", "}", "\n", "for", "_", ",", "q", ":=", "range", "queries", "{", "exp", ",", "err", ":=", "expand", "(", "q", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n", "expanded", "=", "append", "(", "expanded", ",", "exp", ")", "\n", "}", "\n", "return", "expanded", ",", "nil", "\n", "}", "\n\n", "expand", "=", "func", "(", "query", "Query", ")", "(", "Query", ",", "error", ")", "{", "switch", "q", ":=", "query", ".", "(", "type", ")", "{", "case", "*", "QueryStringQuery", ":", "parsed", ",", "err", ":=", "parseQuerySyntax", "(", "q", ".", "Query", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "fmt", ".", "Errorf", "(", "\"", "\"", ",", "q", ".", "Query", ",", "err", ")", "\n", "}", "\n", "return", "expand", "(", "parsed", ")", "\n", "case", "*", "ConjunctionQuery", ":", "children", ",", "err", ":=", "expandSlice", "(", "q", ".", "Conjuncts", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n", "q", ".", "Conjuncts", "=", "children", "\n", "return", "q", ",", "nil", "\n", "case", "*", "DisjunctionQuery", ":", "children", ",", "err", ":=", "expandSlice", "(", "q", ".", "Disjuncts", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n", "q", ".", "Disjuncts", "=", "children", "\n", "return", "q", ",", "nil", "\n", "case", "*", "BooleanQuery", ":", "var", "err", "error", "\n", "q", ".", "Must", ",", "err", "=", "expand", "(", "q", ".", "Must", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n", "q", ".", "Should", ",", "err", "=", "expand", "(", "q", ".", "Should", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n", "q", ".", "MustNot", ",", "err", "=", "expand", "(", "q", ".", "MustNot", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n", "return", "q", ",", "nil", "\n", "default", ":", "return", "query", ",", "nil", "\n", "}", "\n", "}", "\n", "return", "expand", "(", "query", ")", "\n", "}" ]
// expandQuery traverses the input query tree and returns a new tree where // query string queries have been expanded into base queries. Returned tree may // reference queries from the input tree or new queries.
[ "expandQuery", "traverses", "the", "input", "query", "tree", "and", "returns", "a", "new", "tree", "where", "query", "string", "queries", "have", "been", "expanded", "into", "base", "queries", ".", "Returned", "tree", "may", "reference", "queries", "from", "the", "input", "tree", "or", "new", "queries", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search/query/query.go#L282-L340
162,177
blevesearch/bleve
search/query/query.go
DumpQuery
func DumpQuery(m mapping.IndexMapping, query Query) (string, error) { q, err := expandQuery(m, query) if err != nil { return "", err } data, err := json.MarshalIndent(q, "", " ") return string(data), err }
go
func DumpQuery(m mapping.IndexMapping, query Query) (string, error) { q, err := expandQuery(m, query) if err != nil { return "", err } data, err := json.MarshalIndent(q, "", " ") return string(data), err }
[ "func", "DumpQuery", "(", "m", "mapping", ".", "IndexMapping", ",", "query", "Query", ")", "(", "string", ",", "error", ")", "{", "q", ",", "err", ":=", "expandQuery", "(", "m", ",", "query", ")", "\n", "if", "err", "!=", "nil", "{", "return", "\"", "\"", ",", "err", "\n", "}", "\n", "data", ",", "err", ":=", "json", ".", "MarshalIndent", "(", "q", ",", "\"", "\"", ",", "\"", "\"", ")", "\n", "return", "string", "(", "data", ")", ",", "err", "\n", "}" ]
// DumpQuery returns a string representation of the query tree, where query // string queries have been expanded into base queries. The output format is // meant for debugging purpose and may change in the future.
[ "DumpQuery", "returns", "a", "string", "representation", "of", "the", "query", "tree", "where", "query", "string", "queries", "have", "been", "expanded", "into", "base", "queries", ".", "The", "output", "format", "is", "meant", "for", "debugging", "purpose", "and", "may", "change", "in", "the", "future", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/search/query/query.go#L345-L352
162,178
blevesearch/bleve
index/scorch/segment/zap/intcoder.go
newChunkedIntCoder
func newChunkedIntCoder(chunkSize uint64, maxDocNum uint64) *chunkedIntCoder { total := maxDocNum/chunkSize + 1 rv := &chunkedIntCoder{ chunkSize: chunkSize, chunkLens: make([]uint64, total), final: make([]byte, 0, 64), } return rv }
go
func newChunkedIntCoder(chunkSize uint64, maxDocNum uint64) *chunkedIntCoder { total := maxDocNum/chunkSize + 1 rv := &chunkedIntCoder{ chunkSize: chunkSize, chunkLens: make([]uint64, total), final: make([]byte, 0, 64), } return rv }
[ "func", "newChunkedIntCoder", "(", "chunkSize", "uint64", ",", "maxDocNum", "uint64", ")", "*", "chunkedIntCoder", "{", "total", ":=", "maxDocNum", "/", "chunkSize", "+", "1", "\n", "rv", ":=", "&", "chunkedIntCoder", "{", "chunkSize", ":", "chunkSize", ",", "chunkLens", ":", "make", "(", "[", "]", "uint64", ",", "total", ")", ",", "final", ":", "make", "(", "[", "]", "byte", ",", "0", ",", "64", ")", ",", "}", "\n\n", "return", "rv", "\n", "}" ]
// newChunkedIntCoder returns a new chunk int coder which packs data into // chunks based on the provided chunkSize and supports up to the specified // maxDocNum
[ "newChunkedIntCoder", "returns", "a", "new", "chunk", "int", "coder", "which", "packs", "data", "into", "chunks", "based", "on", "the", "provided", "chunkSize", "and", "supports", "up", "to", "the", "specified", "maxDocNum" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/intcoder.go#L36-L45
162,179
blevesearch/bleve
index/scorch/segment/zap/intcoder.go
Reset
func (c *chunkedIntCoder) Reset() { c.final = c.final[:0] c.chunkBuf.Reset() c.currChunk = 0 for i := range c.chunkLens { c.chunkLens[i] = 0 } }
go
func (c *chunkedIntCoder) Reset() { c.final = c.final[:0] c.chunkBuf.Reset() c.currChunk = 0 for i := range c.chunkLens { c.chunkLens[i] = 0 } }
[ "func", "(", "c", "*", "chunkedIntCoder", ")", "Reset", "(", ")", "{", "c", ".", "final", "=", "c", ".", "final", "[", ":", "0", "]", "\n", "c", ".", "chunkBuf", ".", "Reset", "(", ")", "\n", "c", ".", "currChunk", "=", "0", "\n", "for", "i", ":=", "range", "c", ".", "chunkLens", "{", "c", ".", "chunkLens", "[", "i", "]", "=", "0", "\n", "}", "\n", "}" ]
// Reset lets you reuse this chunked int coder. buffers are reset and reused // from previous use. you cannot change the chunk size or max doc num.
[ "Reset", "lets", "you", "reuse", "this", "chunked", "int", "coder", ".", "buffers", "are", "reset", "and", "reused", "from", "previous", "use", ".", "you", "cannot", "change", "the", "chunk", "size", "or", "max", "doc", "num", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/intcoder.go#L49-L56
162,180
blevesearch/bleve
index/scorch/segment/zap/intcoder.go
Write
func (c *chunkedIntCoder) Write(w io.Writer) (int, error) { bufNeeded := binary.MaxVarintLen64 * (1 + len(c.chunkLens)) if len(c.buf) < bufNeeded { c.buf = make([]byte, bufNeeded) } buf := c.buf // convert the chunk lengths into chunk offsets chunkOffsets := modifyLengthsToEndOffsets(c.chunkLens) // write out the number of chunks & each chunk offsets n := binary.PutUvarint(buf, uint64(len(chunkOffsets))) for _, chunkOffset := range chunkOffsets { n += binary.PutUvarint(buf[n:], chunkOffset) } tw, err := w.Write(buf[:n]) if err != nil { return tw, err } // write out the data nw, err := w.Write(c.final) tw += nw if err != nil { return tw, err } return tw, nil }
go
func (c *chunkedIntCoder) Write(w io.Writer) (int, error) { bufNeeded := binary.MaxVarintLen64 * (1 + len(c.chunkLens)) if len(c.buf) < bufNeeded { c.buf = make([]byte, bufNeeded) } buf := c.buf // convert the chunk lengths into chunk offsets chunkOffsets := modifyLengthsToEndOffsets(c.chunkLens) // write out the number of chunks & each chunk offsets n := binary.PutUvarint(buf, uint64(len(chunkOffsets))) for _, chunkOffset := range chunkOffsets { n += binary.PutUvarint(buf[n:], chunkOffset) } tw, err := w.Write(buf[:n]) if err != nil { return tw, err } // write out the data nw, err := w.Write(c.final) tw += nw if err != nil { return tw, err } return tw, nil }
[ "func", "(", "c", "*", "chunkedIntCoder", ")", "Write", "(", "w", "io", ".", "Writer", ")", "(", "int", ",", "error", ")", "{", "bufNeeded", ":=", "binary", ".", "MaxVarintLen64", "*", "(", "1", "+", "len", "(", "c", ".", "chunkLens", ")", ")", "\n", "if", "len", "(", "c", ".", "buf", ")", "<", "bufNeeded", "{", "c", ".", "buf", "=", "make", "(", "[", "]", "byte", ",", "bufNeeded", ")", "\n", "}", "\n", "buf", ":=", "c", ".", "buf", "\n\n", "// convert the chunk lengths into chunk offsets", "chunkOffsets", ":=", "modifyLengthsToEndOffsets", "(", "c", ".", "chunkLens", ")", "\n\n", "// write out the number of chunks & each chunk offsets", "n", ":=", "binary", ".", "PutUvarint", "(", "buf", ",", "uint64", "(", "len", "(", "chunkOffsets", ")", ")", ")", "\n", "for", "_", ",", "chunkOffset", ":=", "range", "chunkOffsets", "{", "n", "+=", "binary", ".", "PutUvarint", "(", "buf", "[", "n", ":", "]", ",", "chunkOffset", ")", "\n", "}", "\n\n", "tw", ",", "err", ":=", "w", ".", "Write", "(", "buf", "[", ":", "n", "]", ")", "\n", "if", "err", "!=", "nil", "{", "return", "tw", ",", "err", "\n", "}", "\n\n", "// write out the data", "nw", ",", "err", ":=", "w", ".", "Write", "(", "c", ".", "final", ")", "\n", "tw", "+=", "nw", "\n", "if", "err", "!=", "nil", "{", "return", "tw", ",", "err", "\n", "}", "\n", "return", "tw", ",", "nil", "\n", "}" ]
// Write commits all the encoded chunked integers to the provided writer.
[ "Write", "commits", "all", "the", "encoded", "chunked", "integers", "to", "the", "provided", "writer", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/intcoder.go#L107-L135
162,181
blevesearch/bleve
index/upsidedown/dump.go
dumpPrefix
func dumpPrefix(kvreader store.KVReader, rv chan interface{}, prefix []byte) { start := prefix if start == nil { start = []byte{0} } it := kvreader.PrefixIterator(start) defer func() { cerr := it.Close() if cerr != nil { rv <- cerr } }() key, val, valid := it.Current() for valid { ck := make([]byte, len(key)) copy(ck, key) cv := make([]byte, len(val)) copy(cv, val) row, err := ParseFromKeyValue(ck, cv) if err != nil { rv <- err return } rv <- row it.Next() key, val, valid = it.Current() } }
go
func dumpPrefix(kvreader store.KVReader, rv chan interface{}, prefix []byte) { start := prefix if start == nil { start = []byte{0} } it := kvreader.PrefixIterator(start) defer func() { cerr := it.Close() if cerr != nil { rv <- cerr } }() key, val, valid := it.Current() for valid { ck := make([]byte, len(key)) copy(ck, key) cv := make([]byte, len(val)) copy(cv, val) row, err := ParseFromKeyValue(ck, cv) if err != nil { rv <- err return } rv <- row it.Next() key, val, valid = it.Current() } }
[ "func", "dumpPrefix", "(", "kvreader", "store", ".", "KVReader", ",", "rv", "chan", "interface", "{", "}", ",", "prefix", "[", "]", "byte", ")", "{", "start", ":=", "prefix", "\n", "if", "start", "==", "nil", "{", "start", "=", "[", "]", "byte", "{", "0", "}", "\n", "}", "\n", "it", ":=", "kvreader", ".", "PrefixIterator", "(", "start", ")", "\n", "defer", "func", "(", ")", "{", "cerr", ":=", "it", ".", "Close", "(", ")", "\n", "if", "cerr", "!=", "nil", "{", "rv", "<-", "cerr", "\n", "}", "\n", "}", "(", ")", "\n", "key", ",", "val", ",", "valid", ":=", "it", ".", "Current", "(", ")", "\n", "for", "valid", "{", "ck", ":=", "make", "(", "[", "]", "byte", ",", "len", "(", "key", ")", ")", "\n", "copy", "(", "ck", ",", "key", ")", "\n", "cv", ":=", "make", "(", "[", "]", "byte", ",", "len", "(", "val", ")", ")", "\n", "copy", "(", "cv", ",", "val", ")", "\n", "row", ",", "err", ":=", "ParseFromKeyValue", "(", "ck", ",", "cv", ")", "\n", "if", "err", "!=", "nil", "{", "rv", "<-", "err", "\n", "return", "\n", "}", "\n", "rv", "<-", "row", "\n\n", "it", ".", "Next", "(", ")", "\n", "key", ",", "val", ",", "valid", "=", "it", ".", "Current", "(", ")", "\n", "}", "\n", "}" ]
// the functions in this file are only intended to be used by // the bleve_dump utility and the debug http handlers // if your application relies on them, you're doing something wrong // they may change or be removed at any time
[ "the", "functions", "in", "this", "file", "are", "only", "intended", "to", "be", "used", "by", "the", "bleve_dump", "utility", "and", "the", "debug", "http", "handlers", "if", "your", "application", "relies", "on", "them", "you", "re", "doing", "something", "wrong", "they", "may", "change", "or", "be", "removed", "at", "any", "time" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/upsidedown/dump.go#L29-L57
162,182
blevesearch/bleve
index/upsidedown/dump.go
DumpDoc
func (i *IndexReader) DumpDoc(id string) chan interface{} { idBytes := []byte(id) rv := make(chan interface{}) go func() { defer close(rv) back, err := backIndexRowForDoc(i.kvreader, []byte(id)) if err != nil { rv <- err return } // no such doc if back == nil { return } // build sorted list of term keys keys := make(keyset, 0) for _, entry := range back.termsEntries { for i := range entry.Terms { tfr := NewTermFrequencyRow([]byte(entry.Terms[i]), uint16(*entry.Field), idBytes, 0, 0) key := tfr.Key() keys = append(keys, key) } } sort.Sort(keys) // first add all the stored rows storedRowPrefix := NewStoredRow(idBytes, 0, []uint64{}, 'x', []byte{}).ScanPrefixForDoc() dumpPrefix(i.kvreader, rv, storedRowPrefix) // now walk term keys in order and add them as well if len(keys) > 0 { it := i.kvreader.RangeIterator(keys[0], nil) defer func() { cerr := it.Close() if cerr != nil { rv <- cerr } }() for _, key := range keys { it.Seek(key) rkey, rval, valid := it.Current() if !valid { break } rck := make([]byte, len(rkey)) copy(rck, key) rcv := make([]byte, len(rval)) copy(rcv, rval) row, err := ParseFromKeyValue(rck, rcv) if err != nil { rv <- err return } rv <- row } } }() return rv }
go
func (i *IndexReader) DumpDoc(id string) chan interface{} { idBytes := []byte(id) rv := make(chan interface{}) go func() { defer close(rv) back, err := backIndexRowForDoc(i.kvreader, []byte(id)) if err != nil { rv <- err return } // no such doc if back == nil { return } // build sorted list of term keys keys := make(keyset, 0) for _, entry := range back.termsEntries { for i := range entry.Terms { tfr := NewTermFrequencyRow([]byte(entry.Terms[i]), uint16(*entry.Field), idBytes, 0, 0) key := tfr.Key() keys = append(keys, key) } } sort.Sort(keys) // first add all the stored rows storedRowPrefix := NewStoredRow(idBytes, 0, []uint64{}, 'x', []byte{}).ScanPrefixForDoc() dumpPrefix(i.kvreader, rv, storedRowPrefix) // now walk term keys in order and add them as well if len(keys) > 0 { it := i.kvreader.RangeIterator(keys[0], nil) defer func() { cerr := it.Close() if cerr != nil { rv <- cerr } }() for _, key := range keys { it.Seek(key) rkey, rval, valid := it.Current() if !valid { break } rck := make([]byte, len(rkey)) copy(rck, key) rcv := make([]byte, len(rval)) copy(rcv, rval) row, err := ParseFromKeyValue(rck, rcv) if err != nil { rv <- err return } rv <- row } } }() return rv }
[ "func", "(", "i", "*", "IndexReader", ")", "DumpDoc", "(", "id", "string", ")", "chan", "interface", "{", "}", "{", "idBytes", ":=", "[", "]", "byte", "(", "id", ")", "\n\n", "rv", ":=", "make", "(", "chan", "interface", "{", "}", ")", "\n\n", "go", "func", "(", ")", "{", "defer", "close", "(", "rv", ")", "\n\n", "back", ",", "err", ":=", "backIndexRowForDoc", "(", "i", ".", "kvreader", ",", "[", "]", "byte", "(", "id", ")", ")", "\n", "if", "err", "!=", "nil", "{", "rv", "<-", "err", "\n", "return", "\n", "}", "\n\n", "// no such doc", "if", "back", "==", "nil", "{", "return", "\n", "}", "\n", "// build sorted list of term keys", "keys", ":=", "make", "(", "keyset", ",", "0", ")", "\n", "for", "_", ",", "entry", ":=", "range", "back", ".", "termsEntries", "{", "for", "i", ":=", "range", "entry", ".", "Terms", "{", "tfr", ":=", "NewTermFrequencyRow", "(", "[", "]", "byte", "(", "entry", ".", "Terms", "[", "i", "]", ")", ",", "uint16", "(", "*", "entry", ".", "Field", ")", ",", "idBytes", ",", "0", ",", "0", ")", "\n", "key", ":=", "tfr", ".", "Key", "(", ")", "\n", "keys", "=", "append", "(", "keys", ",", "key", ")", "\n", "}", "\n", "}", "\n", "sort", ".", "Sort", "(", "keys", ")", "\n\n", "// first add all the stored rows", "storedRowPrefix", ":=", "NewStoredRow", "(", "idBytes", ",", "0", ",", "[", "]", "uint64", "{", "}", ",", "'x'", ",", "[", "]", "byte", "{", "}", ")", ".", "ScanPrefixForDoc", "(", ")", "\n", "dumpPrefix", "(", "i", ".", "kvreader", ",", "rv", ",", "storedRowPrefix", ")", "\n\n", "// now walk term keys in order and add them as well", "if", "len", "(", "keys", ")", ">", "0", "{", "it", ":=", "i", ".", "kvreader", ".", "RangeIterator", "(", "keys", "[", "0", "]", ",", "nil", ")", "\n", "defer", "func", "(", ")", "{", "cerr", ":=", "it", ".", "Close", "(", ")", "\n", "if", "cerr", "!=", "nil", "{", "rv", "<-", "cerr", "\n", "}", "\n", "}", "(", ")", "\n\n", "for", "_", ",", "key", ":=", "range", "keys", "{", "it", ".", "Seek", "(", "key", ")", "\n", "rkey", ",", "rval", ",", "valid", ":=", "it", ".", "Current", "(", ")", "\n", "if", "!", "valid", "{", "break", "\n", "}", "\n", "rck", ":=", "make", "(", "[", "]", "byte", ",", "len", "(", "rkey", ")", ")", "\n", "copy", "(", "rck", ",", "key", ")", "\n", "rcv", ":=", "make", "(", "[", "]", "byte", ",", "len", "(", "rval", ")", ")", "\n", "copy", "(", "rcv", ",", "rval", ")", "\n", "row", ",", "err", ":=", "ParseFromKeyValue", "(", "rck", ",", "rcv", ")", "\n", "if", "err", "!=", "nil", "{", "rv", "<-", "err", "\n", "return", "\n", "}", "\n", "rv", "<-", "row", "\n", "}", "\n", "}", "\n", "}", "(", ")", "\n\n", "return", "rv", "\n", "}" ]
// DumpDoc returns all rows in the index related to this doc id
[ "DumpDoc", "returns", "all", "rows", "in", "the", "index", "related", "to", "this", "doc", "id" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/upsidedown/dump.go#L110-L174
162,183
blevesearch/bleve
mapping/index.go
AddCustomCharFilter
func (im *IndexMappingImpl) AddCustomCharFilter(name string, config map[string]interface{}) error { _, err := im.cache.DefineCharFilter(name, config) if err != nil { return err } im.CustomAnalysis.CharFilters[name] = config return nil }
go
func (im *IndexMappingImpl) AddCustomCharFilter(name string, config map[string]interface{}) error { _, err := im.cache.DefineCharFilter(name, config) if err != nil { return err } im.CustomAnalysis.CharFilters[name] = config return nil }
[ "func", "(", "im", "*", "IndexMappingImpl", ")", "AddCustomCharFilter", "(", "name", "string", ",", "config", "map", "[", "string", "]", "interface", "{", "}", ")", "error", "{", "_", ",", "err", ":=", "im", ".", "cache", ".", "DefineCharFilter", "(", "name", ",", "config", ")", "\n", "if", "err", "!=", "nil", "{", "return", "err", "\n", "}", "\n", "im", ".", "CustomAnalysis", ".", "CharFilters", "[", "name", "]", "=", "config", "\n", "return", "nil", "\n", "}" ]
// AddCustomCharFilter defines a custom char filter for use in this mapping
[ "AddCustomCharFilter", "defines", "a", "custom", "char", "filter", "for", "use", "in", "this", "mapping" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/mapping/index.go#L59-L66
162,184
blevesearch/bleve
mapping/index.go
AddCustomTokenizer
func (im *IndexMappingImpl) AddCustomTokenizer(name string, config map[string]interface{}) error { _, err := im.cache.DefineTokenizer(name, config) if err != nil { return err } im.CustomAnalysis.Tokenizers[name] = config return nil }
go
func (im *IndexMappingImpl) AddCustomTokenizer(name string, config map[string]interface{}) error { _, err := im.cache.DefineTokenizer(name, config) if err != nil { return err } im.CustomAnalysis.Tokenizers[name] = config return nil }
[ "func", "(", "im", "*", "IndexMappingImpl", ")", "AddCustomTokenizer", "(", "name", "string", ",", "config", "map", "[", "string", "]", "interface", "{", "}", ")", "error", "{", "_", ",", "err", ":=", "im", ".", "cache", ".", "DefineTokenizer", "(", "name", ",", "config", ")", "\n", "if", "err", "!=", "nil", "{", "return", "err", "\n", "}", "\n", "im", ".", "CustomAnalysis", ".", "Tokenizers", "[", "name", "]", "=", "config", "\n", "return", "nil", "\n", "}" ]
// AddCustomTokenizer defines a custom tokenizer for use in this mapping
[ "AddCustomTokenizer", "defines", "a", "custom", "tokenizer", "for", "use", "in", "this", "mapping" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/mapping/index.go#L69-L76
162,185
blevesearch/bleve
mapping/index.go
AddCustomTokenMap
func (im *IndexMappingImpl) AddCustomTokenMap(name string, config map[string]interface{}) error { _, err := im.cache.DefineTokenMap(name, config) if err != nil { return err } im.CustomAnalysis.TokenMaps[name] = config return nil }
go
func (im *IndexMappingImpl) AddCustomTokenMap(name string, config map[string]interface{}) error { _, err := im.cache.DefineTokenMap(name, config) if err != nil { return err } im.CustomAnalysis.TokenMaps[name] = config return nil }
[ "func", "(", "im", "*", "IndexMappingImpl", ")", "AddCustomTokenMap", "(", "name", "string", ",", "config", "map", "[", "string", "]", "interface", "{", "}", ")", "error", "{", "_", ",", "err", ":=", "im", ".", "cache", ".", "DefineTokenMap", "(", "name", ",", "config", ")", "\n", "if", "err", "!=", "nil", "{", "return", "err", "\n", "}", "\n", "im", ".", "CustomAnalysis", ".", "TokenMaps", "[", "name", "]", "=", "config", "\n", "return", "nil", "\n", "}" ]
// AddCustomTokenMap defines a custom token map for use in this mapping
[ "AddCustomTokenMap", "defines", "a", "custom", "token", "map", "for", "use", "in", "this", "mapping" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/mapping/index.go#L79-L86
162,186
blevesearch/bleve
mapping/index.go
AddCustomTokenFilter
func (im *IndexMappingImpl) AddCustomTokenFilter(name string, config map[string]interface{}) error { _, err := im.cache.DefineTokenFilter(name, config) if err != nil { return err } im.CustomAnalysis.TokenFilters[name] = config return nil }
go
func (im *IndexMappingImpl) AddCustomTokenFilter(name string, config map[string]interface{}) error { _, err := im.cache.DefineTokenFilter(name, config) if err != nil { return err } im.CustomAnalysis.TokenFilters[name] = config return nil }
[ "func", "(", "im", "*", "IndexMappingImpl", ")", "AddCustomTokenFilter", "(", "name", "string", ",", "config", "map", "[", "string", "]", "interface", "{", "}", ")", "error", "{", "_", ",", "err", ":=", "im", ".", "cache", ".", "DefineTokenFilter", "(", "name", ",", "config", ")", "\n", "if", "err", "!=", "nil", "{", "return", "err", "\n", "}", "\n", "im", ".", "CustomAnalysis", ".", "TokenFilters", "[", "name", "]", "=", "config", "\n", "return", "nil", "\n", "}" ]
// AddCustomTokenFilter defines a custom token filter for use in this mapping
[ "AddCustomTokenFilter", "defines", "a", "custom", "token", "filter", "for", "use", "in", "this", "mapping" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/mapping/index.go#L89-L96
162,187
blevesearch/bleve
mapping/index.go
AddCustomDateTimeParser
func (im *IndexMappingImpl) AddCustomDateTimeParser(name string, config map[string]interface{}) error { _, err := im.cache.DefineDateTimeParser(name, config) if err != nil { return err } im.CustomAnalysis.DateTimeParsers[name] = config return nil }
go
func (im *IndexMappingImpl) AddCustomDateTimeParser(name string, config map[string]interface{}) error { _, err := im.cache.DefineDateTimeParser(name, config) if err != nil { return err } im.CustomAnalysis.DateTimeParsers[name] = config return nil }
[ "func", "(", "im", "*", "IndexMappingImpl", ")", "AddCustomDateTimeParser", "(", "name", "string", ",", "config", "map", "[", "string", "]", "interface", "{", "}", ")", "error", "{", "_", ",", "err", ":=", "im", ".", "cache", ".", "DefineDateTimeParser", "(", "name", ",", "config", ")", "\n", "if", "err", "!=", "nil", "{", "return", "err", "\n", "}", "\n", "im", ".", "CustomAnalysis", ".", "DateTimeParsers", "[", "name", "]", "=", "config", "\n", "return", "nil", "\n", "}" ]
// AddCustomDateTimeParser defines a custom date time parser for use in this mapping
[ "AddCustomDateTimeParser", "defines", "a", "custom", "date", "time", "parser", "for", "use", "in", "this", "mapping" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/mapping/index.go#L137-L144
162,188
blevesearch/bleve
mapping/index.go
NewIndexMapping
func NewIndexMapping() *IndexMappingImpl { return &IndexMappingImpl{ TypeMapping: make(map[string]*DocumentMapping), DefaultMapping: NewDocumentMapping(), TypeField: defaultTypeField, DefaultType: defaultType, DefaultAnalyzer: defaultAnalyzer, DefaultDateTimeParser: defaultDateTimeParser, DefaultField: defaultField, IndexDynamic: IndexDynamic, StoreDynamic: StoreDynamic, DocValuesDynamic: DocValuesDynamic, CustomAnalysis: newCustomAnalysis(), cache: registry.NewCache(), } }
go
func NewIndexMapping() *IndexMappingImpl { return &IndexMappingImpl{ TypeMapping: make(map[string]*DocumentMapping), DefaultMapping: NewDocumentMapping(), TypeField: defaultTypeField, DefaultType: defaultType, DefaultAnalyzer: defaultAnalyzer, DefaultDateTimeParser: defaultDateTimeParser, DefaultField: defaultField, IndexDynamic: IndexDynamic, StoreDynamic: StoreDynamic, DocValuesDynamic: DocValuesDynamic, CustomAnalysis: newCustomAnalysis(), cache: registry.NewCache(), } }
[ "func", "NewIndexMapping", "(", ")", "*", "IndexMappingImpl", "{", "return", "&", "IndexMappingImpl", "{", "TypeMapping", ":", "make", "(", "map", "[", "string", "]", "*", "DocumentMapping", ")", ",", "DefaultMapping", ":", "NewDocumentMapping", "(", ")", ",", "TypeField", ":", "defaultTypeField", ",", "DefaultType", ":", "defaultType", ",", "DefaultAnalyzer", ":", "defaultAnalyzer", ",", "DefaultDateTimeParser", ":", "defaultDateTimeParser", ",", "DefaultField", ":", "defaultField", ",", "IndexDynamic", ":", "IndexDynamic", ",", "StoreDynamic", ":", "StoreDynamic", ",", "DocValuesDynamic", ":", "DocValuesDynamic", ",", "CustomAnalysis", ":", "newCustomAnalysis", "(", ")", ",", "cache", ":", "registry", ".", "NewCache", "(", ")", ",", "}", "\n", "}" ]
// NewIndexMapping creates a new IndexMapping that will use all the default indexing rules
[ "NewIndexMapping", "creates", "a", "new", "IndexMapping", "that", "will", "use", "all", "the", "default", "indexing", "rules" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/mapping/index.go#L147-L162
162,189
blevesearch/bleve
mapping/index.go
Validate
func (im *IndexMappingImpl) Validate() error { _, err := im.cache.AnalyzerNamed(im.DefaultAnalyzer) if err != nil { return err } _, err = im.cache.DateTimeParserNamed(im.DefaultDateTimeParser) if err != nil { return err } err = im.DefaultMapping.Validate(im.cache) if err != nil { return err } for _, docMapping := range im.TypeMapping { err = docMapping.Validate(im.cache) if err != nil { return err } } return nil }
go
func (im *IndexMappingImpl) Validate() error { _, err := im.cache.AnalyzerNamed(im.DefaultAnalyzer) if err != nil { return err } _, err = im.cache.DateTimeParserNamed(im.DefaultDateTimeParser) if err != nil { return err } err = im.DefaultMapping.Validate(im.cache) if err != nil { return err } for _, docMapping := range im.TypeMapping { err = docMapping.Validate(im.cache) if err != nil { return err } } return nil }
[ "func", "(", "im", "*", "IndexMappingImpl", ")", "Validate", "(", ")", "error", "{", "_", ",", "err", ":=", "im", ".", "cache", ".", "AnalyzerNamed", "(", "im", ".", "DefaultAnalyzer", ")", "\n", "if", "err", "!=", "nil", "{", "return", "err", "\n", "}", "\n", "_", ",", "err", "=", "im", ".", "cache", ".", "DateTimeParserNamed", "(", "im", ".", "DefaultDateTimeParser", ")", "\n", "if", "err", "!=", "nil", "{", "return", "err", "\n", "}", "\n", "err", "=", "im", ".", "DefaultMapping", ".", "Validate", "(", "im", ".", "cache", ")", "\n", "if", "err", "!=", "nil", "{", "return", "err", "\n", "}", "\n", "for", "_", ",", "docMapping", ":=", "range", "im", ".", "TypeMapping", "{", "err", "=", "docMapping", ".", "Validate", "(", "im", ".", "cache", ")", "\n", "if", "err", "!=", "nil", "{", "return", "err", "\n", "}", "\n", "}", "\n", "return", "nil", "\n", "}" ]
// Validate will walk the entire structure ensuring the following // explicitly named and default analyzers can be built
[ "Validate", "will", "walk", "the", "entire", "structure", "ensuring", "the", "following", "explicitly", "named", "and", "default", "analyzers", "can", "be", "built" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/mapping/index.go#L166-L186
162,190
blevesearch/bleve
mapping/index.go
AddDocumentMapping
func (im *IndexMappingImpl) AddDocumentMapping(doctype string, dm *DocumentMapping) { im.TypeMapping[doctype] = dm }
go
func (im *IndexMappingImpl) AddDocumentMapping(doctype string, dm *DocumentMapping) { im.TypeMapping[doctype] = dm }
[ "func", "(", "im", "*", "IndexMappingImpl", ")", "AddDocumentMapping", "(", "doctype", "string", ",", "dm", "*", "DocumentMapping", ")", "{", "im", ".", "TypeMapping", "[", "doctype", "]", "=", "dm", "\n", "}" ]
// AddDocumentMapping sets a custom document mapping for the specified type
[ "AddDocumentMapping", "sets", "a", "custom", "document", "mapping", "for", "the", "specified", "type" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/mapping/index.go#L189-L191
162,191
blevesearch/bleve
mapping/index.go
AnalyzerNameForPath
func (im *IndexMappingImpl) AnalyzerNameForPath(path string) string { // first we look for explicit mapping on the field for _, docMapping := range im.TypeMapping { analyzerName := docMapping.analyzerNameForPath(path) if analyzerName != "" { return analyzerName } } // now try the default mapping pathMapping := im.DefaultMapping.documentMappingForPath(path) if pathMapping != nil { if len(pathMapping.Fields) > 0 { if pathMapping.Fields[0].Analyzer != "" { return pathMapping.Fields[0].Analyzer } } } // next we will try default analyzers for the path pathDecoded := decodePath(path) for _, docMapping := range im.TypeMapping { rv := docMapping.defaultAnalyzerName(pathDecoded) if rv != "" { return rv } } return im.DefaultAnalyzer }
go
func (im *IndexMappingImpl) AnalyzerNameForPath(path string) string { // first we look for explicit mapping on the field for _, docMapping := range im.TypeMapping { analyzerName := docMapping.analyzerNameForPath(path) if analyzerName != "" { return analyzerName } } // now try the default mapping pathMapping := im.DefaultMapping.documentMappingForPath(path) if pathMapping != nil { if len(pathMapping.Fields) > 0 { if pathMapping.Fields[0].Analyzer != "" { return pathMapping.Fields[0].Analyzer } } } // next we will try default analyzers for the path pathDecoded := decodePath(path) for _, docMapping := range im.TypeMapping { rv := docMapping.defaultAnalyzerName(pathDecoded) if rv != "" { return rv } } return im.DefaultAnalyzer }
[ "func", "(", "im", "*", "IndexMappingImpl", ")", "AnalyzerNameForPath", "(", "path", "string", ")", "string", "{", "// first we look for explicit mapping on the field", "for", "_", ",", "docMapping", ":=", "range", "im", ".", "TypeMapping", "{", "analyzerName", ":=", "docMapping", ".", "analyzerNameForPath", "(", "path", ")", "\n", "if", "analyzerName", "!=", "\"", "\"", "{", "return", "analyzerName", "\n", "}", "\n", "}", "\n", "// now try the default mapping", "pathMapping", ":=", "im", ".", "DefaultMapping", ".", "documentMappingForPath", "(", "path", ")", "\n", "if", "pathMapping", "!=", "nil", "{", "if", "len", "(", "pathMapping", ".", "Fields", ")", ">", "0", "{", "if", "pathMapping", ".", "Fields", "[", "0", "]", ".", "Analyzer", "!=", "\"", "\"", "{", "return", "pathMapping", ".", "Fields", "[", "0", "]", ".", "Analyzer", "\n", "}", "\n", "}", "\n", "}", "\n\n", "// next we will try default analyzers for the path", "pathDecoded", ":=", "decodePath", "(", "path", ")", "\n", "for", "_", ",", "docMapping", ":=", "range", "im", ".", "TypeMapping", "{", "rv", ":=", "docMapping", ".", "defaultAnalyzerName", "(", "pathDecoded", ")", "\n", "if", "rv", "!=", "\"", "\"", "{", "return", "rv", "\n", "}", "\n", "}", "\n\n", "return", "im", ".", "DefaultAnalyzer", "\n", "}" ]
// AnalyzerNameForPath attempts to find the best analyzer to use with only a // field name will walk all the document types, look for field mappings at the // provided path, if one exists and it has an explicit analyzer that is // returned.
[ "AnalyzerNameForPath", "attempts", "to", "find", "the", "best", "analyzer", "to", "use", "with", "only", "a", "field", "name", "will", "walk", "all", "the", "document", "types", "look", "for", "field", "mappings", "at", "the", "provided", "path", "if", "one", "exists", "and", "it", "has", "an", "explicit", "analyzer", "that", "is", "returned", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/mapping/index.go#L358-L386
162,192
blevesearch/bleve
index/scorch/segment/zap/posting.go
Iterator
func (p *PostingsList) Iterator(includeFreq, includeNorm, includeLocs bool, prealloc segment.PostingsIterator) segment.PostingsIterator { if p.normBits1Hit == 0 && p.postings == nil { return emptyPostingsIterator } var preallocPI *PostingsIterator pi, ok := prealloc.(*PostingsIterator) if ok && pi != nil { preallocPI = pi } if preallocPI == emptyPostingsIterator { preallocPI = nil } return p.iterator(includeFreq, includeNorm, includeLocs, preallocPI) }
go
func (p *PostingsList) Iterator(includeFreq, includeNorm, includeLocs bool, prealloc segment.PostingsIterator) segment.PostingsIterator { if p.normBits1Hit == 0 && p.postings == nil { return emptyPostingsIterator } var preallocPI *PostingsIterator pi, ok := prealloc.(*PostingsIterator) if ok && pi != nil { preallocPI = pi } if preallocPI == emptyPostingsIterator { preallocPI = nil } return p.iterator(includeFreq, includeNorm, includeLocs, preallocPI) }
[ "func", "(", "p", "*", "PostingsList", ")", "Iterator", "(", "includeFreq", ",", "includeNorm", ",", "includeLocs", "bool", ",", "prealloc", "segment", ".", "PostingsIterator", ")", "segment", ".", "PostingsIterator", "{", "if", "p", ".", "normBits1Hit", "==", "0", "&&", "p", ".", "postings", "==", "nil", "{", "return", "emptyPostingsIterator", "\n", "}", "\n\n", "var", "preallocPI", "*", "PostingsIterator", "\n", "pi", ",", "ok", ":=", "prealloc", ".", "(", "*", "PostingsIterator", ")", "\n", "if", "ok", "&&", "pi", "!=", "nil", "{", "preallocPI", "=", "pi", "\n", "}", "\n", "if", "preallocPI", "==", "emptyPostingsIterator", "{", "preallocPI", "=", "nil", "\n", "}", "\n\n", "return", "p", ".", "iterator", "(", "includeFreq", ",", "includeNorm", ",", "includeLocs", ",", "preallocPI", ")", "\n", "}" ]
// Iterator returns an iterator for this postings list
[ "Iterator", "returns", "an", "iterator", "for", "this", "postings", "list" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/posting.go#L137-L153
162,193
blevesearch/bleve
index/scorch/segment/zap/posting.go
Count
func (p *PostingsList) Count() uint64 { var n, e uint64 if p.normBits1Hit != 0 { n = 1 if p.except != nil && p.except.Contains(uint32(p.docNum1Hit)) { e = 1 } } else if p.postings != nil { n = p.postings.GetCardinality() if p.except != nil { e = p.postings.AndCardinality(p.except) } } return n - e }
go
func (p *PostingsList) Count() uint64 { var n, e uint64 if p.normBits1Hit != 0 { n = 1 if p.except != nil && p.except.Contains(uint32(p.docNum1Hit)) { e = 1 } } else if p.postings != nil { n = p.postings.GetCardinality() if p.except != nil { e = p.postings.AndCardinality(p.except) } } return n - e }
[ "func", "(", "p", "*", "PostingsList", ")", "Count", "(", ")", "uint64", "{", "var", "n", ",", "e", "uint64", "\n", "if", "p", ".", "normBits1Hit", "!=", "0", "{", "n", "=", "1", "\n", "if", "p", ".", "except", "!=", "nil", "&&", "p", ".", "except", ".", "Contains", "(", "uint32", "(", "p", ".", "docNum1Hit", ")", ")", "{", "e", "=", "1", "\n", "}", "\n", "}", "else", "if", "p", ".", "postings", "!=", "nil", "{", "n", "=", "p", ".", "postings", ".", "GetCardinality", "(", ")", "\n", "if", "p", ".", "except", "!=", "nil", "{", "e", "=", "p", ".", "postings", ".", "AndCardinality", "(", "p", ".", "except", ")", "\n", "}", "\n", "}", "\n", "return", "n", "-", "e", "\n", "}" ]
// Count returns the number of items on this postings list
[ "Count", "returns", "the", "number", "of", "items", "on", "this", "postings", "list" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/posting.go#L264-L278
162,194
blevesearch/bleve
index/scorch/segment/zap/posting.go
readLocation
func (i *PostingsIterator) readLocation(l *Location) error { // read off field fieldID, err := i.locReader.ReadUvarint() if err != nil { return fmt.Errorf("error reading location field: %v", err) } // read off pos pos, err := i.locReader.ReadUvarint() if err != nil { return fmt.Errorf("error reading location pos: %v", err) } // read off start start, err := i.locReader.ReadUvarint() if err != nil { return fmt.Errorf("error reading location start: %v", err) } // read off end end, err := i.locReader.ReadUvarint() if err != nil { return fmt.Errorf("error reading location end: %v", err) } // read off num array pos numArrayPos, err := i.locReader.ReadUvarint() if err != nil { return fmt.Errorf("error reading location num array pos: %v", err) } l.field = i.postings.sb.fieldsInv[fieldID] l.pos = pos l.start = start l.end = end if cap(l.ap) < int(numArrayPos) { l.ap = make([]uint64, int(numArrayPos)) } else { l.ap = l.ap[:int(numArrayPos)] } // read off array positions for k := 0; k < int(numArrayPos); k++ { ap, err := i.locReader.ReadUvarint() if err != nil { return fmt.Errorf("error reading array position: %v", err) } l.ap[k] = ap } return nil }
go
func (i *PostingsIterator) readLocation(l *Location) error { // read off field fieldID, err := i.locReader.ReadUvarint() if err != nil { return fmt.Errorf("error reading location field: %v", err) } // read off pos pos, err := i.locReader.ReadUvarint() if err != nil { return fmt.Errorf("error reading location pos: %v", err) } // read off start start, err := i.locReader.ReadUvarint() if err != nil { return fmt.Errorf("error reading location start: %v", err) } // read off end end, err := i.locReader.ReadUvarint() if err != nil { return fmt.Errorf("error reading location end: %v", err) } // read off num array pos numArrayPos, err := i.locReader.ReadUvarint() if err != nil { return fmt.Errorf("error reading location num array pos: %v", err) } l.field = i.postings.sb.fieldsInv[fieldID] l.pos = pos l.start = start l.end = end if cap(l.ap) < int(numArrayPos) { l.ap = make([]uint64, int(numArrayPos)) } else { l.ap = l.ap[:int(numArrayPos)] } // read off array positions for k := 0; k < int(numArrayPos); k++ { ap, err := i.locReader.ReadUvarint() if err != nil { return fmt.Errorf("error reading array position: %v", err) } l.ap[k] = ap } return nil }
[ "func", "(", "i", "*", "PostingsIterator", ")", "readLocation", "(", "l", "*", "Location", ")", "error", "{", "// read off field", "fieldID", ",", "err", ":=", "i", ".", "locReader", ".", "ReadUvarint", "(", ")", "\n", "if", "err", "!=", "nil", "{", "return", "fmt", ".", "Errorf", "(", "\"", "\"", ",", "err", ")", "\n", "}", "\n", "// read off pos", "pos", ",", "err", ":=", "i", ".", "locReader", ".", "ReadUvarint", "(", ")", "\n", "if", "err", "!=", "nil", "{", "return", "fmt", ".", "Errorf", "(", "\"", "\"", ",", "err", ")", "\n", "}", "\n", "// read off start", "start", ",", "err", ":=", "i", ".", "locReader", ".", "ReadUvarint", "(", ")", "\n", "if", "err", "!=", "nil", "{", "return", "fmt", ".", "Errorf", "(", "\"", "\"", ",", "err", ")", "\n", "}", "\n", "// read off end", "end", ",", "err", ":=", "i", ".", "locReader", ".", "ReadUvarint", "(", ")", "\n", "if", "err", "!=", "nil", "{", "return", "fmt", ".", "Errorf", "(", "\"", "\"", ",", "err", ")", "\n", "}", "\n", "// read off num array pos", "numArrayPos", ",", "err", ":=", "i", ".", "locReader", ".", "ReadUvarint", "(", ")", "\n", "if", "err", "!=", "nil", "{", "return", "fmt", ".", "Errorf", "(", "\"", "\"", ",", "err", ")", "\n", "}", "\n\n", "l", ".", "field", "=", "i", ".", "postings", ".", "sb", ".", "fieldsInv", "[", "fieldID", "]", "\n", "l", ".", "pos", "=", "pos", "\n", "l", ".", "start", "=", "start", "\n", "l", ".", "end", "=", "end", "\n\n", "if", "cap", "(", "l", ".", "ap", ")", "<", "int", "(", "numArrayPos", ")", "{", "l", ".", "ap", "=", "make", "(", "[", "]", "uint64", ",", "int", "(", "numArrayPos", ")", ")", "\n", "}", "else", "{", "l", ".", "ap", "=", "l", ".", "ap", "[", ":", "int", "(", "numArrayPos", ")", "]", "\n", "}", "\n\n", "// read off array positions", "for", "k", ":=", "0", ";", "k", "<", "int", "(", "numArrayPos", ")", ";", "k", "++", "{", "ap", ",", "err", ":=", "i", ".", "locReader", ".", "ReadUvarint", "(", ")", "\n", "if", "err", "!=", "nil", "{", "return", "fmt", ".", "Errorf", "(", "\"", "\"", ",", "err", ")", "\n", "}", "\n\n", "l", ".", "ap", "[", "k", "]", "=", "ap", "\n", "}", "\n\n", "return", "nil", "\n", "}" ]
// readLocation processes all the integers on the stream representing a single // location.
[ "readLocation", "processes", "all", "the", "integers", "on", "the", "stream", "representing", "a", "single", "location", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/posting.go#L466-L515
162,195
blevesearch/bleve
index/scorch/segment/zap/posting.go
Advance
func (i *PostingsIterator) Advance(docNum uint64) (segment.Posting, error) { return i.nextAtOrAfter(docNum) }
go
func (i *PostingsIterator) Advance(docNum uint64) (segment.Posting, error) { return i.nextAtOrAfter(docNum) }
[ "func", "(", "i", "*", "PostingsIterator", ")", "Advance", "(", "docNum", "uint64", ")", "(", "segment", ".", "Posting", ",", "error", ")", "{", "return", "i", ".", "nextAtOrAfter", "(", "docNum", ")", "\n", "}" ]
// Advance returns the posting at the specified docNum or it is not present // the next posting, or if the end is reached, nil
[ "Advance", "returns", "the", "posting", "at", "the", "specified", "docNum", "or", "it", "is", "not", "present", "the", "next", "posting", "or", "if", "the", "end", "is", "reached", "nil" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/posting.go#L524-L526
162,196
blevesearch/bleve
index/scorch/segment/zap/posting.go
nextAtOrAfter
func (i *PostingsIterator) nextAtOrAfter(atOrAfter uint64) (segment.Posting, error) { docNum, exists, err := i.nextDocNumAtOrAfter(atOrAfter) if err != nil || !exists { return nil, err } i.next = Posting{} // clear the struct rv := &i.next rv.docNum = docNum if !i.includeFreqNorm { return rv, nil } var normBits uint64 var hasLocs bool rv.freq, normBits, hasLocs, err = i.readFreqNormHasLocs() if err != nil { return nil, err } rv.norm = math.Float32frombits(uint32(normBits)) if i.includeLocs && hasLocs { // prepare locations into reused slices, where we assume // rv.freq >= "number of locs", since in a composite field, // some component fields might have their IncludeTermVector // flags disabled while other component fields are enabled if cap(i.nextLocs) >= int(rv.freq) { i.nextLocs = i.nextLocs[0:rv.freq] } else { i.nextLocs = make([]Location, rv.freq, rv.freq*2) } if cap(i.nextSegmentLocs) < int(rv.freq) { i.nextSegmentLocs = make([]segment.Location, rv.freq, rv.freq*2) } rv.locs = i.nextSegmentLocs[:0] numLocsBytes, err := i.locReader.ReadUvarint() if err != nil { return nil, fmt.Errorf("error reading location numLocsBytes: %v", err) } j := 0 startBytesRemaining := i.locReader.Len() // # bytes remaining in the locReader for startBytesRemaining-i.locReader.Len() < int(numLocsBytes) { err := i.readLocation(&i.nextLocs[j]) if err != nil { return nil, err } rv.locs = append(rv.locs, &i.nextLocs[j]) j++ } } return rv, nil }
go
func (i *PostingsIterator) nextAtOrAfter(atOrAfter uint64) (segment.Posting, error) { docNum, exists, err := i.nextDocNumAtOrAfter(atOrAfter) if err != nil || !exists { return nil, err } i.next = Posting{} // clear the struct rv := &i.next rv.docNum = docNum if !i.includeFreqNorm { return rv, nil } var normBits uint64 var hasLocs bool rv.freq, normBits, hasLocs, err = i.readFreqNormHasLocs() if err != nil { return nil, err } rv.norm = math.Float32frombits(uint32(normBits)) if i.includeLocs && hasLocs { // prepare locations into reused slices, where we assume // rv.freq >= "number of locs", since in a composite field, // some component fields might have their IncludeTermVector // flags disabled while other component fields are enabled if cap(i.nextLocs) >= int(rv.freq) { i.nextLocs = i.nextLocs[0:rv.freq] } else { i.nextLocs = make([]Location, rv.freq, rv.freq*2) } if cap(i.nextSegmentLocs) < int(rv.freq) { i.nextSegmentLocs = make([]segment.Location, rv.freq, rv.freq*2) } rv.locs = i.nextSegmentLocs[:0] numLocsBytes, err := i.locReader.ReadUvarint() if err != nil { return nil, fmt.Errorf("error reading location numLocsBytes: %v", err) } j := 0 startBytesRemaining := i.locReader.Len() // # bytes remaining in the locReader for startBytesRemaining-i.locReader.Len() < int(numLocsBytes) { err := i.readLocation(&i.nextLocs[j]) if err != nil { return nil, err } rv.locs = append(rv.locs, &i.nextLocs[j]) j++ } } return rv, nil }
[ "func", "(", "i", "*", "PostingsIterator", ")", "nextAtOrAfter", "(", "atOrAfter", "uint64", ")", "(", "segment", ".", "Posting", ",", "error", ")", "{", "docNum", ",", "exists", ",", "err", ":=", "i", ".", "nextDocNumAtOrAfter", "(", "atOrAfter", ")", "\n", "if", "err", "!=", "nil", "||", "!", "exists", "{", "return", "nil", ",", "err", "\n", "}", "\n\n", "i", ".", "next", "=", "Posting", "{", "}", "// clear the struct", "\n", "rv", ":=", "&", "i", ".", "next", "\n", "rv", ".", "docNum", "=", "docNum", "\n\n", "if", "!", "i", ".", "includeFreqNorm", "{", "return", "rv", ",", "nil", "\n", "}", "\n\n", "var", "normBits", "uint64", "\n", "var", "hasLocs", "bool", "\n\n", "rv", ".", "freq", ",", "normBits", ",", "hasLocs", ",", "err", "=", "i", ".", "readFreqNormHasLocs", "(", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n\n", "rv", ".", "norm", "=", "math", ".", "Float32frombits", "(", "uint32", "(", "normBits", ")", ")", "\n\n", "if", "i", ".", "includeLocs", "&&", "hasLocs", "{", "// prepare locations into reused slices, where we assume", "// rv.freq >= \"number of locs\", since in a composite field,", "// some component fields might have their IncludeTermVector", "// flags disabled while other component fields are enabled", "if", "cap", "(", "i", ".", "nextLocs", ")", ">=", "int", "(", "rv", ".", "freq", ")", "{", "i", ".", "nextLocs", "=", "i", ".", "nextLocs", "[", "0", ":", "rv", ".", "freq", "]", "\n", "}", "else", "{", "i", ".", "nextLocs", "=", "make", "(", "[", "]", "Location", ",", "rv", ".", "freq", ",", "rv", ".", "freq", "*", "2", ")", "\n", "}", "\n", "if", "cap", "(", "i", ".", "nextSegmentLocs", ")", "<", "int", "(", "rv", ".", "freq", ")", "{", "i", ".", "nextSegmentLocs", "=", "make", "(", "[", "]", "segment", ".", "Location", ",", "rv", ".", "freq", ",", "rv", ".", "freq", "*", "2", ")", "\n", "}", "\n", "rv", ".", "locs", "=", "i", ".", "nextSegmentLocs", "[", ":", "0", "]", "\n\n", "numLocsBytes", ",", "err", ":=", "i", ".", "locReader", ".", "ReadUvarint", "(", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "fmt", ".", "Errorf", "(", "\"", "\"", ",", "err", ")", "\n", "}", "\n\n", "j", ":=", "0", "\n", "startBytesRemaining", ":=", "i", ".", "locReader", ".", "Len", "(", ")", "// # bytes remaining in the locReader", "\n", "for", "startBytesRemaining", "-", "i", ".", "locReader", ".", "Len", "(", ")", "<", "int", "(", "numLocsBytes", ")", "{", "err", ":=", "i", ".", "readLocation", "(", "&", "i", ".", "nextLocs", "[", "j", "]", ")", "\n", "if", "err", "!=", "nil", "{", "return", "nil", ",", "err", "\n", "}", "\n", "rv", ".", "locs", "=", "append", "(", "rv", ".", "locs", ",", "&", "i", ".", "nextLocs", "[", "j", "]", ")", "\n", "j", "++", "\n", "}", "\n", "}", "\n\n", "return", "rv", ",", "nil", "\n", "}" ]
// Next returns the next posting on the postings list, or nil at the end
[ "Next", "returns", "the", "next", "posting", "on", "the", "postings", "list", "or", "nil", "at", "the", "end" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/posting.go#L529-L586
162,197
blevesearch/bleve
index/scorch/segment/zap/posting.go
nextBytes
func (i *PostingsIterator) nextBytes() ( docNumOut uint64, freq uint64, normBits uint64, bytesFreqNorm []byte, bytesLoc []byte, err error) { docNum, exists, err := i.nextDocNumAtOrAfter(0) if err != nil || !exists { return 0, 0, 0, nil, nil, err } if i.normBits1Hit != 0 { if i.buf == nil { i.buf = make([]byte, binary.MaxVarintLen64*2) } n := binary.PutUvarint(i.buf, freqHasLocs1Hit) n += binary.PutUvarint(i.buf[n:], i.normBits1Hit) return docNum, uint64(1), i.normBits1Hit, i.buf[:n], nil, nil } startFreqNorm := len(i.currChunkFreqNorm) - i.freqNormReader.Len() var hasLocs bool freq, normBits, hasLocs, err = i.readFreqNormHasLocs() if err != nil { return 0, 0, 0, nil, nil, err } endFreqNorm := len(i.currChunkFreqNorm) - i.freqNormReader.Len() bytesFreqNorm = i.currChunkFreqNorm[startFreqNorm:endFreqNorm] if hasLocs { startLoc := len(i.currChunkLoc) - i.locReader.Len() numLocsBytes, err := i.locReader.ReadUvarint() if err != nil { return 0, 0, 0, nil, nil, fmt.Errorf("error reading location nextBytes numLocs: %v", err) } // skip over all the location bytes i.locReader.SkipBytes(int(numLocsBytes)) endLoc := len(i.currChunkLoc) - i.locReader.Len() bytesLoc = i.currChunkLoc[startLoc:endLoc] } return docNum, freq, normBits, bytesFreqNorm, bytesLoc, nil }
go
func (i *PostingsIterator) nextBytes() ( docNumOut uint64, freq uint64, normBits uint64, bytesFreqNorm []byte, bytesLoc []byte, err error) { docNum, exists, err := i.nextDocNumAtOrAfter(0) if err != nil || !exists { return 0, 0, 0, nil, nil, err } if i.normBits1Hit != 0 { if i.buf == nil { i.buf = make([]byte, binary.MaxVarintLen64*2) } n := binary.PutUvarint(i.buf, freqHasLocs1Hit) n += binary.PutUvarint(i.buf[n:], i.normBits1Hit) return docNum, uint64(1), i.normBits1Hit, i.buf[:n], nil, nil } startFreqNorm := len(i.currChunkFreqNorm) - i.freqNormReader.Len() var hasLocs bool freq, normBits, hasLocs, err = i.readFreqNormHasLocs() if err != nil { return 0, 0, 0, nil, nil, err } endFreqNorm := len(i.currChunkFreqNorm) - i.freqNormReader.Len() bytesFreqNorm = i.currChunkFreqNorm[startFreqNorm:endFreqNorm] if hasLocs { startLoc := len(i.currChunkLoc) - i.locReader.Len() numLocsBytes, err := i.locReader.ReadUvarint() if err != nil { return 0, 0, 0, nil, nil, fmt.Errorf("error reading location nextBytes numLocs: %v", err) } // skip over all the location bytes i.locReader.SkipBytes(int(numLocsBytes)) endLoc := len(i.currChunkLoc) - i.locReader.Len() bytesLoc = i.currChunkLoc[startLoc:endLoc] } return docNum, freq, normBits, bytesFreqNorm, bytesLoc, nil }
[ "func", "(", "i", "*", "PostingsIterator", ")", "nextBytes", "(", ")", "(", "docNumOut", "uint64", ",", "freq", "uint64", ",", "normBits", "uint64", ",", "bytesFreqNorm", "[", "]", "byte", ",", "bytesLoc", "[", "]", "byte", ",", "err", "error", ")", "{", "docNum", ",", "exists", ",", "err", ":=", "i", ".", "nextDocNumAtOrAfter", "(", "0", ")", "\n", "if", "err", "!=", "nil", "||", "!", "exists", "{", "return", "0", ",", "0", ",", "0", ",", "nil", ",", "nil", ",", "err", "\n", "}", "\n\n", "if", "i", ".", "normBits1Hit", "!=", "0", "{", "if", "i", ".", "buf", "==", "nil", "{", "i", ".", "buf", "=", "make", "(", "[", "]", "byte", ",", "binary", ".", "MaxVarintLen64", "*", "2", ")", "\n", "}", "\n", "n", ":=", "binary", ".", "PutUvarint", "(", "i", ".", "buf", ",", "freqHasLocs1Hit", ")", "\n", "n", "+=", "binary", ".", "PutUvarint", "(", "i", ".", "buf", "[", "n", ":", "]", ",", "i", ".", "normBits1Hit", ")", "\n", "return", "docNum", ",", "uint64", "(", "1", ")", ",", "i", ".", "normBits1Hit", ",", "i", ".", "buf", "[", ":", "n", "]", ",", "nil", ",", "nil", "\n", "}", "\n\n", "startFreqNorm", ":=", "len", "(", "i", ".", "currChunkFreqNorm", ")", "-", "i", ".", "freqNormReader", ".", "Len", "(", ")", "\n\n", "var", "hasLocs", "bool", "\n\n", "freq", ",", "normBits", ",", "hasLocs", ",", "err", "=", "i", ".", "readFreqNormHasLocs", "(", ")", "\n", "if", "err", "!=", "nil", "{", "return", "0", ",", "0", ",", "0", ",", "nil", ",", "nil", ",", "err", "\n", "}", "\n\n", "endFreqNorm", ":=", "len", "(", "i", ".", "currChunkFreqNorm", ")", "-", "i", ".", "freqNormReader", ".", "Len", "(", ")", "\n", "bytesFreqNorm", "=", "i", ".", "currChunkFreqNorm", "[", "startFreqNorm", ":", "endFreqNorm", "]", "\n\n", "if", "hasLocs", "{", "startLoc", ":=", "len", "(", "i", ".", "currChunkLoc", ")", "-", "i", ".", "locReader", ".", "Len", "(", ")", "\n\n", "numLocsBytes", ",", "err", ":=", "i", ".", "locReader", ".", "ReadUvarint", "(", ")", "\n", "if", "err", "!=", "nil", "{", "return", "0", ",", "0", ",", "0", ",", "nil", ",", "nil", ",", "fmt", ".", "Errorf", "(", "\"", "\"", ",", "err", ")", "\n", "}", "\n\n", "// skip over all the location bytes", "i", ".", "locReader", ".", "SkipBytes", "(", "int", "(", "numLocsBytes", ")", ")", "\n\n", "endLoc", ":=", "len", "(", "i", ".", "currChunkLoc", ")", "-", "i", ".", "locReader", ".", "Len", "(", ")", "\n", "bytesLoc", "=", "i", ".", "currChunkLoc", "[", "startLoc", ":", "endLoc", "]", "\n", "}", "\n\n", "return", "docNum", ",", "freq", ",", "normBits", ",", "bytesFreqNorm", ",", "bytesLoc", ",", "nil", "\n", "}" ]
// nextBytes returns the docNum and the encoded freq & loc bytes for // the next posting
[ "nextBytes", "returns", "the", "docNum", "and", "the", "encoded", "freq", "&", "loc", "bytes", "for", "the", "next", "posting" ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/posting.go#L592-L638
162,198
blevesearch/bleve
index/scorch/segment/zap/posting.go
DocNum1Hit
func (p *PostingsIterator) DocNum1Hit() (uint64, bool) { if p.normBits1Hit != 0 && p.docNum1Hit != DocNum1HitFinished { return p.docNum1Hit, true } return 0, false }
go
func (p *PostingsIterator) DocNum1Hit() (uint64, bool) { if p.normBits1Hit != 0 && p.docNum1Hit != DocNum1HitFinished { return p.docNum1Hit, true } return 0, false }
[ "func", "(", "p", "*", "PostingsIterator", ")", "DocNum1Hit", "(", ")", "(", "uint64", ",", "bool", ")", "{", "if", "p", ".", "normBits1Hit", "!=", "0", "&&", "p", ".", "docNum1Hit", "!=", "DocNum1HitFinished", "{", "return", "p", ".", "docNum1Hit", ",", "true", "\n", "}", "\n", "return", "0", ",", "false", "\n", "}" ]
// DocNum1Hit returns the docNum and true if this is "1-hit" optimized // and the docNum is available.
[ "DocNum1Hit", "returns", "the", "docNum", "and", "true", "if", "this", "is", "1", "-", "hit", "optimized", "and", "the", "docNum", "is", "available", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/posting.go#L792-L797
162,199
blevesearch/bleve
index/scorch/segment/zap/posting.go
PostingsIteratorFromBitmap
func PostingsIteratorFromBitmap(bm *roaring.Bitmap, includeFreqNorm, includeLocs bool) (*PostingsIterator, error) { return &PostingsIterator{ ActualBM: bm, Actual: bm.Iterator(), includeFreqNorm: includeFreqNorm, includeLocs: includeLocs, }, nil }
go
func PostingsIteratorFromBitmap(bm *roaring.Bitmap, includeFreqNorm, includeLocs bool) (*PostingsIterator, error) { return &PostingsIterator{ ActualBM: bm, Actual: bm.Iterator(), includeFreqNorm: includeFreqNorm, includeLocs: includeLocs, }, nil }
[ "func", "PostingsIteratorFromBitmap", "(", "bm", "*", "roaring", ".", "Bitmap", ",", "includeFreqNorm", ",", "includeLocs", "bool", ")", "(", "*", "PostingsIterator", ",", "error", ")", "{", "return", "&", "PostingsIterator", "{", "ActualBM", ":", "bm", ",", "Actual", ":", "bm", ".", "Iterator", "(", ")", ",", "includeFreqNorm", ":", "includeFreqNorm", ",", "includeLocs", ":", "includeLocs", ",", "}", ",", "nil", "\n", "}" ]
// PostingsIteratorFromBitmap constructs a PostingsIterator given an // "actual" bitmap.
[ "PostingsIteratorFromBitmap", "constructs", "a", "PostingsIterator", "given", "an", "actual", "bitmap", "." ]
7f3a218ae72960bb4841254833a52a5f088a9928
https://github.com/blevesearch/bleve/blob/7f3a218ae72960bb4841254833a52a5f088a9928/index/scorch/segment/zap/posting.go#L801-L809