id
int32 0
167k
| repo
stringlengths 5
54
| path
stringlengths 4
155
| func_name
stringlengths 1
118
| original_string
stringlengths 52
85.5k
| language
stringclasses 1
value | code
stringlengths 52
85.5k
| code_tokens
sequence | docstring
stringlengths 6
2.61k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 85
252
|
---|---|---|---|---|---|---|---|---|---|---|---|
5,500 | gorgonia/cu | dnn/dropout.go | IsReady | func (d *Dropout) IsReady() bool {
return d.handle != nil && d.states != nil && d.stateSizeInBytes != 0
} | go | func (d *Dropout) IsReady() bool {
return d.handle != nil && d.states != nil && d.stateSizeInBytes != 0
} | [
"func",
"(",
"d",
"*",
"Dropout",
")",
"IsReady",
"(",
")",
"bool",
"{",
"return",
"d",
".",
"handle",
"!=",
"nil",
"&&",
"d",
".",
"states",
"!=",
"nil",
"&&",
"d",
".",
"stateSizeInBytes",
"!=",
"0",
"\n",
"}"
] | // IsReady indicates if the dropout operator is ready to be used | [
"IsReady",
"indicates",
"if",
"the",
"dropout",
"operator",
"is",
"ready",
"to",
"be",
"used"
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/dnn/dropout.go#L68-L70 |
5,501 | gorgonia/cu | dnn/dropout.go | Reset | func (d *Dropout) Reset() {
d.handle = nil
d.states = nil
d.stateSizeInBytes = 0
d.seed = 0
} | go | func (d *Dropout) Reset() {
d.handle = nil
d.states = nil
d.stateSizeInBytes = 0
d.seed = 0
} | [
"func",
"(",
"d",
"*",
"Dropout",
")",
"Reset",
"(",
")",
"{",
"d",
".",
"handle",
"=",
"nil",
"\n",
"d",
".",
"states",
"=",
"nil",
"\n",
"d",
".",
"stateSizeInBytes",
"=",
"0",
"\n",
"d",
".",
"seed",
"=",
"0",
"\n",
"}"
] | // Reset resets the state to be not ready. It does NOT reset the dropout ratio. | [
"Reset",
"resets",
"the",
"state",
"to",
"be",
"not",
"ready",
".",
"It",
"does",
"NOT",
"reset",
"the",
"dropout",
"ratio",
"."
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/dnn/dropout.go#L73-L78 |
5,502 | gorgonia/cu | occupancy.go | MaxActiveBlocksPerMultiProcessor | func (fn Function) MaxActiveBlocksPerMultiProcessor(blockSize int, dynamicSmemSize int64) (int, error) {
bs := C.int(blockSize)
dss := C.size_t(dynamicSmemSize)
var numBlocks C.int
if err := result(C.cuOccupancyMaxActiveBlocksPerMultiprocessor(&numBlocks, fn.fn, bs, dss)); err != nil {
return 0, err
}
return int(numBlocks), nil
} | go | func (fn Function) MaxActiveBlocksPerMultiProcessor(blockSize int, dynamicSmemSize int64) (int, error) {
bs := C.int(blockSize)
dss := C.size_t(dynamicSmemSize)
var numBlocks C.int
if err := result(C.cuOccupancyMaxActiveBlocksPerMultiprocessor(&numBlocks, fn.fn, bs, dss)); err != nil {
return 0, err
}
return int(numBlocks), nil
} | [
"func",
"(",
"fn",
"Function",
")",
"MaxActiveBlocksPerMultiProcessor",
"(",
"blockSize",
"int",
",",
"dynamicSmemSize",
"int64",
")",
"(",
"int",
",",
"error",
")",
"{",
"bs",
":=",
"C",
".",
"int",
"(",
"blockSize",
")",
"\n",
"dss",
":=",
"C",
".",
"size_t",
"(",
"dynamicSmemSize",
")",
"\n\n",
"var",
"numBlocks",
"C",
".",
"int",
"\n",
"if",
"err",
":=",
"result",
"(",
"C",
".",
"cuOccupancyMaxActiveBlocksPerMultiprocessor",
"(",
"&",
"numBlocks",
",",
"fn",
".",
"fn",
",",
"bs",
",",
"dss",
")",
")",
";",
"err",
"!=",
"nil",
"{",
"return",
"0",
",",
"err",
"\n",
"}",
"\n",
"return",
"int",
"(",
"numBlocks",
")",
",",
"nil",
"\n",
"}"
] | // MaxActiveBlocksPerMultiProcessor returns the number of the maximum active blocks per streaming multiprocessor. | [
"MaxActiveBlocksPerMultiProcessor",
"returns",
"the",
"number",
"of",
"the",
"maximum",
"active",
"blocks",
"per",
"streaming",
"multiprocessor",
"."
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/occupancy.go#L7-L16 |
5,503 | gorgonia/cu | occupancy.go | MaxActiveBlocksPerMultiProcessorWithFlags | func (fn Function) MaxActiveBlocksPerMultiProcessorWithFlags(blockSize int, dynamicSmemSize int64, flags OccupancyFlags) (int, error) {
bs := C.int(blockSize)
dss := C.size_t(dynamicSmemSize)
of := C.uint(flags)
var numBlocks C.int
if err := result(C.cuOccupancyMaxActiveBlocksPerMultiprocessorWithFlags(&numBlocks, fn.fn, bs, dss, of)); err != nil {
return 0, err
}
return int(numBlocks), nil
} | go | func (fn Function) MaxActiveBlocksPerMultiProcessorWithFlags(blockSize int, dynamicSmemSize int64, flags OccupancyFlags) (int, error) {
bs := C.int(blockSize)
dss := C.size_t(dynamicSmemSize)
of := C.uint(flags)
var numBlocks C.int
if err := result(C.cuOccupancyMaxActiveBlocksPerMultiprocessorWithFlags(&numBlocks, fn.fn, bs, dss, of)); err != nil {
return 0, err
}
return int(numBlocks), nil
} | [
"func",
"(",
"fn",
"Function",
")",
"MaxActiveBlocksPerMultiProcessorWithFlags",
"(",
"blockSize",
"int",
",",
"dynamicSmemSize",
"int64",
",",
"flags",
"OccupancyFlags",
")",
"(",
"int",
",",
"error",
")",
"{",
"bs",
":=",
"C",
".",
"int",
"(",
"blockSize",
")",
"\n",
"dss",
":=",
"C",
".",
"size_t",
"(",
"dynamicSmemSize",
")",
"\n",
"of",
":=",
"C",
".",
"uint",
"(",
"flags",
")",
"\n\n",
"var",
"numBlocks",
"C",
".",
"int",
"\n",
"if",
"err",
":=",
"result",
"(",
"C",
".",
"cuOccupancyMaxActiveBlocksPerMultiprocessorWithFlags",
"(",
"&",
"numBlocks",
",",
"fn",
".",
"fn",
",",
"bs",
",",
"dss",
",",
"of",
")",
")",
";",
"err",
"!=",
"nil",
"{",
"return",
"0",
",",
"err",
"\n",
"}",
"\n",
"return",
"int",
"(",
"numBlocks",
")",
",",
"nil",
"\n",
"}"
] | // MaxActiveBlocksPerMultiProcessorWithFlags returns the number of the maximum active blocks per streaming multiprocessor.
// The flags control how special cases are handled. | [
"MaxActiveBlocksPerMultiProcessorWithFlags",
"returns",
"the",
"number",
"of",
"the",
"maximum",
"active",
"blocks",
"per",
"streaming",
"multiprocessor",
".",
"The",
"flags",
"control",
"how",
"special",
"cases",
"are",
"handled",
"."
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/occupancy.go#L20-L30 |
5,504 | gorgonia/cu | cmd/gencudnn/parsego.go | checkNils | func (s *PkgState) checkNils() []string {
var retVal []string
for _, f := range s.Files {
for _, decl := range f.Decls {
fn, ok := decl.(*ast.FuncDecl)
if !ok {
continue
}
if fn.Type.Results == nil {
continue
}
retVals := make(map[string]bool) // name:isPtr
retTypes := make(map[string]bool) // typeName: hasBeenAsigned
posRetVal := make([]string, 0, len(fn.Type.Results.List))
for _, ret := range fn.Type.Results.List {
for _, name := range ret.Names {
posRetVal = append(posRetVal, name.Name)
}
switch r := ret.Type.(type) {
case *ast.StarExpr:
if _, ok := r.X.(*ast.Ident); ok {
for _, name := range ret.Names {
retTypes[name.Name] = false
retVals[name.Name] = true
}
}
case *ast.Ident:
// don't add to retTypes, but keep adding to retNames
for _, name := range ret.Names {
retVals[name.Name] = false
}
}
}
for _, stmt := range fn.Body.List {
switch s := stmt.(type) {
case *ast.AssignStmt:
for _, lhs := range s.Lhs {
if ident, ok := lhs.(*ast.Ident); ok {
if _, ok := retTypes[ident.Name]; ok {
retTypes[ident.Name] = true
}
}
}
case *ast.ReturnStmt:
for i, ret := range s.Results {
if ue, ok := ret.(*ast.UnaryExpr); ok && ue.Op == token.AND {
retTypes[posRetVal[i]] = true // assume assigned
}
}
}
}
for _, v := range retTypes {
if !v {
retVal = append(retVal, fn.Name.Name)
}
}
}
}
return retVal
} | go | func (s *PkgState) checkNils() []string {
var retVal []string
for _, f := range s.Files {
for _, decl := range f.Decls {
fn, ok := decl.(*ast.FuncDecl)
if !ok {
continue
}
if fn.Type.Results == nil {
continue
}
retVals := make(map[string]bool) // name:isPtr
retTypes := make(map[string]bool) // typeName: hasBeenAsigned
posRetVal := make([]string, 0, len(fn.Type.Results.List))
for _, ret := range fn.Type.Results.List {
for _, name := range ret.Names {
posRetVal = append(posRetVal, name.Name)
}
switch r := ret.Type.(type) {
case *ast.StarExpr:
if _, ok := r.X.(*ast.Ident); ok {
for _, name := range ret.Names {
retTypes[name.Name] = false
retVals[name.Name] = true
}
}
case *ast.Ident:
// don't add to retTypes, but keep adding to retNames
for _, name := range ret.Names {
retVals[name.Name] = false
}
}
}
for _, stmt := range fn.Body.List {
switch s := stmt.(type) {
case *ast.AssignStmt:
for _, lhs := range s.Lhs {
if ident, ok := lhs.(*ast.Ident); ok {
if _, ok := retTypes[ident.Name]; ok {
retTypes[ident.Name] = true
}
}
}
case *ast.ReturnStmt:
for i, ret := range s.Results {
if ue, ok := ret.(*ast.UnaryExpr); ok && ue.Op == token.AND {
retTypes[posRetVal[i]] = true // assume assigned
}
}
}
}
for _, v := range retTypes {
if !v {
retVal = append(retVal, fn.Name.Name)
}
}
}
}
return retVal
} | [
"func",
"(",
"s",
"*",
"PkgState",
")",
"checkNils",
"(",
")",
"[",
"]",
"string",
"{",
"var",
"retVal",
"[",
"]",
"string",
"\n",
"for",
"_",
",",
"f",
":=",
"range",
"s",
".",
"Files",
"{",
"for",
"_",
",",
"decl",
":=",
"range",
"f",
".",
"Decls",
"{",
"fn",
",",
"ok",
":=",
"decl",
".",
"(",
"*",
"ast",
".",
"FuncDecl",
")",
"\n",
"if",
"!",
"ok",
"{",
"continue",
"\n",
"}",
"\n\n",
"if",
"fn",
".",
"Type",
".",
"Results",
"==",
"nil",
"{",
"continue",
"\n",
"}",
"\n\n",
"retVals",
":=",
"make",
"(",
"map",
"[",
"string",
"]",
"bool",
")",
"// name:isPtr",
"\n",
"retTypes",
":=",
"make",
"(",
"map",
"[",
"string",
"]",
"bool",
")",
"// typeName: hasBeenAsigned",
"\n",
"posRetVal",
":=",
"make",
"(",
"[",
"]",
"string",
",",
"0",
",",
"len",
"(",
"fn",
".",
"Type",
".",
"Results",
".",
"List",
")",
")",
"\n",
"for",
"_",
",",
"ret",
":=",
"range",
"fn",
".",
"Type",
".",
"Results",
".",
"List",
"{",
"for",
"_",
",",
"name",
":=",
"range",
"ret",
".",
"Names",
"{",
"posRetVal",
"=",
"append",
"(",
"posRetVal",
",",
"name",
".",
"Name",
")",
"\n",
"}",
"\n\n",
"switch",
"r",
":=",
"ret",
".",
"Type",
".",
"(",
"type",
")",
"{",
"case",
"*",
"ast",
".",
"StarExpr",
":",
"if",
"_",
",",
"ok",
":=",
"r",
".",
"X",
".",
"(",
"*",
"ast",
".",
"Ident",
")",
";",
"ok",
"{",
"for",
"_",
",",
"name",
":=",
"range",
"ret",
".",
"Names",
"{",
"retTypes",
"[",
"name",
".",
"Name",
"]",
"=",
"false",
"\n",
"retVals",
"[",
"name",
".",
"Name",
"]",
"=",
"true",
"\n",
"}",
"\n",
"}",
"\n",
"case",
"*",
"ast",
".",
"Ident",
":",
"// don't add to retTypes, but keep adding to retNames",
"for",
"_",
",",
"name",
":=",
"range",
"ret",
".",
"Names",
"{",
"retVals",
"[",
"name",
".",
"Name",
"]",
"=",
"false",
"\n",
"}",
"\n",
"}",
"\n",
"}",
"\n",
"for",
"_",
",",
"stmt",
":=",
"range",
"fn",
".",
"Body",
".",
"List",
"{",
"switch",
"s",
":=",
"stmt",
".",
"(",
"type",
")",
"{",
"case",
"*",
"ast",
".",
"AssignStmt",
":",
"for",
"_",
",",
"lhs",
":=",
"range",
"s",
".",
"Lhs",
"{",
"if",
"ident",
",",
"ok",
":=",
"lhs",
".",
"(",
"*",
"ast",
".",
"Ident",
")",
";",
"ok",
"{",
"if",
"_",
",",
"ok",
":=",
"retTypes",
"[",
"ident",
".",
"Name",
"]",
";",
"ok",
"{",
"retTypes",
"[",
"ident",
".",
"Name",
"]",
"=",
"true",
"\n",
"}",
"\n",
"}",
"\n",
"}",
"\n",
"case",
"*",
"ast",
".",
"ReturnStmt",
":",
"for",
"i",
",",
"ret",
":=",
"range",
"s",
".",
"Results",
"{",
"if",
"ue",
",",
"ok",
":=",
"ret",
".",
"(",
"*",
"ast",
".",
"UnaryExpr",
")",
";",
"ok",
"&&",
"ue",
".",
"Op",
"==",
"token",
".",
"AND",
"{",
"retTypes",
"[",
"posRetVal",
"[",
"i",
"]",
"]",
"=",
"true",
"// assume assigned",
"\n",
"}",
"\n",
"}",
"\n",
"}",
"\n",
"}",
"\n\n",
"for",
"_",
",",
"v",
":=",
"range",
"retTypes",
"{",
"if",
"!",
"v",
"{",
"retVal",
"=",
"append",
"(",
"retVal",
",",
"fn",
".",
"Name",
".",
"Name",
")",
"\n",
"}",
"\n",
"}",
"\n",
"}",
"\n",
"}",
"\n",
"return",
"retVal",
"\n",
"}"
] | // checkNils checks the package for functions that return potentially nil pointer types.
//
// It expects functions to have return names. Which is what the generator generates anyways | [
"checkNils",
"checks",
"the",
"package",
"for",
"functions",
"that",
"return",
"potentially",
"nil",
"pointer",
"types",
".",
"It",
"expects",
"functions",
"to",
"have",
"return",
"names",
".",
"Which",
"is",
"what",
"the",
"generator",
"generates",
"anyways"
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/cmd/gencudnn/parsego.go#L37-L100 |
5,505 | gorgonia/cu | cmd/gencudnn/parsego.go | usedCFn | func (pkg *PkgState) usedCFn() map[string]int {
retVal := make(map[string]int)
visitor := &usedCFnVisit{retVal}
for _, f := range pkg.Files {
for _, decl := range f.Decls {
fn, ok := decl.(*ast.FuncDecl)
if !ok {
continue
}
ast.Walk(visitor, fn)
}
}
return retVal
} | go | func (pkg *PkgState) usedCFn() map[string]int {
retVal := make(map[string]int)
visitor := &usedCFnVisit{retVal}
for _, f := range pkg.Files {
for _, decl := range f.Decls {
fn, ok := decl.(*ast.FuncDecl)
if !ok {
continue
}
ast.Walk(visitor, fn)
}
}
return retVal
} | [
"func",
"(",
"pkg",
"*",
"PkgState",
")",
"usedCFn",
"(",
")",
"map",
"[",
"string",
"]",
"int",
"{",
"retVal",
":=",
"make",
"(",
"map",
"[",
"string",
"]",
"int",
")",
"\n",
"visitor",
":=",
"&",
"usedCFnVisit",
"{",
"retVal",
"}",
"\n",
"for",
"_",
",",
"f",
":=",
"range",
"pkg",
".",
"Files",
"{",
"for",
"_",
",",
"decl",
":=",
"range",
"f",
".",
"Decls",
"{",
"fn",
",",
"ok",
":=",
"decl",
".",
"(",
"*",
"ast",
".",
"FuncDecl",
")",
"\n",
"if",
"!",
"ok",
"{",
"continue",
"\n",
"}",
"\n",
"ast",
".",
"Walk",
"(",
"visitor",
",",
"fn",
")",
"\n",
"}",
"\n",
"}",
"\n",
"return",
"retVal",
"\n",
"}"
] | // useCFn returns the count of how many times a C function has been used in the generated package | [
"useCFn",
"returns",
"the",
"count",
"of",
"how",
"many",
"times",
"a",
"C",
"function",
"has",
"been",
"used",
"in",
"the",
"generated",
"package"
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/cmd/gencudnn/parsego.go#L118-L131 |
5,506 | gorgonia/cu | dnn/convolution.go | MakeConvolutionPreference | func MakeConvolutionPreference(t ConvolutionType, pref ConvolutionPreference) ConvolutionPreference {
return ConvolutionPreference(byte(t) | byte(pref))
} | go | func MakeConvolutionPreference(t ConvolutionType, pref ConvolutionPreference) ConvolutionPreference {
return ConvolutionPreference(byte(t) | byte(pref))
} | [
"func",
"MakeConvolutionPreference",
"(",
"t",
"ConvolutionType",
",",
"pref",
"ConvolutionPreference",
")",
"ConvolutionPreference",
"{",
"return",
"ConvolutionPreference",
"(",
"byte",
"(",
"t",
")",
"|",
"byte",
"(",
"pref",
")",
")",
"\n",
"}"
] | // MakeConvolutionPreference allows the creation of a tagged preference - whether it's fwd, bwd or data or filter | [
"MakeConvolutionPreference",
"allows",
"the",
"creation",
"of",
"a",
"tagged",
"preference",
"-",
"whether",
"it",
"s",
"fwd",
"bwd",
"or",
"data",
"or",
"filter"
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/dnn/convolution.go#L35-L37 |
5,507 | gorgonia/cu | cmd/gencudnn/params.go | isOutputPtrOfPrim | func isOutputPtrOfPrim(fnName string, p bg.Parameter) bool {
if !isOutput(fnName, p) && !isIO(fnName, p) {
return false
}
if !p.IsPointer() {
return false
}
return isBuiltin(depointerize(nameOfType(p.Type())))
} | go | func isOutputPtrOfPrim(fnName string, p bg.Parameter) bool {
if !isOutput(fnName, p) && !isIO(fnName, p) {
return false
}
if !p.IsPointer() {
return false
}
return isBuiltin(depointerize(nameOfType(p.Type())))
} | [
"func",
"isOutputPtrOfPrim",
"(",
"fnName",
"string",
",",
"p",
"bg",
".",
"Parameter",
")",
"bool",
"{",
"if",
"!",
"isOutput",
"(",
"fnName",
",",
"p",
")",
"&&",
"!",
"isIO",
"(",
"fnName",
",",
"p",
")",
"{",
"return",
"false",
"\n",
"}",
"\n",
"if",
"!",
"p",
".",
"IsPointer",
"(",
")",
"{",
"return",
"false",
"\n",
"}",
"\n",
"return",
"isBuiltin",
"(",
"depointerize",
"(",
"nameOfType",
"(",
"p",
".",
"Type",
"(",
")",
")",
")",
")",
"\n",
"}"
] | // functions for convertibility | [
"functions",
"for",
"convertibility"
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/cmd/gencudnn/params.go#L31-L39 |
5,508 | gorgonia/cu | cu.go | Version | func Version() int {
var v C.int
if err := result(C.cuDriverGetVersion(&v)); err != nil {
return -1
}
return int(v)
} | go | func Version() int {
var v C.int
if err := result(C.cuDriverGetVersion(&v)); err != nil {
return -1
}
return int(v)
} | [
"func",
"Version",
"(",
")",
"int",
"{",
"var",
"v",
"C",
".",
"int",
"\n",
"if",
"err",
":=",
"result",
"(",
"C",
".",
"cuDriverGetVersion",
"(",
"&",
"v",
")",
")",
";",
"err",
"!=",
"nil",
"{",
"return",
"-",
"1",
"\n",
"}",
"\n",
"return",
"int",
"(",
"v",
")",
"\n",
"}"
] | // Version returns the version of the CUDA driver | [
"Version",
"returns",
"the",
"version",
"of",
"the",
"CUDA",
"driver"
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/cu.go#L19-L25 |
5,509 | gorgonia/cu | convenience.go | MemoryType | func (mem DevicePtr) MemoryType() (typ MemoryType, err error) {
var p unsafe.Pointer
if p, err = mem.PtrAttribute(MemoryTypeAttr); err != nil {
return
}
t := *(*uint64)(p)
typ = MemoryType(byte(t))
return
} | go | func (mem DevicePtr) MemoryType() (typ MemoryType, err error) {
var p unsafe.Pointer
if p, err = mem.PtrAttribute(MemoryTypeAttr); err != nil {
return
}
t := *(*uint64)(p)
typ = MemoryType(byte(t))
return
} | [
"func",
"(",
"mem",
"DevicePtr",
")",
"MemoryType",
"(",
")",
"(",
"typ",
"MemoryType",
",",
"err",
"error",
")",
"{",
"var",
"p",
"unsafe",
".",
"Pointer",
"\n",
"if",
"p",
",",
"err",
"=",
"mem",
".",
"PtrAttribute",
"(",
"MemoryTypeAttr",
")",
";",
"err",
"!=",
"nil",
"{",
"return",
"\n",
"}",
"\n",
"t",
":=",
"*",
"(",
"*",
"uint64",
")",
"(",
"p",
")",
"\n",
"typ",
"=",
"MemoryType",
"(",
"byte",
"(",
"t",
")",
")",
"\n",
"return",
"\n",
"}"
] | // This file lists all the convenience functions and methods, not necessarily stuff that is covered in the API
// MemoryType returns the MemoryType of the memory | [
"This",
"file",
"lists",
"all",
"the",
"convenience",
"functions",
"and",
"methods",
"not",
"necessarily",
"stuff",
"that",
"is",
"covered",
"in",
"the",
"API",
"MemoryType",
"returns",
"the",
"MemoryType",
"of",
"the",
"memory"
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/convenience.go#L15-L23 |
5,510 | gorgonia/cu | convenience.go | MemSize | func (mem DevicePtr) MemSize() uintptr {
size, _, err := mem.AddressRange()
if err != nil {
log.Printf("MEMSIZE ERR %v", err)
}
return uintptr(size)
} | go | func (mem DevicePtr) MemSize() uintptr {
size, _, err := mem.AddressRange()
if err != nil {
log.Printf("MEMSIZE ERR %v", err)
}
return uintptr(size)
} | [
"func",
"(",
"mem",
"DevicePtr",
")",
"MemSize",
"(",
")",
"uintptr",
"{",
"size",
",",
"_",
",",
"err",
":=",
"mem",
".",
"AddressRange",
"(",
")",
"\n",
"if",
"err",
"!=",
"nil",
"{",
"log",
".",
"Printf",
"(",
"\"",
"\"",
",",
"err",
")",
"\n",
"}",
"\n",
"return",
"uintptr",
"(",
"size",
")",
"\n",
"}"
] | // MemSize returns the size of the memory slab in bytes. Returns 0 if errors occured | [
"MemSize",
"returns",
"the",
"size",
"of",
"the",
"memory",
"slab",
"in",
"bytes",
".",
"Returns",
"0",
"if",
"errors",
"occured"
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/convenience.go#L26-L32 |
5,511 | gorgonia/cu | convenience.go | ComputeCapability | func (d Device) ComputeCapability() (major, minor int, err error) {
var attrs []int
if attrs, err = d.Attributes(ComputeCapabilityMajor, ComputeCapabilityMinor); err != nil {
err = errors.Wrapf(err, "Failed to get ComputeCapability")
return
}
major = attrs[0]
minor = attrs[1]
return
} | go | func (d Device) ComputeCapability() (major, minor int, err error) {
var attrs []int
if attrs, err = d.Attributes(ComputeCapabilityMajor, ComputeCapabilityMinor); err != nil {
err = errors.Wrapf(err, "Failed to get ComputeCapability")
return
}
major = attrs[0]
minor = attrs[1]
return
} | [
"func",
"(",
"d",
"Device",
")",
"ComputeCapability",
"(",
")",
"(",
"major",
",",
"minor",
"int",
",",
"err",
"error",
")",
"{",
"var",
"attrs",
"[",
"]",
"int",
"\n",
"if",
"attrs",
",",
"err",
"=",
"d",
".",
"Attributes",
"(",
"ComputeCapabilityMajor",
",",
"ComputeCapabilityMinor",
")",
";",
"err",
"!=",
"nil",
"{",
"err",
"=",
"errors",
".",
"Wrapf",
"(",
"err",
",",
"\"",
"\"",
")",
"\n",
"return",
"\n",
"}",
"\n",
"major",
"=",
"attrs",
"[",
"0",
"]",
"\n",
"minor",
"=",
"attrs",
"[",
"1",
"]",
"\n",
"return",
"\n",
"}"
] | // ComputeCapability returns the compute capability of the device.
// This method is a convenience method for the deprecated API call cuDeviceComputeCapability. | [
"ComputeCapability",
"returns",
"the",
"compute",
"capability",
"of",
"the",
"device",
".",
"This",
"method",
"is",
"a",
"convenience",
"method",
"for",
"the",
"deprecated",
"API",
"call",
"cuDeviceComputeCapability",
"."
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/convenience.go#L41-L50 |
5,512 | gorgonia/cu | dnn/generated_spatialtransformer.go | NewSpatialTransformer | func NewSpatialTransformer(samplerType SamplerType, dataType DataType, nbDims int, dimA []int) (retVal *SpatialTransformer, err error) {
var internal C.cudnnSpatialTransformerDescriptor_t
if err := result(C.cudnnCreateSpatialTransformerDescriptor(&internal)); err != nil {
return nil, err
}
dimAC, dimACManaged := ints2CIntPtr(dimA)
defer returnManaged(dimACManaged)
if err := result(C.cudnnSetSpatialTransformerNdDescriptor(internal, samplerType.C(), dataType.C(), C.int(nbDims), dimAC)); err != nil {
return nil, err
}
retVal = &SpatialTransformer{
internal: internal,
samplerType: samplerType,
dataType: dataType,
nbDims: nbDims,
dimA: dimA,
}
runtime.SetFinalizer(retVal, destroySpatialTransformer)
return retVal, nil
} | go | func NewSpatialTransformer(samplerType SamplerType, dataType DataType, nbDims int, dimA []int) (retVal *SpatialTransformer, err error) {
var internal C.cudnnSpatialTransformerDescriptor_t
if err := result(C.cudnnCreateSpatialTransformerDescriptor(&internal)); err != nil {
return nil, err
}
dimAC, dimACManaged := ints2CIntPtr(dimA)
defer returnManaged(dimACManaged)
if err := result(C.cudnnSetSpatialTransformerNdDescriptor(internal, samplerType.C(), dataType.C(), C.int(nbDims), dimAC)); err != nil {
return nil, err
}
retVal = &SpatialTransformer{
internal: internal,
samplerType: samplerType,
dataType: dataType,
nbDims: nbDims,
dimA: dimA,
}
runtime.SetFinalizer(retVal, destroySpatialTransformer)
return retVal, nil
} | [
"func",
"NewSpatialTransformer",
"(",
"samplerType",
"SamplerType",
",",
"dataType",
"DataType",
",",
"nbDims",
"int",
",",
"dimA",
"[",
"]",
"int",
")",
"(",
"retVal",
"*",
"SpatialTransformer",
",",
"err",
"error",
")",
"{",
"var",
"internal",
"C",
".",
"cudnnSpatialTransformerDescriptor_t",
"\n",
"if",
"err",
":=",
"result",
"(",
"C",
".",
"cudnnCreateSpatialTransformerDescriptor",
"(",
"&",
"internal",
")",
")",
";",
"err",
"!=",
"nil",
"{",
"return",
"nil",
",",
"err",
"\n",
"}",
"\n\n",
"dimAC",
",",
"dimACManaged",
":=",
"ints2CIntPtr",
"(",
"dimA",
")",
"\n",
"defer",
"returnManaged",
"(",
"dimACManaged",
")",
"\n",
"if",
"err",
":=",
"result",
"(",
"C",
".",
"cudnnSetSpatialTransformerNdDescriptor",
"(",
"internal",
",",
"samplerType",
".",
"C",
"(",
")",
",",
"dataType",
".",
"C",
"(",
")",
",",
"C",
".",
"int",
"(",
"nbDims",
")",
",",
"dimAC",
")",
")",
";",
"err",
"!=",
"nil",
"{",
"return",
"nil",
",",
"err",
"\n",
"}",
"\n\n",
"retVal",
"=",
"&",
"SpatialTransformer",
"{",
"internal",
":",
"internal",
",",
"samplerType",
":",
"samplerType",
",",
"dataType",
":",
"dataType",
",",
"nbDims",
":",
"nbDims",
",",
"dimA",
":",
"dimA",
",",
"}",
"\n",
"runtime",
".",
"SetFinalizer",
"(",
"retVal",
",",
"destroySpatialTransformer",
")",
"\n",
"return",
"retVal",
",",
"nil",
"\n",
"}"
] | // NewSpatialTransformer creates a new SpatialTransformer. | [
"NewSpatialTransformer",
"creates",
"a",
"new",
"SpatialTransformer",
"."
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/dnn/generated_spatialtransformer.go#L22-L43 |
5,513 | gorgonia/cu | cmd/gencublas/binding.go | GoTypeForEnum | func GoTypeForEnum(typ cc.Type, name string, types ...map[string]bg.Template) string {
if typ == nil {
return "<nil>"
}
if typ.Kind() != cc.Enum {
panic(fmt.Sprintf("invalid type: %v", typ))
}
tag := typ.Tag()
if tag != 0 {
n := string(xc.Dict.S(tag))
for _, t := range types {
if s, ok := t[n]; ok {
var buf bytes.Buffer
err := s.Execute(&buf, name)
if err != nil {
panic(err)
}
return buf.String()
}
}
}
log.Printf("%s", typ.Declarator())
panic(fmt.Sprintf("unknown type: %+v", typ))
} | go | func GoTypeForEnum(typ cc.Type, name string, types ...map[string]bg.Template) string {
if typ == nil {
return "<nil>"
}
if typ.Kind() != cc.Enum {
panic(fmt.Sprintf("invalid type: %v", typ))
}
tag := typ.Tag()
if tag != 0 {
n := string(xc.Dict.S(tag))
for _, t := range types {
if s, ok := t[n]; ok {
var buf bytes.Buffer
err := s.Execute(&buf, name)
if err != nil {
panic(err)
}
return buf.String()
}
}
}
log.Printf("%s", typ.Declarator())
panic(fmt.Sprintf("unknown type: %+v", typ))
} | [
"func",
"GoTypeForEnum",
"(",
"typ",
"cc",
".",
"Type",
",",
"name",
"string",
",",
"types",
"...",
"map",
"[",
"string",
"]",
"bg",
".",
"Template",
")",
"string",
"{",
"if",
"typ",
"==",
"nil",
"{",
"return",
"\"",
"\"",
"\n",
"}",
"\n",
"if",
"typ",
".",
"Kind",
"(",
")",
"!=",
"cc",
".",
"Enum",
"{",
"panic",
"(",
"fmt",
".",
"Sprintf",
"(",
"\"",
"\"",
",",
"typ",
")",
")",
"\n",
"}",
"\n",
"tag",
":=",
"typ",
".",
"Tag",
"(",
")",
"\n",
"if",
"tag",
"!=",
"0",
"{",
"n",
":=",
"string",
"(",
"xc",
".",
"Dict",
".",
"S",
"(",
"tag",
")",
")",
"\n",
"for",
"_",
",",
"t",
":=",
"range",
"types",
"{",
"if",
"s",
",",
"ok",
":=",
"t",
"[",
"n",
"]",
";",
"ok",
"{",
"var",
"buf",
"bytes",
".",
"Buffer",
"\n",
"err",
":=",
"s",
".",
"Execute",
"(",
"&",
"buf",
",",
"name",
")",
"\n",
"if",
"err",
"!=",
"nil",
"{",
"panic",
"(",
"err",
")",
"\n",
"}",
"\n",
"return",
"buf",
".",
"String",
"(",
")",
"\n",
"}",
"\n",
"}",
"\n",
"}",
"\n",
"log",
".",
"Printf",
"(",
"\"",
"\"",
",",
"typ",
".",
"Declarator",
"(",
")",
")",
"\n",
"panic",
"(",
"fmt",
".",
"Sprintf",
"(",
"\"",
"\"",
",",
"typ",
")",
")",
"\n",
"}"
] | // GoTypeForEnum returns a string representation of the given enum type using a mapping
// in types. GoTypeForEnum will panic if no type mapping is found after searching the
// user-provided types mappings or the type is not an enum. | [
"GoTypeForEnum",
"returns",
"a",
"string",
"representation",
"of",
"the",
"given",
"enum",
"type",
"using",
"a",
"mapping",
"in",
"types",
".",
"GoTypeForEnum",
"will",
"panic",
"if",
"no",
"type",
"mapping",
"is",
"found",
"after",
"searching",
"the",
"user",
"-",
"provided",
"types",
"mappings",
"or",
"the",
"type",
"is",
"not",
"an",
"enum",
"."
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/cmd/gencublas/binding.go#L89-L112 |
5,514 | gorgonia/cu | cmd/gencublas/binding.go | LowerCaseFirst | func LowerCaseFirst(s string) string {
if len(s) == 0 {
return s
}
return string(s[0]|' ') + s[1:]
} | go | func LowerCaseFirst(s string) string {
if len(s) == 0 {
return s
}
return string(s[0]|' ') + s[1:]
} | [
"func",
"LowerCaseFirst",
"(",
"s",
"string",
")",
"string",
"{",
"if",
"len",
"(",
"s",
")",
"==",
"0",
"{",
"return",
"s",
"\n",
"}",
"\n",
"return",
"string",
"(",
"s",
"[",
"0",
"]",
"|",
"' '",
")",
"+",
"s",
"[",
"1",
":",
"]",
"\n",
"}"
] | // LowerCaseFirst returns s with the first character lower-cased. LowerCaseFirst
// assumes s is an ASCII-represented string. | [
"LowerCaseFirst",
"returns",
"s",
"with",
"the",
"first",
"character",
"lower",
"-",
"cased",
".",
"LowerCaseFirst",
"assumes",
"s",
"is",
"an",
"ASCII",
"-",
"represented",
"string",
"."
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/cmd/gencublas/binding.go#L221-L226 |
5,515 | gorgonia/cu | cmd/gencublas/binding.go | UpperCaseFirst | func UpperCaseFirst(s string) string {
if len(s) == 0 {
return s
}
return string(s[0]&^' ') + s[1:]
} | go | func UpperCaseFirst(s string) string {
if len(s) == 0 {
return s
}
return string(s[0]&^' ') + s[1:]
} | [
"func",
"UpperCaseFirst",
"(",
"s",
"string",
")",
"string",
"{",
"if",
"len",
"(",
"s",
")",
"==",
"0",
"{",
"return",
"s",
"\n",
"}",
"\n",
"return",
"string",
"(",
"s",
"[",
"0",
"]",
"&^",
"' '",
")",
"+",
"s",
"[",
"1",
":",
"]",
"\n",
"}"
] | // UpperCaseFirst returns s with the first character upper-cased. UpperCaseFirst
// assumes s is an ASCII-represented string. | [
"UpperCaseFirst",
"returns",
"s",
"with",
"the",
"first",
"character",
"upper",
"-",
"cased",
".",
"UpperCaseFirst",
"assumes",
"s",
"is",
"an",
"ASCII",
"-",
"represented",
"string",
"."
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/cmd/gencublas/binding.go#L230-L235 |
5,516 | gorgonia/cu | cmd/gencublas/binding.go | functions | func functions(t *cc.TranslationUnit) ([]bg.Declaration, error) {
filter := func(d *cc.Declarator) bool {
if d.Type.Kind() != cc.Function {
return false
}
return true
}
return bg.Get(t, filter)
} | go | func functions(t *cc.TranslationUnit) ([]bg.Declaration, error) {
filter := func(d *cc.Declarator) bool {
if d.Type.Kind() != cc.Function {
return false
}
return true
}
return bg.Get(t, filter)
} | [
"func",
"functions",
"(",
"t",
"*",
"cc",
".",
"TranslationUnit",
")",
"(",
"[",
"]",
"bg",
".",
"Declaration",
",",
"error",
")",
"{",
"filter",
":=",
"func",
"(",
"d",
"*",
"cc",
".",
"Declarator",
")",
"bool",
"{",
"if",
"d",
".",
"Type",
".",
"Kind",
"(",
")",
"!=",
"cc",
".",
"Function",
"{",
"return",
"false",
"\n",
"}",
"\n",
"return",
"true",
"\n",
"}",
"\n",
"return",
"bg",
".",
"Get",
"(",
"t",
",",
"filter",
")",
"\n",
"}"
] | // functions say we only want functions declared | [
"functions",
"say",
"we",
"only",
"want",
"functions",
"declared"
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/cmd/gencublas/binding.go#L276-L284 |
5,517 | gorgonia/cu | dnn/filter.go | NewFilter | func NewFilter(dataType DataType, format TensorFormat, shape []int) (retVal *Filter, err error) {
var internal C.cudnnFilterDescriptor_t
if err = result(C.cudnnCreateFilterDescriptor(&internal)); err != nil {
return nil, err
}
switch len(shape) {
case 0, 1, 2, 3:
return nil, errors.New("Cannot create filter with a shape < 4 dimensions")
case 4:
if err = result(C.cudnnSetFilter4dDescriptor(internal, dataType.C(), format.C(), C.int(shape[0]), C.int(shape[1]), C.int(shape[2]), C.int(shape[3]))); err != nil {
return nil, err
}
default:
filterDimA, filterDimAManaged := ints2CIntPtr(shape)
defer returnManaged(filterDimAManaged)
if err = result(C.cudnnSetFilterNdDescriptor(internal, dataType.C(), format.C(), C.int(len(shape)), filterDimA)); err != nil {
return nil, err
}
}
return &Filter{
internal: internal,
format: format,
shape: shape,
}, nil
} | go | func NewFilter(dataType DataType, format TensorFormat, shape []int) (retVal *Filter, err error) {
var internal C.cudnnFilterDescriptor_t
if err = result(C.cudnnCreateFilterDescriptor(&internal)); err != nil {
return nil, err
}
switch len(shape) {
case 0, 1, 2, 3:
return nil, errors.New("Cannot create filter with a shape < 4 dimensions")
case 4:
if err = result(C.cudnnSetFilter4dDescriptor(internal, dataType.C(), format.C(), C.int(shape[0]), C.int(shape[1]), C.int(shape[2]), C.int(shape[3]))); err != nil {
return nil, err
}
default:
filterDimA, filterDimAManaged := ints2CIntPtr(shape)
defer returnManaged(filterDimAManaged)
if err = result(C.cudnnSetFilterNdDescriptor(internal, dataType.C(), format.C(), C.int(len(shape)), filterDimA)); err != nil {
return nil, err
}
}
return &Filter{
internal: internal,
format: format,
shape: shape,
}, nil
} | [
"func",
"NewFilter",
"(",
"dataType",
"DataType",
",",
"format",
"TensorFormat",
",",
"shape",
"[",
"]",
"int",
")",
"(",
"retVal",
"*",
"Filter",
",",
"err",
"error",
")",
"{",
"var",
"internal",
"C",
".",
"cudnnFilterDescriptor_t",
"\n",
"if",
"err",
"=",
"result",
"(",
"C",
".",
"cudnnCreateFilterDescriptor",
"(",
"&",
"internal",
")",
")",
";",
"err",
"!=",
"nil",
"{",
"return",
"nil",
",",
"err",
"\n",
"}",
"\n",
"switch",
"len",
"(",
"shape",
")",
"{",
"case",
"0",
",",
"1",
",",
"2",
",",
"3",
":",
"return",
"nil",
",",
"errors",
".",
"New",
"(",
"\"",
"\"",
")",
"\n",
"case",
"4",
":",
"if",
"err",
"=",
"result",
"(",
"C",
".",
"cudnnSetFilter4dDescriptor",
"(",
"internal",
",",
"dataType",
".",
"C",
"(",
")",
",",
"format",
".",
"C",
"(",
")",
",",
"C",
".",
"int",
"(",
"shape",
"[",
"0",
"]",
")",
",",
"C",
".",
"int",
"(",
"shape",
"[",
"1",
"]",
")",
",",
"C",
".",
"int",
"(",
"shape",
"[",
"2",
"]",
")",
",",
"C",
".",
"int",
"(",
"shape",
"[",
"3",
"]",
")",
")",
")",
";",
"err",
"!=",
"nil",
"{",
"return",
"nil",
",",
"err",
"\n",
"}",
"\n",
"default",
":",
"filterDimA",
",",
"filterDimAManaged",
":=",
"ints2CIntPtr",
"(",
"shape",
")",
"\n",
"defer",
"returnManaged",
"(",
"filterDimAManaged",
")",
"\n",
"if",
"err",
"=",
"result",
"(",
"C",
".",
"cudnnSetFilterNdDescriptor",
"(",
"internal",
",",
"dataType",
".",
"C",
"(",
")",
",",
"format",
".",
"C",
"(",
")",
",",
"C",
".",
"int",
"(",
"len",
"(",
"shape",
")",
")",
",",
"filterDimA",
")",
")",
";",
"err",
"!=",
"nil",
"{",
"return",
"nil",
",",
"err",
"\n",
"}",
"\n",
"}",
"\n",
"return",
"&",
"Filter",
"{",
"internal",
":",
"internal",
",",
"format",
":",
"format",
",",
"shape",
":",
"shape",
",",
"}",
",",
"nil",
"\n",
"}"
] | // NewFilter creates a new Filter. | [
"NewFilter",
"creates",
"a",
"new",
"Filter",
"."
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/dnn/filter.go#L21-L45 |
5,518 | gorgonia/cu | dnn/filter.go | Shape | func (f *Filter) Shape() []int {
retVal := make([]int, len(f.shape))
copy(retVal, f.shape)
return retVal
} | go | func (f *Filter) Shape() []int {
retVal := make([]int, len(f.shape))
copy(retVal, f.shape)
return retVal
} | [
"func",
"(",
"f",
"*",
"Filter",
")",
"Shape",
"(",
")",
"[",
"]",
"int",
"{",
"retVal",
":=",
"make",
"(",
"[",
"]",
"int",
",",
"len",
"(",
"f",
".",
"shape",
")",
")",
"\n",
"copy",
"(",
"retVal",
",",
"f",
".",
"shape",
")",
"\n",
"return",
"retVal",
"\n",
"}"
] | // Shape returns a copy of the shape. | [
"Shape",
"returns",
"a",
"copy",
"of",
"the",
"shape",
"."
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/dnn/filter.go#L54-L58 |
5,519 | gorgonia/cu | dnn/interop/interop.go | Describe | func Describe(t Tensor) (*cudnn.TensorDescriptor, error) {
shape := t.Shape().Clone()
strides := make([]int, len(shape))
copy(strides, t.Strides())
switch shape.Dims() {
case 0:
// TODO?
case 1:
// TODO?
case 2:
// take a 2D shape and make it 4D:
// because Gorgonia only takes NCHW formats
// any 2D matrix can be thought of as (1,1 H, W)
shape = append(shape, 0, 0) // shape after would be (H, W, 0, 0)
copy(shape[2:], shape[0:]) // shift the shape down by copying: (H, W, H, W)
shape[0] = 1 // (1, W, H, W)
shape[1] = 1 // (1,1,H, W)
strides = append(strides, 0, 0)
copy(strides[2:], strides[0:])
strides[0] = strides[2] * shape[2]
strides[1] = strides[2] * shape[2] // no, this is not a bug.
case 3:
shape = append(shape, 0)
copy(shape[1:], shape[0:])
shape[0] = 1
strides = append(strides, 0)
copy(strides[1:], strides[0:])
strides[0] = strides[1] * shape[1]
default:
}
return cudnn.NewTensorDescriptor(cudnn.NCHW, Dtype2DataType(t.Dtype()), shape, strides)
} | go | func Describe(t Tensor) (*cudnn.TensorDescriptor, error) {
shape := t.Shape().Clone()
strides := make([]int, len(shape))
copy(strides, t.Strides())
switch shape.Dims() {
case 0:
// TODO?
case 1:
// TODO?
case 2:
// take a 2D shape and make it 4D:
// because Gorgonia only takes NCHW formats
// any 2D matrix can be thought of as (1,1 H, W)
shape = append(shape, 0, 0) // shape after would be (H, W, 0, 0)
copy(shape[2:], shape[0:]) // shift the shape down by copying: (H, W, H, W)
shape[0] = 1 // (1, W, H, W)
shape[1] = 1 // (1,1,H, W)
strides = append(strides, 0, 0)
copy(strides[2:], strides[0:])
strides[0] = strides[2] * shape[2]
strides[1] = strides[2] * shape[2] // no, this is not a bug.
case 3:
shape = append(shape, 0)
copy(shape[1:], shape[0:])
shape[0] = 1
strides = append(strides, 0)
copy(strides[1:], strides[0:])
strides[0] = strides[1] * shape[1]
default:
}
return cudnn.NewTensorDescriptor(cudnn.NCHW, Dtype2DataType(t.Dtype()), shape, strides)
} | [
"func",
"Describe",
"(",
"t",
"Tensor",
")",
"(",
"*",
"cudnn",
".",
"TensorDescriptor",
",",
"error",
")",
"{",
"shape",
":=",
"t",
".",
"Shape",
"(",
")",
".",
"Clone",
"(",
")",
"\n",
"strides",
":=",
"make",
"(",
"[",
"]",
"int",
",",
"len",
"(",
"shape",
")",
")",
"\n",
"copy",
"(",
"strides",
",",
"t",
".",
"Strides",
"(",
")",
")",
"\n",
"switch",
"shape",
".",
"Dims",
"(",
")",
"{",
"case",
"0",
":",
"// TODO?",
"case",
"1",
":",
"// TODO?",
"case",
"2",
":",
"// take a 2D shape and make it 4D:",
"// because Gorgonia only takes NCHW formats",
"// any 2D matrix can be thought of as (1,1 H, W)",
"shape",
"=",
"append",
"(",
"shape",
",",
"0",
",",
"0",
")",
"// shape after would be (H, W, 0, 0)",
"\n",
"copy",
"(",
"shape",
"[",
"2",
":",
"]",
",",
"shape",
"[",
"0",
":",
"]",
")",
"// shift the shape down by copying: (H, W, H, W)",
"\n",
"shape",
"[",
"0",
"]",
"=",
"1",
"// (1, W, H, W)",
"\n",
"shape",
"[",
"1",
"]",
"=",
"1",
"// (1,1,H, W)",
"\n\n",
"strides",
"=",
"append",
"(",
"strides",
",",
"0",
",",
"0",
")",
"\n",
"copy",
"(",
"strides",
"[",
"2",
":",
"]",
",",
"strides",
"[",
"0",
":",
"]",
")",
"\n",
"strides",
"[",
"0",
"]",
"=",
"strides",
"[",
"2",
"]",
"*",
"shape",
"[",
"2",
"]",
"\n",
"strides",
"[",
"1",
"]",
"=",
"strides",
"[",
"2",
"]",
"*",
"shape",
"[",
"2",
"]",
"// no, this is not a bug.",
"\n",
"case",
"3",
":",
"shape",
"=",
"append",
"(",
"shape",
",",
"0",
")",
"\n",
"copy",
"(",
"shape",
"[",
"1",
":",
"]",
",",
"shape",
"[",
"0",
":",
"]",
")",
"\n",
"shape",
"[",
"0",
"]",
"=",
"1",
"\n\n",
"strides",
"=",
"append",
"(",
"strides",
",",
"0",
")",
"\n",
"copy",
"(",
"strides",
"[",
"1",
":",
"]",
",",
"strides",
"[",
"0",
":",
"]",
")",
"\n",
"strides",
"[",
"0",
"]",
"=",
"strides",
"[",
"1",
"]",
"*",
"shape",
"[",
"1",
"]",
"\n",
"default",
":",
"}",
"\n\n",
"return",
"cudnn",
".",
"NewTensorDescriptor",
"(",
"cudnn",
".",
"NCHW",
",",
"Dtype2DataType",
"(",
"t",
".",
"Dtype",
"(",
")",
")",
",",
"shape",
",",
"strides",
")",
"\n",
"}"
] | // Describe extracts the metadata from a tensor.Dense and returns a cuDNN TensorDescriptor | [
"Describe",
"extracts",
"the",
"metadata",
"from",
"a",
"tensor",
".",
"Dense",
"and",
"returns",
"a",
"cuDNN",
"TensorDescriptor"
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/dnn/interop/interop.go#L22-L58 |
5,520 | gorgonia/cu | dnn/interop/interop.go | Dtype2DataType | func Dtype2DataType(t tensor.Dtype) cudnn.DataType {
switch t.Name() {
case "float64":
return cudnn.Double
case "float32":
return cudnn.Float
case "float16":
return cudnn.Half
case "int8":
return cudnn.Int8
case "int32":
return cudnn.Int32
case "int128":
return cudnn.Int8x4
}
panic("Unreachable")
} | go | func Dtype2DataType(t tensor.Dtype) cudnn.DataType {
switch t.Name() {
case "float64":
return cudnn.Double
case "float32":
return cudnn.Float
case "float16":
return cudnn.Half
case "int8":
return cudnn.Int8
case "int32":
return cudnn.Int32
case "int128":
return cudnn.Int8x4
}
panic("Unreachable")
} | [
"func",
"Dtype2DataType",
"(",
"t",
"tensor",
".",
"Dtype",
")",
"cudnn",
".",
"DataType",
"{",
"switch",
"t",
".",
"Name",
"(",
")",
"{",
"case",
"\"",
"\"",
":",
"return",
"cudnn",
".",
"Double",
"\n",
"case",
"\"",
"\"",
":",
"return",
"cudnn",
".",
"Float",
"\n",
"case",
"\"",
"\"",
":",
"return",
"cudnn",
".",
"Half",
"\n",
"case",
"\"",
"\"",
":",
"return",
"cudnn",
".",
"Int8",
"\n",
"case",
"\"",
"\"",
":",
"return",
"cudnn",
".",
"Int32",
"\n",
"case",
"\"",
"\"",
":",
"return",
"cudnn",
".",
"Int8x4",
"\n",
"}",
"\n",
"panic",
"(",
"\"",
"\"",
")",
"\n",
"}"
] | // Dtype2DataType converts a tensor.Dtype to a cudnnDataType. | [
"Dtype2DataType",
"converts",
"a",
"tensor",
".",
"Dtype",
"to",
"a",
"cudnnDataType",
"."
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/dnn/interop/interop.go#L66-L82 |
5,521 | gorgonia/cu | cmd/gencudnn/main.go | goimports | func goimports(filename string) error {
cmd := exec.Command("goimports", "-w", filename)
return cmd.Run()
} | go | func goimports(filename string) error {
cmd := exec.Command("goimports", "-w", filename)
return cmd.Run()
} | [
"func",
"goimports",
"(",
"filename",
"string",
")",
"error",
"{",
"cmd",
":=",
"exec",
".",
"Command",
"(",
"\"",
"\"",
",",
"\"",
"\"",
",",
"filename",
")",
"\n",
"return",
"cmd",
".",
"Run",
"(",
")",
"\n",
"}"
] | // yes I know goimports can be imported, but I'm lazy | [
"yes",
"I",
"know",
"goimports",
"can",
"be",
"imported",
"but",
"I",
"m",
"lazy"
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/cmd/gencudnn/main.go#L49-L52 |
5,522 | gorgonia/cu | cmd/gencudnn/main.go | reportPotentialNils | func reportPotentialNils(pkg *PkgState) {
nils := pkg.checkNils()
if len(nils) > 0 {
fmt.Printf("## Potential Nils ##\nThese functions have a `*T` return value, but a possible null exception error might happen\n\n")
for _, n := range nils {
fmt.Printf("* `%v`\n", n)
}
}
fmt.Println()
} | go | func reportPotentialNils(pkg *PkgState) {
nils := pkg.checkNils()
if len(nils) > 0 {
fmt.Printf("## Potential Nils ##\nThese functions have a `*T` return value, but a possible null exception error might happen\n\n")
for _, n := range nils {
fmt.Printf("* `%v`\n", n)
}
}
fmt.Println()
} | [
"func",
"reportPotentialNils",
"(",
"pkg",
"*",
"PkgState",
")",
"{",
"nils",
":=",
"pkg",
".",
"checkNils",
"(",
")",
"\n",
"if",
"len",
"(",
"nils",
")",
">",
"0",
"{",
"fmt",
".",
"Printf",
"(",
"\"",
"\\n",
"\\n",
"\\n",
"\"",
")",
"\n",
"for",
"_",
",",
"n",
":=",
"range",
"nils",
"{",
"fmt",
".",
"Printf",
"(",
"\"",
"\\n",
"\"",
",",
"n",
")",
"\n",
"}",
"\n",
"}",
"\n",
"fmt",
".",
"Println",
"(",
")",
"\n",
"}"
] | // find potential nils | [
"find",
"potential",
"nils"
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/cmd/gencudnn/main.go#L94-L103 |
5,523 | gorgonia/cu | dnn/optensor.go | NewOp | func NewOp(op OpTensorOp, dt DataType, prop NanPropagation) (*Op, error) {
var internal C.cudnnOpTensorDescriptor_t
if err := result(C.cudnnCreateOpTensorDescriptor(&internal)); err != nil {
return nil, err
}
if err := result(C.cudnnSetOpTensorDescriptor(internal, op.C(), dt.C(), prop.C())); err != nil {
return nil, err
}
retVal := &Op{
internal: internal,
op: op,
dataType: dt,
nanPropagation: prop,
}
runtime.SetFinalizer(retVal, destroyOp)
return retVal, nil
} | go | func NewOp(op OpTensorOp, dt DataType, prop NanPropagation) (*Op, error) {
var internal C.cudnnOpTensorDescriptor_t
if err := result(C.cudnnCreateOpTensorDescriptor(&internal)); err != nil {
return nil, err
}
if err := result(C.cudnnSetOpTensorDescriptor(internal, op.C(), dt.C(), prop.C())); err != nil {
return nil, err
}
retVal := &Op{
internal: internal,
op: op,
dataType: dt,
nanPropagation: prop,
}
runtime.SetFinalizer(retVal, destroyOp)
return retVal, nil
} | [
"func",
"NewOp",
"(",
"op",
"OpTensorOp",
",",
"dt",
"DataType",
",",
"prop",
"NanPropagation",
")",
"(",
"*",
"Op",
",",
"error",
")",
"{",
"var",
"internal",
"C",
".",
"cudnnOpTensorDescriptor_t",
"\n",
"if",
"err",
":=",
"result",
"(",
"C",
".",
"cudnnCreateOpTensorDescriptor",
"(",
"&",
"internal",
")",
")",
";",
"err",
"!=",
"nil",
"{",
"return",
"nil",
",",
"err",
"\n",
"}",
"\n\n",
"if",
"err",
":=",
"result",
"(",
"C",
".",
"cudnnSetOpTensorDescriptor",
"(",
"internal",
",",
"op",
".",
"C",
"(",
")",
",",
"dt",
".",
"C",
"(",
")",
",",
"prop",
".",
"C",
"(",
")",
")",
")",
";",
"err",
"!=",
"nil",
"{",
"return",
"nil",
",",
"err",
"\n",
"}",
"\n",
"retVal",
":=",
"&",
"Op",
"{",
"internal",
":",
"internal",
",",
"op",
":",
"op",
",",
"dataType",
":",
"dt",
",",
"nanPropagation",
":",
"prop",
",",
"}",
"\n",
"runtime",
".",
"SetFinalizer",
"(",
"retVal",
",",
"destroyOp",
")",
"\n",
"return",
"retVal",
",",
"nil",
"\n",
"}"
] | // NewOp creates a new Op with the provided settings | [
"NewOp",
"creates",
"a",
"new",
"Op",
"with",
"the",
"provided",
"settings"
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/dnn/optensor.go#L22-L39 |
5,524 | gorgonia/cu | dnn/optensor.go | DoOp | func (ctx *Context) DoOp(op *Op,
alpha1 float64, aDesc *TensorDescriptor, aData Memory,
alpha2 float64, bDesc *TensorDescriptor, bData Memory,
beta float64, cDesc *TensorDescriptor, cData Memory) error {
// dtype checks
if !(aDesc.dataType == bDesc.dataType && bDesc.dataType == cDesc.dataType) {
return errors.Errorf(dtypeMismatch3, cDesc.dataType, aDesc.dataType, bDesc.dataType)
}
if cDesc.dataType == Double && op.dataType != cDesc.dataType {
return errors.Errorf(dtypeMismatch3, Double, cDesc.dataType, op.dataType)
}
if op.dataType != Float && op.dataType != Double {
return errors.Errorf(dtypeMismatch2, Float, Double, op.dataType)
}
// shapecheck
if !(shapeEq(aDesc.shape, bDesc.shape) && shapeEq(bDesc.shape, cDesc.shape)) {
return errors.Errorf(shapeMismatch3, aDesc.shape, bDesc.shape, cDesc.shape)
}
// location check
if bData.Uintptr() == cData.Uintptr() && aData.Uintptr() != cData.Uintptr() {
// If the input tensor B is the same tensor as the destination tensor C,
// then the input tensor A also must be the same tensor as the destination tensor C.
return errors.Errorf(memoryError3, cData.Uintptr(), aData.Uintptr(), bData.Uintptr())
}
// alpha beta generation
var alpha1C, alpha2C, betaC unsafe.Pointer
if op.dataType == Float {
var a1, a2, b C.float
a1 = C.float(float32(alpha1))
a2 = C.float(float32(alpha2))
b = C.float(float32(beta))
alpha1C = unsafe.Pointer(&a1)
alpha2C = unsafe.Pointer(&a2)
betaC = unsafe.Pointer(&b)
} else {
var a1, a2, b C.double
a1 = C.double(alpha1)
a2 = C.double(alpha2)
b = C.double(beta)
alpha1C = unsafe.Pointer(&a1)
alpha2C = unsafe.Pointer(&a2)
betaC = unsafe.Pointer(&b)
}
res := C.cudnnOpTensor(ctx.internal, op.internal,
alpha1C, aDesc.internal, aData.Pointer(),
alpha2C, bDesc.internal, bData.Pointer(),
betaC, cDesc.internal, cData.Pointer(),
)
return result(res)
} | go | func (ctx *Context) DoOp(op *Op,
alpha1 float64, aDesc *TensorDescriptor, aData Memory,
alpha2 float64, bDesc *TensorDescriptor, bData Memory,
beta float64, cDesc *TensorDescriptor, cData Memory) error {
// dtype checks
if !(aDesc.dataType == bDesc.dataType && bDesc.dataType == cDesc.dataType) {
return errors.Errorf(dtypeMismatch3, cDesc.dataType, aDesc.dataType, bDesc.dataType)
}
if cDesc.dataType == Double && op.dataType != cDesc.dataType {
return errors.Errorf(dtypeMismatch3, Double, cDesc.dataType, op.dataType)
}
if op.dataType != Float && op.dataType != Double {
return errors.Errorf(dtypeMismatch2, Float, Double, op.dataType)
}
// shapecheck
if !(shapeEq(aDesc.shape, bDesc.shape) && shapeEq(bDesc.shape, cDesc.shape)) {
return errors.Errorf(shapeMismatch3, aDesc.shape, bDesc.shape, cDesc.shape)
}
// location check
if bData.Uintptr() == cData.Uintptr() && aData.Uintptr() != cData.Uintptr() {
// If the input tensor B is the same tensor as the destination tensor C,
// then the input tensor A also must be the same tensor as the destination tensor C.
return errors.Errorf(memoryError3, cData.Uintptr(), aData.Uintptr(), bData.Uintptr())
}
// alpha beta generation
var alpha1C, alpha2C, betaC unsafe.Pointer
if op.dataType == Float {
var a1, a2, b C.float
a1 = C.float(float32(alpha1))
a2 = C.float(float32(alpha2))
b = C.float(float32(beta))
alpha1C = unsafe.Pointer(&a1)
alpha2C = unsafe.Pointer(&a2)
betaC = unsafe.Pointer(&b)
} else {
var a1, a2, b C.double
a1 = C.double(alpha1)
a2 = C.double(alpha2)
b = C.double(beta)
alpha1C = unsafe.Pointer(&a1)
alpha2C = unsafe.Pointer(&a2)
betaC = unsafe.Pointer(&b)
}
res := C.cudnnOpTensor(ctx.internal, op.internal,
alpha1C, aDesc.internal, aData.Pointer(),
alpha2C, bDesc.internal, bData.Pointer(),
betaC, cDesc.internal, cData.Pointer(),
)
return result(res)
} | [
"func",
"(",
"ctx",
"*",
"Context",
")",
"DoOp",
"(",
"op",
"*",
"Op",
",",
"alpha1",
"float64",
",",
"aDesc",
"*",
"TensorDescriptor",
",",
"aData",
"Memory",
",",
"alpha2",
"float64",
",",
"bDesc",
"*",
"TensorDescriptor",
",",
"bData",
"Memory",
",",
"beta",
"float64",
",",
"cDesc",
"*",
"TensorDescriptor",
",",
"cData",
"Memory",
")",
"error",
"{",
"// dtype checks",
"if",
"!",
"(",
"aDesc",
".",
"dataType",
"==",
"bDesc",
".",
"dataType",
"&&",
"bDesc",
".",
"dataType",
"==",
"cDesc",
".",
"dataType",
")",
"{",
"return",
"errors",
".",
"Errorf",
"(",
"dtypeMismatch3",
",",
"cDesc",
".",
"dataType",
",",
"aDesc",
".",
"dataType",
",",
"bDesc",
".",
"dataType",
")",
"\n",
"}",
"\n\n",
"if",
"cDesc",
".",
"dataType",
"==",
"Double",
"&&",
"op",
".",
"dataType",
"!=",
"cDesc",
".",
"dataType",
"{",
"return",
"errors",
".",
"Errorf",
"(",
"dtypeMismatch3",
",",
"Double",
",",
"cDesc",
".",
"dataType",
",",
"op",
".",
"dataType",
")",
"\n",
"}",
"\n\n",
"if",
"op",
".",
"dataType",
"!=",
"Float",
"&&",
"op",
".",
"dataType",
"!=",
"Double",
"{",
"return",
"errors",
".",
"Errorf",
"(",
"dtypeMismatch2",
",",
"Float",
",",
"Double",
",",
"op",
".",
"dataType",
")",
"\n",
"}",
"\n\n",
"// shapecheck",
"if",
"!",
"(",
"shapeEq",
"(",
"aDesc",
".",
"shape",
",",
"bDesc",
".",
"shape",
")",
"&&",
"shapeEq",
"(",
"bDesc",
".",
"shape",
",",
"cDesc",
".",
"shape",
")",
")",
"{",
"return",
"errors",
".",
"Errorf",
"(",
"shapeMismatch3",
",",
"aDesc",
".",
"shape",
",",
"bDesc",
".",
"shape",
",",
"cDesc",
".",
"shape",
")",
"\n",
"}",
"\n\n",
"// location check",
"if",
"bData",
".",
"Uintptr",
"(",
")",
"==",
"cData",
".",
"Uintptr",
"(",
")",
"&&",
"aData",
".",
"Uintptr",
"(",
")",
"!=",
"cData",
".",
"Uintptr",
"(",
")",
"{",
"// If the input tensor B is the same tensor as the destination tensor C,",
"// then the input tensor A also must be the same tensor as the destination tensor C.",
"return",
"errors",
".",
"Errorf",
"(",
"memoryError3",
",",
"cData",
".",
"Uintptr",
"(",
")",
",",
"aData",
".",
"Uintptr",
"(",
")",
",",
"bData",
".",
"Uintptr",
"(",
")",
")",
"\n",
"}",
"\n\n",
"// alpha beta generation",
"var",
"alpha1C",
",",
"alpha2C",
",",
"betaC",
"unsafe",
".",
"Pointer",
"\n",
"if",
"op",
".",
"dataType",
"==",
"Float",
"{",
"var",
"a1",
",",
"a2",
",",
"b",
"C",
".",
"float",
"\n",
"a1",
"=",
"C",
".",
"float",
"(",
"float32",
"(",
"alpha1",
")",
")",
"\n",
"a2",
"=",
"C",
".",
"float",
"(",
"float32",
"(",
"alpha2",
")",
")",
"\n",
"b",
"=",
"C",
".",
"float",
"(",
"float32",
"(",
"beta",
")",
")",
"\n\n",
"alpha1C",
"=",
"unsafe",
".",
"Pointer",
"(",
"&",
"a1",
")",
"\n",
"alpha2C",
"=",
"unsafe",
".",
"Pointer",
"(",
"&",
"a2",
")",
"\n",
"betaC",
"=",
"unsafe",
".",
"Pointer",
"(",
"&",
"b",
")",
"\n",
"}",
"else",
"{",
"var",
"a1",
",",
"a2",
",",
"b",
"C",
".",
"double",
"\n",
"a1",
"=",
"C",
".",
"double",
"(",
"alpha1",
")",
"\n",
"a2",
"=",
"C",
".",
"double",
"(",
"alpha2",
")",
"\n",
"b",
"=",
"C",
".",
"double",
"(",
"beta",
")",
"\n\n",
"alpha1C",
"=",
"unsafe",
".",
"Pointer",
"(",
"&",
"a1",
")",
"\n",
"alpha2C",
"=",
"unsafe",
".",
"Pointer",
"(",
"&",
"a2",
")",
"\n",
"betaC",
"=",
"unsafe",
".",
"Pointer",
"(",
"&",
"b",
")",
"\n",
"}",
"\n\n",
"res",
":=",
"C",
".",
"cudnnOpTensor",
"(",
"ctx",
".",
"internal",
",",
"op",
".",
"internal",
",",
"alpha1C",
",",
"aDesc",
".",
"internal",
",",
"aData",
".",
"Pointer",
"(",
")",
",",
"alpha2C",
",",
"bDesc",
".",
"internal",
",",
"bData",
".",
"Pointer",
"(",
")",
",",
"betaC",
",",
"cDesc",
".",
"internal",
",",
"cData",
".",
"Pointer",
"(",
")",
",",
")",
"\n",
"return",
"result",
"(",
"res",
")",
"\n",
"}"
] | // DoOp actually performs the operation. | [
"DoOp",
"actually",
"performs",
"the",
"operation",
"."
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/dnn/optensor.go#L51-L109 |
5,525 | gorgonia/cu | dnn/shape.go | shapeEq | func shapeEq(a, b []int) bool {
if isScalar(a) && isScalar(b) {
return true
}
if len(a) != len(b) {
return false
}
for i, v := range a {
if b[i] != v {
return false
}
}
return true
} | go | func shapeEq(a, b []int) bool {
if isScalar(a) && isScalar(b) {
return true
}
if len(a) != len(b) {
return false
}
for i, v := range a {
if b[i] != v {
return false
}
}
return true
} | [
"func",
"shapeEq",
"(",
"a",
",",
"b",
"[",
"]",
"int",
")",
"bool",
"{",
"if",
"isScalar",
"(",
"a",
")",
"&&",
"isScalar",
"(",
"b",
")",
"{",
"return",
"true",
"\n",
"}",
"\n\n",
"if",
"len",
"(",
"a",
")",
"!=",
"len",
"(",
"b",
")",
"{",
"return",
"false",
"\n",
"}",
"\n\n",
"for",
"i",
",",
"v",
":=",
"range",
"a",
"{",
"if",
"b",
"[",
"i",
"]",
"!=",
"v",
"{",
"return",
"false",
"\n",
"}",
"\n",
"}",
"\n",
"return",
"true",
"\n",
"}"
] | // shapeEq is adapted from tensor | [
"shapeEq",
"is",
"adapted",
"from",
"tensor"
] | 89152d7e441439045736bc7640ff607ec371c26c | https://github.com/gorgonia/cu/blob/89152d7e441439045736bc7640ff607ec371c26c/dnn/shape.go#L8-L23 |
5,526 | balzaczyy/golucene | core/codec/spi/segmentCommitInfo.go | Files | func (si *SegmentCommitInfo) Files() []string {
// Start from the wrapped info's files:
files := make(map[string]bool)
for name, _ := range si.Info.Files() {
files[name] = true
}
// Must separately add any live docs files
for _, name := range si.Info.Codec().(Codec).LiveDocsFormat().Files(si) {
files[name] = true
}
// Must separately add any per-gen updates files. This can go away
// when we get rid of genUpdatesFiles (6.0)
for _, names := range si.genUpdatesFiles {
for name, _ := range names {
files[name] = true
}
}
// must separately add any field updates files
for _, names := range si.dvUpdatesFiles {
for name, _ := range names {
files[name] = true
}
}
// must separately add fieldInfos files
for name, _ := range si.fieldInfosFiles {
files[name] = true
}
ans := make([]string, 0, len(files))
for s, _ := range files {
ans = append(ans, s)
}
return ans
} | go | func (si *SegmentCommitInfo) Files() []string {
// Start from the wrapped info's files:
files := make(map[string]bool)
for name, _ := range si.Info.Files() {
files[name] = true
}
// Must separately add any live docs files
for _, name := range si.Info.Codec().(Codec).LiveDocsFormat().Files(si) {
files[name] = true
}
// Must separately add any per-gen updates files. This can go away
// when we get rid of genUpdatesFiles (6.0)
for _, names := range si.genUpdatesFiles {
for name, _ := range names {
files[name] = true
}
}
// must separately add any field updates files
for _, names := range si.dvUpdatesFiles {
for name, _ := range names {
files[name] = true
}
}
// must separately add fieldInfos files
for name, _ := range si.fieldInfosFiles {
files[name] = true
}
ans := make([]string, 0, len(files))
for s, _ := range files {
ans = append(ans, s)
}
return ans
} | [
"func",
"(",
"si",
"*",
"SegmentCommitInfo",
")",
"Files",
"(",
")",
"[",
"]",
"string",
"{",
"// Start from the wrapped info's files:",
"files",
":=",
"make",
"(",
"map",
"[",
"string",
"]",
"bool",
")",
"\n",
"for",
"name",
",",
"_",
":=",
"range",
"si",
".",
"Info",
".",
"Files",
"(",
")",
"{",
"files",
"[",
"name",
"]",
"=",
"true",
"\n",
"}",
"\n\n",
"// Must separately add any live docs files",
"for",
"_",
",",
"name",
":=",
"range",
"si",
".",
"Info",
".",
"Codec",
"(",
")",
".",
"(",
"Codec",
")",
".",
"LiveDocsFormat",
"(",
")",
".",
"Files",
"(",
"si",
")",
"{",
"files",
"[",
"name",
"]",
"=",
"true",
"\n",
"}",
"\n\n",
"// Must separately add any per-gen updates files. This can go away",
"// when we get rid of genUpdatesFiles (6.0)",
"for",
"_",
",",
"names",
":=",
"range",
"si",
".",
"genUpdatesFiles",
"{",
"for",
"name",
",",
"_",
":=",
"range",
"names",
"{",
"files",
"[",
"name",
"]",
"=",
"true",
"\n",
"}",
"\n",
"}",
"\n\n",
"// must separately add any field updates files",
"for",
"_",
",",
"names",
":=",
"range",
"si",
".",
"dvUpdatesFiles",
"{",
"for",
"name",
",",
"_",
":=",
"range",
"names",
"{",
"files",
"[",
"name",
"]",
"=",
"true",
"\n",
"}",
"\n",
"}",
"\n\n",
"// must separately add fieldInfos files",
"for",
"name",
",",
"_",
":=",
"range",
"si",
".",
"fieldInfosFiles",
"{",
"files",
"[",
"name",
"]",
"=",
"true",
"\n",
"}",
"\n\n",
"ans",
":=",
"make",
"(",
"[",
"]",
"string",
",",
"0",
",",
"len",
"(",
"files",
")",
")",
"\n",
"for",
"s",
",",
"_",
":=",
"range",
"files",
"{",
"ans",
"=",
"append",
"(",
"ans",
",",
"s",
")",
"\n",
"}",
"\n",
"return",
"ans",
"\n",
"}"
] | // Returns all files in use by this segment. | [
"Returns",
"all",
"files",
"in",
"use",
"by",
"this",
"segment",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/codec/spi/segmentCommitInfo.go#L124-L161 |
5,527 | balzaczyy/golucene | core/index/live.go | newLiveIndexWriterConfig | func newLiveIndexWriterConfig(analyzer analysis.Analyzer,
matchVersion util.Version) *LiveIndexWriterConfigImpl {
assert(DefaultSimilarity != nil)
assert(DefaultCodec != nil)
return &LiveIndexWriterConfigImpl{
analyzer: analyzer,
matchVersion: matchVersion,
ramBufferSizeMB: DEFAULT_RAM_BUFFER_SIZE_MB,
maxBufferedDocs: DEFAULT_MAX_BUFFERED_DOCS,
maxBufferedDeleteTerms: DEFAULT_MAX_BUFFERED_DELETE_TERMS,
readerTermsIndexDivisor: DEFAULT_READER_TERMS_INDEX_DIVISOR,
termIndexInterval: DEFAULT_TERM_INDEX_INTERVAL, // TODO: this should be private to the codec, not settable here
delPolicy: DEFAULT_DELETION_POLICY,
useCompoundFile: DEFAULT_USE_COMPOUND_FILE_SYSTEM,
openMode: OPEN_MODE_CREATE_OR_APPEND,
similarity: DefaultSimilarity(),
mergeScheduler: NewConcurrentMergeScheduler(),
writeLockTimeout: WRITE_LOCK_TIMEOUT,
_indexingChain: defaultIndexingChain,
codec: DefaultCodec(),
infoStream: util.DefaultInfoStream(),
mergePolicy: NewTieredMergePolicy(),
_flushPolicy: newFlushByRamOrCountsPolicy(),
readerPooling: DEFAULT_READER_POOLING,
_indexerThreadPool: NewDocumentsWriterPerThreadPool(DEFAULT_MAX_THREAD_STATES),
perRoutineHardLimitMB: DEFAULT_RAM_PER_THREAD_HARD_LIMIT_MB,
checkIntegrityAtMerge: DEFAULT_CHECK_INTEGRITY_AT_MERGE,
}
} | go | func newLiveIndexWriterConfig(analyzer analysis.Analyzer,
matchVersion util.Version) *LiveIndexWriterConfigImpl {
assert(DefaultSimilarity != nil)
assert(DefaultCodec != nil)
return &LiveIndexWriterConfigImpl{
analyzer: analyzer,
matchVersion: matchVersion,
ramBufferSizeMB: DEFAULT_RAM_BUFFER_SIZE_MB,
maxBufferedDocs: DEFAULT_MAX_BUFFERED_DOCS,
maxBufferedDeleteTerms: DEFAULT_MAX_BUFFERED_DELETE_TERMS,
readerTermsIndexDivisor: DEFAULT_READER_TERMS_INDEX_DIVISOR,
termIndexInterval: DEFAULT_TERM_INDEX_INTERVAL, // TODO: this should be private to the codec, not settable here
delPolicy: DEFAULT_DELETION_POLICY,
useCompoundFile: DEFAULT_USE_COMPOUND_FILE_SYSTEM,
openMode: OPEN_MODE_CREATE_OR_APPEND,
similarity: DefaultSimilarity(),
mergeScheduler: NewConcurrentMergeScheduler(),
writeLockTimeout: WRITE_LOCK_TIMEOUT,
_indexingChain: defaultIndexingChain,
codec: DefaultCodec(),
infoStream: util.DefaultInfoStream(),
mergePolicy: NewTieredMergePolicy(),
_flushPolicy: newFlushByRamOrCountsPolicy(),
readerPooling: DEFAULT_READER_POOLING,
_indexerThreadPool: NewDocumentsWriterPerThreadPool(DEFAULT_MAX_THREAD_STATES),
perRoutineHardLimitMB: DEFAULT_RAM_PER_THREAD_HARD_LIMIT_MB,
checkIntegrityAtMerge: DEFAULT_CHECK_INTEGRITY_AT_MERGE,
}
} | [
"func",
"newLiveIndexWriterConfig",
"(",
"analyzer",
"analysis",
".",
"Analyzer",
",",
"matchVersion",
"util",
".",
"Version",
")",
"*",
"LiveIndexWriterConfigImpl",
"{",
"assert",
"(",
"DefaultSimilarity",
"!=",
"nil",
")",
"\n",
"assert",
"(",
"DefaultCodec",
"!=",
"nil",
")",
"\n",
"return",
"&",
"LiveIndexWriterConfigImpl",
"{",
"analyzer",
":",
"analyzer",
",",
"matchVersion",
":",
"matchVersion",
",",
"ramBufferSizeMB",
":",
"DEFAULT_RAM_BUFFER_SIZE_MB",
",",
"maxBufferedDocs",
":",
"DEFAULT_MAX_BUFFERED_DOCS",
",",
"maxBufferedDeleteTerms",
":",
"DEFAULT_MAX_BUFFERED_DELETE_TERMS",
",",
"readerTermsIndexDivisor",
":",
"DEFAULT_READER_TERMS_INDEX_DIVISOR",
",",
"termIndexInterval",
":",
"DEFAULT_TERM_INDEX_INTERVAL",
",",
"// TODO: this should be private to the codec, not settable here",
"delPolicy",
":",
"DEFAULT_DELETION_POLICY",
",",
"useCompoundFile",
":",
"DEFAULT_USE_COMPOUND_FILE_SYSTEM",
",",
"openMode",
":",
"OPEN_MODE_CREATE_OR_APPEND",
",",
"similarity",
":",
"DefaultSimilarity",
"(",
")",
",",
"mergeScheduler",
":",
"NewConcurrentMergeScheduler",
"(",
")",
",",
"writeLockTimeout",
":",
"WRITE_LOCK_TIMEOUT",
",",
"_indexingChain",
":",
"defaultIndexingChain",
",",
"codec",
":",
"DefaultCodec",
"(",
")",
",",
"infoStream",
":",
"util",
".",
"DefaultInfoStream",
"(",
")",
",",
"mergePolicy",
":",
"NewTieredMergePolicy",
"(",
")",
",",
"_flushPolicy",
":",
"newFlushByRamOrCountsPolicy",
"(",
")",
",",
"readerPooling",
":",
"DEFAULT_READER_POOLING",
",",
"_indexerThreadPool",
":",
"NewDocumentsWriterPerThreadPool",
"(",
"DEFAULT_MAX_THREAD_STATES",
")",
",",
"perRoutineHardLimitMB",
":",
"DEFAULT_RAM_PER_THREAD_HARD_LIMIT_MB",
",",
"checkIntegrityAtMerge",
":",
"DEFAULT_CHECK_INTEGRITY_AT_MERGE",
",",
"}",
"\n",
"}"
] | // used by IndexWriterConfig | [
"used",
"by",
"IndexWriterConfig"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/index/live.go#L100-L129 |
5,528 | balzaczyy/golucene | core/store/compound.go | FileLength | func (d *CompoundFileDirectory) FileLength(name string) (n int64, err error) {
panic("not implemented yet")
} | go | func (d *CompoundFileDirectory) FileLength(name string) (n int64, err error) {
panic("not implemented yet")
} | [
"func",
"(",
"d",
"*",
"CompoundFileDirectory",
")",
"FileLength",
"(",
"name",
"string",
")",
"(",
"n",
"int64",
",",
"err",
"error",
")",
"{",
"panic",
"(",
"\"",
"\"",
")",
"\n",
"}"
] | // Returns the length of a file in the directory. | [
"Returns",
"the",
"length",
"of",
"a",
"file",
"in",
"the",
"directory",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/store/compound.go#L165-L167 |
5,529 | balzaczyy/golucene | core/util/automaton/regexp.go | NewRegExpWithFlag | func NewRegExpWithFlag(s string, flags int) *RegExp {
ans := &RegExp{
b: []rune(s),
flags: flags,
}
var e *RegExp
if len(s) == 0 {
e = makeStringRE("")
} else {
e = ans.parseUnionExp()
if ans.pos < len(ans.b) {
panic(fmt.Sprintf("end-of-string expected at position %v", ans.pos))
}
}
ans.kind = e.kind
ans.exp1, ans.exp2 = e.exp1, e.exp2
ans.s = e.s
ans.c = e.c
ans.min, ans.max, ans.digits = e.min, e.max, e.digits
ans.from, ans.to = e.from, e.to
ans.b = nil
return ans
} | go | func NewRegExpWithFlag(s string, flags int) *RegExp {
ans := &RegExp{
b: []rune(s),
flags: flags,
}
var e *RegExp
if len(s) == 0 {
e = makeStringRE("")
} else {
e = ans.parseUnionExp()
if ans.pos < len(ans.b) {
panic(fmt.Sprintf("end-of-string expected at position %v", ans.pos))
}
}
ans.kind = e.kind
ans.exp1, ans.exp2 = e.exp1, e.exp2
ans.s = e.s
ans.c = e.c
ans.min, ans.max, ans.digits = e.min, e.max, e.digits
ans.from, ans.to = e.from, e.to
ans.b = nil
return ans
} | [
"func",
"NewRegExpWithFlag",
"(",
"s",
"string",
",",
"flags",
"int",
")",
"*",
"RegExp",
"{",
"ans",
":=",
"&",
"RegExp",
"{",
"b",
":",
"[",
"]",
"rune",
"(",
"s",
")",
",",
"flags",
":",
"flags",
",",
"}",
"\n",
"var",
"e",
"*",
"RegExp",
"\n",
"if",
"len",
"(",
"s",
")",
"==",
"0",
"{",
"e",
"=",
"makeStringRE",
"(",
"\"",
"\"",
")",
"\n",
"}",
"else",
"{",
"e",
"=",
"ans",
".",
"parseUnionExp",
"(",
")",
"\n",
"if",
"ans",
".",
"pos",
"<",
"len",
"(",
"ans",
".",
"b",
")",
"{",
"panic",
"(",
"fmt",
".",
"Sprintf",
"(",
"\"",
"\"",
",",
"ans",
".",
"pos",
")",
")",
"\n",
"}",
"\n",
"}",
"\n",
"ans",
".",
"kind",
"=",
"e",
".",
"kind",
"\n",
"ans",
".",
"exp1",
",",
"ans",
".",
"exp2",
"=",
"e",
".",
"exp1",
",",
"e",
".",
"exp2",
"\n",
"ans",
".",
"s",
"=",
"e",
".",
"s",
"\n",
"ans",
".",
"c",
"=",
"e",
".",
"c",
"\n",
"ans",
".",
"min",
",",
"ans",
".",
"max",
",",
"ans",
".",
"digits",
"=",
"e",
".",
"min",
",",
"e",
".",
"max",
",",
"e",
".",
"digits",
"\n",
"ans",
".",
"from",
",",
"ans",
".",
"to",
"=",
"e",
".",
"from",
",",
"e",
".",
"to",
"\n",
"ans",
".",
"b",
"=",
"nil",
"\n",
"return",
"ans",
"\n",
"}"
] | // Constructs new RegExp from a string. | [
"Constructs",
"new",
"RegExp",
"from",
"a",
"string",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/util/automaton/regexp.go#L118-L140 |
5,530 | balzaczyy/golucene | core/util/automaton/regexp.go | String | func (re *RegExp) String() string {
var b bytes.Buffer
return re.toStringBuilder(&b).String()
} | go | func (re *RegExp) String() string {
var b bytes.Buffer
return re.toStringBuilder(&b).String()
} | [
"func",
"(",
"re",
"*",
"RegExp",
")",
"String",
"(",
")",
"string",
"{",
"var",
"b",
"bytes",
".",
"Buffer",
"\n",
"return",
"re",
".",
"toStringBuilder",
"(",
"&",
"b",
")",
".",
"String",
"(",
")",
"\n",
"}"
] | // Constructs string from parsed regular expression | [
"Constructs",
"string",
"from",
"parsed",
"regular",
"expression"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/util/automaton/regexp.go#L215-L218 |
5,531 | balzaczyy/golucene | core/codec/compressing/storedFieldsReader.go | newCompressingStoredFieldsReaderFrom | func newCompressingStoredFieldsReaderFrom(reader *CompressingStoredFieldsReader) *CompressingStoredFieldsReader {
return &CompressingStoredFieldsReader{
version: reader.version,
fieldInfos: reader.fieldInfos,
fieldsStream: reader.fieldsStream.Clone(),
indexReader: reader.indexReader.Clone(),
maxPointer: reader.maxPointer,
chunkSize: reader.chunkSize,
packedIntsVersion: reader.packedIntsVersion,
compressionMode: reader.compressionMode,
decompressor: reader.compressionMode.NewDecompressor(),
numDocs: reader.numDocs,
bytes: make([]byte, len(reader.bytes)),
closed: false,
}
} | go | func newCompressingStoredFieldsReaderFrom(reader *CompressingStoredFieldsReader) *CompressingStoredFieldsReader {
return &CompressingStoredFieldsReader{
version: reader.version,
fieldInfos: reader.fieldInfos,
fieldsStream: reader.fieldsStream.Clone(),
indexReader: reader.indexReader.Clone(),
maxPointer: reader.maxPointer,
chunkSize: reader.chunkSize,
packedIntsVersion: reader.packedIntsVersion,
compressionMode: reader.compressionMode,
decompressor: reader.compressionMode.NewDecompressor(),
numDocs: reader.numDocs,
bytes: make([]byte, len(reader.bytes)),
closed: false,
}
} | [
"func",
"newCompressingStoredFieldsReaderFrom",
"(",
"reader",
"*",
"CompressingStoredFieldsReader",
")",
"*",
"CompressingStoredFieldsReader",
"{",
"return",
"&",
"CompressingStoredFieldsReader",
"{",
"version",
":",
"reader",
".",
"version",
",",
"fieldInfos",
":",
"reader",
".",
"fieldInfos",
",",
"fieldsStream",
":",
"reader",
".",
"fieldsStream",
".",
"Clone",
"(",
")",
",",
"indexReader",
":",
"reader",
".",
"indexReader",
".",
"Clone",
"(",
")",
",",
"maxPointer",
":",
"reader",
".",
"maxPointer",
",",
"chunkSize",
":",
"reader",
".",
"chunkSize",
",",
"packedIntsVersion",
":",
"reader",
".",
"packedIntsVersion",
",",
"compressionMode",
":",
"reader",
".",
"compressionMode",
",",
"decompressor",
":",
"reader",
".",
"compressionMode",
".",
"NewDecompressor",
"(",
")",
",",
"numDocs",
":",
"reader",
".",
"numDocs",
",",
"bytes",
":",
"make",
"(",
"[",
"]",
"byte",
",",
"len",
"(",
"reader",
".",
"bytes",
")",
")",
",",
"closed",
":",
"false",
",",
"}",
"\n",
"}"
] | // used by clone | [
"used",
"by",
"clone"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/codec/compressing/storedFieldsReader.go#L37-L52 |
5,532 | balzaczyy/golucene | core/codec/compressing/storedFieldsReader.go | Close | func (r *CompressingStoredFieldsReader) Close() (err error) {
if !r.closed {
if err = util.Close(r.fieldsStream); err == nil {
r.closed = true
}
}
return
} | go | func (r *CompressingStoredFieldsReader) Close() (err error) {
if !r.closed {
if err = util.Close(r.fieldsStream); err == nil {
r.closed = true
}
}
return
} | [
"func",
"(",
"r",
"*",
"CompressingStoredFieldsReader",
")",
"Close",
"(",
")",
"(",
"err",
"error",
")",
"{",
"if",
"!",
"r",
".",
"closed",
"{",
"if",
"err",
"=",
"util",
".",
"Close",
"(",
"r",
".",
"fieldsStream",
")",
";",
"err",
"==",
"nil",
"{",
"r",
".",
"closed",
"=",
"true",
"\n",
"}",
"\n",
"}",
"\n",
"return",
"\n",
"}"
] | // Close the underlying IndexInputs | [
"Close",
"the",
"underlying",
"IndexInputs"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/codec/compressing/storedFieldsReader.go#L172-L179 |
5,533 | balzaczyy/golucene | core/codec/lucene41/postingsReader.go | readVIntBlock | func readVIntBlock(docIn store.IndexInput, docBuffer []int,
freqBuffer []int, num int, indexHasFreq bool) (err error) {
if indexHasFreq {
for i := 0; i < num; i++ {
code, err := asInt(docIn.ReadVInt())
if err != nil {
return err
}
docBuffer[i] = int(uint(code) >> 1)
if (code & 1) != 0 {
freqBuffer[i] = 1
} else {
freqBuffer[i], err = asInt(docIn.ReadVInt())
if err != nil {
return err
}
}
}
} else {
for i := 0; i < num; i++ {
docBuffer[i], err = asInt(docIn.ReadVInt())
if err != nil {
return err
}
}
}
return nil
} | go | func readVIntBlock(docIn store.IndexInput, docBuffer []int,
freqBuffer []int, num int, indexHasFreq bool) (err error) {
if indexHasFreq {
for i := 0; i < num; i++ {
code, err := asInt(docIn.ReadVInt())
if err != nil {
return err
}
docBuffer[i] = int(uint(code) >> 1)
if (code & 1) != 0 {
freqBuffer[i] = 1
} else {
freqBuffer[i], err = asInt(docIn.ReadVInt())
if err != nil {
return err
}
}
}
} else {
for i := 0; i < num; i++ {
docBuffer[i], err = asInt(docIn.ReadVInt())
if err != nil {
return err
}
}
}
return nil
} | [
"func",
"readVIntBlock",
"(",
"docIn",
"store",
".",
"IndexInput",
",",
"docBuffer",
"[",
"]",
"int",
",",
"freqBuffer",
"[",
"]",
"int",
",",
"num",
"int",
",",
"indexHasFreq",
"bool",
")",
"(",
"err",
"error",
")",
"{",
"if",
"indexHasFreq",
"{",
"for",
"i",
":=",
"0",
";",
"i",
"<",
"num",
";",
"i",
"++",
"{",
"code",
",",
"err",
":=",
"asInt",
"(",
"docIn",
".",
"ReadVInt",
"(",
")",
")",
"\n",
"if",
"err",
"!=",
"nil",
"{",
"return",
"err",
"\n",
"}",
"\n",
"docBuffer",
"[",
"i",
"]",
"=",
"int",
"(",
"uint",
"(",
"code",
")",
">>",
"1",
")",
"\n",
"if",
"(",
"code",
"&",
"1",
")",
"!=",
"0",
"{",
"freqBuffer",
"[",
"i",
"]",
"=",
"1",
"\n",
"}",
"else",
"{",
"freqBuffer",
"[",
"i",
"]",
",",
"err",
"=",
"asInt",
"(",
"docIn",
".",
"ReadVInt",
"(",
")",
")",
"\n",
"if",
"err",
"!=",
"nil",
"{",
"return",
"err",
"\n",
"}",
"\n",
"}",
"\n",
"}",
"\n",
"}",
"else",
"{",
"for",
"i",
":=",
"0",
";",
"i",
"<",
"num",
";",
"i",
"++",
"{",
"docBuffer",
"[",
"i",
"]",
",",
"err",
"=",
"asInt",
"(",
"docIn",
".",
"ReadVInt",
"(",
")",
")",
"\n",
"if",
"err",
"!=",
"nil",
"{",
"return",
"err",
"\n",
"}",
"\n",
"}",
"\n",
"}",
"\n",
"return",
"nil",
"\n",
"}"
] | /**
* Read values that have been written using variable-length encoding instead of bit-packing.
*/ | [
"Read",
"values",
"that",
"have",
"been",
"written",
"using",
"variable",
"-",
"length",
"encoding",
"instead",
"of",
"bit",
"-",
"packing",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/codec/lucene41/postingsReader.go#L136-L163 |
5,534 | balzaczyy/golucene | core/index/invertedDocConsumerPerField.go | utf8ToString | func utf8ToString(iso8859_1_buf []byte) string {
buf := make([]rune, len(iso8859_1_buf))
for i, b := range iso8859_1_buf {
buf[i] = rune(b)
}
return string(buf)
} | go | func utf8ToString(iso8859_1_buf []byte) string {
buf := make([]rune, len(iso8859_1_buf))
for i, b := range iso8859_1_buf {
buf[i] = rune(b)
}
return string(buf)
} | [
"func",
"utf8ToString",
"(",
"iso8859_1_buf",
"[",
"]",
"byte",
")",
"string",
"{",
"buf",
":=",
"make",
"(",
"[",
"]",
"rune",
",",
"len",
"(",
"iso8859_1_buf",
")",
")",
"\n",
"for",
"i",
",",
"b",
":=",
"range",
"iso8859_1_buf",
"{",
"buf",
"[",
"i",
"]",
"=",
"rune",
"(",
"b",
")",
"\n",
"}",
"\n",
"return",
"string",
"(",
"buf",
")",
"\n",
"}"
] | // Simpler version of Lucene's own method | [
"Simpler",
"version",
"of",
"Lucene",
"s",
"own",
"method"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/index/invertedDocConsumerPerField.go#L179-L185 |
5,535 | balzaczyy/golucene | core/index/writer.go | ensureOpen | func (cc *ClosingControl) ensureOpen(failIfClosing bool) {
assert2(!cc._closed && (!failIfClosing || !cc._closing), "this IndexWriter is closed")
} | go | func (cc *ClosingControl) ensureOpen(failIfClosing bool) {
assert2(!cc._closed && (!failIfClosing || !cc._closing), "this IndexWriter is closed")
} | [
"func",
"(",
"cc",
"*",
"ClosingControl",
")",
"ensureOpen",
"(",
"failIfClosing",
"bool",
")",
"{",
"assert2",
"(",
"!",
"cc",
".",
"_closed",
"&&",
"(",
"!",
"failIfClosing",
"||",
"!",
"cc",
".",
"_closing",
")",
",",
"\"",
"\"",
")",
"\n",
"}"
] | // Used internally to throw an AlreadyClosedError if this IndexWriter
// has been closed or is in the process of closing. | [
"Used",
"internally",
"to",
"throw",
"an",
"AlreadyClosedError",
"if",
"this",
"IndexWriter",
"has",
"been",
"closed",
"or",
"is",
"in",
"the",
"process",
"of",
"closing",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/index/writer.go#L121-L123 |
5,536 | balzaczyy/golucene | core/index/writer.go | segString | func (w *IndexWriter) segString() string {
// TODO synchronized
return w.readerPool.segmentsToString(w.segmentInfos.Segments)
} | go | func (w *IndexWriter) segString() string {
// TODO synchronized
return w.readerPool.segmentsToString(w.segmentInfos.Segments)
} | [
"func",
"(",
"w",
"*",
"IndexWriter",
")",
"segString",
"(",
")",
"string",
"{",
"// TODO synchronized",
"return",
"w",
".",
"readerPool",
".",
"segmentsToString",
"(",
"w",
".",
"segmentInfos",
".",
"Segments",
")",
"\n",
"}"
] | // Returns a string description of all segments, for debugging. | [
"Returns",
"a",
"string",
"description",
"of",
"all",
"segments",
"for",
"debugging",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/index/writer.go#L1456-L1459 |
5,537 | balzaczyy/golucene | core/index/writer.go | assertFilesExist | func (w *IndexWriter) assertFilesExist(toSync *SegmentInfos) error {
files := toSync.files(w.directory, false)
for _, filename := range files {
allFiles, err := w.directory.ListAll()
if err != nil {
return err
}
ok, err := w.slowFileExists(w.directory, filename)
if err != nil {
return err
}
assert2(ok, "file %v does not exist; files=%v", filename, allFiles)
// If this trips it means we are missing a call to checkpoint
// somewhere, because by the time we are called, deleter should
// know about every file referenced by the current head
// segmentInfos:
assert2(w.deleter.exists(filename), "IndexFileDeleter doesn't know about file %v", filename)
}
return nil
} | go | func (w *IndexWriter) assertFilesExist(toSync *SegmentInfos) error {
files := toSync.files(w.directory, false)
for _, filename := range files {
allFiles, err := w.directory.ListAll()
if err != nil {
return err
}
ok, err := w.slowFileExists(w.directory, filename)
if err != nil {
return err
}
assert2(ok, "file %v does not exist; files=%v", filename, allFiles)
// If this trips it means we are missing a call to checkpoint
// somewhere, because by the time we are called, deleter should
// know about every file referenced by the current head
// segmentInfos:
assert2(w.deleter.exists(filename), "IndexFileDeleter doesn't know about file %v", filename)
}
return nil
} | [
"func",
"(",
"w",
"*",
"IndexWriter",
")",
"assertFilesExist",
"(",
"toSync",
"*",
"SegmentInfos",
")",
"error",
"{",
"files",
":=",
"toSync",
".",
"files",
"(",
"w",
".",
"directory",
",",
"false",
")",
"\n",
"for",
"_",
",",
"filename",
":=",
"range",
"files",
"{",
"allFiles",
",",
"err",
":=",
"w",
".",
"directory",
".",
"ListAll",
"(",
")",
"\n",
"if",
"err",
"!=",
"nil",
"{",
"return",
"err",
"\n",
"}",
"\n",
"ok",
",",
"err",
":=",
"w",
".",
"slowFileExists",
"(",
"w",
".",
"directory",
",",
"filename",
")",
"\n",
"if",
"err",
"!=",
"nil",
"{",
"return",
"err",
"\n",
"}",
"\n",
"assert2",
"(",
"ok",
",",
"\"",
"\"",
",",
"filename",
",",
"allFiles",
")",
"\n",
"// If this trips it means we are missing a call to checkpoint",
"// somewhere, because by the time we are called, deleter should",
"// know about every file referenced by the current head",
"// segmentInfos:",
"assert2",
"(",
"w",
".",
"deleter",
".",
"exists",
"(",
"filename",
")",
",",
"\"",
"\"",
",",
"filename",
")",
"\n",
"}",
"\n",
"return",
"nil",
"\n",
"}"
] | // called only from assert | [
"called",
"only",
"from",
"assert"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/index/writer.go#L1462-L1481 |
5,538 | balzaczyy/golucene | core/index/writer.go | deleteNewFiles | func (w *IndexWriter) deleteNewFiles(files []string) error {
w.Lock() // synchronized
defer w.Unlock()
panic("not implemented yet")
} | go | func (w *IndexWriter) deleteNewFiles(files []string) error {
w.Lock() // synchronized
defer w.Unlock()
panic("not implemented yet")
} | [
"func",
"(",
"w",
"*",
"IndexWriter",
")",
"deleteNewFiles",
"(",
"files",
"[",
"]",
"string",
")",
"error",
"{",
"w",
".",
"Lock",
"(",
")",
"// synchronized",
"\n",
"defer",
"w",
".",
"Unlock",
"(",
")",
"\n",
"panic",
"(",
"\"",
"\"",
")",
"\n",
"}"
] | // Tries to delete the given files if unreferenced. | [
"Tries",
"to",
"delete",
"the",
"given",
"files",
"if",
"unreferenced",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/index/writer.go#L1721-L1725 |
5,539 | balzaczyy/golucene | core/document/field.go | NewFieldFromString | func NewFieldFromString(name, value string, ft *FieldType) *Field {
assert2(name != "", "name cannot be empty")
assert2(value != "", "value cannot be empty")
assert2(ft.stored || ft.indexed,
"it doesn't make sense to have a field that is neither indexed nor stored")
return &Field{_type: ft, _name: name, _data: value, _boost: 1}
} | go | func NewFieldFromString(name, value string, ft *FieldType) *Field {
assert2(name != "", "name cannot be empty")
assert2(value != "", "value cannot be empty")
assert2(ft.stored || ft.indexed,
"it doesn't make sense to have a field that is neither indexed nor stored")
return &Field{_type: ft, _name: name, _data: value, _boost: 1}
} | [
"func",
"NewFieldFromString",
"(",
"name",
",",
"value",
"string",
",",
"ft",
"*",
"FieldType",
")",
"*",
"Field",
"{",
"assert2",
"(",
"name",
"!=",
"\"",
"\"",
",",
"\"",
"\"",
")",
"\n",
"assert2",
"(",
"value",
"!=",
"\"",
"\"",
",",
"\"",
"\"",
")",
"\n",
"assert2",
"(",
"ft",
".",
"stored",
"||",
"ft",
".",
"indexed",
",",
"\"",
"\"",
")",
"\n",
"return",
"&",
"Field",
"{",
"_type",
":",
"ft",
",",
"_name",
":",
"name",
",",
"_data",
":",
"value",
",",
"_boost",
":",
"1",
"}",
"\n",
"}"
] | // Create field with String value | [
"Create",
"field",
"with",
"String",
"value"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/document/field.go#L42-L48 |
5,540 | balzaczyy/golucene | core/codec/blocktree/segmentTermEnum.go | pushFrame | func (e *SegmentTermsEnum) pushFrame(arc *fst.Arc, frameData []byte, length int) (f *segmentTermsEnumFrame, err error) {
// fmt.Println("Pushing frame...")
e.scratchReader.Reset(frameData)
code, err := e.scratchReader.ReadVLong()
if err != nil {
return nil, err
}
fpSeek := int64(uint64(code) >> BTT_OUTPUT_FLAGS_NUM_BITS)
f = e.frame(1 + e.currentFrame.ord)
f.hasTerms = (code & BTT_OUTPUT_FLAG_HAS_TERMS) != 0
f.hasTermsOrig = f.hasTerms
f.isFloor = (code & BTT_OUTPUT_FLAG_IS_FLOOR) != 0
if f.isFloor {
f.setFloorData(e.scratchReader, frameData)
}
e.pushFrameAt(arc, fpSeek, length)
return f, err
} | go | func (e *SegmentTermsEnum) pushFrame(arc *fst.Arc, frameData []byte, length int) (f *segmentTermsEnumFrame, err error) {
// fmt.Println("Pushing frame...")
e.scratchReader.Reset(frameData)
code, err := e.scratchReader.ReadVLong()
if err != nil {
return nil, err
}
fpSeek := int64(uint64(code) >> BTT_OUTPUT_FLAGS_NUM_BITS)
f = e.frame(1 + e.currentFrame.ord)
f.hasTerms = (code & BTT_OUTPUT_FLAG_HAS_TERMS) != 0
f.hasTermsOrig = f.hasTerms
f.isFloor = (code & BTT_OUTPUT_FLAG_IS_FLOOR) != 0
if f.isFloor {
f.setFloorData(e.scratchReader, frameData)
}
e.pushFrameAt(arc, fpSeek, length)
return f, err
} | [
"func",
"(",
"e",
"*",
"SegmentTermsEnum",
")",
"pushFrame",
"(",
"arc",
"*",
"fst",
".",
"Arc",
",",
"frameData",
"[",
"]",
"byte",
",",
"length",
"int",
")",
"(",
"f",
"*",
"segmentTermsEnumFrame",
",",
"err",
"error",
")",
"{",
"// fmt.Println(\"Pushing frame...\")",
"e",
".",
"scratchReader",
".",
"Reset",
"(",
"frameData",
")",
"\n",
"code",
",",
"err",
":=",
"e",
".",
"scratchReader",
".",
"ReadVLong",
"(",
")",
"\n",
"if",
"err",
"!=",
"nil",
"{",
"return",
"nil",
",",
"err",
"\n",
"}",
"\n",
"fpSeek",
":=",
"int64",
"(",
"uint64",
"(",
"code",
")",
">>",
"BTT_OUTPUT_FLAGS_NUM_BITS",
")",
"\n",
"f",
"=",
"e",
".",
"frame",
"(",
"1",
"+",
"e",
".",
"currentFrame",
".",
"ord",
")",
"\n",
"f",
".",
"hasTerms",
"=",
"(",
"code",
"&",
"BTT_OUTPUT_FLAG_HAS_TERMS",
")",
"!=",
"0",
"\n",
"f",
".",
"hasTermsOrig",
"=",
"f",
".",
"hasTerms",
"\n",
"f",
".",
"isFloor",
"=",
"(",
"code",
"&",
"BTT_OUTPUT_FLAG_IS_FLOOR",
")",
"!=",
"0",
"\n",
"if",
"f",
".",
"isFloor",
"{",
"f",
".",
"setFloorData",
"(",
"e",
".",
"scratchReader",
",",
"frameData",
")",
"\n",
"}",
"\n",
"e",
".",
"pushFrameAt",
"(",
"arc",
",",
"fpSeek",
",",
"length",
")",
"\n",
"return",
"f",
",",
"err",
"\n",
"}"
] | // Pushes a frame we seek'd to | [
"Pushes",
"a",
"frame",
"we",
"seek",
"d",
"to"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/codec/blocktree/segmentTermEnum.go#L134-L151 |
5,541 | balzaczyy/golucene | core/codec/blocktree/segmentTermEnum.go | pushFrameAt | func (e *SegmentTermsEnum) pushFrameAt(arc *fst.Arc, fp int64, length int) (f *segmentTermsEnumFrame, err error) {
f = e.frame(1 + e.currentFrame.ord)
f.arc = arc
if f.fpOrig == fp && f.nextEnt != -1 {
// fmt.Printf(" push reused frame ord=%v fp=%v isFloor?=%v hasTerms=%v pref=%v nextEnt=%v targetBeforeCurrentLength=%v term.length=%v vs prefix=%v\n",
// f.ord, f.fp, f.isFloor, f.hasTerms, e.term, f.nextEnt, e.targetBeforeCurrentLength, e.term.length, f.prefix)
if f.ord > e.targetBeforeCurrentLength {
f.rewind()
} else {
// fmt.Println(" skip rewind!")
}
if length != f.prefix {
panic("assert fail")
}
} else {
f.nextEnt = -1
f.prefix = length
f.state.TermBlockOrd = 0
f.fpOrig, f.fp = fp, fp
f.lastSubFP = -1
// fmt.Printf(" push new frame ord=%v fp=%v hasTerms=%v isFloor=%v pref=%v\n",
// f.ord, f.fp, f.hasTerms, f.isFloor, e.term)
}
return f, nil
} | go | func (e *SegmentTermsEnum) pushFrameAt(arc *fst.Arc, fp int64, length int) (f *segmentTermsEnumFrame, err error) {
f = e.frame(1 + e.currentFrame.ord)
f.arc = arc
if f.fpOrig == fp && f.nextEnt != -1 {
// fmt.Printf(" push reused frame ord=%v fp=%v isFloor?=%v hasTerms=%v pref=%v nextEnt=%v targetBeforeCurrentLength=%v term.length=%v vs prefix=%v\n",
// f.ord, f.fp, f.isFloor, f.hasTerms, e.term, f.nextEnt, e.targetBeforeCurrentLength, e.term.length, f.prefix)
if f.ord > e.targetBeforeCurrentLength {
f.rewind()
} else {
// fmt.Println(" skip rewind!")
}
if length != f.prefix {
panic("assert fail")
}
} else {
f.nextEnt = -1
f.prefix = length
f.state.TermBlockOrd = 0
f.fpOrig, f.fp = fp, fp
f.lastSubFP = -1
// fmt.Printf(" push new frame ord=%v fp=%v hasTerms=%v isFloor=%v pref=%v\n",
// f.ord, f.fp, f.hasTerms, f.isFloor, e.term)
}
return f, nil
} | [
"func",
"(",
"e",
"*",
"SegmentTermsEnum",
")",
"pushFrameAt",
"(",
"arc",
"*",
"fst",
".",
"Arc",
",",
"fp",
"int64",
",",
"length",
"int",
")",
"(",
"f",
"*",
"segmentTermsEnumFrame",
",",
"err",
"error",
")",
"{",
"f",
"=",
"e",
".",
"frame",
"(",
"1",
"+",
"e",
".",
"currentFrame",
".",
"ord",
")",
"\n",
"f",
".",
"arc",
"=",
"arc",
"\n",
"if",
"f",
".",
"fpOrig",
"==",
"fp",
"&&",
"f",
".",
"nextEnt",
"!=",
"-",
"1",
"{",
"// fmt.Printf(\" push reused frame ord=%v fp=%v isFloor?=%v hasTerms=%v pref=%v nextEnt=%v targetBeforeCurrentLength=%v term.length=%v vs prefix=%v\\n\",",
"// \tf.ord, f.fp, f.isFloor, f.hasTerms, e.term, f.nextEnt, e.targetBeforeCurrentLength, e.term.length, f.prefix)",
"if",
"f",
".",
"ord",
">",
"e",
".",
"targetBeforeCurrentLength",
"{",
"f",
".",
"rewind",
"(",
")",
"\n",
"}",
"else",
"{",
"// fmt.Println(\" skip rewind!\")",
"}",
"\n",
"if",
"length",
"!=",
"f",
".",
"prefix",
"{",
"panic",
"(",
"\"",
"\"",
")",
"\n",
"}",
"\n",
"}",
"else",
"{",
"f",
".",
"nextEnt",
"=",
"-",
"1",
"\n",
"f",
".",
"prefix",
"=",
"length",
"\n",
"f",
".",
"state",
".",
"TermBlockOrd",
"=",
"0",
"\n",
"f",
".",
"fpOrig",
",",
"f",
".",
"fp",
"=",
"fp",
",",
"fp",
"\n",
"f",
".",
"lastSubFP",
"=",
"-",
"1",
"\n",
"// fmt.Printf(\" push new frame ord=%v fp=%v hasTerms=%v isFloor=%v pref=%v\\n\",",
"// \tf.ord, f.fp, f.hasTerms, f.isFloor, e.term)",
"}",
"\n",
"return",
"f",
",",
"nil",
"\n",
"}"
] | // Pushes next'd frame or seek'd frame; we later
// lazy-load the frame only when needed | [
"Pushes",
"next",
"d",
"frame",
"or",
"seek",
"d",
"frame",
";",
"we",
"later",
"lazy",
"-",
"load",
"the",
"frame",
"only",
"when",
"needed"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/codec/blocktree/segmentTermEnum.go#L155-L179 |
5,542 | balzaczyy/golucene | core/codec/spi/codec.go | LoadCodec | func LoadCodec(name string) Codec {
c, ok := allCodecs[name]
if !ok {
fmt.Println("Unknown codec:", name)
fmt.Println("Available codecs:", allCodecs)
assert(ok)
}
return c
} | go | func LoadCodec(name string) Codec {
c, ok := allCodecs[name]
if !ok {
fmt.Println("Unknown codec:", name)
fmt.Println("Available codecs:", allCodecs)
assert(ok)
}
return c
} | [
"func",
"LoadCodec",
"(",
"name",
"string",
")",
"Codec",
"{",
"c",
",",
"ok",
":=",
"allCodecs",
"[",
"name",
"]",
"\n",
"if",
"!",
"ok",
"{",
"fmt",
".",
"Println",
"(",
"\"",
"\"",
",",
"name",
")",
"\n",
"fmt",
".",
"Println",
"(",
"\"",
"\"",
",",
"allCodecs",
")",
"\n",
"assert",
"(",
"ok",
")",
"\n",
"}",
"\n",
"return",
"c",
"\n",
"}"
] | // looks up a codec by name | [
"looks",
"up",
"a",
"codec",
"by",
"name"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/codec/spi/codec.go#L124-L132 |
5,543 | balzaczyy/golucene | core/codec/spi/codec.go | AvailableCodecs | func AvailableCodecs() []string {
ans := make([]string, 0, len(allCodecs))
for name, _ := range allCodecs {
ans = append(ans, name)
}
return ans
} | go | func AvailableCodecs() []string {
ans := make([]string, 0, len(allCodecs))
for name, _ := range allCodecs {
ans = append(ans, name)
}
return ans
} | [
"func",
"AvailableCodecs",
"(",
")",
"[",
"]",
"string",
"{",
"ans",
":=",
"make",
"(",
"[",
"]",
"string",
",",
"0",
",",
"len",
"(",
"allCodecs",
")",
")",
"\n",
"for",
"name",
",",
"_",
":=",
"range",
"allCodecs",
"{",
"ans",
"=",
"append",
"(",
"ans",
",",
"name",
")",
"\n",
"}",
"\n",
"return",
"ans",
"\n",
"}"
] | // returns a list of all available codec names | [
"returns",
"a",
"list",
"of",
"all",
"available",
"codec",
"names"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/codec/spi/codec.go#L135-L141 |
5,544 | balzaczyy/golucene | core/util/packed/packed.go | CheckVersion | func CheckVersion(version int32) {
if version < PACKED_VERSION_START {
panic(fmt.Sprintf("Version is too old, should be at least %v (got %v)", PACKED_VERSION_START, version))
} else if version > VERSION_CURRENT {
panic(fmt.Sprintf("Version is too new, should be at most %v (got %v)", VERSION_CURRENT, version))
}
} | go | func CheckVersion(version int32) {
if version < PACKED_VERSION_START {
panic(fmt.Sprintf("Version is too old, should be at least %v (got %v)", PACKED_VERSION_START, version))
} else if version > VERSION_CURRENT {
panic(fmt.Sprintf("Version is too new, should be at most %v (got %v)", VERSION_CURRENT, version))
}
} | [
"func",
"CheckVersion",
"(",
"version",
"int32",
")",
"{",
"if",
"version",
"<",
"PACKED_VERSION_START",
"{",
"panic",
"(",
"fmt",
".",
"Sprintf",
"(",
"\"",
"\"",
",",
"PACKED_VERSION_START",
",",
"version",
")",
")",
"\n",
"}",
"else",
"if",
"version",
">",
"VERSION_CURRENT",
"{",
"panic",
"(",
"fmt",
".",
"Sprintf",
"(",
"\"",
"\"",
",",
"VERSION_CURRENT",
",",
"version",
")",
")",
"\n",
"}",
"\n",
"}"
] | // Ceck the validity of a version number | [
"Ceck",
"the",
"validity",
"of",
"a",
"version",
"number"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/util/packed/packed.go#L54-L60 |
5,545 | balzaczyy/golucene | core/util/packed/packed.go | IsSupported | func (f PackedFormat) IsSupported(bitsPerValue int) bool {
switch int(f) {
case PACKED_SINGLE_BLOCK:
return is64Supported(bitsPerValue)
}
return bitsPerValue >= 1 && bitsPerValue <= 64
} | go | func (f PackedFormat) IsSupported(bitsPerValue int) bool {
switch int(f) {
case PACKED_SINGLE_BLOCK:
return is64Supported(bitsPerValue)
}
return bitsPerValue >= 1 && bitsPerValue <= 64
} | [
"func",
"(",
"f",
"PackedFormat",
")",
"IsSupported",
"(",
"bitsPerValue",
"int",
")",
"bool",
"{",
"switch",
"int",
"(",
"f",
")",
"{",
"case",
"PACKED_SINGLE_BLOCK",
":",
"return",
"is64Supported",
"(",
"bitsPerValue",
")",
"\n",
"}",
"\n",
"return",
"bitsPerValue",
">=",
"1",
"&&",
"bitsPerValue",
"<=",
"64",
"\n",
"}"
] | /**
* Tests whether the provided number of bits per value is supported by the
* format.
*/ | [
"Tests",
"whether",
"the",
"provided",
"number",
"of",
"bits",
"per",
"value",
"is",
"supported",
"by",
"the",
"format",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/util/packed/packed.go#L118-L124 |
5,546 | balzaczyy/golucene | core/util/wrapper.go | Set | func (so *SetOnce) Set(obj interface{}) {
so.Do(func() { so.obj = obj })
assert2(so.obj == obj, "The object cannot be set twice!")
} | go | func (so *SetOnce) Set(obj interface{}) {
so.Do(func() { so.obj = obj })
assert2(so.obj == obj, "The object cannot be set twice!")
} | [
"func",
"(",
"so",
"*",
"SetOnce",
")",
"Set",
"(",
"obj",
"interface",
"{",
"}",
")",
"{",
"so",
".",
"Do",
"(",
"func",
"(",
")",
"{",
"so",
".",
"obj",
"=",
"obj",
"}",
")",
"\n",
"assert2",
"(",
"so",
".",
"obj",
"==",
"obj",
",",
"\"",
"\"",
")",
"\n",
"}"
] | // Sets the given object. If the object has already been set, an exception is thrown. | [
"Sets",
"the",
"given",
"object",
".",
"If",
"the",
"object",
"has",
"already",
"been",
"set",
"an",
"exception",
"is",
"thrown",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/util/wrapper.go#L30-L33 |
5,547 | balzaczyy/golucene | core/index/deleter.go | onCommit | func (p KeepOnlyLastCommitDeletionPolicy) onCommit(commits []IndexCommit) error {
// Note that len(commits) should normally be 2 (if not called by
// onInit above).
for i, limit := 0, len(commits); i < limit-1; i++ {
commits[i].Delete()
}
return nil
} | go | func (p KeepOnlyLastCommitDeletionPolicy) onCommit(commits []IndexCommit) error {
// Note that len(commits) should normally be 2 (if not called by
// onInit above).
for i, limit := 0, len(commits); i < limit-1; i++ {
commits[i].Delete()
}
return nil
} | [
"func",
"(",
"p",
"KeepOnlyLastCommitDeletionPolicy",
")",
"onCommit",
"(",
"commits",
"[",
"]",
"IndexCommit",
")",
"error",
"{",
"// Note that len(commits) should normally be 2 (if not called by",
"// onInit above).",
"for",
"i",
",",
"limit",
":=",
"0",
",",
"len",
"(",
"commits",
")",
";",
"i",
"<",
"limit",
"-",
"1",
";",
"i",
"++",
"{",
"commits",
"[",
"i",
"]",
".",
"Delete",
"(",
")",
"\n",
"}",
"\n",
"return",
"nil",
"\n",
"}"
] | // Deletes all commits except the most recent one. | [
"Deletes",
"all",
"commits",
"except",
"the",
"most",
"recent",
"one",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/index/deleter.go#L91-L98 |
5,548 | balzaczyy/golucene | core/index/readerPool.go | dropAll | func (pool *ReaderPool) dropAll(doSave bool) error {
pool.Lock() // synchronized
defer pool.Unlock()
var priorE error
for len(pool.readerMap) > 0 {
for k, rld := range pool.readerMap {
if doSave {
ok, err := rld.writeLiveDocs(pool.owner.directory)
if err != nil {
return err
}
if ok {
// Make sure we only write del docs and field updates for a live segment:
assert(pool.infoIsLive(rld.info))
// Must checkpoint because we just
// created new _X_N.del and field updates files;
// don't call IW.checkpoint because that also
// increments SIS.version, which we do not want to
// do here: it was done previously (after we
// invoked BDS.applyDeletes), whereas here all we
// did was move the state to disk:
err = pool.owner.checkpointNoSIS()
if err != nil {
return err
}
}
}
// Important to remove as-we-go, not with .clear()
// in the end, in case we hit an exception;
// otherwise we could over-decref if close() is
// called again:
delete(pool.readerMap, k)
// NOTE: it is allowed that these decRefs do not
// actually close the SRs; this happens when a
// near real-time reader is kept open after the
// IndexWriter instance is closed:
err := rld.dropReaders()
if err != nil {
if doSave {
return err
}
if priorE == nil {
priorE = err
}
}
}
}
assert(len(pool.readerMap) == 0)
return priorE
} | go | func (pool *ReaderPool) dropAll(doSave bool) error {
pool.Lock() // synchronized
defer pool.Unlock()
var priorE error
for len(pool.readerMap) > 0 {
for k, rld := range pool.readerMap {
if doSave {
ok, err := rld.writeLiveDocs(pool.owner.directory)
if err != nil {
return err
}
if ok {
// Make sure we only write del docs and field updates for a live segment:
assert(pool.infoIsLive(rld.info))
// Must checkpoint because we just
// created new _X_N.del and field updates files;
// don't call IW.checkpoint because that also
// increments SIS.version, which we do not want to
// do here: it was done previously (after we
// invoked BDS.applyDeletes), whereas here all we
// did was move the state to disk:
err = pool.owner.checkpointNoSIS()
if err != nil {
return err
}
}
}
// Important to remove as-we-go, not with .clear()
// in the end, in case we hit an exception;
// otherwise we could over-decref if close() is
// called again:
delete(pool.readerMap, k)
// NOTE: it is allowed that these decRefs do not
// actually close the SRs; this happens when a
// near real-time reader is kept open after the
// IndexWriter instance is closed:
err := rld.dropReaders()
if err != nil {
if doSave {
return err
}
if priorE == nil {
priorE = err
}
}
}
}
assert(len(pool.readerMap) == 0)
return priorE
} | [
"func",
"(",
"pool",
"*",
"ReaderPool",
")",
"dropAll",
"(",
"doSave",
"bool",
")",
"error",
"{",
"pool",
".",
"Lock",
"(",
")",
"// synchronized",
"\n",
"defer",
"pool",
".",
"Unlock",
"(",
")",
"\n\n",
"var",
"priorE",
"error",
"\n",
"for",
"len",
"(",
"pool",
".",
"readerMap",
")",
">",
"0",
"{",
"for",
"k",
",",
"rld",
":=",
"range",
"pool",
".",
"readerMap",
"{",
"if",
"doSave",
"{",
"ok",
",",
"err",
":=",
"rld",
".",
"writeLiveDocs",
"(",
"pool",
".",
"owner",
".",
"directory",
")",
"\n",
"if",
"err",
"!=",
"nil",
"{",
"return",
"err",
"\n",
"}",
"\n",
"if",
"ok",
"{",
"// Make sure we only write del docs and field updates for a live segment:",
"assert",
"(",
"pool",
".",
"infoIsLive",
"(",
"rld",
".",
"info",
")",
")",
"\n",
"// Must checkpoint because we just",
"// created new _X_N.del and field updates files;",
"// don't call IW.checkpoint because that also",
"// increments SIS.version, which we do not want to",
"// do here: it was done previously (after we",
"// invoked BDS.applyDeletes), whereas here all we",
"// did was move the state to disk:",
"err",
"=",
"pool",
".",
"owner",
".",
"checkpointNoSIS",
"(",
")",
"\n",
"if",
"err",
"!=",
"nil",
"{",
"return",
"err",
"\n",
"}",
"\n",
"}",
"\n",
"}",
"\n\n",
"// Important to remove as-we-go, not with .clear()",
"// in the end, in case we hit an exception;",
"// otherwise we could over-decref if close() is",
"// called again:",
"delete",
"(",
"pool",
".",
"readerMap",
",",
"k",
")",
"\n\n",
"// NOTE: it is allowed that these decRefs do not",
"// actually close the SRs; this happens when a",
"// near real-time reader is kept open after the",
"// IndexWriter instance is closed:",
"err",
":=",
"rld",
".",
"dropReaders",
"(",
")",
"\n",
"if",
"err",
"!=",
"nil",
"{",
"if",
"doSave",
"{",
"return",
"err",
"\n",
"}",
"\n",
"if",
"priorE",
"==",
"nil",
"{",
"priorE",
"=",
"err",
"\n",
"}",
"\n",
"}",
"\n",
"}",
"\n",
"}",
"\n",
"assert",
"(",
"len",
"(",
"pool",
".",
"readerMap",
")",
"==",
"0",
")",
"\n",
"return",
"priorE",
"\n",
"}"
] | // Remove all our references to readers, and commits any pending changes. | [
"Remove",
"all",
"our",
"references",
"to",
"readers",
"and",
"commits",
"any",
"pending",
"changes",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/index/readerPool.go#L42-L94 |
5,549 | balzaczyy/golucene | core/store/ram.go | FileExists | func (rd *RAMDirectory) FileExists(name string) bool {
rd.EnsureOpen()
rd.fileMapLock.RLock()
defer rd.fileMapLock.RUnlock()
_, ok := rd.fileMap[name]
return ok
} | go | func (rd *RAMDirectory) FileExists(name string) bool {
rd.EnsureOpen()
rd.fileMapLock.RLock()
defer rd.fileMapLock.RUnlock()
_, ok := rd.fileMap[name]
return ok
} | [
"func",
"(",
"rd",
"*",
"RAMDirectory",
")",
"FileExists",
"(",
"name",
"string",
")",
"bool",
"{",
"rd",
".",
"EnsureOpen",
"(",
")",
"\n",
"rd",
".",
"fileMapLock",
".",
"RLock",
"(",
")",
"\n",
"defer",
"rd",
".",
"fileMapLock",
".",
"RUnlock",
"(",
")",
"\n",
"_",
",",
"ok",
":=",
"rd",
".",
"fileMap",
"[",
"name",
"]",
"\n",
"return",
"ok",
"\n",
"}"
] | // Returns true iff the named file exists in this directory | [
"Returns",
"true",
"iff",
"the",
"named",
"file",
"exists",
"in",
"this",
"directory"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/store/ram.go#L72-L78 |
5,550 | balzaczyy/golucene | core/store/ram.go | OpenInput | func (rd *RAMDirectory) OpenInput(name string, context IOContext) (in IndexInput, err error) {
rd.EnsureOpen()
if file, ok := rd.fileMap[name]; ok {
return newRAMInputStream(name, file)
}
return nil, errors.New(name)
} | go | func (rd *RAMDirectory) OpenInput(name string, context IOContext) (in IndexInput, err error) {
rd.EnsureOpen()
if file, ok := rd.fileMap[name]; ok {
return newRAMInputStream(name, file)
}
return nil, errors.New(name)
} | [
"func",
"(",
"rd",
"*",
"RAMDirectory",
")",
"OpenInput",
"(",
"name",
"string",
",",
"context",
"IOContext",
")",
"(",
"in",
"IndexInput",
",",
"err",
"error",
")",
"{",
"rd",
".",
"EnsureOpen",
"(",
")",
"\n",
"if",
"file",
",",
"ok",
":=",
"rd",
".",
"fileMap",
"[",
"name",
"]",
";",
"ok",
"{",
"return",
"newRAMInputStream",
"(",
"name",
",",
"file",
")",
"\n",
"}",
"\n",
"return",
"nil",
",",
"errors",
".",
"New",
"(",
"name",
")",
"\n",
"}"
] | // Returns a stream reading an existing file. | [
"Returns",
"a",
"stream",
"reading",
"an",
"existing",
"file",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/store/ram.go#L140-L146 |
5,551 | balzaczyy/golucene | core/store/ram.go | Close | func (rd *RAMDirectory) Close() error {
rd.IsOpen = false
rd.fileMapLock.Lock()
defer rd.fileMapLock.Unlock()
rd.fileMap = make(map[string]*RAMFile)
return nil
} | go | func (rd *RAMDirectory) Close() error {
rd.IsOpen = false
rd.fileMapLock.Lock()
defer rd.fileMapLock.Unlock()
rd.fileMap = make(map[string]*RAMFile)
return nil
} | [
"func",
"(",
"rd",
"*",
"RAMDirectory",
")",
"Close",
"(",
")",
"error",
"{",
"rd",
".",
"IsOpen",
"=",
"false",
"\n",
"rd",
".",
"fileMapLock",
".",
"Lock",
"(",
")",
"\n",
"defer",
"rd",
".",
"fileMapLock",
".",
"Unlock",
"(",
")",
"\n",
"rd",
".",
"fileMap",
"=",
"make",
"(",
"map",
"[",
"string",
"]",
"*",
"RAMFile",
")",
"\n",
"return",
"nil",
"\n",
"}"
] | // Closes the store to future operations, releasing associated memroy. | [
"Closes",
"the",
"store",
"to",
"future",
"operations",
"releasing",
"associated",
"memroy",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/store/ram.go#L149-L155 |
5,552 | balzaczyy/golucene | core/index/merge.go | verbose | func (mp *LogMergePolicy) verbose(w *IndexWriter) bool {
return w != nil && w.infoStream.IsEnabled("LMP")
} | go | func (mp *LogMergePolicy) verbose(w *IndexWriter) bool {
return w != nil && w.infoStream.IsEnabled("LMP")
} | [
"func",
"(",
"mp",
"*",
"LogMergePolicy",
")",
"verbose",
"(",
"w",
"*",
"IndexWriter",
")",
"bool",
"{",
"return",
"w",
"!=",
"nil",
"&&",
"w",
".",
"infoStream",
".",
"IsEnabled",
"(",
"\"",
"\"",
")",
"\n",
"}"
] | // Returns true if LMP is enabled in IndexWriter's InfoStream. | [
"Returns",
"true",
"if",
"LMP",
"is",
"enabled",
"in",
"IndexWriter",
"s",
"InfoStream",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/index/merge.go#L732-L734 |
5,553 | balzaczyy/golucene | core/index/merge.go | message | func (mp *LogMergePolicy) message(message string, w *IndexWriter) {
if mp.verbose(w) {
w.infoStream.Message("LMP", message)
}
} | go | func (mp *LogMergePolicy) message(message string, w *IndexWriter) {
if mp.verbose(w) {
w.infoStream.Message("LMP", message)
}
} | [
"func",
"(",
"mp",
"*",
"LogMergePolicy",
")",
"message",
"(",
"message",
"string",
",",
"w",
"*",
"IndexWriter",
")",
"{",
"if",
"mp",
".",
"verbose",
"(",
"w",
")",
"{",
"w",
".",
"infoStream",
".",
"Message",
"(",
"\"",
"\"",
",",
"message",
")",
"\n",
"}",
"\n",
"}"
] | // Print a debug message to IndexWriter's infoStream. | [
"Print",
"a",
"debug",
"message",
"to",
"IndexWriter",
"s",
"infoStream",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/index/merge.go#L737-L741 |
5,554 | balzaczyy/golucene | core/search/explanation.go | addDetail | func (exp *ExplanationImpl) addDetail(detail Explanation) {
exp.details = append(exp.details, detail)
} | go | func (exp *ExplanationImpl) addDetail(detail Explanation) {
exp.details = append(exp.details, detail)
} | [
"func",
"(",
"exp",
"*",
"ExplanationImpl",
")",
"addDetail",
"(",
"detail",
"Explanation",
")",
"{",
"exp",
".",
"details",
"=",
"append",
"(",
"exp",
".",
"details",
",",
"detail",
")",
"\n",
"}"
] | // Adds a sub-node to this explanation node | [
"Adds",
"a",
"sub",
"-",
"node",
"to",
"this",
"explanation",
"node"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/search/explanation.go#L54-L56 |
5,555 | balzaczyy/golucene | core/document/fieldType.go | newFieldType | func newFieldType() *FieldType {
return &FieldType{
_tokenized: true,
_indexOptions: model.INDEX_OPT_DOCS_AND_FREQS_AND_POSITIONS,
numericPrecisionStep: util.NUMERIC_PRECISION_STEP_DEFAULT,
}
} | go | func newFieldType() *FieldType {
return &FieldType{
_tokenized: true,
_indexOptions: model.INDEX_OPT_DOCS_AND_FREQS_AND_POSITIONS,
numericPrecisionStep: util.NUMERIC_PRECISION_STEP_DEFAULT,
}
} | [
"func",
"newFieldType",
"(",
")",
"*",
"FieldType",
"{",
"return",
"&",
"FieldType",
"{",
"_tokenized",
":",
"true",
",",
"_indexOptions",
":",
"model",
".",
"INDEX_OPT_DOCS_AND_FREQS_AND_POSITIONS",
",",
"numericPrecisionStep",
":",
"util",
".",
"NUMERIC_PRECISION_STEP_DEFAULT",
",",
"}",
"\n",
"}"
] | // Create a new FieldType with default properties. | [
"Create",
"a",
"new",
"FieldType",
"with",
"default",
"properties",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/document/fieldType.go#L58-L64 |
5,556 | balzaczyy/golucene | core/document/fieldType.go | String | func (ft *FieldType) String() string {
var buf bytes.Buffer
if ft.Stored() {
buf.WriteString("stored")
}
if ft.Indexed() {
if buf.Len() > 0 {
buf.WriteString(",")
}
buf.WriteString("indexed")
if ft.Tokenized() {
buf.WriteString(",tokenized")
}
if ft.StoreTermVectors() {
buf.WriteString(",termVector")
}
if ft.StoreTermVectorOffsets() {
buf.WriteString(",termVectorOffsets")
}
if ft.StoreTermVectorPositions() {
buf.WriteString(",termVectorPosition")
}
if ft.StoreTermVectorPayloads() {
buf.WriteString(",termVectorPayloads")
}
if ft.OmitNorms() {
buf.WriteString(",omitNorms")
}
if ft.IndexOptions() != model.INDEX_OPT_DOCS_AND_FREQS_AND_POSITIONS {
fmt.Fprintf(&buf, ",indexOptions=%v", ft.IndexOptions())
}
if ft.numericType != 0 {
fmt.Fprintf(&buf, ",numericType=%v,numericPrecisionStep=%v", ft.numericType, ft.numericPrecisionStep)
}
}
if ft.DocValueType() != 0 {
if buf.Len() > 0 {
buf.WriteString(",")
}
fmt.Fprintf(&buf, "docValueType=%v", ft.DocValueType())
}
return buf.String()
} | go | func (ft *FieldType) String() string {
var buf bytes.Buffer
if ft.Stored() {
buf.WriteString("stored")
}
if ft.Indexed() {
if buf.Len() > 0 {
buf.WriteString(",")
}
buf.WriteString("indexed")
if ft.Tokenized() {
buf.WriteString(",tokenized")
}
if ft.StoreTermVectors() {
buf.WriteString(",termVector")
}
if ft.StoreTermVectorOffsets() {
buf.WriteString(",termVectorOffsets")
}
if ft.StoreTermVectorPositions() {
buf.WriteString(",termVectorPosition")
}
if ft.StoreTermVectorPayloads() {
buf.WriteString(",termVectorPayloads")
}
if ft.OmitNorms() {
buf.WriteString(",omitNorms")
}
if ft.IndexOptions() != model.INDEX_OPT_DOCS_AND_FREQS_AND_POSITIONS {
fmt.Fprintf(&buf, ",indexOptions=%v", ft.IndexOptions())
}
if ft.numericType != 0 {
fmt.Fprintf(&buf, ",numericType=%v,numericPrecisionStep=%v", ft.numericType, ft.numericPrecisionStep)
}
}
if ft.DocValueType() != 0 {
if buf.Len() > 0 {
buf.WriteString(",")
}
fmt.Fprintf(&buf, "docValueType=%v", ft.DocValueType())
}
return buf.String()
} | [
"func",
"(",
"ft",
"*",
"FieldType",
")",
"String",
"(",
")",
"string",
"{",
"var",
"buf",
"bytes",
".",
"Buffer",
"\n",
"if",
"ft",
".",
"Stored",
"(",
")",
"{",
"buf",
".",
"WriteString",
"(",
"\"",
"\"",
")",
"\n",
"}",
"\n",
"if",
"ft",
".",
"Indexed",
"(",
")",
"{",
"if",
"buf",
".",
"Len",
"(",
")",
">",
"0",
"{",
"buf",
".",
"WriteString",
"(",
"\"",
"\"",
")",
"\n",
"}",
"\n",
"buf",
".",
"WriteString",
"(",
"\"",
"\"",
")",
"\n",
"if",
"ft",
".",
"Tokenized",
"(",
")",
"{",
"buf",
".",
"WriteString",
"(",
"\"",
"\"",
")",
"\n",
"}",
"\n",
"if",
"ft",
".",
"StoreTermVectors",
"(",
")",
"{",
"buf",
".",
"WriteString",
"(",
"\"",
"\"",
")",
"\n",
"}",
"\n",
"if",
"ft",
".",
"StoreTermVectorOffsets",
"(",
")",
"{",
"buf",
".",
"WriteString",
"(",
"\"",
"\"",
")",
"\n",
"}",
"\n",
"if",
"ft",
".",
"StoreTermVectorPositions",
"(",
")",
"{",
"buf",
".",
"WriteString",
"(",
"\"",
"\"",
")",
"\n",
"}",
"\n",
"if",
"ft",
".",
"StoreTermVectorPayloads",
"(",
")",
"{",
"buf",
".",
"WriteString",
"(",
"\"",
"\"",
")",
"\n",
"}",
"\n",
"if",
"ft",
".",
"OmitNorms",
"(",
")",
"{",
"buf",
".",
"WriteString",
"(",
"\"",
"\"",
")",
"\n",
"}",
"\n",
"if",
"ft",
".",
"IndexOptions",
"(",
")",
"!=",
"model",
".",
"INDEX_OPT_DOCS_AND_FREQS_AND_POSITIONS",
"{",
"fmt",
".",
"Fprintf",
"(",
"&",
"buf",
",",
"\"",
"\"",
",",
"ft",
".",
"IndexOptions",
"(",
")",
")",
"\n",
"}",
"\n",
"if",
"ft",
".",
"numericType",
"!=",
"0",
"{",
"fmt",
".",
"Fprintf",
"(",
"&",
"buf",
",",
"\"",
"\"",
",",
"ft",
".",
"numericType",
",",
"ft",
".",
"numericPrecisionStep",
")",
"\n",
"}",
"\n",
"}",
"\n",
"if",
"ft",
".",
"DocValueType",
"(",
")",
"!=",
"0",
"{",
"if",
"buf",
".",
"Len",
"(",
")",
">",
"0",
"{",
"buf",
".",
"WriteString",
"(",
"\"",
"\"",
")",
"\n",
"}",
"\n",
"fmt",
".",
"Fprintf",
"(",
"&",
"buf",
",",
"\"",
"\"",
",",
"ft",
".",
"DocValueType",
"(",
")",
")",
"\n",
"}",
"\n",
"return",
"buf",
".",
"String",
"(",
")",
"\n",
"}"
] | // Prints a Field for human consumption. | [
"Prints",
"a",
"Field",
"for",
"human",
"consumption",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/document/fieldType.go#L100-L142 |
5,557 | balzaczyy/golucene | core/util/automaton/run.go | charClass | func (ra *RunAutomaton) charClass(c int) int {
return findIndex(c, ra.points)
} | go | func (ra *RunAutomaton) charClass(c int) int {
return findIndex(c, ra.points)
} | [
"func",
"(",
"ra",
"*",
"RunAutomaton",
")",
"charClass",
"(",
"c",
"int",
")",
"int",
"{",
"return",
"findIndex",
"(",
"c",
",",
"ra",
".",
"points",
")",
"\n",
"}"
] | // Gets character class of given codepoint | [
"Gets",
"character",
"class",
"of",
"given",
"codepoint"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/util/automaton/run.go#L26-L28 |
5,558 | balzaczyy/golucene | core/util/automaton/run.go | newRunAutomaton | func newRunAutomaton(a *Automaton, maxInterval int, tablesize bool) *RunAutomaton {
a = determinize(a)
size := a.numStates()
if size < 1 {
size = 1
}
points := a.startPoints()
nPoints := len(points)
ans := &RunAutomaton{
maxInterval: maxInterval,
automaton: a,
points: points,
initial: 0,
size: size,
accept: make([]bool, size),
transitions: make([]int, size*nPoints),
}
for i, _ := range ans.transitions {
ans.transitions[i] = -1
}
for n := 0; n < size; n++ {
ans.accept[n] = a.IsAccept(n)
for c, point := range ans.points {
dest := a.step(n, point)
assert(dest == -1 || dest < size)
ans.transitions[n*nPoints+c] = dest
}
}
// Set alphabet table for optimal run performance.
if tablesize {
panic("not implemented yet")
}
return ans
} | go | func newRunAutomaton(a *Automaton, maxInterval int, tablesize bool) *RunAutomaton {
a = determinize(a)
size := a.numStates()
if size < 1 {
size = 1
}
points := a.startPoints()
nPoints := len(points)
ans := &RunAutomaton{
maxInterval: maxInterval,
automaton: a,
points: points,
initial: 0,
size: size,
accept: make([]bool, size),
transitions: make([]int, size*nPoints),
}
for i, _ := range ans.transitions {
ans.transitions[i] = -1
}
for n := 0; n < size; n++ {
ans.accept[n] = a.IsAccept(n)
for c, point := range ans.points {
dest := a.step(n, point)
assert(dest == -1 || dest < size)
ans.transitions[n*nPoints+c] = dest
}
}
// Set alphabet table for optimal run performance.
if tablesize {
panic("not implemented yet")
}
return ans
} | [
"func",
"newRunAutomaton",
"(",
"a",
"*",
"Automaton",
",",
"maxInterval",
"int",
",",
"tablesize",
"bool",
")",
"*",
"RunAutomaton",
"{",
"a",
"=",
"determinize",
"(",
"a",
")",
"\n",
"size",
":=",
"a",
".",
"numStates",
"(",
")",
"\n",
"if",
"size",
"<",
"1",
"{",
"size",
"=",
"1",
"\n",
"}",
"\n",
"points",
":=",
"a",
".",
"startPoints",
"(",
")",
"\n",
"nPoints",
":=",
"len",
"(",
"points",
")",
"\n",
"ans",
":=",
"&",
"RunAutomaton",
"{",
"maxInterval",
":",
"maxInterval",
",",
"automaton",
":",
"a",
",",
"points",
":",
"points",
",",
"initial",
":",
"0",
",",
"size",
":",
"size",
",",
"accept",
":",
"make",
"(",
"[",
"]",
"bool",
",",
"size",
")",
",",
"transitions",
":",
"make",
"(",
"[",
"]",
"int",
",",
"size",
"*",
"nPoints",
")",
",",
"}",
"\n",
"for",
"i",
",",
"_",
":=",
"range",
"ans",
".",
"transitions",
"{",
"ans",
".",
"transitions",
"[",
"i",
"]",
"=",
"-",
"1",
"\n",
"}",
"\n",
"for",
"n",
":=",
"0",
";",
"n",
"<",
"size",
";",
"n",
"++",
"{",
"ans",
".",
"accept",
"[",
"n",
"]",
"=",
"a",
".",
"IsAccept",
"(",
"n",
")",
"\n",
"for",
"c",
",",
"point",
":=",
"range",
"ans",
".",
"points",
"{",
"dest",
":=",
"a",
".",
"step",
"(",
"n",
",",
"point",
")",
"\n",
"assert",
"(",
"dest",
"==",
"-",
"1",
"||",
"dest",
"<",
"size",
")",
"\n",
"ans",
".",
"transitions",
"[",
"n",
"*",
"nPoints",
"+",
"c",
"]",
"=",
"dest",
"\n",
"}",
"\n",
"}",
"\n",
"// Set alphabet table for optimal run performance.",
"if",
"tablesize",
"{",
"panic",
"(",
"\"",
"\"",
")",
"\n",
"}",
"\n",
"return",
"ans",
"\n",
"}"
] | // Constructs a new RunAutomaton from a deterministic Automaton. | [
"Constructs",
"a",
"new",
"RunAutomaton",
"from",
"a",
"deterministic",
"Automaton",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/util/automaton/run.go#L31-L64 |
5,559 | balzaczyy/golucene | core/util/automaton/daciukMihov.go | convert | func convert(a *AutomatonBuilder, s *dfsaState, visited map[*dfsaState]int) int {
panic("not implemented yet")
} | go | func convert(a *AutomatonBuilder, s *dfsaState, visited map[*dfsaState]int) int {
panic("not implemented yet")
} | [
"func",
"convert",
"(",
"a",
"*",
"AutomatonBuilder",
",",
"s",
"*",
"dfsaState",
",",
"visited",
"map",
"[",
"*",
"dfsaState",
"]",
"int",
")",
"int",
"{",
"panic",
"(",
"\"",
"\"",
")",
"\n",
"}"
] | // Internal recursive traversal for conversion. | [
"Internal",
"recursive",
"traversal",
"for",
"conversion",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/util/automaton/daciukMihov.go#L32-L34 |
5,560 | balzaczyy/golucene | core/index/config.go | setIndexWriter | func (conf *IndexWriterConfig) setIndexWriter(writer *IndexWriter) *IndexWriterConfig {
conf.writer.Set(writer)
return conf
} | go | func (conf *IndexWriterConfig) setIndexWriter(writer *IndexWriter) *IndexWriterConfig {
conf.writer.Set(writer)
return conf
} | [
"func",
"(",
"conf",
"*",
"IndexWriterConfig",
")",
"setIndexWriter",
"(",
"writer",
"*",
"IndexWriter",
")",
"*",
"IndexWriterConfig",
"{",
"conf",
".",
"writer",
".",
"Set",
"(",
"writer",
")",
"\n",
"return",
"conf",
"\n",
"}"
] | // Sets the IndexWriter this config is attached to. | [
"Sets",
"the",
"IndexWriter",
"this",
"config",
"is",
"attached",
"to",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/index/config.go#L85-L88 |
5,561 | balzaczyy/golucene | core/codec/blocktree/segmentTermEnumFrame.go | prefixMatches | func (f *segmentTermsEnumFrame) prefixMatches(target []byte) bool {
for i := 0; i < f.prefix; i++ {
if target[i] != f.ste.term.At(i) {
return false
}
}
return true
} | go | func (f *segmentTermsEnumFrame) prefixMatches(target []byte) bool {
for i := 0; i < f.prefix; i++ {
if target[i] != f.ste.term.At(i) {
return false
}
}
return true
} | [
"func",
"(",
"f",
"*",
"segmentTermsEnumFrame",
")",
"prefixMatches",
"(",
"target",
"[",
"]",
"byte",
")",
"bool",
"{",
"for",
"i",
":=",
"0",
";",
"i",
"<",
"f",
".",
"prefix",
";",
"i",
"++",
"{",
"if",
"target",
"[",
"i",
"]",
"!=",
"f",
".",
"ste",
".",
"term",
".",
"At",
"(",
"i",
")",
"{",
"return",
"false",
"\n",
"}",
"\n",
"}",
"\n",
"return",
"true",
"\n",
"}"
] | // Used only by assert | [
"Used",
"only",
"by",
"assert"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/codec/blocktree/segmentTermEnumFrame.go#L357-L364 |
5,562 | balzaczyy/golucene | core/util/automaton/sortedIntSet.go | incr | func (sis *SortedIntSet) incr(num int) {
if sis.useTreeMap {
val, ok := sis.dict[num]
if !ok {
sis.dict[num] = 1
} else {
sis.dict[num] = 1 + val
}
return
}
for i, v := range sis.values {
if v == num {
sis.counts[i]++
return
} else if num < v {
// insert here
sis.values = append(sis.values[:i], append([]int{num}, sis.values[i:]...)...)
sis.counts = append(sis.counts[:i], append([]int{1}, sis.counts[i:]...)...)
return
}
}
// append
sis.values = append(sis.values, num)
sis.counts = append(sis.counts, 1)
if len(sis.values) == TREE_MAP_CUTOVER {
sis.useTreeMap = true
for i, v := range sis.values {
sis.dict[v] = sis.counts[i]
}
}
} | go | func (sis *SortedIntSet) incr(num int) {
if sis.useTreeMap {
val, ok := sis.dict[num]
if !ok {
sis.dict[num] = 1
} else {
sis.dict[num] = 1 + val
}
return
}
for i, v := range sis.values {
if v == num {
sis.counts[i]++
return
} else if num < v {
// insert here
sis.values = append(sis.values[:i], append([]int{num}, sis.values[i:]...)...)
sis.counts = append(sis.counts[:i], append([]int{1}, sis.counts[i:]...)...)
return
}
}
// append
sis.values = append(sis.values, num)
sis.counts = append(sis.counts, 1)
if len(sis.values) == TREE_MAP_CUTOVER {
sis.useTreeMap = true
for i, v := range sis.values {
sis.dict[v] = sis.counts[i]
}
}
} | [
"func",
"(",
"sis",
"*",
"SortedIntSet",
")",
"incr",
"(",
"num",
"int",
")",
"{",
"if",
"sis",
".",
"useTreeMap",
"{",
"val",
",",
"ok",
":=",
"sis",
".",
"dict",
"[",
"num",
"]",
"\n",
"if",
"!",
"ok",
"{",
"sis",
".",
"dict",
"[",
"num",
"]",
"=",
"1",
"\n",
"}",
"else",
"{",
"sis",
".",
"dict",
"[",
"num",
"]",
"=",
"1",
"+",
"val",
"\n",
"}",
"\n",
"return",
"\n",
"}",
"\n\n",
"for",
"i",
",",
"v",
":=",
"range",
"sis",
".",
"values",
"{",
"if",
"v",
"==",
"num",
"{",
"sis",
".",
"counts",
"[",
"i",
"]",
"++",
"\n",
"return",
"\n",
"}",
"else",
"if",
"num",
"<",
"v",
"{",
"// insert here",
"sis",
".",
"values",
"=",
"append",
"(",
"sis",
".",
"values",
"[",
":",
"i",
"]",
",",
"append",
"(",
"[",
"]",
"int",
"{",
"num",
"}",
",",
"sis",
".",
"values",
"[",
"i",
":",
"]",
"...",
")",
"...",
")",
"\n",
"sis",
".",
"counts",
"=",
"append",
"(",
"sis",
".",
"counts",
"[",
":",
"i",
"]",
",",
"append",
"(",
"[",
"]",
"int",
"{",
"1",
"}",
",",
"sis",
".",
"counts",
"[",
"i",
":",
"]",
"...",
")",
"...",
")",
"\n",
"return",
"\n",
"}",
"\n",
"}",
"\n\n",
"// append",
"sis",
".",
"values",
"=",
"append",
"(",
"sis",
".",
"values",
",",
"num",
")",
"\n",
"sis",
".",
"counts",
"=",
"append",
"(",
"sis",
".",
"counts",
",",
"1",
")",
"\n\n",
"if",
"len",
"(",
"sis",
".",
"values",
")",
"==",
"TREE_MAP_CUTOVER",
"{",
"sis",
".",
"useTreeMap",
"=",
"true",
"\n",
"for",
"i",
",",
"v",
":=",
"range",
"sis",
".",
"values",
"{",
"sis",
".",
"dict",
"[",
"v",
"]",
"=",
"sis",
".",
"counts",
"[",
"i",
"]",
"\n",
"}",
"\n",
"}",
"\n",
"}"
] | // Adds this state ot the set | [
"Adds",
"this",
"state",
"ot",
"the",
"set"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/util/automaton/sortedIntSet.go#L40-L73 |
5,563 | balzaczyy/golucene | core/util/automaton/sortedIntSet.go | decr | func (sis *SortedIntSet) decr(num int) {
if sis.useTreeMap {
count, ok := sis.dict[num]
assert(ok)
if count == 1 {
delete(sis.dict, num)
// Fall back to simple arrays once we touch zero again
if len(sis.dict) == 0 {
sis.useTreeMap = false
sis.values = sis.values[:0] // reuse slice
sis.counts = sis.counts[:0] // reuse slice
}
} else {
sis.dict[num] = count - 1
}
return
}
for i, v := range sis.values {
if v == num {
sis.counts[i]--
if sis.counts[i] == 0 {
limit := len(sis.values) - 1
if i < limit {
sis.values = append(sis.values[:i], sis.values[i+1:]...)
sis.counts = append(sis.counts[:i], sis.counts[i+1:]...)
} else {
sis.values = sis.values[:i]
sis.counts = sis.counts[:i]
}
}
return
}
}
panic("should not be here!")
} | go | func (sis *SortedIntSet) decr(num int) {
if sis.useTreeMap {
count, ok := sis.dict[num]
assert(ok)
if count == 1 {
delete(sis.dict, num)
// Fall back to simple arrays once we touch zero again
if len(sis.dict) == 0 {
sis.useTreeMap = false
sis.values = sis.values[:0] // reuse slice
sis.counts = sis.counts[:0] // reuse slice
}
} else {
sis.dict[num] = count - 1
}
return
}
for i, v := range sis.values {
if v == num {
sis.counts[i]--
if sis.counts[i] == 0 {
limit := len(sis.values) - 1
if i < limit {
sis.values = append(sis.values[:i], sis.values[i+1:]...)
sis.counts = append(sis.counts[:i], sis.counts[i+1:]...)
} else {
sis.values = sis.values[:i]
sis.counts = sis.counts[:i]
}
}
return
}
}
panic("should not be here!")
} | [
"func",
"(",
"sis",
"*",
"SortedIntSet",
")",
"decr",
"(",
"num",
"int",
")",
"{",
"if",
"sis",
".",
"useTreeMap",
"{",
"count",
",",
"ok",
":=",
"sis",
".",
"dict",
"[",
"num",
"]",
"\n",
"assert",
"(",
"ok",
")",
"\n",
"if",
"count",
"==",
"1",
"{",
"delete",
"(",
"sis",
".",
"dict",
",",
"num",
")",
"\n",
"// Fall back to simple arrays once we touch zero again",
"if",
"len",
"(",
"sis",
".",
"dict",
")",
"==",
"0",
"{",
"sis",
".",
"useTreeMap",
"=",
"false",
"\n",
"sis",
".",
"values",
"=",
"sis",
".",
"values",
"[",
":",
"0",
"]",
"// reuse slice",
"\n",
"sis",
".",
"counts",
"=",
"sis",
".",
"counts",
"[",
":",
"0",
"]",
"// reuse slice",
"\n",
"}",
"\n",
"}",
"else",
"{",
"sis",
".",
"dict",
"[",
"num",
"]",
"=",
"count",
"-",
"1",
"\n",
"}",
"\n",
"return",
"\n",
"}",
"\n\n",
"for",
"i",
",",
"v",
":=",
"range",
"sis",
".",
"values",
"{",
"if",
"v",
"==",
"num",
"{",
"sis",
".",
"counts",
"[",
"i",
"]",
"--",
"\n",
"if",
"sis",
".",
"counts",
"[",
"i",
"]",
"==",
"0",
"{",
"limit",
":=",
"len",
"(",
"sis",
".",
"values",
")",
"-",
"1",
"\n",
"if",
"i",
"<",
"limit",
"{",
"sis",
".",
"values",
"=",
"append",
"(",
"sis",
".",
"values",
"[",
":",
"i",
"]",
",",
"sis",
".",
"values",
"[",
"i",
"+",
"1",
":",
"]",
"...",
")",
"\n",
"sis",
".",
"counts",
"=",
"append",
"(",
"sis",
".",
"counts",
"[",
":",
"i",
"]",
",",
"sis",
".",
"counts",
"[",
"i",
"+",
"1",
":",
"]",
"...",
")",
"\n",
"}",
"else",
"{",
"sis",
".",
"values",
"=",
"sis",
".",
"values",
"[",
":",
"i",
"]",
"\n",
"sis",
".",
"counts",
"=",
"sis",
".",
"counts",
"[",
":",
"i",
"]",
"\n",
"}",
"\n",
"}",
"\n",
"return",
"\n",
"}",
"\n",
"}",
"\n\n",
"panic",
"(",
"\"",
"\"",
")",
"\n",
"}"
] | // Removes the state from the set, if count decrs to 0 | [
"Removes",
"the",
"state",
"from",
"the",
"set",
"if",
"count",
"decrs",
"to",
"0"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/util/automaton/sortedIntSet.go#L76-L112 |
5,564 | balzaczyy/golucene | core/util/array.go | newArrayTimSorter | func newArrayTimSorter(arr sort.Interface, maxTempSlots int) *ArrayTimSorter {
ans := &ArrayTimSorter{
TimSorter: newTimSorter(arr, maxTempSlots),
arr: arr,
}
if maxTempSlots > 0 {
ans.tmp = make([]interface{}, maxTempSlots)
}
return ans
} | go | func newArrayTimSorter(arr sort.Interface, maxTempSlots int) *ArrayTimSorter {
ans := &ArrayTimSorter{
TimSorter: newTimSorter(arr, maxTempSlots),
arr: arr,
}
if maxTempSlots > 0 {
ans.tmp = make([]interface{}, maxTempSlots)
}
return ans
} | [
"func",
"newArrayTimSorter",
"(",
"arr",
"sort",
".",
"Interface",
",",
"maxTempSlots",
"int",
")",
"*",
"ArrayTimSorter",
"{",
"ans",
":=",
"&",
"ArrayTimSorter",
"{",
"TimSorter",
":",
"newTimSorter",
"(",
"arr",
",",
"maxTempSlots",
")",
",",
"arr",
":",
"arr",
",",
"}",
"\n",
"if",
"maxTempSlots",
">",
"0",
"{",
"ans",
".",
"tmp",
"=",
"make",
"(",
"[",
"]",
"interface",
"{",
"}",
",",
"maxTempSlots",
")",
"\n",
"}",
"\n",
"return",
"ans",
"\n",
"}"
] | // Create a new ArrayTimSorter | [
"Create",
"a",
"new",
"ArrayTimSorter"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/util/array.go#L129-L138 |
5,565 | balzaczyy/golucene | core/util/sort.go | newTimSorter | func newTimSorter(arr sort.Interface, maxTempSlots int) *TimSorter {
return &TimSorter{
Sorter: newSorter(arr),
runEnds: make([]int, 1+STACKSIZE),
maxTempSlots: maxTempSlots,
}
} | go | func newTimSorter(arr sort.Interface, maxTempSlots int) *TimSorter {
return &TimSorter{
Sorter: newSorter(arr),
runEnds: make([]int, 1+STACKSIZE),
maxTempSlots: maxTempSlots,
}
} | [
"func",
"newTimSorter",
"(",
"arr",
"sort",
".",
"Interface",
",",
"maxTempSlots",
"int",
")",
"*",
"TimSorter",
"{",
"return",
"&",
"TimSorter",
"{",
"Sorter",
":",
"newSorter",
"(",
"arr",
")",
",",
"runEnds",
":",
"make",
"(",
"[",
"]",
"int",
",",
"1",
"+",
"STACKSIZE",
")",
",",
"maxTempSlots",
":",
"maxTempSlots",
",",
"}",
"\n",
"}"
] | // Create a new TimSorter | [
"Create",
"a",
"new",
"TimSorter"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/util/sort.go#L249-L255 |
5,566 | balzaczyy/golucene | core/util/sort.go | minRun | func minRun(length int) int {
assert2(length >= MINRUN, fmt.Sprintf("length=%v", length))
n := length
r := 0
for n >= 64 {
r = (r | (n & 1))
n = int(uint(n) >> 1)
}
minRun := n + r
assert(minRun >= MINRUN && minRun <= RUN_THRESHOLD)
return minRun
} | go | func minRun(length int) int {
assert2(length >= MINRUN, fmt.Sprintf("length=%v", length))
n := length
r := 0
for n >= 64 {
r = (r | (n & 1))
n = int(uint(n) >> 1)
}
minRun := n + r
assert(minRun >= MINRUN && minRun <= RUN_THRESHOLD)
return minRun
} | [
"func",
"minRun",
"(",
"length",
"int",
")",
"int",
"{",
"assert2",
"(",
"length",
">=",
"MINRUN",
",",
"fmt",
".",
"Sprintf",
"(",
"\"",
"\"",
",",
"length",
")",
")",
"\n",
"n",
":=",
"length",
"\n",
"r",
":=",
"0",
"\n",
"for",
"n",
">=",
"64",
"{",
"r",
"=",
"(",
"r",
"|",
"(",
"n",
"&",
"1",
")",
")",
"\n",
"n",
"=",
"int",
"(",
"uint",
"(",
"n",
")",
">>",
"1",
")",
"\n",
"}",
"\n",
"minRun",
":=",
"n",
"+",
"r",
"\n",
"assert",
"(",
"minRun",
">=",
"MINRUN",
"&&",
"minRun",
"<=",
"RUN_THRESHOLD",
")",
"\n",
"return",
"minRun",
"\n",
"}"
] | // Minimum run length for an array of given length. | [
"Minimum",
"run",
"length",
"for",
"an",
"array",
"of",
"given",
"length",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/util/sort.go#L258-L269 |
5,567 | balzaczyy/golucene | core/util/sort.go | nextRun | func (sorter *TimSorter) nextRun() int {
runBase := sorter.runEnd(0)
assert2(runBase < sorter.to, fmt.Sprintf("runBase=%v to=%v", runBase, sorter.to))
if runBase == sorter.to-1 {
return 1
}
o := runBase + 2
if sorter.Less(runBase+1, runBase) {
// run must be strictly descending
for o < sorter.to && sorter.Less(o, o-1) {
o++
}
sorter.reverse(runBase, o)
} else {
// run must be non-descending
for o < sorter.to && !sorter.Less(o, o-1) {
o++
}
}
runHi := runBase + sorter.minRun
if sorter.to < runHi {
runHi = sorter.to
}
if o > runHi {
runHi = o
}
sorter.binarySort(runBase, runHi, o)
for i := runBase; i < runHi-1; i++ {
assert(!sorter.Less(i+1, i))
}
return runHi - runBase
} | go | func (sorter *TimSorter) nextRun() int {
runBase := sorter.runEnd(0)
assert2(runBase < sorter.to, fmt.Sprintf("runBase=%v to=%v", runBase, sorter.to))
if runBase == sorter.to-1 {
return 1
}
o := runBase + 2
if sorter.Less(runBase+1, runBase) {
// run must be strictly descending
for o < sorter.to && sorter.Less(o, o-1) {
o++
}
sorter.reverse(runBase, o)
} else {
// run must be non-descending
for o < sorter.to && !sorter.Less(o, o-1) {
o++
}
}
runHi := runBase + sorter.minRun
if sorter.to < runHi {
runHi = sorter.to
}
if o > runHi {
runHi = o
}
sorter.binarySort(runBase, runHi, o)
for i := runBase; i < runHi-1; i++ {
assert(!sorter.Less(i+1, i))
}
return runHi - runBase
} | [
"func",
"(",
"sorter",
"*",
"TimSorter",
")",
"nextRun",
"(",
")",
"int",
"{",
"runBase",
":=",
"sorter",
".",
"runEnd",
"(",
"0",
")",
"\n",
"assert2",
"(",
"runBase",
"<",
"sorter",
".",
"to",
",",
"fmt",
".",
"Sprintf",
"(",
"\"",
"\"",
",",
"runBase",
",",
"sorter",
".",
"to",
")",
")",
"\n",
"if",
"runBase",
"==",
"sorter",
".",
"to",
"-",
"1",
"{",
"return",
"1",
"\n",
"}",
"\n",
"o",
":=",
"runBase",
"+",
"2",
"\n",
"if",
"sorter",
".",
"Less",
"(",
"runBase",
"+",
"1",
",",
"runBase",
")",
"{",
"// run must be strictly descending",
"for",
"o",
"<",
"sorter",
".",
"to",
"&&",
"sorter",
".",
"Less",
"(",
"o",
",",
"o",
"-",
"1",
")",
"{",
"o",
"++",
"\n",
"}",
"\n",
"sorter",
".",
"reverse",
"(",
"runBase",
",",
"o",
")",
"\n",
"}",
"else",
"{",
"// run must be non-descending",
"for",
"o",
"<",
"sorter",
".",
"to",
"&&",
"!",
"sorter",
".",
"Less",
"(",
"o",
",",
"o",
"-",
"1",
")",
"{",
"o",
"++",
"\n",
"}",
"\n",
"}",
"\n",
"runHi",
":=",
"runBase",
"+",
"sorter",
".",
"minRun",
"\n",
"if",
"sorter",
".",
"to",
"<",
"runHi",
"{",
"runHi",
"=",
"sorter",
".",
"to",
"\n",
"}",
"\n",
"if",
"o",
">",
"runHi",
"{",
"runHi",
"=",
"o",
"\n",
"}",
"\n",
"sorter",
".",
"binarySort",
"(",
"runBase",
",",
"runHi",
",",
"o",
")",
"\n",
"for",
"i",
":=",
"runBase",
";",
"i",
"<",
"runHi",
"-",
"1",
";",
"i",
"++",
"{",
"assert",
"(",
"!",
"sorter",
".",
"Less",
"(",
"i",
"+",
"1",
",",
"i",
")",
")",
"\n",
"}",
"\n",
"return",
"runHi",
"-",
"runBase",
"\n",
"}"
] | // Compute the length of the next run, make the run sorted and return its length | [
"Compute",
"the",
"length",
"of",
"the",
"next",
"run",
"make",
"the",
"run",
"sorted",
"and",
"return",
"its",
"length"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/util/sort.go#L281-L312 |
5,568 | balzaczyy/golucene | core/index/concurrentMergeScheduler.go | SetMaxMergesAndRoutines | func (cms *ConcurrentMergeScheduler) SetMaxMergesAndRoutines(maxMergeCount, maxRoutineCount int) {
assert2(maxRoutineCount >= 1, "maxRoutineCount should be at least 1")
assert2(maxMergeCount >= 1, "maxMergeCount should be at least 1")
assert2(maxRoutineCount <= maxMergeCount, fmt.Sprintf(
"maxRoutineCount should be <= maxMergeCount (= %v)", maxMergeCount))
oldCount := cms.maxRoutineCount
cms.maxRoutineCount = maxRoutineCount
cms.maxMergeCount = maxMergeCount
cms.Lock()
defer cms.Unlock()
for i := oldCount; i < maxRoutineCount; i++ {
go cms.worker(i)
}
} | go | func (cms *ConcurrentMergeScheduler) SetMaxMergesAndRoutines(maxMergeCount, maxRoutineCount int) {
assert2(maxRoutineCount >= 1, "maxRoutineCount should be at least 1")
assert2(maxMergeCount >= 1, "maxMergeCount should be at least 1")
assert2(maxRoutineCount <= maxMergeCount, fmt.Sprintf(
"maxRoutineCount should be <= maxMergeCount (= %v)", maxMergeCount))
oldCount := cms.maxRoutineCount
cms.maxRoutineCount = maxRoutineCount
cms.maxMergeCount = maxMergeCount
cms.Lock()
defer cms.Unlock()
for i := oldCount; i < maxRoutineCount; i++ {
go cms.worker(i)
}
} | [
"func",
"(",
"cms",
"*",
"ConcurrentMergeScheduler",
")",
"SetMaxMergesAndRoutines",
"(",
"maxMergeCount",
",",
"maxRoutineCount",
"int",
")",
"{",
"assert2",
"(",
"maxRoutineCount",
">=",
"1",
",",
"\"",
"\"",
")",
"\n",
"assert2",
"(",
"maxMergeCount",
">=",
"1",
",",
"\"",
"\"",
")",
"\n",
"assert2",
"(",
"maxRoutineCount",
"<=",
"maxMergeCount",
",",
"fmt",
".",
"Sprintf",
"(",
"\"",
"\"",
",",
"maxMergeCount",
")",
")",
"\n\n",
"oldCount",
":=",
"cms",
".",
"maxRoutineCount",
"\n",
"cms",
".",
"maxRoutineCount",
"=",
"maxRoutineCount",
"\n",
"cms",
".",
"maxMergeCount",
"=",
"maxMergeCount",
"\n\n",
"cms",
".",
"Lock",
"(",
")",
"\n",
"defer",
"cms",
".",
"Unlock",
"(",
")",
"\n",
"for",
"i",
":=",
"oldCount",
";",
"i",
"<",
"maxRoutineCount",
";",
"i",
"++",
"{",
"go",
"cms",
".",
"worker",
"(",
"i",
")",
"\n",
"}",
"\n",
"}"
] | // Sets the maximum number of merge goroutines and simultaneous
// merges allowed. | [
"Sets",
"the",
"maximum",
"number",
"of",
"merge",
"goroutines",
"and",
"simultaneous",
"merges",
"allowed",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/index/concurrentMergeScheduler.go#L135-L150 |
5,569 | balzaczyy/golucene | core/index/segments.go | NewSegmentReader | func NewSegmentReader(si *SegmentCommitInfo,
termInfosIndexDivisor int, context store.IOContext) (r *SegmentReader, err error) {
r = &SegmentReader{}
r.AtomicReaderImpl = newAtomicReader(r)
r.ARFieldsReader = r
r.si = si
if r.fieldInfos, err = ReadFieldInfos(si); err != nil {
return nil, err
}
// log.Print("Obtaining SegmentCoreReaders...")
if r.core, err = newSegmentCoreReaders(r, si.Info.Dir, si, context, termInfosIndexDivisor); err != nil {
return nil, err
}
// r.segDocValues = newSegmentDocValues()
var success = false
defer func() {
// With lock-less commits, it's entirely possible (and
// fine) to hit a FileNotFound exception above. In
// this case, we want to explicitly close any subset
// of things that were opened so that we don't have to
// wait for a GC to do so.
if !success {
// log.Printf("Failed to initialize SegmentReader.")
r.core.decRef()
}
}()
codec := si.Info.Codec().(Codec)
if si.HasDeletions() {
panic("not supported yet")
} else {
assert(si.DelCount() == 0)
}
r.numDocs = si.Info.DocCount() - si.DelCount()
if r.fieldInfos.HasDocValues {
r.initDocValuesProducers(codec)
}
success = true
return r, nil
} | go | func NewSegmentReader(si *SegmentCommitInfo,
termInfosIndexDivisor int, context store.IOContext) (r *SegmentReader, err error) {
r = &SegmentReader{}
r.AtomicReaderImpl = newAtomicReader(r)
r.ARFieldsReader = r
r.si = si
if r.fieldInfos, err = ReadFieldInfos(si); err != nil {
return nil, err
}
// log.Print("Obtaining SegmentCoreReaders...")
if r.core, err = newSegmentCoreReaders(r, si.Info.Dir, si, context, termInfosIndexDivisor); err != nil {
return nil, err
}
// r.segDocValues = newSegmentDocValues()
var success = false
defer func() {
// With lock-less commits, it's entirely possible (and
// fine) to hit a FileNotFound exception above. In
// this case, we want to explicitly close any subset
// of things that were opened so that we don't have to
// wait for a GC to do so.
if !success {
// log.Printf("Failed to initialize SegmentReader.")
r.core.decRef()
}
}()
codec := si.Info.Codec().(Codec)
if si.HasDeletions() {
panic("not supported yet")
} else {
assert(si.DelCount() == 0)
}
r.numDocs = si.Info.DocCount() - si.DelCount()
if r.fieldInfos.HasDocValues {
r.initDocValuesProducers(codec)
}
success = true
return r, nil
} | [
"func",
"NewSegmentReader",
"(",
"si",
"*",
"SegmentCommitInfo",
",",
"termInfosIndexDivisor",
"int",
",",
"context",
"store",
".",
"IOContext",
")",
"(",
"r",
"*",
"SegmentReader",
",",
"err",
"error",
")",
"{",
"r",
"=",
"&",
"SegmentReader",
"{",
"}",
"\n",
"r",
".",
"AtomicReaderImpl",
"=",
"newAtomicReader",
"(",
"r",
")",
"\n",
"r",
".",
"ARFieldsReader",
"=",
"r",
"\n\n",
"r",
".",
"si",
"=",
"si",
"\n",
"if",
"r",
".",
"fieldInfos",
",",
"err",
"=",
"ReadFieldInfos",
"(",
"si",
")",
";",
"err",
"!=",
"nil",
"{",
"return",
"nil",
",",
"err",
"\n",
"}",
"\n",
"// log.Print(\"Obtaining SegmentCoreReaders...\")",
"if",
"r",
".",
"core",
",",
"err",
"=",
"newSegmentCoreReaders",
"(",
"r",
",",
"si",
".",
"Info",
".",
"Dir",
",",
"si",
",",
"context",
",",
"termInfosIndexDivisor",
")",
";",
"err",
"!=",
"nil",
"{",
"return",
"nil",
",",
"err",
"\n",
"}",
"\n",
"// r.segDocValues = newSegmentDocValues()",
"var",
"success",
"=",
"false",
"\n",
"defer",
"func",
"(",
")",
"{",
"// With lock-less commits, it's entirely possible (and",
"// fine) to hit a FileNotFound exception above. In",
"// this case, we want to explicitly close any subset",
"// of things that were opened so that we don't have to",
"// wait for a GC to do so.",
"if",
"!",
"success",
"{",
"// log.Printf(\"Failed to initialize SegmentReader.\")",
"r",
".",
"core",
".",
"decRef",
"(",
")",
"\n",
"}",
"\n",
"}",
"(",
")",
"\n\n",
"codec",
":=",
"si",
".",
"Info",
".",
"Codec",
"(",
")",
".",
"(",
"Codec",
")",
"\n",
"if",
"si",
".",
"HasDeletions",
"(",
")",
"{",
"panic",
"(",
"\"",
"\"",
")",
"\n",
"}",
"else",
"{",
"assert",
"(",
"si",
".",
"DelCount",
"(",
")",
"==",
"0",
")",
"\n",
"}",
"\n",
"r",
".",
"numDocs",
"=",
"si",
".",
"Info",
".",
"DocCount",
"(",
")",
"-",
"si",
".",
"DelCount",
"(",
")",
"\n\n",
"if",
"r",
".",
"fieldInfos",
".",
"HasDocValues",
"{",
"r",
".",
"initDocValuesProducers",
"(",
"codec",
")",
"\n",
"}",
"\n",
"success",
"=",
"true",
"\n",
"return",
"r",
",",
"nil",
"\n",
"}"
] | /**
* Constructs a new SegmentReader with a new core.
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
// TODO: why is this public? | [
"Constructs",
"a",
"new",
"SegmentReader",
"with",
"a",
"new",
"core",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/index/segments.go#L45-L88 |
5,570 | balzaczyy/golucene | core/index/segments.go | String | func (r *SegmentReader) String() string {
// SegmentInfo.toString takes dir and number of
// *pending* deletions; so we reverse compute that here:
return r.si.StringOf(r.si.Info.Dir, r.si.Info.DocCount()-r.numDocs-r.si.DelCount())
} | go | func (r *SegmentReader) String() string {
// SegmentInfo.toString takes dir and number of
// *pending* deletions; so we reverse compute that here:
return r.si.StringOf(r.si.Info.Dir, r.si.Info.DocCount()-r.numDocs-r.si.DelCount())
} | [
"func",
"(",
"r",
"*",
"SegmentReader",
")",
"String",
"(",
")",
"string",
"{",
"// SegmentInfo.toString takes dir and number of",
"// *pending* deletions; so we reverse compute that here:",
"return",
"r",
".",
"si",
".",
"StringOf",
"(",
"r",
".",
"si",
".",
"Info",
".",
"Dir",
",",
"r",
".",
"si",
".",
"Info",
".",
"DocCount",
"(",
")",
"-",
"r",
".",
"numDocs",
"-",
"r",
".",
"si",
".",
"DelCount",
"(",
")",
")",
"\n",
"}"
] | // SegmentReader.java L179 | [
"SegmentReader",
".",
"java",
"L179"
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/index/segments.go#L198-L202 |
5,571 | balzaczyy/golucene | core/store/fs.go | DeleteFile | func (d *FSDirectory) DeleteFile(name string) (err error) {
d.EnsureOpen()
if err = os.Remove(filepath.Join(d.path, name)); err == nil {
d.staleFilesLock.Lock()
defer d.staleFilesLock.Unlock()
delete(d.staleFiles, name)
}
return
} | go | func (d *FSDirectory) DeleteFile(name string) (err error) {
d.EnsureOpen()
if err = os.Remove(filepath.Join(d.path, name)); err == nil {
d.staleFilesLock.Lock()
defer d.staleFilesLock.Unlock()
delete(d.staleFiles, name)
}
return
} | [
"func",
"(",
"d",
"*",
"FSDirectory",
")",
"DeleteFile",
"(",
"name",
"string",
")",
"(",
"err",
"error",
")",
"{",
"d",
".",
"EnsureOpen",
"(",
")",
"\n",
"if",
"err",
"=",
"os",
".",
"Remove",
"(",
"filepath",
".",
"Join",
"(",
"d",
".",
"path",
",",
"name",
")",
")",
";",
"err",
"==",
"nil",
"{",
"d",
".",
"staleFilesLock",
".",
"Lock",
"(",
")",
"\n",
"defer",
"d",
".",
"staleFilesLock",
".",
"Unlock",
"(",
")",
"\n",
"delete",
"(",
"d",
".",
"staleFiles",
",",
"name",
")",
"\n",
"}",
"\n",
"return",
"\n",
"}"
] | // Removes an existing file in the directory. | [
"Removes",
"an",
"existing",
"file",
"in",
"the",
"directory",
"."
] | d0be9ee89713717392b5b089e18a5a6fb6a63562 | https://github.com/balzaczyy/golucene/blob/d0be9ee89713717392b5b089e18a5a6fb6a63562/core/store/fs.go#L129-L137 |
5,572 | kr/fs | walk.go | WalkFS | func WalkFS(root string, fs FileSystem) *Walker {
info, err := fs.Lstat(root)
return &Walker{
fs: fs,
stack: []item{{root, info, err}},
}
} | go | func WalkFS(root string, fs FileSystem) *Walker {
info, err := fs.Lstat(root)
return &Walker{
fs: fs,
stack: []item{{root, info, err}},
}
} | [
"func",
"WalkFS",
"(",
"root",
"string",
",",
"fs",
"FileSystem",
")",
"*",
"Walker",
"{",
"info",
",",
"err",
":=",
"fs",
".",
"Lstat",
"(",
"root",
")",
"\n",
"return",
"&",
"Walker",
"{",
"fs",
":",
"fs",
",",
"stack",
":",
"[",
"]",
"item",
"{",
"{",
"root",
",",
"info",
",",
"err",
"}",
"}",
",",
"}",
"\n",
"}"
] | // WalkFS returns a new Walker rooted at root on the FileSystem fs. | [
"WalkFS",
"returns",
"a",
"new",
"Walker",
"rooted",
"at",
"root",
"on",
"the",
"FileSystem",
"fs",
"."
] | 1455def202f6e05b95cc7bfc7e8ae67ae5141eba | https://github.com/kr/fs/blob/1455def202f6e05b95cc7bfc7e8ae67ae5141eba/walk.go#L34-L40 |
5,573 | kr/fs | walk.go | Step | func (w *Walker) Step() bool {
if w.descend && w.cur.err == nil && w.cur.info.IsDir() {
list, err := w.fs.ReadDir(w.cur.path)
if err != nil {
w.cur.err = err
w.stack = append(w.stack, w.cur)
} else {
for i := len(list) - 1; i >= 0; i-- {
path := w.fs.Join(w.cur.path, list[i].Name())
w.stack = append(w.stack, item{path, list[i], nil})
}
}
}
if len(w.stack) == 0 {
return false
}
i := len(w.stack) - 1
w.cur = w.stack[i]
w.stack = w.stack[:i]
w.descend = true
return true
} | go | func (w *Walker) Step() bool {
if w.descend && w.cur.err == nil && w.cur.info.IsDir() {
list, err := w.fs.ReadDir(w.cur.path)
if err != nil {
w.cur.err = err
w.stack = append(w.stack, w.cur)
} else {
for i := len(list) - 1; i >= 0; i-- {
path := w.fs.Join(w.cur.path, list[i].Name())
w.stack = append(w.stack, item{path, list[i], nil})
}
}
}
if len(w.stack) == 0 {
return false
}
i := len(w.stack) - 1
w.cur = w.stack[i]
w.stack = w.stack[:i]
w.descend = true
return true
} | [
"func",
"(",
"w",
"*",
"Walker",
")",
"Step",
"(",
")",
"bool",
"{",
"if",
"w",
".",
"descend",
"&&",
"w",
".",
"cur",
".",
"err",
"==",
"nil",
"&&",
"w",
".",
"cur",
".",
"info",
".",
"IsDir",
"(",
")",
"{",
"list",
",",
"err",
":=",
"w",
".",
"fs",
".",
"ReadDir",
"(",
"w",
".",
"cur",
".",
"path",
")",
"\n",
"if",
"err",
"!=",
"nil",
"{",
"w",
".",
"cur",
".",
"err",
"=",
"err",
"\n",
"w",
".",
"stack",
"=",
"append",
"(",
"w",
".",
"stack",
",",
"w",
".",
"cur",
")",
"\n",
"}",
"else",
"{",
"for",
"i",
":=",
"len",
"(",
"list",
")",
"-",
"1",
";",
"i",
">=",
"0",
";",
"i",
"--",
"{",
"path",
":=",
"w",
".",
"fs",
".",
"Join",
"(",
"w",
".",
"cur",
".",
"path",
",",
"list",
"[",
"i",
"]",
".",
"Name",
"(",
")",
")",
"\n",
"w",
".",
"stack",
"=",
"append",
"(",
"w",
".",
"stack",
",",
"item",
"{",
"path",
",",
"list",
"[",
"i",
"]",
",",
"nil",
"}",
")",
"\n",
"}",
"\n",
"}",
"\n",
"}",
"\n\n",
"if",
"len",
"(",
"w",
".",
"stack",
")",
"==",
"0",
"{",
"return",
"false",
"\n",
"}",
"\n",
"i",
":=",
"len",
"(",
"w",
".",
"stack",
")",
"-",
"1",
"\n",
"w",
".",
"cur",
"=",
"w",
".",
"stack",
"[",
"i",
"]",
"\n",
"w",
".",
"stack",
"=",
"w",
".",
"stack",
"[",
":",
"i",
"]",
"\n",
"w",
".",
"descend",
"=",
"true",
"\n",
"return",
"true",
"\n",
"}"
] | // Step advances the Walker to the next file or directory,
// which will then be available through the Path, Stat,
// and Err methods.
// It returns false when the walk stops at the end of the tree. | [
"Step",
"advances",
"the",
"Walker",
"to",
"the",
"next",
"file",
"or",
"directory",
"which",
"will",
"then",
"be",
"available",
"through",
"the",
"Path",
"Stat",
"and",
"Err",
"methods",
".",
"It",
"returns",
"false",
"when",
"the",
"walk",
"stops",
"at",
"the",
"end",
"of",
"the",
"tree",
"."
] | 1455def202f6e05b95cc7bfc7e8ae67ae5141eba | https://github.com/kr/fs/blob/1455def202f6e05b95cc7bfc7e8ae67ae5141eba/walk.go#L46-L68 |
5,574 | dotabuff/manta | entity.go | newEntity | func newEntity(index, serial int32, class *class) *Entity {
return &Entity{
index: index,
serial: serial,
class: class,
active: true,
state: newFieldState(),
fpCache: make(map[string]*fieldPath),
fpNoop: make(map[string]bool),
}
} | go | func newEntity(index, serial int32, class *class) *Entity {
return &Entity{
index: index,
serial: serial,
class: class,
active: true,
state: newFieldState(),
fpCache: make(map[string]*fieldPath),
fpNoop: make(map[string]bool),
}
} | [
"func",
"newEntity",
"(",
"index",
",",
"serial",
"int32",
",",
"class",
"*",
"class",
")",
"*",
"Entity",
"{",
"return",
"&",
"Entity",
"{",
"index",
":",
"index",
",",
"serial",
":",
"serial",
",",
"class",
":",
"class",
",",
"active",
":",
"true",
",",
"state",
":",
"newFieldState",
"(",
")",
",",
"fpCache",
":",
"make",
"(",
"map",
"[",
"string",
"]",
"*",
"fieldPath",
")",
",",
"fpNoop",
":",
"make",
"(",
"map",
"[",
"string",
"]",
"bool",
")",
",",
"}",
"\n",
"}"
] | // newEntity returns a new entity for the given index, serial and class | [
"newEntity",
"returns",
"a",
"new",
"entity",
"for",
"the",
"given",
"index",
"serial",
"and",
"class"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/entity.go#L62-L72 |
5,575 | dotabuff/manta | entity.go | String | func (e *Entity) String() string {
return fmt.Sprintf("%d <%s>", e.index, e.class.name)
} | go | func (e *Entity) String() string {
return fmt.Sprintf("%d <%s>", e.index, e.class.name)
} | [
"func",
"(",
"e",
"*",
"Entity",
")",
"String",
"(",
")",
"string",
"{",
"return",
"fmt",
".",
"Sprintf",
"(",
"\"",
"\"",
",",
"e",
".",
"index",
",",
"e",
".",
"class",
".",
"name",
")",
"\n",
"}"
] | // String returns a human identifiable string for the Entity | [
"String",
"returns",
"a",
"human",
"identifiable",
"string",
"for",
"the",
"Entity"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/entity.go#L75-L77 |
5,576 | dotabuff/manta | entity.go | Map | func (e *Entity) Map() map[string]interface{} {
values := make(map[string]interface{})
for _, fp := range e.class.getFieldPaths(newFieldPath(), e.state) {
values[e.class.getNameForFieldPath(fp)] = e.state.get(fp)
}
return values
} | go | func (e *Entity) Map() map[string]interface{} {
values := make(map[string]interface{})
for _, fp := range e.class.getFieldPaths(newFieldPath(), e.state) {
values[e.class.getNameForFieldPath(fp)] = e.state.get(fp)
}
return values
} | [
"func",
"(",
"e",
"*",
"Entity",
")",
"Map",
"(",
")",
"map",
"[",
"string",
"]",
"interface",
"{",
"}",
"{",
"values",
":=",
"make",
"(",
"map",
"[",
"string",
"]",
"interface",
"{",
"}",
")",
"\n",
"for",
"_",
",",
"fp",
":=",
"range",
"e",
".",
"class",
".",
"getFieldPaths",
"(",
"newFieldPath",
"(",
")",
",",
"e",
".",
"state",
")",
"{",
"values",
"[",
"e",
".",
"class",
".",
"getNameForFieldPath",
"(",
"fp",
")",
"]",
"=",
"e",
".",
"state",
".",
"get",
"(",
"fp",
")",
"\n",
"}",
"\n",
"return",
"values",
"\n",
"}"
] | // Map returns a map of current entity state as key-value pairs | [
"Map",
"returns",
"a",
"map",
"of",
"current",
"entity",
"state",
"as",
"key",
"-",
"value",
"pairs"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/entity.go#L80-L86 |
5,577 | dotabuff/manta | entity.go | Get | func (e *Entity) Get(name string) interface{} {
if fp, ok := e.fpCache[name]; ok {
return e.state.get(fp)
}
if e.fpNoop[name] {
return nil
}
fp := newFieldPath()
if !e.class.getFieldPathForName(fp, name) {
e.fpNoop[name] = true
fp.release()
return nil
}
e.fpCache[name] = fp
return e.state.get(fp)
} | go | func (e *Entity) Get(name string) interface{} {
if fp, ok := e.fpCache[name]; ok {
return e.state.get(fp)
}
if e.fpNoop[name] {
return nil
}
fp := newFieldPath()
if !e.class.getFieldPathForName(fp, name) {
e.fpNoop[name] = true
fp.release()
return nil
}
e.fpCache[name] = fp
return e.state.get(fp)
} | [
"func",
"(",
"e",
"*",
"Entity",
")",
"Get",
"(",
"name",
"string",
")",
"interface",
"{",
"}",
"{",
"if",
"fp",
",",
"ok",
":=",
"e",
".",
"fpCache",
"[",
"name",
"]",
";",
"ok",
"{",
"return",
"e",
".",
"state",
".",
"get",
"(",
"fp",
")",
"\n",
"}",
"\n",
"if",
"e",
".",
"fpNoop",
"[",
"name",
"]",
"{",
"return",
"nil",
"\n",
"}",
"\n\n",
"fp",
":=",
"newFieldPath",
"(",
")",
"\n",
"if",
"!",
"e",
".",
"class",
".",
"getFieldPathForName",
"(",
"fp",
",",
"name",
")",
"{",
"e",
".",
"fpNoop",
"[",
"name",
"]",
"=",
"true",
"\n",
"fp",
".",
"release",
"(",
")",
"\n",
"return",
"nil",
"\n",
"}",
"\n",
"e",
".",
"fpCache",
"[",
"name",
"]",
"=",
"fp",
"\n\n",
"return",
"e",
".",
"state",
".",
"get",
"(",
"fp",
")",
"\n",
"}"
] | // Get returns the current value of the Entity state for the given key | [
"Get",
"returns",
"the",
"current",
"value",
"of",
"the",
"Entity",
"state",
"for",
"the",
"given",
"key"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/entity.go#L94-L111 |
5,578 | dotabuff/manta | entity.go | Exists | func (e *Entity) Exists(name string) bool {
return e.Get(name) != nil
} | go | func (e *Entity) Exists(name string) bool {
return e.Get(name) != nil
} | [
"func",
"(",
"e",
"*",
"Entity",
")",
"Exists",
"(",
"name",
"string",
")",
"bool",
"{",
"return",
"e",
".",
"Get",
"(",
"name",
")",
"!=",
"nil",
"\n",
"}"
] | // Exists returns true if the given key exists in the Entity state | [
"Exists",
"returns",
"true",
"if",
"the",
"given",
"key",
"exists",
"in",
"the",
"Entity",
"state"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/entity.go#L114-L116 |
5,579 | dotabuff/manta | entity.go | GetInt32 | func (e *Entity) GetInt32(name string) (int32, bool) {
x, ok := e.Get(name).(int32)
return x, ok
} | go | func (e *Entity) GetInt32(name string) (int32, bool) {
x, ok := e.Get(name).(int32)
return x, ok
} | [
"func",
"(",
"e",
"*",
"Entity",
")",
"GetInt32",
"(",
"name",
"string",
")",
"(",
"int32",
",",
"bool",
")",
"{",
"x",
",",
"ok",
":=",
"e",
".",
"Get",
"(",
"name",
")",
".",
"(",
"int32",
")",
"\n",
"return",
"x",
",",
"ok",
"\n",
"}"
] | // GetInt32 gets given key as an int32 | [
"GetInt32",
"gets",
"given",
"key",
"as",
"an",
"int32"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/entity.go#L119-L122 |
5,580 | dotabuff/manta | entity.go | GetUint32 | func (e *Entity) GetUint32(name string) (uint32, bool) {
if v := e.Get(name); v != nil {
switch x := v.(type) {
case uint32:
return x, true
case uint64:
return uint32(x), true
}
}
return 0, false
} | go | func (e *Entity) GetUint32(name string) (uint32, bool) {
if v := e.Get(name); v != nil {
switch x := v.(type) {
case uint32:
return x, true
case uint64:
return uint32(x), true
}
}
return 0, false
} | [
"func",
"(",
"e",
"*",
"Entity",
")",
"GetUint32",
"(",
"name",
"string",
")",
"(",
"uint32",
",",
"bool",
")",
"{",
"if",
"v",
":=",
"e",
".",
"Get",
"(",
"name",
")",
";",
"v",
"!=",
"nil",
"{",
"switch",
"x",
":=",
"v",
".",
"(",
"type",
")",
"{",
"case",
"uint32",
":",
"return",
"x",
",",
"true",
"\n",
"case",
"uint64",
":",
"return",
"uint32",
"(",
"x",
")",
",",
"true",
"\n",
"}",
"\n",
"}",
"\n",
"return",
"0",
",",
"false",
"\n",
"}"
] | // GetUint32 gets given key as a uint32 | [
"GetUint32",
"gets",
"given",
"key",
"as",
"a",
"uint32"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/entity.go#L125-L135 |
5,581 | dotabuff/manta | entity.go | GetUint64 | func (e *Entity) GetUint64(name string) (uint64, bool) {
x, ok := e.Get(name).(uint64)
return x, ok
} | go | func (e *Entity) GetUint64(name string) (uint64, bool) {
x, ok := e.Get(name).(uint64)
return x, ok
} | [
"func",
"(",
"e",
"*",
"Entity",
")",
"GetUint64",
"(",
"name",
"string",
")",
"(",
"uint64",
",",
"bool",
")",
"{",
"x",
",",
"ok",
":=",
"e",
".",
"Get",
"(",
"name",
")",
".",
"(",
"uint64",
")",
"\n",
"return",
"x",
",",
"ok",
"\n",
"}"
] | // GetUint64 gets given key as a uint64 | [
"GetUint64",
"gets",
"given",
"key",
"as",
"a",
"uint64"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/entity.go#L138-L141 |
5,582 | dotabuff/manta | entity.go | GetFloat32 | func (e *Entity) GetFloat32(name string) (float32, bool) {
x, ok := e.Get(name).(float32)
return x, ok
} | go | func (e *Entity) GetFloat32(name string) (float32, bool) {
x, ok := e.Get(name).(float32)
return x, ok
} | [
"func",
"(",
"e",
"*",
"Entity",
")",
"GetFloat32",
"(",
"name",
"string",
")",
"(",
"float32",
",",
"bool",
")",
"{",
"x",
",",
"ok",
":=",
"e",
".",
"Get",
"(",
"name",
")",
".",
"(",
"float32",
")",
"\n",
"return",
"x",
",",
"ok",
"\n",
"}"
] | // GetFloat32 gets given key as an float32 | [
"GetFloat32",
"gets",
"given",
"key",
"as",
"an",
"float32"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/entity.go#L144-L147 |
5,583 | dotabuff/manta | entity.go | GetString | func (e *Entity) GetString(name string) (string, bool) {
x, ok := e.Get(name).(string)
return x, ok
} | go | func (e *Entity) GetString(name string) (string, bool) {
x, ok := e.Get(name).(string)
return x, ok
} | [
"func",
"(",
"e",
"*",
"Entity",
")",
"GetString",
"(",
"name",
"string",
")",
"(",
"string",
",",
"bool",
")",
"{",
"x",
",",
"ok",
":=",
"e",
".",
"Get",
"(",
"name",
")",
".",
"(",
"string",
")",
"\n",
"return",
"x",
",",
"ok",
"\n",
"}"
] | // GetString gets given key as a string | [
"GetString",
"gets",
"given",
"key",
"as",
"a",
"string"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/entity.go#L150-L153 |
5,584 | dotabuff/manta | entity.go | GetBool | func (e *Entity) GetBool(name string) (bool, bool) {
x, ok := e.Get(name).(bool)
return x, ok
} | go | func (e *Entity) GetBool(name string) (bool, bool) {
x, ok := e.Get(name).(bool)
return x, ok
} | [
"func",
"(",
"e",
"*",
"Entity",
")",
"GetBool",
"(",
"name",
"string",
")",
"(",
"bool",
",",
"bool",
")",
"{",
"x",
",",
"ok",
":=",
"e",
".",
"Get",
"(",
"name",
")",
".",
"(",
"bool",
")",
"\n",
"return",
"x",
",",
"ok",
"\n",
"}"
] | // GetBool gets given key as a bool | [
"GetBool",
"gets",
"given",
"key",
"as",
"a",
"bool"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/entity.go#L156-L159 |
5,585 | dotabuff/manta | entity.go | FindEntityByHandle | func (p *Parser) FindEntityByHandle(handle uint64) *Entity {
idx := handle2idx(handle)
e := p.FindEntity(idx)
if e != nil && e.GetSerial() != serialForHandle(handle) {
return nil
}
return e
} | go | func (p *Parser) FindEntityByHandle(handle uint64) *Entity {
idx := handle2idx(handle)
e := p.FindEntity(idx)
if e != nil && e.GetSerial() != serialForHandle(handle) {
return nil
}
return e
} | [
"func",
"(",
"p",
"*",
"Parser",
")",
"FindEntityByHandle",
"(",
"handle",
"uint64",
")",
"*",
"Entity",
"{",
"idx",
":=",
"handle2idx",
"(",
"handle",
")",
"\n",
"e",
":=",
"p",
".",
"FindEntity",
"(",
"idx",
")",
"\n",
"if",
"e",
"!=",
"nil",
"&&",
"e",
".",
"GetSerial",
"(",
")",
"!=",
"serialForHandle",
"(",
"handle",
")",
"{",
"return",
"nil",
"\n",
"}",
"\n",
"return",
"e",
"\n",
"}"
] | // FindEntityByHandle finds a given Entity by handle | [
"FindEntityByHandle",
"finds",
"a",
"given",
"Entity",
"by",
"handle"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/entity.go#L201-L208 |
5,586 | dotabuff/manta | entity.go | FilterEntity | func (p *Parser) FilterEntity(fb func(*Entity) bool) []*Entity {
entities := make([]*Entity, 0, 0)
for _, et := range p.entities {
if fb(et) {
entities = append(entities, et)
}
}
return entities
} | go | func (p *Parser) FilterEntity(fb func(*Entity) bool) []*Entity {
entities := make([]*Entity, 0, 0)
for _, et := range p.entities {
if fb(et) {
entities = append(entities, et)
}
}
return entities
} | [
"func",
"(",
"p",
"*",
"Parser",
")",
"FilterEntity",
"(",
"fb",
"func",
"(",
"*",
"Entity",
")",
"bool",
")",
"[",
"]",
"*",
"Entity",
"{",
"entities",
":=",
"make",
"(",
"[",
"]",
"*",
"Entity",
",",
"0",
",",
"0",
")",
"\n",
"for",
"_",
",",
"et",
":=",
"range",
"p",
".",
"entities",
"{",
"if",
"fb",
"(",
"et",
")",
"{",
"entities",
"=",
"append",
"(",
"entities",
",",
"et",
")",
"\n",
"}",
"\n",
"}",
"\n",
"return",
"entities",
"\n",
"}"
] | // FilterEntity finds entities by callback | [
"FilterEntity",
"finds",
"entities",
"by",
"callback"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/entity.go#L211-L219 |
5,587 | dotabuff/manta | entity.go | OnEntity | func (p *Parser) OnEntity(h EntityHandler) {
p.entityHandlers = append(p.entityHandlers, h)
} | go | func (p *Parser) OnEntity(h EntityHandler) {
p.entityHandlers = append(p.entityHandlers, h)
} | [
"func",
"(",
"p",
"*",
"Parser",
")",
"OnEntity",
"(",
"h",
"EntityHandler",
")",
"{",
"p",
".",
"entityHandlers",
"=",
"append",
"(",
"p",
".",
"entityHandlers",
",",
"h",
")",
"\n",
"}"
] | // OnEntity registers an EntityHandler that will be called when an entity
// is created, updated, deleted, etc. | [
"OnEntity",
"registers",
"an",
"EntityHandler",
"that",
"will",
"be",
"called",
"when",
"an",
"entity",
"is",
"created",
"updated",
"deleted",
"etc",
"."
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/entity.go#L319-L321 |
5,588 | dotabuff/manta | huffman.go | Less | func (th treeHeap) Less(i int, j int) bool {
if th[i].Weight() == th[j].Weight() {
return th[i].Value() >= th[j].Value()
} else {
return th[i].Weight() < th[j].Weight()
}
} | go | func (th treeHeap) Less(i int, j int) bool {
if th[i].Weight() == th[j].Weight() {
return th[i].Value() >= th[j].Value()
} else {
return th[i].Weight() < th[j].Weight()
}
} | [
"func",
"(",
"th",
"treeHeap",
")",
"Less",
"(",
"i",
"int",
",",
"j",
"int",
")",
"bool",
"{",
"if",
"th",
"[",
"i",
"]",
".",
"Weight",
"(",
")",
"==",
"th",
"[",
"j",
"]",
".",
"Weight",
"(",
")",
"{",
"return",
"th",
"[",
"i",
"]",
".",
"Value",
"(",
")",
">=",
"th",
"[",
"j",
"]",
".",
"Value",
"(",
")",
"\n",
"}",
"else",
"{",
"return",
"th",
"[",
"i",
"]",
".",
"Weight",
"(",
")",
"<",
"th",
"[",
"j",
"]",
".",
"Weight",
"(",
")",
"\n",
"}",
"\n",
"}"
] | // Weight compare function | [
"Weight",
"compare",
"function"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/huffman.go#L87-L93 |
5,589 | dotabuff/manta | huffman.go | Pop | func (th *treeHeap) Pop() (popped interface{}) {
popped = (*th)[len(*th)-1]
*th = (*th)[:len(*th)-1]
return
} | go | func (th *treeHeap) Pop() (popped interface{}) {
popped = (*th)[len(*th)-1]
*th = (*th)[:len(*th)-1]
return
} | [
"func",
"(",
"th",
"*",
"treeHeap",
")",
"Pop",
"(",
")",
"(",
"popped",
"interface",
"{",
"}",
")",
"{",
"popped",
"=",
"(",
"*",
"th",
")",
"[",
"len",
"(",
"*",
"th",
")",
"-",
"1",
"]",
"\n",
"*",
"th",
"=",
"(",
"*",
"th",
")",
"[",
":",
"len",
"(",
"*",
"th",
")",
"-",
"1",
"]",
"\n",
"return",
"\n",
"}"
] | // Remove item, required for heap | [
"Remove",
"item",
"required",
"for",
"heap"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/huffman.go#L101-L105 |
5,590 | dotabuff/manta | huffman.go | Swap | func (th treeHeap) Swap(i, j int) {
th[i], th[j] = th[j], th[i]
} | go | func (th treeHeap) Swap(i, j int) {
th[i], th[j] = th[j], th[i]
} | [
"func",
"(",
"th",
"treeHeap",
")",
"Swap",
"(",
"i",
",",
"j",
"int",
")",
"{",
"th",
"[",
"i",
"]",
",",
"th",
"[",
"j",
"]",
"=",
"th",
"[",
"j",
"]",
",",
"th",
"[",
"i",
"]",
"\n",
"}"
] | // Swap two items, required for heap | [
"Swap",
"two",
"items",
"required",
"for",
"heap"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/huffman.go#L108-L110 |
5,591 | dotabuff/manta | huffman.go | buildHuffmanTree | func buildHuffmanTree(symFreqs []int) huffmanTree {
var trees treeHeap
for v, w := range symFreqs {
if w == 0 {
w = 1
}
trees = append(trees, &huffmanLeaf{w, v})
}
n := 40
heap.Init(&trees)
for trees.Len() > 1 {
a := heap.Pop(&trees).(huffmanTree)
b := heap.Pop(&trees).(huffmanTree)
heap.Push(&trees, &huffmanNode{a.Weight() + b.Weight(), n, a, b})
n++
}
return heap.Pop(&trees).(huffmanTree)
} | go | func buildHuffmanTree(symFreqs []int) huffmanTree {
var trees treeHeap
for v, w := range symFreqs {
if w == 0 {
w = 1
}
trees = append(trees, &huffmanLeaf{w, v})
}
n := 40
heap.Init(&trees)
for trees.Len() > 1 {
a := heap.Pop(&trees).(huffmanTree)
b := heap.Pop(&trees).(huffmanTree)
heap.Push(&trees, &huffmanNode{a.Weight() + b.Weight(), n, a, b})
n++
}
return heap.Pop(&trees).(huffmanTree)
} | [
"func",
"buildHuffmanTree",
"(",
"symFreqs",
"[",
"]",
"int",
")",
"huffmanTree",
"{",
"var",
"trees",
"treeHeap",
"\n",
"for",
"v",
",",
"w",
":=",
"range",
"symFreqs",
"{",
"if",
"w",
"==",
"0",
"{",
"w",
"=",
"1",
"\n",
"}",
"\n\n",
"trees",
"=",
"append",
"(",
"trees",
",",
"&",
"huffmanLeaf",
"{",
"w",
",",
"v",
"}",
")",
"\n",
"}",
"\n\n",
"n",
":=",
"40",
"\n\n",
"heap",
".",
"Init",
"(",
"&",
"trees",
")",
"\n",
"for",
"trees",
".",
"Len",
"(",
")",
">",
"1",
"{",
"a",
":=",
"heap",
".",
"Pop",
"(",
"&",
"trees",
")",
".",
"(",
"huffmanTree",
")",
"\n",
"b",
":=",
"heap",
".",
"Pop",
"(",
"&",
"trees",
")",
".",
"(",
"huffmanTree",
")",
"\n\n",
"heap",
".",
"Push",
"(",
"&",
"trees",
",",
"&",
"huffmanNode",
"{",
"a",
".",
"Weight",
"(",
")",
"+",
"b",
".",
"Weight",
"(",
")",
",",
"n",
",",
"a",
",",
"b",
"}",
")",
"\n",
"n",
"++",
"\n",
"}",
"\n\n",
"return",
"heap",
".",
"Pop",
"(",
"&",
"trees",
")",
".",
"(",
"huffmanTree",
")",
"\n",
"}"
] | // Construct a tree from a map of weight -> item | [
"Construct",
"a",
"tree",
"from",
"a",
"map",
"of",
"weight",
"-",
">",
"item"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/huffman.go#L113-L135 |
5,592 | dotabuff/manta | huffman.go | swapNodes | func swapNodes(tree huffmanTree, path uint32, len uint32) {
for len > 0 {
// get current bit
len--
one := path & 1
path = path >> 1
// check if we are correct
if tree.IsLeaf() {
_panicf("Touching leaf in node swap, %d left in path", len)
}
// switch on the type
if one == 1 {
tree = tree.Right()
} else {
tree = tree.Left()
}
}
node := tree.(*huffmanNode)
node.left, node.right = node.right, node.left
} | go | func swapNodes(tree huffmanTree, path uint32, len uint32) {
for len > 0 {
// get current bit
len--
one := path & 1
path = path >> 1
// check if we are correct
if tree.IsLeaf() {
_panicf("Touching leaf in node swap, %d left in path", len)
}
// switch on the type
if one == 1 {
tree = tree.Right()
} else {
tree = tree.Left()
}
}
node := tree.(*huffmanNode)
node.left, node.right = node.right, node.left
} | [
"func",
"swapNodes",
"(",
"tree",
"huffmanTree",
",",
"path",
"uint32",
",",
"len",
"uint32",
")",
"{",
"for",
"len",
">",
"0",
"{",
"// get current bit",
"len",
"--",
"\n",
"one",
":=",
"path",
"&",
"1",
"\n",
"path",
"=",
"path",
">>",
"1",
"\n\n",
"// check if we are correct",
"if",
"tree",
".",
"IsLeaf",
"(",
")",
"{",
"_panicf",
"(",
"\"",
"\"",
",",
"len",
")",
"\n",
"}",
"\n\n",
"// switch on the type",
"if",
"one",
"==",
"1",
"{",
"tree",
"=",
"tree",
".",
"Right",
"(",
")",
"\n",
"}",
"else",
"{",
"tree",
"=",
"tree",
".",
"Left",
"(",
")",
"\n",
"}",
"\n",
"}",
"\n\n",
"node",
":=",
"tree",
".",
"(",
"*",
"huffmanNode",
")",
"\n",
"node",
".",
"left",
",",
"node",
".",
"right",
"=",
"node",
".",
"right",
",",
"node",
".",
"left",
"\n",
"}"
] | // Swap two nodes based on the given path | [
"Swap",
"two",
"nodes",
"based",
"on",
"the",
"given",
"path"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/huffman.go#L138-L160 |
5,593 | dotabuff/manta | huffman.go | printCodes | func printCodes(tree huffmanTree, prefix []byte) {
if tree == nil {
return
}
if tree.IsLeaf() {
node := tree.(*huffmanLeaf)
fmt.Printf("%v\t%d\t%d\t%s\n", node.Value(), node.Weight(), len(prefix), string(prefix))
} else {
prefix = append(prefix, '0')
printCodes(tree.Left(), prefix)
prefix = prefix[:len(prefix)-1]
prefix = append(prefix, '1')
printCodes(tree.Right(), prefix)
prefix = prefix[:len(prefix)-1]
}
} | go | func printCodes(tree huffmanTree, prefix []byte) {
if tree == nil {
return
}
if tree.IsLeaf() {
node := tree.(*huffmanLeaf)
fmt.Printf("%v\t%d\t%d\t%s\n", node.Value(), node.Weight(), len(prefix), string(prefix))
} else {
prefix = append(prefix, '0')
printCodes(tree.Left(), prefix)
prefix = prefix[:len(prefix)-1]
prefix = append(prefix, '1')
printCodes(tree.Right(), prefix)
prefix = prefix[:len(prefix)-1]
}
} | [
"func",
"printCodes",
"(",
"tree",
"huffmanTree",
",",
"prefix",
"[",
"]",
"byte",
")",
"{",
"if",
"tree",
"==",
"nil",
"{",
"return",
"\n",
"}",
"\n\n",
"if",
"tree",
".",
"IsLeaf",
"(",
")",
"{",
"node",
":=",
"tree",
".",
"(",
"*",
"huffmanLeaf",
")",
"\n",
"fmt",
".",
"Printf",
"(",
"\"",
"\\t",
"\\t",
"\\t",
"\\n",
"\"",
",",
"node",
".",
"Value",
"(",
")",
",",
"node",
".",
"Weight",
"(",
")",
",",
"len",
"(",
"prefix",
")",
",",
"string",
"(",
"prefix",
")",
")",
"\n",
"}",
"else",
"{",
"prefix",
"=",
"append",
"(",
"prefix",
",",
"'0'",
")",
"\n",
"printCodes",
"(",
"tree",
".",
"Left",
"(",
")",
",",
"prefix",
")",
"\n",
"prefix",
"=",
"prefix",
"[",
":",
"len",
"(",
"prefix",
")",
"-",
"1",
"]",
"\n\n",
"prefix",
"=",
"append",
"(",
"prefix",
",",
"'1'",
")",
"\n",
"printCodes",
"(",
"tree",
".",
"Right",
"(",
")",
",",
"prefix",
")",
"\n",
"prefix",
"=",
"prefix",
"[",
":",
"len",
"(",
"prefix",
")",
"-",
"1",
"]",
"\n",
"}",
"\n",
"}"
] | // Print computed tree order | [
"Print",
"computed",
"tree",
"order"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/huffman.go#L163-L180 |
5,594 | dotabuff/manta | field_path.go | pop | func (fp *fieldPath) pop(n int) {
for i := 0; i < n; i++ {
fp.path[fp.last] = 0
fp.last--
}
} | go | func (fp *fieldPath) pop(n int) {
for i := 0; i < n; i++ {
fp.path[fp.last] = 0
fp.last--
}
} | [
"func",
"(",
"fp",
"*",
"fieldPath",
")",
"pop",
"(",
"n",
"int",
")",
"{",
"for",
"i",
":=",
"0",
";",
"i",
"<",
"n",
";",
"i",
"++",
"{",
"fp",
".",
"path",
"[",
"fp",
".",
"last",
"]",
"=",
"0",
"\n",
"fp",
".",
"last",
"--",
"\n",
"}",
"\n",
"}"
] | // pop reduces the last element by n, zeroing values in the popped path | [
"pop",
"reduces",
"the",
"last",
"element",
"by",
"n",
"zeroing",
"values",
"in",
"the",
"popped",
"path"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/field_path.go#L252-L257 |
5,595 | dotabuff/manta | field_path.go | copy | func (fp *fieldPath) copy() *fieldPath {
x := fpPool.Get().(*fieldPath)
copy(x.path, fp.path)
x.last = fp.last
x.done = fp.done
return x
} | go | func (fp *fieldPath) copy() *fieldPath {
x := fpPool.Get().(*fieldPath)
copy(x.path, fp.path)
x.last = fp.last
x.done = fp.done
return x
} | [
"func",
"(",
"fp",
"*",
"fieldPath",
")",
"copy",
"(",
")",
"*",
"fieldPath",
"{",
"x",
":=",
"fpPool",
".",
"Get",
"(",
")",
".",
"(",
"*",
"fieldPath",
")",
"\n",
"copy",
"(",
"x",
".",
"path",
",",
"fp",
".",
"path",
")",
"\n",
"x",
".",
"last",
"=",
"fp",
".",
"last",
"\n",
"x",
".",
"done",
"=",
"fp",
".",
"done",
"\n",
"return",
"x",
"\n",
"}"
] | // copy returns a copy of the fieldPath | [
"copy",
"returns",
"a",
"copy",
"of",
"the",
"fieldPath"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/field_path.go#L260-L266 |
5,596 | dotabuff/manta | field_path.go | String | func (fp *fieldPath) String() string {
ss := make([]string, fp.last+1)
for i := 0; i <= fp.last; i++ {
ss[i] = strconv.Itoa(fp.path[i])
}
return strings.Join(ss, "/")
} | go | func (fp *fieldPath) String() string {
ss := make([]string, fp.last+1)
for i := 0; i <= fp.last; i++ {
ss[i] = strconv.Itoa(fp.path[i])
}
return strings.Join(ss, "/")
} | [
"func",
"(",
"fp",
"*",
"fieldPath",
")",
"String",
"(",
")",
"string",
"{",
"ss",
":=",
"make",
"(",
"[",
"]",
"string",
",",
"fp",
".",
"last",
"+",
"1",
")",
"\n",
"for",
"i",
":=",
"0",
";",
"i",
"<=",
"fp",
".",
"last",
";",
"i",
"++",
"{",
"ss",
"[",
"i",
"]",
"=",
"strconv",
".",
"Itoa",
"(",
"fp",
".",
"path",
"[",
"i",
"]",
")",
"\n",
"}",
"\n",
"return",
"strings",
".",
"Join",
"(",
"ss",
",",
"\"",
"\"",
")",
"\n",
"}"
] | // String returns a string representing the fieldPath | [
"String",
"returns",
"a",
"string",
"representing",
"the",
"fieldPath"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/field_path.go#L269-L275 |
5,597 | dotabuff/manta | field_path.go | reset | func (fp *fieldPath) reset() {
copy(fp.path, fpReset)
fp.last = 0
fp.done = false
} | go | func (fp *fieldPath) reset() {
copy(fp.path, fpReset)
fp.last = 0
fp.done = false
} | [
"func",
"(",
"fp",
"*",
"fieldPath",
")",
"reset",
"(",
")",
"{",
"copy",
"(",
"fp",
".",
"path",
",",
"fpReset",
")",
"\n",
"fp",
".",
"last",
"=",
"0",
"\n",
"fp",
".",
"done",
"=",
"false",
"\n",
"}"
] | // reset resets the fieldPath to the empty value | [
"reset",
"resets",
"the",
"fieldPath",
"to",
"the",
"empty",
"value"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/field_path.go#L297-L301 |
5,598 | dotabuff/manta | field_path.go | readFieldPaths | func readFieldPaths(r *reader) []*fieldPath {
fp := newFieldPath()
node, next := huffTree, huffTree
paths := []*fieldPath{}
for !fp.done {
if r.readBits(1) == 1 {
next = node.Right()
} else {
next = node.Left()
}
if next.IsLeaf() {
node = huffTree
fieldPathTable[next.Value()].fn(r, fp)
if !fp.done {
paths = append(paths, fp.copy())
}
} else {
node = next
}
}
fp.release()
return paths
} | go | func readFieldPaths(r *reader) []*fieldPath {
fp := newFieldPath()
node, next := huffTree, huffTree
paths := []*fieldPath{}
for !fp.done {
if r.readBits(1) == 1 {
next = node.Right()
} else {
next = node.Left()
}
if next.IsLeaf() {
node = huffTree
fieldPathTable[next.Value()].fn(r, fp)
if !fp.done {
paths = append(paths, fp.copy())
}
} else {
node = next
}
}
fp.release()
return paths
} | [
"func",
"readFieldPaths",
"(",
"r",
"*",
"reader",
")",
"[",
"]",
"*",
"fieldPath",
"{",
"fp",
":=",
"newFieldPath",
"(",
")",
"\n\n",
"node",
",",
"next",
":=",
"huffTree",
",",
"huffTree",
"\n\n",
"paths",
":=",
"[",
"]",
"*",
"fieldPath",
"{",
"}",
"\n\n",
"for",
"!",
"fp",
".",
"done",
"{",
"if",
"r",
".",
"readBits",
"(",
"1",
")",
"==",
"1",
"{",
"next",
"=",
"node",
".",
"Right",
"(",
")",
"\n",
"}",
"else",
"{",
"next",
"=",
"node",
".",
"Left",
"(",
")",
"\n",
"}",
"\n\n",
"if",
"next",
".",
"IsLeaf",
"(",
")",
"{",
"node",
"=",
"huffTree",
"\n",
"fieldPathTable",
"[",
"next",
".",
"Value",
"(",
")",
"]",
".",
"fn",
"(",
"r",
",",
"fp",
")",
"\n",
"if",
"!",
"fp",
".",
"done",
"{",
"paths",
"=",
"append",
"(",
"paths",
",",
"fp",
".",
"copy",
"(",
")",
")",
"\n",
"}",
"\n",
"}",
"else",
"{",
"node",
"=",
"next",
"\n",
"}",
"\n",
"}",
"\n\n",
"fp",
".",
"release",
"(",
")",
"\n\n",
"return",
"paths",
"\n",
"}"
] | // readFieldPaths reads a new slice of fieldPath values from the given reader | [
"readFieldPaths",
"reads",
"a",
"new",
"slice",
"of",
"fieldPath",
"values",
"from",
"the",
"given",
"reader"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/field_path.go#L309-L337 |
5,599 | dotabuff/manta | field_path.go | newHuffmanTree | func newHuffmanTree() huffmanTree {
freqs := make([]int, len(fieldPathTable))
for i, op := range fieldPathTable {
freqs[i] = op.weight
}
return buildHuffmanTree(freqs)
} | go | func newHuffmanTree() huffmanTree {
freqs := make([]int, len(fieldPathTable))
for i, op := range fieldPathTable {
freqs[i] = op.weight
}
return buildHuffmanTree(freqs)
} | [
"func",
"newHuffmanTree",
"(",
")",
"huffmanTree",
"{",
"freqs",
":=",
"make",
"(",
"[",
"]",
"int",
",",
"len",
"(",
"fieldPathTable",
")",
")",
"\n",
"for",
"i",
",",
"op",
":=",
"range",
"fieldPathTable",
"{",
"freqs",
"[",
"i",
"]",
"=",
"op",
".",
"weight",
"\n",
"}",
"\n",
"return",
"buildHuffmanTree",
"(",
"freqs",
")",
"\n",
"}"
] | // newHuffmanTree creates a new huffmanTree from the field path table | [
"newHuffmanTree",
"creates",
"a",
"new",
"huffmanTree",
"from",
"the",
"field",
"path",
"table"
] | f51563b05e0e19c9ac19196e26455af1782ed3f0 | https://github.com/dotabuff/manta/blob/f51563b05e0e19c9ac19196e26455af1782ed3f0/field_path.go#L340-L346 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.