query
stringlengths
7
3.85k
document
stringlengths
11
430k
metadata
dict
negatives
sequencelengths
0
101
negative_scores
sequencelengths
0
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
TestPinLsIndirect verifies that indirect nodes are listed by pin ls even if a parent node is directly pinned
func (tp *TestSuite) TestPinLsIndirect(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) defer cancel() api, err := tp.makeAPI(ctx) if err != nil { t.Fatal(err) } leaf, parent, grandparent := getThreeChainedNodes(t, ctx, api, "foo") err = api.Pin().Add(ctx, path.IpldPath(grandparent.Cid())) if err != nil { t.Fatal(err) } err = api.Pin().Add(ctx, path.IpldPath(parent.Cid()), opt.Pin.Recursive(false)) if err != nil { t.Fatal(err) } assertPinTypes(t, ctx, api, []cidContainer{grandparent}, []cidContainer{parent}, []cidContainer{leaf}) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (tp *TestSuite) TestPinLsPrecedence(t *testing.T) {\n\t// Testing precedence of recursive, direct and indirect pins\n\t// Results should be recursive > indirect, direct > indirect, and recursive > direct\n\n\tt.Run(\"TestPinLsPredenceRecursiveIndirect\", tp.TestPinLsPredenceRecursiveIndirect)\n\tt.Run(\"TestPinLsPrecedenceDirectIndirect\", tp.TestPinLsPrecedenceDirectIndirect)\n\tt.Run(\"TestPinLsPrecedenceRecursiveDirect\", tp.TestPinLsPrecedenceRecursiveDirect)\n}", "func assertPinLsAllConsistency(t *testing.T, ctx context.Context, api iface.CoreAPI) {\n\tt.Helper()\n\tallPins, err := accPins(api.Pin().Ls(ctx))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\ttype pinTypeProps struct {\n\t\t*cid.Set\n\t\topt.PinLsOption\n\t}\n\n\tall, recursive, direct, indirect := cid.NewSet(), cid.NewSet(), cid.NewSet(), cid.NewSet()\n\ttypeMap := map[string]*pinTypeProps{\n\t\t\"recursive\": {recursive, opt.Pin.Ls.Recursive()},\n\t\t\"direct\": {direct, opt.Pin.Ls.Direct()},\n\t\t\"indirect\": {indirect, opt.Pin.Ls.Indirect()},\n\t}\n\n\tfor _, p := range allPins {\n\t\tif !all.Visit(p.Path().Cid()) {\n\t\t\tt.Fatalf(\"pin ls returned the same cid multiple times\")\n\t\t}\n\n\t\ttypeStr := p.Type()\n\t\tif typeSet, ok := typeMap[p.Type()]; ok {\n\t\t\ttypeSet.Add(p.Path().Cid())\n\t\t} else {\n\t\t\tt.Fatalf(\"unknown pin type: %s\", typeStr)\n\t\t}\n\t}\n\n\tfor typeStr, pinProps := range typeMap {\n\t\tpins, err := accPins(api.Pin().Ls(ctx, pinProps.PinLsOption))\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\n\t\tif expected, actual := len(pins), pinProps.Set.Len(); expected != actual {\n\t\t\tt.Fatalf(\"pin ls all has %d pins of type %s, but pin ls for the type has %d\", expected, typeStr, actual)\n\t\t}\n\n\t\tfor _, p := range pins {\n\t\t\tif pinType := p.Type(); pinType != typeStr {\n\t\t\t\tt.Fatalf(\"returned wrong pin type: expected %s, got %s\", typeStr, pinType)\n\t\t\t}\n\n\t\t\tif c := p.Path().Cid(); !pinProps.Has(c) {\n\t\t\t\tt.Fatalf(\"%s expected to be in pin ls all as type %s\", c.String(), typeStr)\n\t\t\t}\n\t\t}\n\t}\n}", "func (p *pinner) Pin(ctx context.Context, node *mdag.Node, recurse bool) error {\n\tp.lock.Lock()\n\tdefer p.lock.Unlock()\n\tk, err := node.Key()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif recurse {\n\t\tif p.recursePin.HasKey(k) {\n\t\t\treturn nil\n\t\t}\n\n\t\tif p.directPin.HasKey(k) {\n\t\t\tp.directPin.RemoveBlock(k)\n\t\t}\n\n\t\terr := p.pinLinks(ctx, node)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tp.recursePin.AddBlock(k)\n\t} else {\n\t\t_, err := p.dserv.Get(ctx, k)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif p.recursePin.HasKey(k) {\n\t\t\treturn fmt.Errorf(\"%s already pinned recursively\", k.B58String())\n\t\t}\n\n\t\tp.directPin.AddBlock(k)\n\t}\n\treturn nil\n}", "func (pinType) Indirect() PinLsOption {\n\treturn Pin.pinType(\"indirect\")\n}", "func testCheckDDCloudAddressListMatches(name string, expected compute.IPAddressList) resource.TestCheckFunc {\n\tname = ensureResourceTypePrefix(name, \"ddcloud_address_list\")\n\n\treturn func(state *terraform.State) error {\n\t\tres, ok := state.RootModule().Resources[name]\n\t\tif !ok {\n\t\t\treturn fmt.Errorf(\"Not found: %s\", name)\n\t\t}\n\n\t\taddressListID := res.Primary.ID\n\n\t\tclient := testAccProvider.Meta().(*providerState).Client()\n\t\taddressList, err := client.GetIPAddressList(addressListID)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"bad: Get address list: %s\", err)\n\t\t}\n\t\tif addressList == nil {\n\t\t\treturn fmt.Errorf(\"bad: address list not found with Id '%s'\", addressListID)\n\t\t}\n\n\t\tif addressList.Name != expected.Name {\n\t\t\treturn fmt.Errorf(\"bad: address list '%s' has name '%s' (expected '%s')\", addressListID, addressList.Name, expected.Name)\n\t\t}\n\n\t\tif addressList.Description != expected.Description {\n\t\t\treturn fmt.Errorf(\"bad: address list '%s' has description '%s' (expected '%s')\", addressListID, addressList.Description, expected.Description)\n\t\t}\n\n\t\tif len(addressList.Addresses) != len(expected.Addresses) {\n\t\t\treturn fmt.Errorf(\"bad: address list '%s' has %d addresses or address-ranges (expected '%d')\", addressListID, len(addressList.Addresses), len(expected.Addresses))\n\t\t}\n\n\t\terr = compareAddressListEntries(expected, *addressList)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif len(addressList.ChildLists) != len(expected.ChildLists) {\n\t\t\treturn fmt.Errorf(\"bad: address list '%s' has %d child lists (expected '%d')\", addressListID, len(addressList.ChildLists), len(expected.ChildLists))\n\t\t}\n\n\t\tfor index := range addressList.ChildLists {\n\t\t\texpectedChildListID := expected.ChildLists[index].ID\n\t\t\tactualChildListID := addressList.ChildLists[index].ID\n\n\t\t\tif actualChildListID != expectedChildListID {\n\t\t\t\treturn fmt.Errorf(\"bad: address list '%s' has child list at index %d with Id %s (expected '%s')\",\n\t\t\t\t\taddressListID, index, actualChildListID, expectedChildListID,\n\t\t\t\t)\n\t\t\t}\n\t\t}\n\n\t\treturn nil\n\t}\n}", "func (s *MovesSuite) TestPawnTakesPinned() {\n\tmoves := s.validateMovesByFEN(\n\t\t\"8/8/8/8/8/K3P2q/8/8 w - - 0 1\",\n\t\tengine.TT(\"e3\"),\n\t\t[]engine.Tile{},\n\t)\n\n\t// can move 2 pieces forward\n\tassert.Equal(s.T(), 0, len(moves))\n\n\tmoves = s.validateMovesByFEN(\n\t\t\"8/8/8/8/5q2/8/3P4/2K5 w - - 0 1\",\n\t\tengine.TT(\"e3\"),\n\t\t[]engine.Tile{},\n\t)\n\n\t// can move 2 pieces forward\n\tassert.Equal(s.T(), 0, len(moves))\n\n\t// can take if it clears the pin\n\tmoves = s.validateMovesByFEN(\n\t\t\"8/8/8/8/5q2/4P3/8/2K5 w - - 0 1\",\n\t\tengine.TT(\"e3\"),\n\t\t[]engine.Tile{\n\t\t\tengine.TT(\"f4\"),\n\t\t},\n\t)\n\n\t// can move 2 pieces forward\n\tassert.Equal(s.T(), 1, len(moves))\n}", "func (p *pinner) Unpin(ctx context.Context, k util.Key, recursive bool) error {\n\tp.lock.Lock()\n\tdefer p.lock.Unlock()\n\tif p.recursePin.HasKey(k) {\n\t\tif recursive {\n\t\t\tp.recursePin.RemoveBlock(k)\n\t\t\tnode, err := p.dserv.Get(ctx, k)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\treturn p.unpinLinks(ctx, node)\n\t\t} else {\n\t\t\treturn fmt.Errorf(\"%s is pinned recursively\", k)\n\t\t}\n\t} else if p.directPin.HasKey(k) {\n\t\tp.directPin.RemoveBlock(k)\n\t\treturn nil\n\t} else if p.indirPin.HasKey(k) {\n\t\treturn fmt.Errorf(\"%s is pinned indirectly. indirect pins cannot be removed directly\", k)\n\t} else {\n\t\treturn fmt.Errorf(\"%s is not pinned\", k)\n\t}\n}", "func (s *MovesSuite) TestPawnMovePinned() {\n\tmoves := s.validateMovesByFEN(\n\t\t\"8/8/8/8/8/K3P2q/8/8 w - - 0 1\",\n\t\tengine.T(4, 2),\n\t\t[]engine.Tile{},\n\t)\n\n\t// can move 2 pieces forward\n\tassert.Equal(s.T(), 0, len(moves))\n}", "func TestBuildPeerManagerNodeList(t *testing.T) {\n\tg := &Gateway{\n\t\tnodes: map[modules.NetAddress]*node{\n\t\t\t\"foo\": {NetAddress: \"foo\", WasOutboundPeer: true},\n\t\t\t\"bar\": {NetAddress: \"bar\", WasOutboundPeer: false},\n\t\t\t\"baz\": {NetAddress: \"baz\", WasOutboundPeer: true},\n\t\t\t\"quux\": {NetAddress: \"quux\", WasOutboundPeer: false},\n\t\t},\n\t}\n\tnodelist := g.buildPeerManagerNodeList()\n\t// all outbound nodes should be at the front of the list\n\tvar i int\n\tfor i < len(nodelist) && g.nodes[nodelist[i]].WasOutboundPeer {\n\t\ti++\n\t}\n\tfor i < len(nodelist) && !g.nodes[nodelist[i]].WasOutboundPeer {\n\t\ti++\n\t}\n\tif i != len(nodelist) {\n\t\tt.Fatal(\"bad nodelist:\", nodelist)\n\t}\n}", "func TestLocateInventory(t *testing.T) {\n\t// Construct a synthetic block chain with a block index consisting of\n\t// the following structure.\n\t// \tgenesis -> 1 -> 2 -> ... -> 15 -> 16 -> 17 -> 18\n\t// \t \\-> 16a -> 17a\n\ttip := tstTip\n\tchain, teardownFunc, err := newFakeChain(&chaincfg.MainNetParams)\n\tif err != nil || chain == nil {\n\t\tt.Errorf(\"newFakeChain error %v\", err)\n\t}\n\tdefer teardownFunc()\n\n\t//the node in branch1Nodes is also included in branch0Nodes\n\tbranch0Nodes := chainedNodes(chain.bestChain.Genesis(), 18, 0)\n\tbranch1Nodes := chainedNodes(branch0Nodes[14], 2, 1)\n\tfor _, node := range branch0Nodes {\n\t\tchain.index.AddNode(node)\n\t}\n\tfor _, node := range branch1Nodes {\n\t\tchain.index.AddNode(node)\n\t}\n\tchain.bestChain.SetTip(tip(branch0Nodes))\n\n\t// Create chain views for different branches of the overall chain to\n\t// simulate a local and remote node on different parts of the chain.\n\tlocalView := newChainView(tip(branch0Nodes))\n\tremoteView := newChainView(tip(branch1Nodes))\n\n\t// Create a chain view for a completely unrelated block chain to\n\t// simulate a remote node on a totally different chain.\n\tunrelatedBranchNodes := chainedNodes(nil, 5, 2)\n\tunrelatedView := newChainView(tip(unrelatedBranchNodes))\n\n\ttests := []struct {\n\t\tname string\n\t\tlocator BlockLocator // locator for requested inventory\n\t\thashStop common.Hash // stop hash for locator\n\t\tmaxAllowed uint32 // max to locate, 0 = protos const\n\t\theaders []protos.BlockHeader // expected located headers\n\t\thashes []common.Hash // expected located hashes\n\t}{\n\t\t//test0:\n\t\t{\n\t\t\t// Empty block locators and unknown stop hash. No\n\t\t\t// inventory should be located.\n\t\t\tname: \"no locators, no stop\",\n\t\t\tlocator: nil,\n\t\t\thashStop: common.Hash{},\n\t\t\theaders: nil,\n\t\t\thashes: nil,\n\t\t},\n\t\t//test1:\n\t\t{\n\t\t\t// Empty block locators and stop hash in side chain.\n\t\t\t// The expected result is the requested block.\n\t\t\tname: \"no locators, stop in side\",\n\t\t\tlocator: nil,\n\t\t\thashStop: tip(branch1Nodes).hash,\n\t\t\theaders: nodeHeaders(branch1Nodes, 1),\n\t\t\thashes: nodeHashes(branch1Nodes, 1),\n\t\t},\n\t\t//test2:\n\t\t{\n\t\t\t// Empty block locators and stop hash in main chain.\n\t\t\t// The expected result is the requested block.\n\t\t\tname: \"no locators, stop in main\",\n\t\t\tlocator: nil,\n\t\t\thashStop: branch0Nodes[12].hash,\n\t\t\theaders: nodeHeaders(branch0Nodes, 12),\n\t\t\thashes: nodeHashes(branch0Nodes, 12),\n\t\t},\n\t\t//test3:\n\t\t{\n\t\t\t// Locators based on remote being on side chain and a\n\t\t\t// stop hash local node doesn't know about. The\n\t\t\t// expected result is the blocks after the fork point in\n\t\t\t// the main chain and the stop hash has no effect.\n\t\t\tname: \"remote side chain, unknown stop\",\n\t\t\tlocator: remoteView.BlockLocator(nil),\n\t\t\thashStop: common.Hash{0x01},\n\t\t\theaders: nodeHeaders(branch0Nodes, 15, 16, 17),\n\t\t\thashes: nodeHashes(branch0Nodes, 15, 16, 17),\n\t\t},\n\t\t//test4:\n\t\t{\n\t\t\t// Locators based on remote being on side chain and a\n\t\t\t// stop hash in side chain. The expected result is the\n\t\t\t// blocks after the fork point in the main chain and the\n\t\t\t// stop hash has no effect.\n\t\t\tname: \"remote side chain, stop in side\",\n\t\t\tlocator: remoteView.BlockLocator(nil),\n\t\t\thashStop: tip(branch1Nodes).hash,\n\t\t\theaders: nodeHeaders(branch0Nodes, 15, 16, 17),\n\t\t\thashes: nodeHashes(branch0Nodes, 15, 16, 17),\n\t\t},\n\t\t//test5:\n\t\t{\n\t\t\t// Locators based on remote being on side chain and a\n\t\t\t// stop hash in main chain, but before fork point. The\n\t\t\t// expected result is the blocks after the fork point in\n\t\t\t// the main chain and the stop hash has no effect.\n\t\t\tname: \"remote side chain, stop in main before\",\n\t\t\tlocator: remoteView.BlockLocator(nil),\n\t\t\thashStop: branch0Nodes[13].hash,\n\t\t\theaders: nodeHeaders(branch0Nodes, 15, 16, 17),\n\t\t\thashes: nodeHashes(branch0Nodes, 15, 16, 17),\n\t\t},\n\t\t//test6:\n\t\t{\n\t\t\t// Locators based on remote being on side chain and a\n\t\t\t// stop hash in main chain, but exactly at the fork\n\t\t\t// point. The expected result is the blocks after the\n\t\t\t// fork point in the main chain and the stop hash has no\n\t\t\t// effect.\n\t\t\tname: \"remote side chain, stop in main exact\",\n\t\t\tlocator: remoteView.BlockLocator(nil),\n\t\t\thashStop: branch0Nodes[14].hash,\n\t\t\theaders: nodeHeaders(branch0Nodes, 15, 16, 17),\n\t\t\thashes: nodeHashes(branch0Nodes, 15, 16, 17),\n\t\t},\n\t\t//test7:-----------\n\t\t{\n\t\t\t// Locators based on remote being on side chain and a\n\t\t\t// stop hash in main chain just after the fork point.\n\t\t\t// The expected result is the blocks after the fork\n\t\t\t// point in the main chain up to and including the stop\n\t\t\t// hash.\n\t\t\tname: \"remote side chain, stop in main after\",\n\t\t\tlocator: remoteView.BlockLocator(nil),\n\t\t\thashStop: branch0Nodes[15].hash,\n\t\t\theaders: nodeHeaders(branch0Nodes, 15),\n\t\t\thashes: nodeHashes(branch0Nodes, 15),\n\t\t},\n\t\t//test8:------------\n\t\t{\n\t\t\t// Locators based on remote being on side chain and a\n\t\t\t// stop hash in main chain some time after the fork\n\t\t\t// point. The expected result is the blocks after the\n\t\t\t// fork point in the main chain up to and including the\n\t\t\t// stop hash.\n\t\t\tname: \"remote side chain, stop in main after more\",\n\t\t\tlocator: remoteView.BlockLocator(nil),\n\t\t\thashStop: branch0Nodes[16].hash,\n\t\t\theaders: nodeHeaders(branch0Nodes, 15, 16),\n\t\t\thashes: nodeHashes(branch0Nodes, 15, 16),\n\t\t},\n\t\t//test9:\n\t\t{\n\t\t\t// Locators based on remote being on main chain in the\n\t\t\t// past and a stop hash local node doesn't know about.\n\t\t\t// The expected result is the blocks after the known\n\t\t\t// point in the main chain and the stop hash has no\n\t\t\t// effect.\n\t\t\tname: \"remote main chain past, unknown stop\",\n\t\t\tlocator: localView.BlockLocator(branch0Nodes[12]),\n\t\t\thashStop: common.Hash{0x01},\n\t\t\theaders: nodeHeaders(branch0Nodes, 13, 14, 15, 16, 17),\n\t\t\thashes: nodeHashes(branch0Nodes, 13, 14, 15, 16, 17),\n\t\t},\n\t\t//test10:\n\t\t{\n\t\t\t// Locators based on remote being on main chain in the\n\t\t\t// past and a stop hash in a side chain. The expected\n\t\t\t// result is the blocks after the known point in the\n\t\t\t// main chain and the stop hash has no effect.\n\t\t\tname: \"remote main chain past, stop in side\",\n\t\t\tlocator: localView.BlockLocator(branch0Nodes[12]),\n\t\t\thashStop: tip(branch1Nodes).hash,\n\t\t\theaders: nodeHeaders(branch0Nodes, 13, 14, 15, 16, 17),\n\t\t\thashes: nodeHashes(branch0Nodes, 13, 14, 15, 16, 17),\n\t\t},\n\t\t//test11:\n\t\t{\n\t\t\t// Locators based on remote being on main chain in the\n\t\t\t// past and a stop hash in the main chain before that\n\t\t\t// point. The expected result is the blocks after the\n\t\t\t// known point in the main chain and the stop hash has\n\t\t\t// no effect.\n\t\t\tname: \"remote main chain past, stop in main before\",\n\t\t\tlocator: localView.BlockLocator(branch0Nodes[12]),\n\t\t\thashStop: branch0Nodes[11].hash,\n\t\t\theaders: nodeHeaders(branch0Nodes, 13, 14, 15, 16, 17),\n\t\t\thashes: nodeHashes(branch0Nodes, 13, 14, 15, 16, 17),\n\t\t},\n\t\t//test12:\n\t\t{\n\t\t\t// Locators based on remote being on main chain in the\n\t\t\t// past and a stop hash in the main chain exactly at that\n\t\t\t// point. The expected result is the blocks after the\n\t\t\t// known point in the main chain and the stop hash has\n\t\t\t// no effect.\n\t\t\tname: \"remote main chain past, stop in main exact\",\n\t\t\tlocator: localView.BlockLocator(branch0Nodes[12]),\n\t\t\thashStop: branch0Nodes[12].hash,\n\t\t\theaders: nodeHeaders(branch0Nodes, 13, 14, 15, 16, 17),\n\t\t\thashes: nodeHashes(branch0Nodes, 13, 14, 15, 16, 17),\n\t\t},\n\t\t//test13:\n\t\t{\n\t\t\t// Locators based on remote being on main chain in the\n\t\t\t// past and a stop hash in the main chain just after\n\t\t\t// that point. The expected result is the blocks after\n\t\t\t// the known point in the main chain and the stop hash\n\t\t\t// has no effect.\n\t\t\tname: \"remote main chain past, stop in main after\",\n\t\t\tlocator: localView.BlockLocator(branch0Nodes[12]),\n\t\t\thashStop: branch0Nodes[13].hash,\n\t\t\theaders: nodeHeaders(branch0Nodes, 13),\n\t\t\thashes: nodeHashes(branch0Nodes, 13),\n\t\t},\n\t\t//test14:-------------\n\t\t{\n\t\t\t// Locators based on remote being on main chain in the\n\t\t\t// past and a stop hash in the main chain some time\n\t\t\t// after that point. The expected result is the blocks\n\t\t\t// after the known point in the main chain and the stop\n\t\t\t// hash has no effect.\n\t\t\tname: \"remote main chain past, stop in main after more\",\n\t\t\tlocator: localView.BlockLocator(branch0Nodes[12]),\n\t\t\thashStop: branch0Nodes[15].hash,\n\t\t\theaders: nodeHeaders(branch0Nodes, 13, 14, 15),\n\t\t\thashes: nodeHashes(branch0Nodes, 13, 14, 15),\n\t\t},\n\t\t//test15:\n\t\t{\n\t\t\t// Locators based on remote being at exactly the same\n\t\t\t// point in the main chain and a stop hash local node\n\t\t\t// doesn't know about. The expected result is no\n\t\t\t// located inventory.\n\t\t\tname: \"remote main chain same, unknown stop\",\n\t\t\tlocator: localView.BlockLocator(nil),\n\t\t\thashStop: common.Hash{0x01},\n\t\t\theaders: nil,\n\t\t\thashes: nil,\n\t\t},\n\t\t//test16:\n\t\t{\n\t\t\t// Locators based on remote being at exactly the same\n\t\t\t// point in the main chain and a stop hash at exactly\n\t\t\t// the same point. The expected result is no located\n\t\t\t// inventory.\n\t\t\tname: \"remote main chain same, stop same point\",\n\t\t\tlocator: localView.BlockLocator(nil),\n\t\t\thashStop: tip(branch0Nodes).hash,\n\t\t\theaders: nil,\n\t\t\thashes: nil,\n\t\t},\n\t\t//test17:\n\t\t{\n\t\t\t// Locators from remote that don't include any blocks\n\t\t\t// the local node knows. This would happen if the\n\t\t\t// remote node is on a completely separate chain that\n\t\t\t// isn't rooted with the same genesis block. The\n\t\t\t// expected result is the blocks after the genesis\n\t\t\t// block.\n\t\t\tname: \"remote unrelated chain\",\n\t\t\tlocator: unrelatedView.BlockLocator(nil),\n\t\t\thashStop: common.Hash{},\n\t\t\theaders: nodeHeaders(branch0Nodes, 0, 1, 2, 3, 4, 5, 6,\n\t\t\t\t7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17),\n\t\t\thashes: nodeHashes(branch0Nodes, 0, 1, 2, 3, 4, 5, 6,\n\t\t\t\t7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17),\n\t\t},\n\t\t//test18:\n\t\t{\n\t\t\t// Locators from remote for second block in main chain\n\t\t\t// and no stop hash, but with an overridden max limit.\n\t\t\t// The expected result is the blocks after the second\n\t\t\t// block limited by the max.\n\t\t\tname: \"remote genesis\",\n\t\t\tlocator: locatorHashes(branch0Nodes, 0),\n\t\t\thashStop: common.Hash{},\n\t\t\tmaxAllowed: 3,\n\t\t\theaders: nodeHeaders(branch0Nodes, 1, 2, 3),\n\t\t\thashes: nodeHashes(branch0Nodes, 1, 2, 3),\n\t\t},\n\t\t//test19:\n\t\t{\n\t\t\t// Poorly formed locator.\n\t\t\t//\n\t\t\t// Locator from remote that only includes a single\n\t\t\t// block on a side chain the local node knows. The\n\t\t\t// expected result is the blocks after the genesis\n\t\t\t// block since even though the block is known, it is on\n\t\t\t// a side chain and there are no more locators to find\n\t\t\t// the fork point.\n\t\t\tname: \"weak locator, single known side block\",\n\t\t\tlocator: locatorHashes(branch1Nodes, 1),\n\t\t\thashStop: common.Hash{},\n\t\t\theaders: nodeHeaders(branch0Nodes, 0, 1, 2, 3, 4, 5, 6,\n\t\t\t\t7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17),\n\t\t\thashes: nodeHashes(branch0Nodes, 0, 1, 2, 3, 4, 5, 6,\n\t\t\t\t7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17),\n\t\t},\n\t\t//test20:\n\t\t{\n\t\t\t// Poorly formed locator.\n\t\t\t//\n\t\t\t// Locator from remote that only includes multiple\n\t\t\t// blocks on a side chain the local node knows however\n\t\t\t// none in the main chain. The expected result is the\n\t\t\t// blocks after the genesis block since even though the\n\t\t\t// blocks are known, they are all on a side chain and\n\t\t\t// there are no more locators to find the fork point.\n\t\t\tname: \"weak locator, multiple known side blocks\",\n\t\t\tlocator: locatorHashes(branch1Nodes, 1),\n\t\t\thashStop: common.Hash{},\n\t\t\theaders: nodeHeaders(branch0Nodes, 0, 1, 2, 3, 4, 5, 6,\n\t\t\t\t7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17),\n\t\t\thashes: nodeHashes(branch0Nodes, 0, 1, 2, 3, 4, 5, 6,\n\t\t\t\t7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17),\n\t\t},\n\t\t//test21:\n\t\t{\n\t\t\t// Poorly formed locator.\n\t\t\t//\n\t\t\t// Locator from remote that only includes multiple\n\t\t\t// blocks on a side chain the local node knows however\n\t\t\t// none in the main chain but includes a stop hash in\n\t\t\t// the main chain. The expected result is the blocks\n\t\t\t// after the genesis block up to the stop hash since\n\t\t\t// even though the blocks are known, they are all on a\n\t\t\t// side chain and there are no more locators to find the\n\t\t\t// fork point.\n\t\t\tname: \"weak locator, multiple known side blocks, stop in main\",\n\t\t\tlocator: locatorHashes(branch1Nodes, 1),\n\t\t\thashStop: branch0Nodes[5].hash,\n\t\t\theaders: nodeHeaders(branch0Nodes, 0, 1, 2, 3, 4, 5),\n\t\t\thashes: nodeHashes(branch0Nodes, 0, 1, 2, 3, 4, 5),\n\t\t},\n\t}\n\tfor i, test := range tests {\n\t\tt.Logf(\"==========test case %d==========\", i)\n\t\t// Ensure the expected headers are located.\n\t\tvar headers []protos.BlockHeader\n\t\tif test.maxAllowed != 0 {\n\t\t\t// Need to use the unexported function to override the\n\t\t\t// max allowed for headers.\n\t\t\tchain.chainLock.RLock()\n\t\t\theaders = chain.locateHeaders(test.locator, &test.hashStop, test.maxAllowed)\n\t\t\tchain.chainLock.RUnlock()\n\t\t} else {\n\t\t\theaders = chain.LocateHeaders(test.locator, &test.hashStop)\n\t\t}\n\t\tif !reflect.DeepEqual(headers, test.headers) {\n\t\t\tt.Errorf(\"%s: unxpected headers -- got %v, want %v\", test.name, headers, test.headers)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Ensure the expected block hashes are located.\n\t\tmaxAllowed := uint32(protos.MaxBlocksPerMsg)\n\t\tif test.maxAllowed != 0 {\n\t\t\tmaxAllowed = test.maxAllowed\n\t\t}\n\t\thashes := chain.LocateBlocks(test.locator, &test.hashStop, maxAllowed)\n\t\tif !reflect.DeepEqual(hashes, test.hashes) {\n\t\t\tt.Errorf(\"%s: unxpected hashes -- got %v, want %v\", test.name, hashes, test.hashes)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func (p *pinner) IsPinned(key util.Key) bool {\n\tp.lock.RLock()\n\tdefer p.lock.RUnlock()\n\treturn p.recursePin.HasKey(key) ||\n\t\tp.directPin.HasKey(key) ||\n\t\tp.indirPin.HasKey(key) ||\n\t\tp.isInternalPin(key)\n}", "func TestPWLOnlyParent(t *testing.T) {\n\tdir, err := ioutil.TempDir(\"\", tstprefix)\n\tif err != nil {\n\t\tt.Fatalf(\"error creating tempdir: %v\", err)\n\t}\n\tdefer os.RemoveAll(dir)\n\tds := NewTestStore()\n\n\timj := `\n\t\t{\n\t\t \"acKind\": \"ImageManifest\",\n\t\t \"acVersion\": \"0.1.1\",\n\t\t \"name\": \"example.com/test01\",\n\t\t \"pathWhitelist\" : [ \"/a/file01.txt\", \"/a/file02.txt\", \"/b/link01.txt\", \"/c/\", \"/d/\" ]\n\t\t}\n\t`\n\n\tentries := []*testTarEntry{\n\t\t{\n\t\t\tcontents: imj,\n\t\t\theader: &tar.Header{\n\t\t\t\tName: \"manifest\",\n\t\t\t\tSize: int64(len(imj)),\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tcontents: \"hello\",\n\t\t\theader: &tar.Header{\n\t\t\t\tName: \"rootfs/a/file01.txt\",\n\t\t\t\tSize: 5,\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tcontents: \"hello\",\n\t\t\theader: &tar.Header{\n\t\t\t\tName: \"rootfs/a/file02.txt\",\n\t\t\t\tSize: 5,\n\t\t\t},\n\t\t},\n\t\t// This should not appear in rendered aci\n\t\t{\n\t\t\tcontents: \"hello\",\n\t\t\theader: &tar.Header{\n\t\t\t\tName: \"rootfs/a/file03.txt\",\n\t\t\t\tSize: 5,\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\theader: &tar.Header{\n\t\t\t\tName: \"rootfs/b/link01.txt\",\n\t\t\t\tLinkname: \"file01.txt\",\n\t\t\t\tTypeflag: tar.TypeSymlink,\n\t\t\t},\n\t\t},\n\t\t// The file \"rootfs/c/file01.txt\" should not appear but a new file \"rootfs/c/file02.txt\" provided by the upper image should appear.\n\t\t// The directory should be left with its permissions\n\t\t{\n\t\t\theader: &tar.Header{\n\t\t\t\tName: \"rootfs/c\",\n\t\t\t\tTypeflag: tar.TypeDir,\n\t\t\t\tMode: 0700,\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tcontents: \"hello\",\n\t\t\theader: &tar.Header{\n\t\t\t\tName: \"rootfs/c/file01.txt\",\n\t\t\t\tSize: 5,\n\t\t\t\tMode: 0700,\n\t\t\t},\n\t\t},\n\t\t// The file \"rootfs/d/file01.txt\" should not appear but the directory should be left and also its permissions\n\t\t{\n\t\t\theader: &tar.Header{\n\t\t\t\tName: \"rootfs/d\",\n\t\t\t\tTypeflag: tar.TypeDir,\n\t\t\t\tMode: 0700,\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tcontents: \"hello\",\n\t\t\theader: &tar.Header{\n\t\t\t\tName: \"rootfs/d/file01.txt\",\n\t\t\t\tSize: 5,\n\t\t\t\tMode: 0700,\n\t\t\t},\n\t\t},\n\t\t// The file and the directory should not appear\n\t\t{\n\t\t\tcontents: \"hello\",\n\t\t\theader: &tar.Header{\n\t\t\t\tName: \"rootfs/e/file01.txt\",\n\t\t\t\tSize: 5,\n\t\t\t\tMode: 0700,\n\t\t\t},\n\t\t},\n\t}\n\n\tkey1, err := newTestACI(entries, dir, ds)\n\tif err != nil {\n\t\tt.Fatalf(\"unexpected error: %v\", err)\n\t}\n\tim, err := createImageManifest(imj)\n\tif err != nil {\n\t\tt.Fatalf(\"unexpected error: %v\", err)\n\t}\n\timage1 := Image{Im: im, Key: key1, Level: 1}\n\n\timj = `\n\t\t{\n\t\t \"acKind\": \"ImageManifest\",\n\t\t \"acVersion\": \"0.1.1\",\n\t\t \"name\": \"example.com/test02\"\n\t\t}\n\t`\n\n\tk1, _ := types.NewHash(key1)\n\timj, err = addDependencies(imj,\n\t\ttypes.Dependency{\n\t\t\tImageName: \"example.com/test01\",\n\t\t\tImageID: k1},\n\t)\n\n\tentries = []*testTarEntry{\n\t\t{\n\t\t\tcontents: imj,\n\t\t\theader: &tar.Header{\n\t\t\t\tName: \"manifest\",\n\t\t\t\tSize: int64(len(imj)),\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tcontents: \"hellohello\",\n\t\t\theader: &tar.Header{\n\t\t\t\tName: \"rootfs/b/file01.txt\",\n\t\t\t\tSize: 10,\n\t\t\t},\n\t\t},\n\t\t// New file\n\t\t{\n\t\t\tcontents: \"hello\",\n\t\t\theader: &tar.Header{\n\t\t\t\tName: \"rootfs/c/file02.txt\",\n\t\t\t\tSize: 5,\n\t\t\t},\n\t\t},\n\t}\n\n\texpectedFiles := []*fileInfo{\n\t\t&fileInfo{path: \"manifest\", typeflag: tar.TypeReg},\n\t\t&fileInfo{path: \"rootfs/a/file01.txt\", typeflag: tar.TypeReg, size: 5},\n\t\t&fileInfo{path: \"rootfs/a/file02.txt\", typeflag: tar.TypeReg, size: 5},\n\t\t&fileInfo{path: \"rootfs/b/link01.txt\", typeflag: tar.TypeSymlink},\n\t\t&fileInfo{path: \"rootfs/b/file01.txt\", typeflag: tar.TypeReg, size: 10},\n\t\t&fileInfo{path: \"rootfs/c\", typeflag: tar.TypeDir, mode: 0700},\n\t\t&fileInfo{path: \"rootfs/c/file02.txt\", typeflag: tar.TypeReg, size: 5},\n\t\t&fileInfo{path: \"rootfs/d\", typeflag: tar.TypeDir, mode: 0700},\n\t}\n\n\tkey2, err := newTestACI(entries, dir, ds)\n\tif err != nil {\n\t\tt.Fatalf(\"unexpected error: %v\", err)\n\t}\n\tim, err = createImageManifest(imj)\n\tif err != nil {\n\t\tt.Fatalf(\"unexpected error: %v\", err)\n\t}\n\timage2 := Image{Im: im, Key: key2, Level: 0}\n\n\timages := Images{image2, image1}\n\terr = checkRenderACIFromList(images, expectedFiles, ds)\n\tif err != nil {\n\t\tt.Fatalf(\"unexpected error: %v\", err)\n\t}\n\terr = checkRenderACI(\"example.com/test02\", expectedFiles, ds)\n\tif err != nil {\n\t\tt.Fatalf(\"unexpected error: %v\", err)\n\t}\n}", "func TestWalkInBinaryTree_0(t *testing.T) {\n\troot := int32(1)\n\tparentRoot := int32(-1)\n\n\tindexes := [][]int32{\n\t\t{2, 3},\n\t\t{-1, -1},\n\t\t{-1, -1},\n\t}\n\n\texpected := []int32{\n\t\t2, 1, 3,\n\t}\n\n\tactual := walkInBinaryTree(root, parentRoot, indexes)\n\tassert.Equal(t, expected, actual)\n}", "func TestOpenSiaDir(t *testing.T) {\n\tif testing.Short() && !build.VLONG {\n\t\tt.SkipNow()\n\t}\n\tt.Parallel()\n\t// Create filesystem.\n\troot := filepath.Join(testDir(t.Name()), \"fs-root\")\n\tfs := newTestFileSystem(root)\n\t// Create dir /foo\n\tsp := newSiaPath(\"foo\")\n\tif err := fs.NewSiaDir(sp, modules.DefaultDirPerm); err != nil {\n\t\tt.Fatal(err)\n\t}\n\t// Open the newly created dir.\n\tfoo, err := fs.OpenSiaDir(sp)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer func() {\n\t\tif err := foo.Close(); err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t}()\n\t// Create dir /sub/foo. This time don't use NewSiaDir but OpenSiaDir with\n\t// the create flag set to `true`.\n\tsp = newSiaPath(\"sub/foo\")\n\tsd, err := fs.OpenSiaDirCustom(sp, true)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer func() {\n\t\tif err := sd.Close(); err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t}()\n\t// Confirm the integrity of the root node.\n\tif err := fs.checkNode(0, 2, 0); err != nil {\n\t\tt.Fatal(err)\n\t}\n\t// Open the root node manually and confirm that they are the same.\n\trootSD, err := fs.OpenSiaDir(modules.RootSiaPath())\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tif err := fs.checkNode(len(rootSD.threads), len(rootSD.directories), len(rootSD.files)); err != nil {\n\t\tt.Fatal(err)\n\t}\n\t// Confirm the integrity of the /sub node.\n\tsubNode, exists := fs.directories[\"sub\"]\n\tif !exists {\n\t\tt.Fatal(\"expected root to contain the 'sub' node\")\n\t}\n\tif *subNode.name != \"sub\" {\n\t\tt.Fatalf(\"subNode name should be 'sub' but was %v\", *subNode.name)\n\t}\n\tif path := filepath.Join(*subNode.parent.path, *subNode.name); path != *subNode.path {\n\t\tt.Fatalf(\"subNode path should be %v but was %v\", path, *subNode.path)\n\t}\n\tif err := subNode.checkNode(0, 1, 0); err != nil {\n\t\tt.Fatal(err)\n\t}\n\t// Confirm the integrity of the /sub/foo node.\n\tfooNode, exists := subNode.directories[\"foo\"]\n\tif !exists {\n\t\tt.Fatal(\"expected /sub to contain /sub/foo\")\n\t}\n\tif *fooNode.name != \"foo\" {\n\t\tt.Fatalf(\"fooNode name should be 'foo' but was %v\", *fooNode.name)\n\t}\n\tif path := filepath.Join(*fooNode.parent.path, *fooNode.name); path != *fooNode.path {\n\t\tt.Fatalf(\"fooNode path should be %v but was %v\", path, *fooNode.path)\n\t}\n\tif err := fooNode.checkNode(1, 0, 0); err != nil {\n\t\tt.Fatal(err)\n\t}\n\t// Open the newly created dir again.\n\tsd2, err := fs.OpenSiaDir(sp)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer func() {\n\t\tif err := sd2.Close(); err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t}()\n\t// They should have different UIDs.\n\tif sd.threadUID == 0 {\n\t\tt.Fatal(\"threaduid shouldn't be 0\")\n\t}\n\tif sd2.threadUID == 0 {\n\t\tt.Fatal(\"threaduid shouldn't be 0\")\n\t}\n\tif sd.threadUID == sd2.threadUID {\n\t\tt.Fatal(\"sd and sd2 should have different threaduids\")\n\t}\n\tif len(sd.threads) != 2 || len(sd2.threads) != 2 {\n\t\tt.Fatal(\"sd and sd2 should both have 2 threads registered\")\n\t}\n\t_, exists1 := sd.threads[sd.threadUID]\n\t_, exists2 := sd.threads[sd2.threadUID]\n\t_, exists3 := sd2.threads[sd.threadUID]\n\t_, exists4 := sd2.threads[sd2.threadUID]\n\tif exists := exists1 && exists2 && exists3 && exists4; !exists {\n\t\tt.Fatal(\"sd and sd1's threads don't contain the right uids\")\n\t}\n\t// Open /sub manually and make sure that subDir and sdSub are consistent.\n\tsdSub, err := fs.OpenSiaDir(newSiaPath(\"sub\"))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer func() {\n\t\tif err := sdSub.Close(); err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t}()\n\tif err := subNode.checkNode(1, 1, 0); err != nil {\n\t\tt.Fatal(err)\n\t}\n\tif err := sdSub.checkNode(1, 1, 0); err != nil {\n\t\tt.Fatal(err)\n\t}\n}", "func testCheckDDCloudAddressListExists(name string, exists bool) resource.TestCheckFunc {\n\tname = ensureResourceTypePrefix(name, \"ddcloud_address_list\")\n\n\treturn func(state *terraform.State) error {\n\t\tres, ok := state.RootModule().Resources[name]\n\t\tif !ok {\n\t\t\treturn fmt.Errorf(\"Not found: %s\", name)\n\t\t}\n\n\t\taddressListID := res.Primary.ID\n\n\t\tclient := testAccProvider.Meta().(*providerState).Client()\n\t\taddressList, err := client.GetIPAddressList(addressListID)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"bad: Get address list: %s\", err)\n\t\t}\n\t\tif exists && addressList == nil {\n\t\t\treturn fmt.Errorf(\"bad: address list not found with Id '%s'\", addressListID)\n\t\t} else if !exists && addressList != nil {\n\t\t\treturn fmt.Errorf(\"bad: address list still exists with Id '%s'\", addressListID)\n\t\t}\n\n\t\treturn nil\n\t}\n}", "func TestAppendNeighborCandidates(t *testing.T) {\n\tlocalNode, _ := start(RandomID(), 0, \"\") // ID{0, 0, 0, 0}\n\tif localNode == nil {\n\t\treturn\n\t}\n\n\tremNodes := createRandRemoteNodes(200)\n\tneighbors := make([]RemoteNode, 0)\n\tfor i, node := range remNodes {\n\t\tneighbors = InsertWithoutDuplicates(neighbors, []RemoteNode{node})\n\t\tneighbors = localNode.SortListByCloseness(neighbors)\n\t\tassertSortedDuplicateFreeNodeList(t, neighbors, localNode)\n\t\tassert.Equal(t, i+1, len(neighbors))\n\t}\n}", "func (api *PinAPI) pinLsAll(ctx context.Context, typeStr string) <-chan coreiface.Pin {\n\tout := make(chan coreiface.Pin, 1)\n\n\temittedSet := cid.NewSet()\n\n\tAddToResultKeys := func(c cid.Cid, typeStr string) error {\n\t\tif emittedSet.Visit(c) {\n\t\t\tselect {\n\t\t\tcase out <- &pinInfo{\n\t\t\t\tpinType: typeStr,\n\t\t\t\tpath: path.IpldPath(c),\n\t\t\t}:\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn ctx.Err()\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t}\n\n\tgo func() {\n\t\tdefer close(out)\n\n\t\tvar rkeys []cid.Cid\n\t\tvar err error\n\t\tif typeStr == \"recursive\" || typeStr == \"all\" {\n\t\t\tfor streamedCid := range api.pinning.RecursiveKeys(ctx) {\n\t\t\t\tif streamedCid.Err != nil {\n\t\t\t\t\tout <- &pinInfo{err: streamedCid.Err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tif err = AddToResultKeys(streamedCid.C, \"recursive\"); err != nil {\n\t\t\t\t\tout <- &pinInfo{err: err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\trkeys = append(rkeys, streamedCid.C)\n\t\t\t}\n\t\t}\n\t\tif typeStr == \"direct\" || typeStr == \"all\" {\n\t\t\tfor streamedCid := range api.pinning.DirectKeys(ctx) {\n\t\t\t\tif streamedCid.Err != nil {\n\t\t\t\t\tout <- &pinInfo{err: streamedCid.Err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tif err = AddToResultKeys(streamedCid.C, \"direct\"); err != nil {\n\t\t\t\t\tout <- &pinInfo{err: err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif typeStr == \"indirect\" {\n\t\t\t// We need to first visit the direct pins that have priority\n\t\t\t// without emitting them\n\n\t\t\tfor streamedCid := range api.pinning.DirectKeys(ctx) {\n\t\t\t\tif streamedCid.Err != nil {\n\t\t\t\t\tout <- &pinInfo{err: streamedCid.Err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\temittedSet.Add(streamedCid.C)\n\t\t\t}\n\n\t\t\tfor streamedCid := range api.pinning.RecursiveKeys(ctx) {\n\t\t\t\tif streamedCid.Err != nil {\n\t\t\t\t\tout <- &pinInfo{err: streamedCid.Err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\temittedSet.Add(streamedCid.C)\n\t\t\t\trkeys = append(rkeys, streamedCid.C)\n\t\t\t}\n\t\t}\n\t\tif typeStr == \"indirect\" || typeStr == \"all\" {\n\t\t\twalkingSet := cid.NewSet()\n\t\t\tfor _, k := range rkeys {\n\t\t\t\terr = merkledag.Walk(\n\t\t\t\t\tctx, merkledag.GetLinksWithDAG(api.dag), k,\n\t\t\t\t\tfunc(c cid.Cid) bool {\n\t\t\t\t\t\tif !walkingSet.Visit(c) {\n\t\t\t\t\t\t\treturn false\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif emittedSet.Has(c) {\n\t\t\t\t\t\t\treturn true // skipped\n\t\t\t\t\t\t}\n\t\t\t\t\t\terr := AddToResultKeys(c, \"indirect\")\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\tout <- &pinInfo{err: err}\n\t\t\t\t\t\t\treturn false\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn true\n\t\t\t\t\t},\n\t\t\t\t\tmerkledag.SkipRoot(), merkledag.Concurrent(),\n\t\t\t\t)\n\t\t\t\tif err != nil {\n\t\t\t\t\tout <- &pinInfo{err: err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}()\n\n\treturn out\n}", "func TestFindMapPaths(t *testing.T) {\n\tms := compileModules(t, map[string]string{\n\t\t\"a-module\": `\n\t\t\tmodule a-module {\n\t\t\t\tprefix \"m\";\n\t\t\t\tnamespace \"urn:m\";\n\n\t\t\t\tcontainer a-container {\n\t\t\t\t\tleaf field-a {\n\t\t\t\t\t\ttype string;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tcontainer b-container {\n\t\t\t\t\tcontainer config {\n\t\t\t\t\t\tleaf field-b {\n\t\t\t\t\t\t\ttype string;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tcontainer state {\n\t\t\t\t\t\tleaf field-b {\n\t\t\t\t\t\t\ttype string;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tcontainer c-container {\n\t\t\t\t\t\tleaf field-d {\n\t\t\t\t\t\t\ttype string;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t`,\n\t\t\"d-module\": `\n\t\t\tmodule d-module {\n\t\t\t\tprefix \"n\";\n\t\t\t\tnamespace \"urn:n\";\n\n\t\t\t\timport a-module { prefix \"a\"; }\n\n\t\t\t\taugment \"/a:b-container/config\" {\n\t\t\t\t\tleaf field-c { type string; }\n\t\t\t\t}\n\n\t\t\t\taugment \"/a:b-container/state\" {\n\t\t\t\t\tleaf field-c { type string; }\n\t\t\t\t}\n\n\t\t\t\tcontainer d-container {\n\t\t\t\t\tlist d-list {\n\t\t\t\t\t\tkey d-key;\n\n\t\t\t\t\t\tleaf d-key {\n\t\t\t\t\t\t\ttype leafref {\n\t\t\t\t\t\t\t\tpath \"../config/d-key\";\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tcontainer config {\n\t\t\t\t\t\t\tleaf d-key {\n\t\t\t\t\t\t\t\ttype string;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tcontainer state {\n\t\t\t\t\t\t\tleaf d-key {\n\t\t\t\t\t\t\t\ttype string;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t`,\n\t})\n\n\ttests := []struct {\n\t\tname string\n\t\tinStruct *Directory\n\t\tinField string\n\t\tinCompressPaths bool\n\t\tinShadowSchemaPaths bool\n\t\tinAbsolutePaths bool\n\t\twantPaths [][]string\n\t\twantModules [][]string\n\t\twantErr bool\n\t}{{\n\t\tname: \"first-level container with path compression off\",\n\t\tinStruct: &Directory{\n\t\t\tName: \"AContainer\",\n\t\t\tPath: []string{\"\", \"a-module\", \"a-container\"},\n\t\t\tFields: map[string]*yang.Entry{\n\t\t\t\t\"field-a\": findEntry(t, ms, \"a-module\", \"a-container/field-a\"),\n\t\t\t},\n\t\t},\n\t\tinField: \"field-a\",\n\t\twantPaths: [][]string{{\"field-a\"}},\n\t\twantModules: [][]string{{\"a-module\"}},\n\t}, {\n\t\tname: \"invalid parent path - shorter than directory path\",\n\t\tinStruct: &Directory{\n\t\t\tName: \"AContainer\",\n\t\t\tPath: []string{\"\", \"a-module\", \"a-container\"},\n\t\t\tFields: map[string]*yang.Entry{\n\t\t\t\t\"field-a\": findEntry(t, ms, \"a-module\", \"a-container\"),\n\t\t\t},\n\t\t},\n\t\tinField: \"field-a\",\n\t\twantErr: true,\n\t}, {\n\t\tname: \"first-level container with path compression on\",\n\t\tinStruct: &Directory{\n\t\t\tName: \"BContainer\",\n\t\t\tPath: []string{\"\", \"a-module\", \"b-container\"},\n\t\t\tFields: map[string]*yang.Entry{\n\t\t\t\t\"field-b\": findEntry(t, ms, \"a-module\", \"b-container/config/field-b\"),\n\t\t\t},\n\t\t\tShadowedFields: map[string]*yang.Entry{\n\t\t\t\t\"field-b\": findEntry(t, ms, \"a-module\", \"b-container/state/field-b\"),\n\t\t\t},\n\t\t},\n\t\tinField: \"field-b\",\n\t\tinCompressPaths: true,\n\t\twantPaths: [][]string{{\"config\", \"field-b\"}},\n\t\twantModules: [][]string{{\"a-module\", \"a-module\"}},\n\t}, {\n\t\tname: \"first-level container with path compression on and ignoreShadowSchemaPaths on\",\n\t\tinStruct: &Directory{\n\t\t\tName: \"BContainer\",\n\t\t\tPath: []string{\"\", \"a-module\", \"b-container\"},\n\t\t\tFields: map[string]*yang.Entry{\n\t\t\t\t\"field-b\": findEntry(t, ms, \"a-module\", \"b-container/config/field-b\"),\n\t\t\t},\n\t\t\tShadowedFields: map[string]*yang.Entry{\n\t\t\t\t\"field-b\": findEntry(t, ms, \"a-module\", \"b-container/state/field-b\"),\n\t\t\t},\n\t\t},\n\t\tinField: \"field-b\",\n\t\tinCompressPaths: true,\n\t\tinShadowSchemaPaths: true,\n\t\twantPaths: [][]string{{\"state\", \"field-b\"}},\n\t\twantModules: [][]string{{\"a-module\", \"a-module\"}},\n\t}, {\n\t\tname: \"augmented first-level container with path compression on\",\n\t\tinStruct: &Directory{\n\t\t\tName: \"BContainer\",\n\t\t\tPath: []string{\"\", \"a-module\", \"b-container\"},\n\t\t\tFields: map[string]*yang.Entry{\n\t\t\t\t\"field-c\": findEntry(t, ms, \"a-module\", \"b-container/config/field-c\"),\n\t\t\t},\n\t\t\tShadowedFields: map[string]*yang.Entry{\n\t\t\t\t\"field-c\": findEntry(t, ms, \"a-module\", \"b-container/state/field-c\"),\n\t\t\t},\n\t\t},\n\t\tinField: \"field-c\",\n\t\tinCompressPaths: true,\n\t\twantPaths: [][]string{{\"config\", \"field-c\"}},\n\t\twantModules: [][]string{{\"a-module\", \"d-module\"}},\n\t}, {\n\t\tname: \"augmented first-level container with inShadowSchemaPaths=true\",\n\t\tinStruct: &Directory{\n\t\t\tName: \"BContainer\",\n\t\t\tPath: []string{\"\", \"a-module\", \"b-container\"},\n\t\t\tFields: map[string]*yang.Entry{\n\t\t\t\t\"field-c\": findEntry(t, ms, \"a-module\", \"b-container/config/field-c\"),\n\t\t\t},\n\t\t\tShadowedFields: map[string]*yang.Entry{\n\t\t\t\t\"field-c\": findEntry(t, ms, \"a-module\", \"b-container/state/field-c\"),\n\t\t\t},\n\t\t},\n\t\tinField: \"field-c\",\n\t\tinCompressPaths: true,\n\t\tinShadowSchemaPaths: true,\n\t\twantPaths: [][]string{{\"state\", \"field-c\"}},\n\t\twantModules: [][]string{{\"a-module\", \"d-module\"}},\n\t}, {\n\t\tname: \"container with absolute paths on\",\n\t\tinStruct: &Directory{\n\t\t\tName: \"BContainer\",\n\t\t\tPath: []string{\"\", \"a-module\", \"b-container\", \"c-container\"},\n\t\t\tFields: map[string]*yang.Entry{\n\t\t\t\t\"field-d\": findEntry(t, ms, \"a-module\", \"b-container/c-container/field-d\"),\n\t\t\t},\n\t\t},\n\t\tinField: \"field-d\",\n\t\tinAbsolutePaths: true,\n\t\twantPaths: [][]string{{\"\", \"b-container\", \"c-container\", \"field-d\"}},\n\t\twantModules: [][]string{{\"\", \"a-module\", \"a-module\", \"a-module\"}},\n\t}, {\n\t\tname: \"top-level module\",\n\t\tinStruct: &Directory{\n\t\t\tName: \"CContainer\",\n\t\t\tPath: []string{\"\"},\n\t\t\tFields: map[string]*yang.Entry{\n\t\t\t\t\"top\": findEntry(t, ms, \"a-module\", \"\"),\n\t\t\t},\n\t\t},\n\t\tinField: \"top\",\n\t\twantPaths: [][]string{{\"a-module\"}},\n\t\twantModules: [][]string{{\"a-module\"}},\n\t}, {\n\t\tname: \"list with leafref key\",\n\t\tinStruct: &Directory{\n\t\t\tName: \"DList\",\n\t\t\tPath: []string{\"\", \"d-module\", \"d-container\", \"d-list\"},\n\t\t\tListAttr: &YangListAttr{\n\t\t\t\tKeyElems: []*yang.Entry{\n\t\t\t\t\tfindEntry(t, ms, \"d-module\", \"d-container/d-list/config/d-key\"),\n\t\t\t\t},\n\t\t\t},\n\t\t\tFields: map[string]*yang.Entry{\n\t\t\t\t\"d-key\": findEntry(t, ms, \"d-module\", \"d-container/d-list/config/d-key\"),\n\t\t\t},\n\t\t\tShadowedFields: map[string]*yang.Entry{\n\t\t\t\t\"d-key\": findEntry(t, ms, \"d-module\", \"d-container/d-list/state/d-key\"),\n\t\t\t},\n\t\t},\n\t\tinField: \"d-key\",\n\t\tinCompressPaths: true,\n\t\twantPaths: [][]string{\n\t\t\t{\"config\", \"d-key\"},\n\t\t\t{\"d-key\"},\n\t\t},\n\t\twantModules: [][]string{\n\t\t\t{\"d-module\", \"d-module\"},\n\t\t\t{\"d-module\"},\n\t\t},\n\t}, {\n\t\tname: \"list with leafref key with shadowSchemaPaths=true\",\n\t\tinStruct: &Directory{\n\t\t\tName: \"DList\",\n\t\t\tPath: []string{\"\", \"d-module\", \"d-container\", \"d-list\"},\n\t\t\tListAttr: &YangListAttr{\n\t\t\t\tKeyElems: []*yang.Entry{\n\t\t\t\t\tfindEntry(t, ms, \"d-module\", \"d-container/d-list/config/d-key\"),\n\t\t\t\t},\n\t\t\t},\n\t\t\tFields: map[string]*yang.Entry{\n\t\t\t\t\"d-key\": findEntry(t, ms, \"d-module\", \"d-container/d-list/config/d-key\"),\n\t\t\t},\n\t\t\tShadowedFields: map[string]*yang.Entry{\n\t\t\t\t\"d-key\": findEntry(t, ms, \"d-module\", \"d-container/d-list/state/d-key\"),\n\t\t\t},\n\t\t},\n\t\tinField: \"d-key\",\n\t\tinCompressPaths: true,\n\t\tinShadowSchemaPaths: true,\n\t\twantPaths: [][]string{\n\t\t\t{\"state\", \"d-key\"},\n\t\t\t{\"d-key\"},\n\t\t},\n\t\twantModules: [][]string{\n\t\t\t{\"d-module\", \"d-module\"},\n\t\t\t{\"d-module\"},\n\t\t},\n\t}}\n\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tgotPaths, gotModules, err := findMapPaths(tt.inStruct, tt.inField, tt.inCompressPaths, tt.inShadowSchemaPaths, tt.inAbsolutePaths)\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"%s: YANGCodeGenerator.findMapPaths(%v, %v): compress: %v, shadowSchemaPaths: %v, wantErr: %v, gotPaths error: %v\",\n\t\t\t\t\ttt.name, tt.inStruct, tt.inField, tt.inCompressPaths, tt.inShadowSchemaPaths, tt.wantErr, err)\n\t\t\t}\n\t\t\tif tt.wantErr {\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tif diff := cmp.Diff(tt.wantPaths, gotPaths); diff != \"\" {\n\t\t\t\tt.Errorf(\"%s: YANGCodeGenerator.findMapPaths(%v, %v): compress: %v, shadowSchemaPaths: %v, (-want, +gotPaths):\\n%s\", tt.name, tt.inStruct, tt.inField, tt.inCompressPaths, tt.inShadowSchemaPaths, diff)\n\t\t\t}\n\n\t\t\tif diff := cmp.Diff(tt.wantModules, gotModules); diff != \"\" {\n\t\t\t\tt.Errorf(\"%s: YANGCodeGenerator.findMapPaths(%v, %v): compress: %v, shadowSchemaPaths: %v, (-want, +gotModules):\\n%s\", tt.name, tt.inStruct, tt.inField, tt.inCompressPaths, tt.inShadowSchemaPaths, diff)\n\t\t\t}\n\t\t})\n\t}\n}", "func TestWalkInBinaryTree_1(t *testing.T) {\n\troot := int32(1)\n\tparentRoot := int32(-1)\n\n\tindexes := [][]int32{\n\t\t{2, 3},\n\t\t{4, 5},\n\t\t{-1, -1},\n\t\t{-1, -1},\n\t\t{-1, -1},\n\t}\n\n\texpected := []int32{\n\t\t4, 2, 5, 1, 3,\n\t}\n\n\tactual := walkInBinaryTree(root, parentRoot, indexes)\n\tassert.Equal(t, expected, actual)\n}", "func TestNeighbors(t *testing.T) {\n\ttest := &Test{\n\t\tsetupCmds: []Cmd{\n\t\t\t{\"ip netns add nb-vm1\", true},\n\t\t\t{\"ip link add nb-vm1-eth0 type veth peer name eth0 netns nb-vm1\", true},\n\t\t\t{\"ip link set nb-vm1-eth0 up\", true},\n\t\t\t{\"ip netns exec nb-vm1 ip link set eth0 up\", true},\n\t\t\t{\"ip netns exec nb-vm1 ip addr add 192.168.33.33/24 dev eth0\", true},\n\t\t\t{\"sleep 10\", true},\n\t\t\t{\"sudo ip netns exec nb-vm1 ip neighbour add 192.168.33.252 dev eth0 lladdr a6:d1:a0:51:03:49\", true},\n\t\t},\n\n\t\ttearDownCmds: []Cmd{\n\t\t\t{\"ip link del nb-vm1-eth0\", true},\n\t\t\t{\"ip netns del nb-vm1\", true},\n\t\t},\n\n\t\tmode: OneShot,\n\n\t\tchecks: []CheckFunction{\n\t\t\tfunc(c *CheckContext) error {\n\t\t\t\tprefix := c.gremlin\n\n\t\t\t\tnode, err := c.gh.GetNode(prefix.V().Has(\"IPV4\", \"192.168.33.33/24\"))\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn fmt.Errorf(\"Failed to find a node with IP 192.168.33.33/24\")\n\t\t\t\t}\n\n\t\t\t\tneighbors, ok := node.Metadata[\"Neighbors\"].(*topology.Neighbors)\n\t\t\t\tif !ok {\n\t\t\t\t\treturn fmt.Errorf(\"Wrong metadata type for Neighbors: %+v\", node.Metadata[\"Neighbors\"])\n\t\t\t\t}\n\n\t\t\t\tvar found bool\n\t\t\t\tfor _, nb := range *neighbors {\n\t\t\t\t\tif nb.MAC == \"a6:d1:a0:51:03:49\" {\n\t\t\t\t\t\tfound = true\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif !found {\n\t\t\t\t\treturn errors.New(\"unable to find neighbor entry with MAC: a6:d1:a0:51:03:49\")\n\t\t\t\t}\n\n\t\t\t\treturn nil\n\t\t\t},\n\t\t},\n\t}\n\tRunTest(t, test)\n}", "func TestNDPNeighborSolicit(t *testing.T) {\n\tb := []byte{\n\t\t0, 0, 0, 0,\n\t\t1, 2, 3, 4,\n\t\t5, 6, 7, 8,\n\t\t9, 10, 11, 12,\n\t\t13, 14, 15, 16,\n\t}\n\n\t// Test getting the Target Address.\n\tns := NDPNeighborSolicit(b)\n\taddr := testutil.MustParse6(\"102:304:506:708:90a:b0c:d0e:f10\")\n\tif got := ns.TargetAddress(); got != addr {\n\t\tt.Errorf(\"got ns.TargetAddress = %s, want %s\", got, addr)\n\t}\n\n\t// Test updating the Target Address.\n\taddr2 := testutil.MustParse6(\"1112:1314:1516:1718:191a:1b1c:1d1e:1f11\")\n\tns.SetTargetAddress(addr2)\n\tif got := ns.TargetAddress(); got != addr2 {\n\t\tt.Errorf(\"got ns.TargetAddress = %s, want %s\", got, addr2)\n\t}\n\t// Make sure the address got updated in the backing buffer.\n\tif got := tcpip.AddrFrom16Slice(b[ndpNSTargetAddessOffset:][:IPv6AddressSize]); got != addr2 {\n\t\tt.Errorf(\"got targetaddress buffer = %s, want %s\", got, addr2)\n\t}\n}", "func verifyPodLocation(pod *v1.Pod, nodeList *v1.NodeList, zoneValue string, regionValue string) error {\n\tfor _, node := range nodeList.Items {\n\t\tif pod.Spec.NodeName == node.Name {\n\t\t\tfor labelKey, labelValue := range node.Labels {\n\t\t\t\tif labelKey == zoneKey && zoneValue != \"\" {\n\t\t\t\t\tgomega.Expect(zoneValue).To(gomega.Equal(labelValue), fmt.Sprintf(\"Pod %s is not running on Node located in zone %v\", pod.Name, zoneValue))\n\t\t\t\t}\n\t\t\t\tif labelKey == regionKey && regionValue != \"\" {\n\t\t\t\t\tgomega.Expect(regionValue).To(gomega.Equal(labelValue), fmt.Sprintf(\"Pod %s is not running on Node located in region %v\", pod.Name, regionValue))\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func TestDirectConnectionFlags(t *testing.T) {\n // Test with the equivalent of a single IP address in the -d arg: -d 1.2.3.4\n gDirects = \"1.2.3.4\"\n dirFuncs := buildDirectors(gDirects)\n director = getDirector(dirFuncs)\n \n ipv4 := \"1.2.3.4\"\n wentDirect,_ := director(ipv4)\n if wentDirect == false {\n t.Errorf(\"The IP address %s should have been sent direct, but instead was proxied\", ipv4)\n }\n\n // now make sure an address that should be proxied still works\n ipv4 = \"4.5.6.7\"\n wentDirect,_ = director(ipv4)\n if wentDirect == true {\n t.Errorf(\"The IP address %s should have been sent to an upstream proxy, but instead was sent directly\", ipv4)\n }\n\n\n // Test with the equivalent of a multiple IP addresses in the -d arg: -d 1.2.3.4,2.3.4.5\n gDirects = \"1.2.3.4,2.3.4.5\"\n dirFuncs = buildDirectors(gDirects)\n director = getDirector(dirFuncs)\n \n addrsToTest := []string{\"1.2.3.4\", \"2.3.4.5\"}\n for _,ipv4 = range addrsToTest {\n wentDirect,_ := director(ipv4)\n if wentDirect == false {\n t.Errorf(\"The IP address %s should have been sent direct, but instead was proxied\", ipv4)\n }\n }\n\n // now make sure an address that should be proxied still works\n ipv4 = \"4.5.6.7\"\n wentDirect,_ = director(ipv4)\n if wentDirect == true {\n t.Errorf(\"The IP address %s should have been sent to an upstream proxy, but instead was sent directly\", ipv4)\n }\n\n\n // Test with the equivalent of multiple IP address specs in the -d arg: -d 1.2.3.0/24,2.3.4.0/25,4.4.4.4\"\n gDirects = \"1.2.3.0/24,2.3.4.0/25,4.4.4.4\"\n dirFuncs = buildDirectors(gDirects)\n director = getDirector(dirFuncs)\n \n addrsToTest = []string{\"1.2.3.4\", \"1.2.3.254\", \"2.3.4.5\", \"4.4.4.4\"}\n for _,ipv4 = range addrsToTest {\n wentDirect,_ := director(ipv4)\n if wentDirect == false {\n t.Errorf(\"The IP address %s should have been sent direct, but instead was proxied\", ipv4)\n }\n }\n\n // now make sure an address that should be proxied still works\n addrsToTest = []string{\"4.5.6.7\", \"2.3.4.254\"}\n for _,ipv4 = range addrsToTest {\n wentDirect,_ = director(ipv4)\n if wentDirect == true {\n t.Errorf(\"The IP address %s should have been sent to an upstream proxy, but instead was sent directly\", ipv4)\n }\n }\n}", "func TestRouteTable(t *testing.T) {\n\ttest := &Test{\n\t\tsetupCmds: []Cmd{\n\t\t\t{\"ovs-vsctl add-br br-rt\", true},\n\t\t\t{\"ip netns add rt-vm1\", true},\n\t\t\t{\"ip link add rt-vm1-eth0 type veth peer name rt-eth-src netns rt-vm1\", true},\n\t\t\t{\"ip link set rt-vm1-eth0 up\", true},\n\t\t\t{\"ip netns exec rt-vm1 ip link set rt-eth-src up\", true},\n\t\t\t{\"ip netns exec rt-vm1 ip address add 124.65.91.42/24 dev rt-eth-src\", true},\n\t\t\t{\"ovs-vsctl add-port br-rt rt-vm1-eth0\", true},\n\t\t\t{\"ip netns add rt-vm2\", true},\n\t\t\t{\"ip link add rt-vm2-eth0 type veth peer name rt-eth-dst netns rt-vm2\", true},\n\t\t\t{\"ip link set rt-vm2-eth0 up\", true},\n\t\t\t{\"ip netns exec rt-vm2 ip link set rt-eth-dst up\", true},\n\t\t\t{\"ip netns exec rt-vm2 ip address add 124.65.92.43/24 dev rt-eth-dst\", true},\n\t\t\t{\"ovs-vsctl add-port br-rt rt-vm2-eth0\", true},\n\t\t},\n\n\t\ttearDownCmds: []Cmd{\n\t\t\t{\"ovs-vsctl del-br br-rt\", true},\n\t\t\t{\"ip link del rt-vm1-eth0\", true},\n\t\t\t{\"ip netns del rt-vm1\", true},\n\t\t\t{\"ip link del rt-vm2-eth0\", true},\n\t\t\t{\"ip netns del rt-vm2\", true},\n\t\t},\n\n\t\tmode: OneShot,\n\n\t\tchecks: []CheckFunction{\n\t\t\tfunc(c *CheckContext) error {\n\t\t\t\tprefix := c.gremlin\n\n\t\t\t\tnode, err := c.gh.GetNode(prefix.V().Has(\"IPV4\", \"124.65.91.42/24\"))\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn fmt.Errorf(\"Failed to find a node with IP 124.65.91.42/24\")\n\t\t\t\t}\n\n\t\t\t\troutingTables, ok := node.Metadata[\"RoutingTables\"].(*topology.RoutingTables)\n\t\t\t\tif !ok {\n\t\t\t\t\treturn fmt.Errorf(\"Wrong metadata type for RoutingTables: %+v\", node.Metadata[\"RoutingTables\"])\n\t\t\t\t}\n\t\t\t\tnoOfRoutingTable := len(*routingTables)\n\n\t\t\t\texecCmds(t,\n\t\t\t\t\tCmd{Cmd: \"ip netns exec rt-vm1 ip route add 124.65.92.0/24 via 124.65.91.42 table 2\", Check: true},\n\t\t\t\t\tCmd{Cmd: \"sleep 5\", Check: false},\n\t\t\t\t)\n\n\t\t\t\tnode, err = c.gh.GetNode(prefix.V().Has(\"IPV4\", \"124.65.91.42/24\"))\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn fmt.Errorf(\"Failed to find a node with IP 124.65.91.42/24\")\n\t\t\t\t}\n\n\t\t\t\troutingTables, ok = node.Metadata[\"RoutingTables\"].(*topology.RoutingTables)\n\t\t\t\tif !ok {\n\t\t\t\t\treturn fmt.Errorf(\"Wrong metadata type for RoutingTables: %+v\", node.Metadata[\"RoutingTables\"])\n\t\t\t\t}\n\t\t\t\tnewNoOfRoutingTable := len(*routingTables)\n\n\t\t\t\texecCmds(t,\n\t\t\t\t\tCmd{Cmd: \"ip netns exec rt-vm1 ip route del 124.65.92.0/24 via 124.65.91.42 table 2\", Check: true},\n\t\t\t\t\tCmd{Cmd: \"sleep 5\", Check: false},\n\t\t\t\t)\n\t\t\t\tif newNoOfRoutingTable <= noOfRoutingTable {\n\t\t\t\t\treturn fmt.Errorf(\"Failed to add Route\")\n\t\t\t\t}\n\t\t\t\treturn nil\n\t\t\t},\n\t\t},\n\t}\n\tRunTest(t, test)\n}", "func TestNewSkipList(t *testing.T) {\n\tassert := assert.New(t)\n\tsk := skiplist.New(IntNodeComparator, 8)\n\tdump(\"TestNewSkipList.before.dot\", sk)\n\n\t// Check index column\n\th := 1\n\tfor p := sk.IndexHead; p.Down != nil; p, h = p.Down, h+1 {\n\t\tassert.Equal(sk.DataHead, p.Root, \"Index heads point to the data head\")\n\t\tassert.Equal(sk.IndexTail, linkedlist.LoadState(p).Next, \"Index heads linked to the shared tail\")\n\t}\n\tassert.Equal(7, h, \"Index column height\")\n\n\tassert.Equal(sk.DataTail, sk.IndexTail.Root, \"Index tail ppoints to the data tail\")\n\tassert.Equal(sk.DataTail, linkedlist.LoadState(sk.DataHead).Next, \"Data heads linked to the data tail\")\n}", "func (n packedNumber) isIndirect() uint64 {\n\tif uint32(n)&storageField == storageFieldIndirect {\n\t\treturn 1\n\t}\n\treturn 0\n}", "func TestAccAddressListComplexCreate(t *testing.T) {\n\tresource.Test(t, resource.TestCase{\n\t\tProviders: testAccProviders,\n\t\tCheckDestroy: resource.ComposeTestCheckFunc(\n\t\t\ttestCheckDDCloudAddressListDestroy,\n\t\t\ttestCheckDDCloudNetworkDomainDestroy,\n\t\t),\n\t\tSteps: []resource.TestStep{\n\t\t\tresource.TestStep{\n\t\t\t\tConfig: testAccDDCloudAddressListComplex(\"acc_test_list\", \"af_terraform_list\"),\n\t\t\t\tCheck: resource.ComposeTestCheckFunc(\n\t\t\t\t\ttestCheckDDCloudAddressListExists(\"acc_test_list\", true),\n\t\t\t\t\ttestCheckDDCloudAddressListMatches(\"acc_test_list\", compute.IPAddressList{\n\t\t\t\t\t\tName: \"af_terraform_list\",\n\t\t\t\t\t\tDescription: \"Adam's Terraform test address list (do not delete).\",\n\t\t\t\t\t\tAddresses: []compute.IPAddressListEntry{\n\t\t\t\t\t\t\tcompute.IPAddressListEntry{\n\t\t\t\t\t\t\t\tBegin: \"192.168.1.10\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tcompute.IPAddressListEntry{\n\t\t\t\t\t\t\t\tBegin: \"192.168.1.20\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tcompute.IPAddressListEntry{\n\t\t\t\t\t\t\t\tBegin: \"192.168.2.10\",\n\t\t\t\t\t\t\t\tEnd: stringToPtr(\"192.168.2.12\"),\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t}),\n\t\t\t\t),\n\t\t\t},\n\t\t},\n\t})\n}", "func TestGossipLoopbackInfoPropagation(t *testing.T) {\n\tdefer leaktest.AfterTest(t)()\n\tskip.WithIssue(t, 34494)\n\tstopper := stop.NewStopper()\n\tdefer stopper.Stop(context.Background())\n\n\t// Shared cluster ID by all gossipers (this ensures that the gossipers\n\t// don't talk to servers from unrelated tests by accident).\n\tclusterID := uuid.MakeV4()\n\n\tlocal := startGossip(clusterID, 1, stopper, t, metric.NewRegistry())\n\tremote := startGossip(clusterID, 2, stopper, t, metric.NewRegistry())\n\tremote.mu.Lock()\n\trAddr := remote.mu.is.NodeAddr\n\tremote.mu.Unlock()\n\tlocal.manage()\n\tremote.manage()\n\n\t// Add a gossip info for \"foo\" on remote, that was generated by local. This\n\t// simulates what happens if local was to gossip an info, and later restart\n\t// and never gossip that info again.\n\tfunc() {\n\t\tlocal.mu.Lock()\n\t\tdefer local.mu.Unlock()\n\t\tremote.mu.Lock()\n\t\tdefer remote.mu.Unlock()\n\t\t// NB: replacing local.mu.is.newInfo with remote.mu.is.newInfo allows \"foo\"\n\t\t// to be propagated.\n\t\tif err := remote.mu.is.addInfo(\"foo\", local.mu.is.newInfo(nil, 0)); err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t}()\n\n\t// Add an info to local so that it has a highwater timestamp that is newer\n\t// than the info we added to remote. NB: commenting out this line allows\n\t// \"foo\" to be propagated.\n\tif err := local.AddInfo(\"bar\", nil, 0); err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\t// Start a client connection to the remote node.\n\tlocal.mu.Lock()\n\tlocal.startClientLocked(&rAddr)\n\tlocal.mu.Unlock()\n\n\tgetInfo := func(g *Gossip, key string) *Info {\n\t\tg.mu.RLock()\n\t\tdefer g.mu.RUnlock()\n\t\treturn g.mu.is.Infos[key]\n\t}\n\n\ttestutils.SucceedsSoon(t, func() error {\n\t\tif getInfo(remote, \"bar\") == nil {\n\t\t\treturn fmt.Errorf(\"bar not propagated\")\n\t\t}\n\t\tif getInfo(local, \"foo\") == nil {\n\t\t\treturn fmt.Errorf(\"foo not propagated\")\n\t\t}\n\t\treturn nil\n\t})\n}", "func testCheckDDCloudAddressListDestroy(state *terraform.State) error {\n\tfor _, res := range state.RootModule().Resources {\n\t\tif res.Type != \"ddcloud_address_list\" {\n\t\t\tcontinue\n\t\t}\n\n\t\taddressListID := res.Primary.ID\n\n\t\tclient := testAccProvider.Meta().(*providerState).Client()\n\t\taddressList, err := client.GetIPAddressList(addressListID)\n\t\tif err != nil {\n\t\t\treturn nil\n\t\t}\n\t\tif addressList != nil {\n\t\t\treturn fmt.Errorf(\"address list '%s' still exists\", addressListID)\n\t\t}\n\t}\n\n\treturn nil\n}", "func (s *Service) unpinBranch(ctx context.Context, p path.Resolved, key []byte) (context.Context, error) {\n\tn, _, err := s.resolveNodeAtPath(ctx, p, key)\n\tif err != nil {\n\t\treturn ctx, err\n\t}\n\tfor _, l := range n.Links() {\n\t\tif l.Name == \"\" {\n\t\t\tcontinue // Data nodes will never be pinned directly\n\t\t}\n\t\tlp := path.IpfsPath(l.Cid)\n\t\tctx, err = s.unpinPath(ctx, lp)\n\t\tif err != nil {\n\t\t\treturn ctx, err\n\t\t}\n\t\tctx, err = s.unpinBranch(ctx, lp, key)\n\t\tif err != nil {\n\t\t\treturn ctx, err\n\t\t}\n\t}\n\treturn ctx, nil\n}", "func repinMap(bpffsPath string, name string, spec *ebpf.MapSpec) error {\n\tfile := filepath.Join(bpffsPath, name)\n\tpinned, err := ebpf.LoadPinnedMap(file, nil)\n\n\t// Given map was not pinned, nothing to do.\n\tif errors.Is(err, unix.ENOENT) {\n\t\treturn nil\n\t}\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\"map not found at path %s: %v\", name, err)\n\t}\n\n\tif pinned.Type() == spec.Type &&\n\t\tpinned.KeySize() == spec.KeySize &&\n\t\tpinned.ValueSize() == spec.ValueSize &&\n\t\tpinned.Flags() == spec.Flags &&\n\t\tpinned.MaxEntries() == spec.MaxEntries {\n\t\treturn nil\n\t}\n\n\tdest := file + bpffsPending\n\n\tlog.WithFields(logrus.Fields{\n\t\tlogfields.BPFMapName: name,\n\t\tlogfields.BPFMapPath: file,\n\t}).Infof(\"New version of map has different properties, re-pinning with '%s' suffix\", bpffsPending)\n\n\t// Atomically re-pin the map to the its new path.\n\tif err := pinned.Pin(dest); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func locallyInside(a, b *node) bool {\n\tif area(a.prev, a, a.next) < 0.0 {\n\t\treturn area(a, b, a.next) >= 0.0 && area(a, a.prev, b) >= 0.0\n\t}\n\treturn area(a, b, a.prev) < 0.0 || area(a, a.next, b) < 0.0\n}", "func testAddrIndexOperations(t *testing.T, db database.Db, newestBlock *dcrutil.Block, newestSha *chainhash.Hash, newestBlockIdx int64) {\n\t// Metadata about the current addr index state should be unset.\n\tsha, height, err := db.FetchAddrIndexTip()\n\tif err != database.ErrAddrIndexDoesNotExist {\n\t\tt.Fatalf(\"Address index metadata shouldn't be in db, hasn't been built up yet.\")\n\t}\n\n\tvar zeroHash chainhash.Hash\n\tif !sha.IsEqual(&zeroHash) {\n\t\tt.Fatalf(\"AddrIndexTip wrong hash got: %s, want %s\", sha, &zeroHash)\n\n\t}\n\n\tif height != -1 {\n\t\tt.Fatalf(\"Addrindex not built up, yet a block index tip has been set to: %d.\", height)\n\t}\n\n\t// Test enforcement of constraints for \"limit\" and \"skip\"\n\tvar fakeAddr dcrutil.Address\n\t_, _, err = db.FetchTxsForAddr(fakeAddr, -1, 0, false)\n\tif err == nil {\n\t\tt.Fatalf(\"Negative value for skip passed, should return an error\")\n\t}\n\n\t_, _, err = db.FetchTxsForAddr(fakeAddr, 0, -1, false)\n\tif err == nil {\n\t\tt.Fatalf(\"Negative value for limit passed, should return an error\")\n\t}\n\n\t// Simple test to index outputs(s) of the first tx.\n\ttestIndex := make(database.BlockAddrIndex, database.AddrIndexKeySize)\n\ttestTx, err := newestBlock.Tx(0)\n\tif err != nil {\n\t\tt.Fatalf(\"Block has no transactions, unable to test addr \"+\n\t\t\t\"indexing, err %v\", err)\n\t}\n\n\t// Extract the dest addr from the tx.\n\t_, testAddrs, _, err := txscript.ExtractPkScriptAddrs(testTx.MsgTx().TxOut[0].Version, testTx.MsgTx().TxOut[0].PkScript, &chaincfg.MainNetParams)\n\tif err != nil {\n\t\tt.Fatalf(\"Unable to decode tx output, err %v\", err)\n\t}\n\n\t// Extract the hash160 from the output script.\n\tvar hash160Bytes [ripemd160.Size]byte\n\ttestHash160 := testAddrs[0].(*dcrutil.AddressScriptHash).Hash160()\n\tcopy(hash160Bytes[:], testHash160[:])\n\n\t// Create a fake index.\n\tblktxLoc, _, _ := newestBlock.TxLoc()\n\ttestIndex = []*database.TxAddrIndex{\n\t\t&database.TxAddrIndex{\n\t\t\tHash160: hash160Bytes,\n\t\t\tHeight: uint32(newestBlockIdx),\n\t\t\tTxOffset: uint32(blktxLoc[0].TxStart),\n\t\t\tTxLen: uint32(blktxLoc[0].TxLen),\n\t\t},\n\t}\n\n\t// Insert our test addr index into the DB.\n\terr = db.UpdateAddrIndexForBlock(newestSha, newestBlockIdx, testIndex)\n\tif err != nil {\n\t\tt.Fatalf(\"UpdateAddrIndexForBlock: failed to index\"+\n\t\t\t\" addrs for block #%d (%s) \"+\n\t\t\t\"err %v\", newestBlockIdx, newestSha, err)\n\t}\n\n\t// Chain Tip of address should've been updated.\n\tassertAddrIndexTipIsUpdated(db, t, newestSha, newestBlockIdx)\n\n\t// Check index retrieval.\n\ttxReplies, _, err := db.FetchTxsForAddr(testAddrs[0], 0, 1000, false)\n\tif err != nil {\n\t\tt.Fatalf(\"FetchTxsForAddr failed to correctly fetch txs for an \"+\n\t\t\t\"address, err %v\", err)\n\t}\n\t// Should have one reply.\n\tif len(txReplies) != 1 {\n\t\tt.Fatalf(\"Failed to properly index tx by address.\")\n\t}\n\n\t// Our test tx and indexed tx should have the same sha.\n\tindexedTx := txReplies[0]\n\tif !bytes.Equal(indexedTx.Sha.Bytes(), testTx.Sha().Bytes()) {\n\t\tt.Fatalf(\"Failed to fetch proper indexed tx. Expected sha %v, \"+\n\t\t\t\"fetched %v\", testTx.Sha(), indexedTx.Sha)\n\t}\n\n\t// Shut down DB.\n\tdb.Sync()\n\tdb.Close()\n\n\t// Re-Open, tip still should be updated to current height and sha.\n\tdb, err = database.OpenDB(\"leveldb\", \"tstdbopmode\")\n\tif err != nil {\n\t\tt.Fatalf(\"Unable to re-open created db, err %v\", err)\n\t}\n\tassertAddrIndexTipIsUpdated(db, t, newestSha, newestBlockIdx)\n\n\t// Delete the entire index.\n\terr = db.PurgeAddrIndex()\n\tif err != nil {\n\t\tt.Fatalf(\"Couldn't delete address index, err %v\", err)\n\t}\n\n\t// Former index should no longer exist.\n\ttxReplies, _, err = db.FetchTxsForAddr(testAddrs[0], 0, 1000, false)\n\tif err != nil {\n\t\tt.Fatalf(\"Unable to fetch transactions for address: %v\", err)\n\t}\n\tif len(txReplies) != 0 {\n\t\tt.Fatalf(\"Address index was not successfully deleted. \"+\n\t\t\t\"Should have 0 tx's indexed, %v were returned.\",\n\t\t\tlen(txReplies))\n\t}\n\n\t// Tip should be blanked out.\n\tif _, _, err := db.FetchAddrIndexTip(); err != database.ErrAddrIndexDoesNotExist {\n\t\tt.Fatalf(\"Address index was not fully deleted.\")\n\t}\n\n}", "func TestReaddirOverlayFrozen(t *testing.T) {\n\tctx := contexttest.Context(t)\n\n\t// Create an overlay with two directories, each with two files.\n\tupper := newTestRamfsDir(ctx, []dirContent{{name: \"upper-file1\"}, {name: \"upper-file2\"}}, nil)\n\tlower := newTestRamfsDir(ctx, []dirContent{{name: \"lower-file1\"}, {name: \"lower-file2\"}}, nil)\n\toverlayInode := fs.NewTestOverlayDir(ctx, upper, lower, false)\n\n\t// Set that overlay as the root.\n\troot := fs.NewDirent(ctx, overlayInode, \"root\")\n\tctx = &rootContext{\n\t\tContext: ctx,\n\t\troot: root,\n\t}\n\n\t// Check that calling Readdir on the root now returns all 4 files (2\n\t// from each layer in the overlay).\n\trootFile, err := root.Inode.GetFile(ctx, root, fs.FileFlags{Read: true})\n\tif err != nil {\n\t\tt.Fatalf(\"root.Inode.GetFile failed: %v\", err)\n\t}\n\tdefer rootFile.DecRef()\n\tser := &fs.CollectEntriesSerializer{}\n\tif err := rootFile.Readdir(ctx, ser); err != nil {\n\t\tt.Fatalf(\"rootFile.Readdir failed: %v\", err)\n\t}\n\tif got, want := ser.Order, []string{\".\", \"..\", \"lower-file1\", \"lower-file2\", \"upper-file1\", \"upper-file2\"}; !reflect.DeepEqual(got, want) {\n\t\tt.Errorf(\"Readdir got names %v, want %v\", got, want)\n\t}\n\n\t// Readdir should have been called on upper and lower.\n\tupperDir := upper.InodeOperations.(*dir)\n\tlowerDir := lower.InodeOperations.(*dir)\n\tif !upperDir.ReaddirCalled {\n\t\tt.Errorf(\"upperDir.ReaddirCalled got %v, want true\", upperDir.ReaddirCalled)\n\t}\n\tif !lowerDir.ReaddirCalled {\n\t\tt.Errorf(\"lowerDir.ReaddirCalled got %v, want true\", lowerDir.ReaddirCalled)\n\t}\n\n\t// Reset.\n\tupperDir.ReaddirCalled = false\n\tlowerDir.ReaddirCalled = false\n\n\t// Take references on \"upper-file1\" and \"lower-file1\", pinning them in\n\t// the dirent tree.\n\tfor _, name := range []string{\"upper-file1\", \"lower-file1\"} {\n\t\tif _, err := root.Walk(ctx, root, name); err != nil {\n\t\t\tt.Fatalf(\"root.Walk(%q) failed: %v\", name, err)\n\t\t}\n\t\t// Don't drop a reference on the returned dirent so that it\n\t\t// will stay in the tree.\n\t}\n\n\t// Freeze the dirent tree.\n\troot.Freeze()\n\n\t// Seek back to the beginning of the file.\n\tif _, err := rootFile.Seek(ctx, fs.SeekSet, 0); err != nil {\n\t\tt.Fatalf(\"error seeking to beginning of directory: %v\", err)\n\t}\n\n\t// Calling Readdir on the root now will return only the pinned\n\t// children.\n\tser = &fs.CollectEntriesSerializer{}\n\tif err := rootFile.Readdir(ctx, ser); err != nil {\n\t\tt.Fatalf(\"rootFile.Readdir failed: %v\", err)\n\t}\n\tif got, want := ser.Order, []string{\".\", \"..\", \"lower-file1\", \"upper-file1\"}; !reflect.DeepEqual(got, want) {\n\t\tt.Errorf(\"Readdir got names %v, want %v\", got, want)\n\t}\n\n\t// Readdir should NOT have been called on upper or lower.\n\tif upperDir.ReaddirCalled {\n\t\tt.Errorf(\"upperDir.ReaddirCalled got %v, want false\", upperDir.ReaddirCalled)\n\t}\n\tif lowerDir.ReaddirCalled {\n\t\tt.Errorf(\"lowerDir.ReaddirCalled got %v, want false\", lowerDir.ReaddirCalled)\n\t}\n}", "func Test_DoIterativeFindNodeSucc(t *testing.T) {\n\tN := len(instance)\n\tfor i := 0; i < N/k+1; i++ {\n\t\tfrom := (rand.Int() % (N - 1)) + 1\n\t\tto := (rand.Int() % (N - 1)) + 1\n\t\tfor to == from {\n\t\t\tto = (rand.Int() % (N - 1)) + 1\n\t\t}\n\t\tfmt.Printf(\n\t\t\t\"Looking for: %s\\nFrom: %s\\n\",\n\t\t\tinstance[from].NodeID.AsString(),\n\t\t\tinstance[to].NodeID.AsString())\n\t\tresult := instance[from].DoIterativeFindNode(instance[to].NodeID)\n\t\tassertContains(\n\t\t\tresult,\n\t\t\tinstance[to].NodeID.AsString(),\n\t\t\tfmt.Sprintf(\"Cannot find node %d from node %d\", to, from),\n\t\t\tt)\n\t}\n}", "func ListPinnedPullRequests(ctx *context.APIContext) {\n\t// swagger:operation GET /repos/{owner}/{repo}/pulls/pinned repository repoListPinnedPullRequests\n\t// ---\n\t// summary: List a repo's pinned pull requests\n\t// produces:\n\t// - application/json\n\t// parameters:\n\t// - name: owner\n\t// in: path\n\t// description: owner of the repo\n\t// type: string\n\t// required: true\n\t// - name: repo\n\t// in: path\n\t// description: name of the repo\n\t// type: string\n\t// required: true\n\t// responses:\n\t// \"200\":\n\t// \"$ref\": \"#/responses/PullRequestList\"\n\tissues, err := issues_model.GetPinnedIssues(ctx, ctx.Repo.Repository.ID, true)\n\tif err != nil {\n\t\tctx.Error(http.StatusInternalServerError, \"LoadPinnedPullRequests\", err)\n\t\treturn\n\t}\n\n\tapiPrs := make([]*api.PullRequest, len(issues))\n\tfor i, currentIssue := range issues {\n\t\tpr, err := currentIssue.GetPullRequest()\n\t\tif err != nil {\n\t\t\tctx.Error(http.StatusInternalServerError, \"GetPullRequest\", err)\n\t\t\treturn\n\t\t}\n\n\t\tif err = pr.LoadIssue(ctx); err != nil {\n\t\t\tctx.Error(http.StatusInternalServerError, \"LoadIssue\", err)\n\t\t\treturn\n\t\t}\n\n\t\tif err = pr.LoadAttributes(ctx); err != nil {\n\t\t\tctx.Error(http.StatusInternalServerError, \"LoadAttributes\", err)\n\t\t\treturn\n\t\t}\n\n\t\tif err = pr.LoadBaseRepo(ctx); err != nil {\n\t\t\tctx.Error(http.StatusInternalServerError, \"LoadBaseRepo\", err)\n\t\t\treturn\n\t\t}\n\n\t\tif err = pr.LoadHeadRepo(ctx); err != nil {\n\t\t\tctx.Error(http.StatusInternalServerError, \"LoadHeadRepo\", err)\n\t\t\treturn\n\t\t}\n\n\t\tapiPrs[i] = convert.ToAPIPullRequest(ctx, pr, ctx.Doer)\n\t}\n\n\tctx.JSON(http.StatusOK, &apiPrs)\n}", "func TestHiddenWithPK1(t *testing.T) {\n\tdefer testutils.AfterTest(t)()\n\ttestutils.EnsureNoLeak(t)\n\tctx := context.Background()\n\n\ttae := testutil.InitTestDB(ctx, ModuleName, t, nil)\n\tdefer tae.Close()\n\tschema := catalog.MockSchemaAll(13, 2)\n\tschema.BlockMaxRows = 10\n\tschema.SegmentMaxBlocks = 2\n\tbat := catalog.MockBatch(schema, int(schema.BlockMaxRows*4))\n\tdefer bat.Close()\n\tbats := bat.Split(10)\n\n\ttxn, _, rel := testutil.CreateRelationNoCommit(t, tae, testutil.DefaultTestDB, schema, true)\n\terr := rel.Append(context.Background(), bats[0])\n\t{\n\t\toffsets := make([]uint32, 0)\n\t\tit := rel.MakeBlockIt()\n\t\tfor it.Valid() {\n\t\t\tblk := it.GetBlock()\n\t\t\tview, err := blk.GetColumnDataById(context.Background(), schema.PhyAddrKey.Idx)\n\t\t\tassert.NoError(t, err)\n\t\t\tdefer view.Close()\n\t\t\tfp := blk.Fingerprint()\n\t\t\t_ = view.GetData().Foreach(func(v any, _ bool, _ int) (err error) {\n\t\t\t\trid := v.(types.Rowid)\n\t\t\t\tbid, offset := rid.Decode()\n\t\t\t\tt.Logf(\"bid=%s,offset=%d\", bid.String(), offset)\n\t\t\t\tassert.Equal(t, fp.BlockID, bid)\n\t\t\t\toffsets = append(offsets, offset)\n\t\t\t\treturn\n\t\t\t}, nil)\n\t\t\tit.Next()\n\t\t}\n\t\t// sort.Slice(offsets, func(i, j int) bool { return offsets[i] < offsets[j] })\n\t\t// assert.Equal(t, []uint32{0, 1, 2, 3}, offsets)\n\t}\n\tassert.NoError(t, err)\n\tassert.NoError(t, txn.Commit(context.Background()))\n\n\ttxn, rel = testutil.GetDefaultRelation(t, tae, schema.Name)\n\t{\n\t\tblk := testutil.GetOneBlock(rel)\n\t\tview, err := blk.GetColumnDataByName(context.Background(), catalog.PhyAddrColumnName)\n\t\tassert.NoError(t, err)\n\t\tdefer view.Close()\n\t\toffsets := make([]uint32, 0)\n\t\tfp := blk.Fingerprint()\n\t\tt.Log(fp.String())\n\t\t_ = view.GetData().Foreach(func(v any, _ bool, _ int) (err error) {\n\t\t\trid := v.(types.Rowid)\n\t\t\tbid, offset := rid.Decode()\n\t\t\tt.Logf(\",bid=%s,offset=%d\", bid, offset)\n\t\t\tassert.Equal(t, fp.BlockID, bid)\n\t\t\toffsets = append(offsets, offset)\n\t\t\treturn\n\t\t}, nil)\n\t\tsort.Slice(offsets, func(i, j int) bool { return offsets[i] < offsets[j] })\n\t\tassert.Equal(t, []uint32{0, 1, 2, 3}, offsets)\n\t}\n\n\tassert.NoError(t, err)\n\tassert.NoError(t, txn.Commit(context.Background()))\n\n\ttxn, rel = testutil.GetDefaultRelation(t, tae, schema.Name)\n\terr = rel.Append(context.Background(), bats[1])\n\tassert.NoError(t, err)\n\terr = rel.Append(context.Background(), bats[2])\n\tassert.NoError(t, err)\n\terr = rel.Append(context.Background(), bats[3])\n\tassert.NoError(t, err)\n\terr = rel.Append(context.Background(), bats[4])\n\tassert.NoError(t, err)\n\terr = rel.Append(context.Background(), bats[5])\n\tassert.NoError(t, err)\n\tassert.NoError(t, txn.Commit(context.Background()))\n\n\ttestutil.CompactBlocks(t, 0, tae, \"db\", schema, false)\n\n\ttxn, rel = testutil.GetDefaultRelation(t, tae, schema.Name)\n\tvar segMeta *catalog.SegmentEntry\n\t{\n\t\tit := rel.MakeBlockIt()\n\t\tfor it.Valid() {\n\t\t\tblk := it.GetBlock()\n\t\t\tview, err := blk.GetColumnDataByName(context.Background(), catalog.PhyAddrColumnName)\n\t\t\tassert.NoError(t, err)\n\t\t\tdefer view.Close()\n\t\t\toffsets := make([]uint32, 0)\n\t\t\tmeta := blk.GetMeta().(*catalog.BlockEntry)\n\t\t\tt.Log(meta.String())\n\t\t\t_ = view.GetData().Foreach(func(v any, _ bool, _ int) (err error) {\n\t\t\t\trid := v.(types.Rowid)\n\t\t\t\tbid, offset := rid.Decode()\n\t\t\t\t// t.Logf(\"sid=%d,bid=%d,offset=%d\", sid, bid, offset)\n\t\t\t\tassert.Equal(t, meta.ID, bid)\n\t\t\t\toffsets = append(offsets, offset)\n\t\t\t\treturn\n\t\t\t}, nil)\n\t\t\tsort.Slice(offsets, func(i, j int) bool { return offsets[i] < offsets[j] })\n\t\t\tif meta.IsAppendable() {\n\t\t\t\tassert.Equal(t, []uint32{0, 1, 2, 3}, offsets)\n\t\t\t} else {\n\t\t\t\tsegMeta = meta.GetSegment()\n\t\t\t\tassert.Equal(t, []uint32{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, offsets)\n\t\t\t}\n\t\t\tit.Next()\n\t\t}\n\t}\n\n\tassert.NoError(t, txn.Commit(context.Background()))\n\t{\n\t\tseg := segMeta.GetSegmentData()\n\t\tfactory, taskType, scopes, err := seg.BuildCompactionTaskFactory()\n\t\tassert.NoError(t, err)\n\t\ttask, err := tae.Runtime.Scheduler.ScheduleMultiScopedTxnTask(tasks.WaitableCtx, taskType, scopes, factory)\n\t\tassert.NoError(t, err)\n\t\terr = task.WaitDone()\n\t\tassert.NoError(t, err)\n\t}\n\n\ttxn, rel = testutil.GetDefaultRelation(t, tae, schema.Name)\n\t{\n\t\tit := rel.MakeBlockIt()\n\t\tfor it.Valid() {\n\t\t\tblk := it.GetBlock()\n\t\t\tview, err := blk.GetColumnDataByName(context.Background(), catalog.PhyAddrColumnName)\n\t\t\tassert.NoError(t, err)\n\t\t\tdefer view.Close()\n\t\t\toffsets := make([]uint32, 0)\n\t\t\tmeta := blk.GetMeta().(*catalog.BlockEntry)\n\t\t\tt.Log(meta.String())\n\t\t\tt.Log(meta.GetSegment().String())\n\t\t\t_ = view.GetData().Foreach(func(v any, _ bool, _ int) (err error) {\n\t\t\t\trid := v.(types.Rowid)\n\t\t\t\tbid, offset := rid.Decode()\n\t\t\t\t// t.Logf(\"sid=%d,bid=%d,offset=%d\", sid, bid, offset)\n\t\t\t\tassert.Equal(t, meta.ID, bid)\n\t\t\t\toffsets = append(offsets, offset)\n\t\t\t\treturn\n\t\t\t}, nil)\n\t\t\tsort.Slice(offsets, func(i, j int) bool { return offsets[i] < offsets[j] })\n\t\t\tif meta.IsAppendable() {\n\t\t\t\tassert.Equal(t, []uint32{0, 1, 2, 3}, offsets)\n\t\t\t} else {\n\t\t\t\tassert.Equal(t, []uint32{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, offsets)\n\t\t\t}\n\t\t\tit.Next()\n\t\t}\n\t}\n\n\tassert.NoError(t, txn.Commit(context.Background()))\n\tt.Log(tae.Catalog.SimplePPString(common.PPL1))\n}", "func TestAddNeighbor(t *testing.T) {\n\tcityConnects := map[string]map[string]string{}\n\n\t// Add city2 to the north of city1. For city1, only north should be city2, other direction should point to itself\n\taddNeighbor(\"city1\", \"city2\", \"north\", cityConnects)\n\texpected := map[string]map[string]string{\"city1\": map[string]string{\"east\":\"city1\", \"north\":\"city2\", \"south\":\"city1\", \"west\":\"city1\"},\n\t\"city2\": map[string]string{\"east\":\"city2\", \"north\":\"city2\", \"south\":\"city1\", \"west\":\"city2\"}}\n\tif !reflect.DeepEqual(expected, cityConnects) {\n\t\tt.Errorf(\"The city connections are incorrect. Expected %s, but was %s.\", expected, cityConnects)\n\t}\n\n\t// Add city3 to the north of city2. For city2, north should be city3, south should be city1, other direction should point to itself\n\taddNeighbor(\"city3\", \"city2\", \"south\", cityConnects)\n\texpected = map[string]map[string]string{\"city1\": map[string]string{\"east\":\"city1\", \"north\":\"city2\", \"south\":\"city1\", \"west\":\"city1\"},\n\t\"city2\": map[string]string{\"east\":\"city2\", \"north\":\"city3\", \"south\":\"city1\", \"west\":\"city2\"},\n\t\"city3\": map[string]string{\"east\":\"city3\", \"north\":\"city3\", \"south\":\"city2\", \"west\":\"city3\"}}\n\tif !reflect.DeepEqual(expected, cityConnects) {\n\t\tt.Errorf(\"The city connections are incorrect. Expected %v, but was %v.\", expected, cityConnects)\n\t}\n}", "func (c *Client) StoriesTogglePinned(ctx context.Context, request *StoriesTogglePinnedRequest) ([]int, error) {\n\tvar result IntVector\n\n\tif err := c.rpc.Invoke(ctx, request, &result); err != nil {\n\t\treturn nil, err\n\t}\n\treturn []int(result.Elems), nil\n}", "func TestVarIndirection(t *testing.T) {\n\ts := new(S);\n\t// initialized by hand for clarity.\n\ts.innerPointerT = &t1;\n\n\tvar buf bytes.Buffer;\n\tinput := \"{.section @}{innerPointerT}{.end}\";\n\ttmpl, err := Parse(input, nil);\n\tif err != nil {\n\t\tt.Fatal(\"unexpected parse error:\", err)\n\t}\n\terr = tmpl.Execute(s, &buf);\n\tif err != nil {\n\t\tt.Fatal(\"unexpected execute error:\", err)\n\t}\n\texpect := fmt.Sprintf(\"%v\", &t1);\t// output should be hex address of t1\n\tif buf.String() != expect {\n\t\tt.Errorf(\"for %q: expected %q got %q\", input, expect, buf.String())\n\t}\n}", "func (p *pinner) IndirectKeys() map[util.Key]uint64 {\n\treturn p.indirPin.GetRefs()\n}", "func TestCopy_structUnexportedPtrMap(t *testing.T) {\n\ttype Foo interface{}\n\n\ttype Sub struct {\n\t\tList []Foo\n\t}\n\n\ttype test struct {\n\t\tValue string\n\n\t\tprivate *Sub\n\t}\n\n\tv := test{\n\t\tValue: \"foo\",\n\t\tprivate: &Sub{\n\t\t\tList: []Foo{&Sub{}},\n\t\t},\n\t}\n\n\tresult, err := Copy(v)\n\tif err != nil {\n\t\tt.Fatalf(\"err: %s\", err)\n\t}\n\n\t// private should not be copied\n\tv.private = nil\n\tif !reflect.DeepEqual(result, v) {\n\t\tt.Fatalf(\"bad:\\n\\n%#v\\n\\n%#v\", result, v)\n\t}\n}", "func TestTypeResolutionManyToOne(t *testing.T) {\n\ttests := []struct {\n\t\tname string // name is the test identifier.\n\t\t// inLeaves is the set of yang.Entry pointers that are to have types generated\n\t\t// for them.\n\t\tinLeaves []*yang.Entry\n\t\t// inCompressOCPaths enables or disables \"CompressOCPaths\" for the YANGCodeGenerator\n\t\t// instance used for the test.\n\t\tinCompressOCPaths bool\n\t\tinSkipEnumDedup bool\n\t\t// wantTypes is a map, keyed by the path of the yang.Entry within inLeaves and\n\t\t// describing the ygen.MappedType that is expected to be output.\n\t\twantTypes map[string]*ygen.MappedType\n\t}{{\n\t\tname: \"identity with multiple identityref leaves\",\n\t\tinLeaves: []*yang.Entry{{\n\t\t\tName: \"leaf-one\",\n\t\t\tType: &yang.YangType{\n\t\t\t\tName: \"identityref\",\n\t\t\t\tKind: yang.Yidentityref,\n\t\t\t\tIdentityBase: &yang.Identity{\n\t\t\t\t\tName: \"base-identity\",\n\t\t\t\t\tParent: &yang.Module{\n\t\t\t\t\t\tName: \"test-module\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tBase: &yang.Type{\n\t\t\t\t\tName: \"base-identity\",\n\t\t\t\t\tParent: &yang.Module{Name: \"test-module\"},\n\t\t\t\t},\n\t\t\t},\n\t\t\tParent: &yang.Entry{Name: \"test-module\"},\n\t\t\tNode: &yang.Leaf{\n\t\t\t\tParent: &yang.Module{\n\t\t\t\t\tName: \"test-module\",\n\t\t\t\t},\n\t\t\t},\n\t\t}, {\n\t\t\tName: \"leaf-two\",\n\t\t\tType: &yang.YangType{\n\t\t\t\tName: \"identityref\",\n\t\t\t\tKind: yang.Yidentityref,\n\t\t\t\tIdentityBase: &yang.Identity{\n\t\t\t\t\tName: \"base-identity\",\n\t\t\t\t\tParent: &yang.Module{\n\t\t\t\t\t\tName: \"test-module\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tBase: &yang.Type{\n\t\t\t\t\tName: \"base-identity\",\n\t\t\t\t\tParent: &yang.Module{Name: \"test-module\"},\n\t\t\t\t},\n\t\t\t},\n\t\t\tParent: &yang.Entry{Name: \"test-module\"},\n\t\t\tNode: &yang.Leaf{\n\t\t\t\tParent: &yang.Module{\n\t\t\t\t\tName: \"test-module\",\n\t\t\t\t},\n\t\t\t},\n\t\t}},\n\t\twantTypes: map[string]*ygen.MappedType{\n\t\t\t\"/test-module/leaf-one\": {NativeType: \"E_TestModule_BaseIdentity\", IsEnumeratedValue: true, ZeroValue: \"0\"},\n\t\t\t\"/test-module/leaf-two\": {NativeType: \"E_TestModule_BaseIdentity\", IsEnumeratedValue: true, ZeroValue: \"0\"},\n\t\t},\n\t}, {\n\t\tname: \"typedef with multiple references\",\n\t\tinLeaves: []*yang.Entry{{\n\t\t\tName: \"leaf-one\",\n\t\t\tParent: &yang.Entry{\n\t\t\t\tName: \"base-module\",\n\t\t\t},\n\t\t\tType: &yang.YangType{\n\t\t\t\tName: \"definedType\",\n\t\t\t\tKind: yang.Yenum,\n\t\t\t\tEnum: &yang.EnumType{},\n\t\t\t\tBase: &yang.Type{\n\t\t\t\t\tName: \"enumeration\",\n\t\t\t\t\tParent: &yang.Module{Name: \"base-module\"},\n\t\t\t\t},\n\t\t\t},\n\t\t\tNode: &yang.Enum{\n\t\t\t\tParent: &yang.Module{\n\t\t\t\t\tName: \"base-module\",\n\t\t\t\t},\n\t\t\t},\n\t\t}, {\n\t\t\tName: \"leaf-two\",\n\t\t\tParent: &yang.Entry{\n\t\t\t\tName: \"base-module\",\n\t\t\t},\n\t\t\tType: &yang.YangType{\n\t\t\t\tName: \"definedType\",\n\t\t\t\tKind: yang.Yenum,\n\t\t\t\tEnum: &yang.EnumType{},\n\t\t\t\tBase: &yang.Type{\n\t\t\t\t\tName: \"enumeration\",\n\t\t\t\t\tParent: &yang.Module{Name: \"base-module\"},\n\t\t\t\t},\n\t\t\t},\n\t\t\tNode: &yang.Enum{\n\t\t\t\tParent: &yang.Module{\n\t\t\t\t\tName: \"base-module\",\n\t\t\t\t},\n\t\t\t},\n\t\t}},\n\t\twantTypes: map[string]*ygen.MappedType{\n\t\t\t\"/base-module/leaf-one\": {NativeType: \"E_BaseModule_DefinedType\", IsEnumeratedValue: true, ZeroValue: \"0\"},\n\t\t\t\"/base-module/leaf-two\": {NativeType: \"E_BaseModule_DefinedType\", IsEnumeratedValue: true, ZeroValue: \"0\"},\n\t\t},\n\t}, {\n\t\tname: \"enumeration defined in grouping used in multiple places - deduplication enabled\",\n\t\tinLeaves: []*yang.Entry{{\n\t\t\tName: \"leaf-one\",\n\t\t\tParent: &yang.Entry{\n\t\t\t\tName: \"base-module\",\n\t\t\t},\n\t\t\tType: &yang.YangType{\n\t\t\t\tName: \"enumeration\",\n\t\t\t\tKind: yang.Yenum,\n\t\t\t\tEnum: &yang.EnumType{},\n\t\t\t},\n\t\t\tNode: &yang.Leaf{\n\t\t\t\tParent: &yang.Module{\n\t\t\t\t\tName: \"base-module\",\n\t\t\t\t},\n\t\t\t},\n\t\t}, {\n\t\t\tName: \"leaf-two\",\n\t\t\tParent: &yang.Entry{\n\t\t\t\tName: \"base-module\",\n\t\t\t},\n\t\t\tType: &yang.YangType{\n\t\t\t\tName: \"enumeration\",\n\t\t\t\tKind: yang.Yenum,\n\t\t\t\tEnum: &yang.EnumType{},\n\t\t\t},\n\t\t\tNode: &yang.Leaf{\n\t\t\t\tParent: &yang.Module{\n\t\t\t\t\tName: \"base-module\",\n\t\t\t\t},\n\t\t\t},\n\t\t}},\n\t\twantTypes: map[string]*ygen.MappedType{\n\t\t\t\"/base-module/leaf-one\": {NativeType: \"E_BaseModule_LeafOne\", IsEnumeratedValue: true, ZeroValue: \"0\"},\n\t\t\t\"/base-module/leaf-two\": {NativeType: \"E_BaseModule_LeafOne\", IsEnumeratedValue: true, ZeroValue: \"0\"},\n\t\t},\n\t}, {\n\t\tname: \"enumeration defined in grouping used in multiple places - deduplication disabled\",\n\t\tinLeaves: []*yang.Entry{{\n\t\t\tName: \"leaf-one\",\n\t\t\tParent: &yang.Entry{\n\t\t\t\tName: \"base-module\",\n\t\t\t},\n\t\t\tType: &yang.YangType{\n\t\t\t\tName: \"enumeration\",\n\t\t\t\tKind: yang.Yenum,\n\t\t\t\tEnum: &yang.EnumType{},\n\t\t\t},\n\t\t\tNode: &yang.Leaf{\n\t\t\t\tParent: &yang.Module{\n\t\t\t\t\tName: \"base-module\",\n\t\t\t\t},\n\t\t\t},\n\t\t}, {\n\t\t\tName: \"leaf-two\",\n\t\t\tParent: &yang.Entry{\n\t\t\t\tName: \"base-module\",\n\t\t\t},\n\t\t\tType: &yang.YangType{\n\t\t\t\tName: \"enumeration\",\n\t\t\t\tKind: yang.Yenum,\n\t\t\t\tEnum: &yang.EnumType{},\n\t\t\t},\n\t\t\tNode: &yang.Leaf{\n\t\t\t\tParent: &yang.Module{\n\t\t\t\t\tName: \"base-module\",\n\t\t\t\t},\n\t\t\t},\n\t\t}},\n\t\tinSkipEnumDedup: true,\n\t\twantTypes: map[string]*ygen.MappedType{\n\t\t\t\"/base-module/leaf-one\": {NativeType: \"E_BaseModule_LeafOne\", IsEnumeratedValue: true, ZeroValue: \"0\"},\n\t\t\t\"/base-module/leaf-two\": {NativeType: \"E_BaseModule_LeafTwo\", IsEnumeratedValue: true, ZeroValue: \"0\"},\n\t\t},\n\t}}\n\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\ts := NewGoLangMapper(true)\n\t\t\tif err := s.InjectEnumSet(enumMapFromEntries(tt.inLeaves), tt.inCompressOCPaths, false, tt.inSkipEnumDedup, true, true, true, nil); err != nil {\n\t\t\t\tt.Fatalf(\"findEnumSet failed: %v\", err)\n\t\t\t}\n\n\t\t\tgotTypes := make(map[string]*ygen.MappedType)\n\t\t\tfor _, leaf := range tt.inLeaves {\n\t\t\t\tmtype, err := s.yangTypeToGoType(resolveTypeArgs{yangType: leaf.Type, contextEntry: leaf}, tt.inCompressOCPaths, tt.inSkipEnumDedup, true, true, nil)\n\t\t\t\tif err != nil {\n\t\t\t\t\tt.Errorf(\"%s: yangTypeToGoType(%v, %v): got unexpected err: %v, want: nil\", tt.name, leaf.Type, leaf, err)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tgotTypes[leaf.Path()] = mtype\n\t\t\t}\n\n\t\t\tif diff := pretty.Compare(gotTypes, tt.wantTypes); diff != \"\" {\n\t\t\t\tt.Errorf(\"%s: yangTypesToGoTypes(...): incorrect output returned, diff (-got,+want):\\n%s\",\n\t\t\t\t\ttt.name, diff)\n\t\t\t}\n\t\t})\n\t}\n}", "func (a *API) Pin(path string) error {\n\treturn a.Request(\"pin/add\", path).\n\t\tOption(\"recursive\", true).\n\t\tExec(context.Background(), nil)\n}", "func ListPinnedIssues(ctx *context.APIContext) {\n\t// swagger:operation GET /repos/{owner}/{repo}/issues/pinned repository repoListPinnedIssues\n\t// ---\n\t// summary: List a repo's pinned issues\n\t// produces:\n\t// - application/json\n\t// parameters:\n\t// - name: owner\n\t// in: path\n\t// description: owner of the repo\n\t// type: string\n\t// required: true\n\t// - name: repo\n\t// in: path\n\t// description: name of the repo\n\t// type: string\n\t// required: true\n\t// responses:\n\t// \"200\":\n\t// \"$ref\": \"#/responses/IssueList\"\n\tissues, err := issues_model.GetPinnedIssues(ctx, ctx.Repo.Repository.ID, false)\n\tif err != nil {\n\t\tctx.Error(http.StatusInternalServerError, \"LoadPinnedIssues\", err)\n\t\treturn\n\t}\n\n\tctx.JSON(http.StatusOK, convert.ToAPIIssueList(ctx, issues))\n}", "func lspTests(t testing.TB, ctx context.Context, c *jsonrpc2.Conn, root *gituri.URI, wantHover, wantDefinition, wantXDefinition map[string]string, wantReferences, wantSymbols map[string][]string, wantXDependencies string, wantXReferences map[*lsext.WorkspaceReferencesParams][]string, wantXPackages []string) {\n\tfor pos, want := range wantHover {\n\t\ttbRun(t, fmt.Sprintf(\"hover-%s\", strings.Replace(pos, \"/\", \"-\", -1)), func(t testing.TB) {\n\t\t\thoverTest(t, ctx, c, root, pos, want)\n\t\t})\n\t}\n\n\tfor pos, want := range wantDefinition {\n\t\ttbRun(t, fmt.Sprintf(\"definition-%s\", strings.Replace(pos, \"/\", \"-\", -1)), func(t testing.TB) {\n\t\t\tdefinitionTest(t, ctx, c, root, pos, want)\n\t\t})\n\t}\n\tfor pos, want := range wantXDefinition {\n\t\ttbRun(t, fmt.Sprintf(\"xdefinition-%s\", strings.Replace(pos, \"/\", \"-\", -1)), func(t testing.TB) {\n\t\t\txdefinitionTest(t, ctx, c, root, pos, want)\n\t\t})\n\t}\n\n\tfor pos, want := range wantReferences {\n\t\ttbRun(t, fmt.Sprintf(\"references-%s\", pos), func(t testing.TB) {\n\t\t\treferencesTest(t, ctx, c, root, pos, want)\n\t\t})\n\t}\n\n\tfor query, want := range wantSymbols {\n\t\ttbRun(t, fmt.Sprintf(\"symbols(q=%q)\", query), func(t testing.TB) {\n\t\t\tsymbolsTest(t, ctx, c, root, query, want)\n\t\t})\n\t}\n\n\tif wantXDependencies != \"\" {\n\t\ttbRun(t, fmt.Sprintf(\"xdependencies-\"+wantXDependencies), func(t testing.TB) {\n\t\t\tvar deps []lspext.DependencyReference\n\t\t\terr := c.Call(ctx, \"workspace/xdependencies\", struct{}{}, &deps)\n\t\t\tif err != nil {\n\t\t\t\tt.Fatal(err)\n\t\t\t}\n\t\t\tjsonTest(t, deps, \"xdependencies-\"+wantXDependencies)\n\t\t})\n\t}\n\n\tfor params, want := range wantXReferences {\n\t\ttbRun(t, fmt.Sprintf(\"xreferences\"), func(t testing.TB) {\n\t\t\tworkspaceReferencesTest(t, ctx, c, root, *params, want)\n\t\t})\n\t}\n\n\tif wantXPackages != nil {\n\t\ttbRun(t, \"xpackages\", func(t testing.TB) {\n\t\t\tworkspacePackagesTest(t, ctx, c, root, wantXPackages)\n\t\t})\n\t}\n}", "func testCheckDDCloudPortListMatches(name string, expected compute.PortList) resource.TestCheckFunc {\n\tname = ensureResourceTypePrefix(name, \"ddcloud_port_list\")\n\n\treturn func(state *terraform.State) error {\n\t\tres, ok := state.RootModule().Resources[name]\n\t\tif !ok {\n\t\t\treturn fmt.Errorf(\"not found: %s\", name)\n\t\t}\n\n\t\tportListID := res.Primary.ID\n\n\t\tclient := testAccProvider.Meta().(*providerState).Client()\n\t\tportList, err := client.GetPortList(portListID)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"bad: get port list: %s\", err)\n\t\t}\n\t\tif portList == nil {\n\t\t\treturn fmt.Errorf(\"bad: port list not found with Id '%s'\", portListID)\n\t\t}\n\n\t\tif portList.Name != expected.Name {\n\t\t\treturn fmt.Errorf(\"bad: port list '%s' has name '%s' (expected '%s')\", portListID, portList.Name, expected.Name)\n\t\t}\n\n\t\tif portList.Description != expected.Description {\n\t\t\treturn fmt.Errorf(\"bad: port list '%s' has description '%s' (expected '%s')\", portListID, portList.Description, expected.Description)\n\t\t}\n\n\t\tif len(portList.Ports) != len(expected.Ports) {\n\t\t\treturn fmt.Errorf(\"bad: port list '%s' has %d ports or port ranges (expected '%d')\", portListID, len(portList.Ports), len(expected.Ports))\n\t\t}\n\n\t\terr = comparePortListEntries(expected, *portList)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif len(portList.ChildLists) != len(expected.ChildLists) {\n\t\t\treturn fmt.Errorf(\"bad: port list '%s' has %d child lists (expected '%d')\", portListID, len(portList.ChildLists), len(expected.ChildLists))\n\t\t}\n\n\t\tfor index := range portList.ChildLists {\n\t\t\texpectedChildListID := expected.ChildLists[index].ID\n\t\t\tactualChildListID := portList.ChildLists[index].ID\n\n\t\t\tif actualChildListID != expectedChildListID {\n\t\t\t\treturn fmt.Errorf(\"bad: port list '%s' has child list at index %d with Id %s (expected '%s')\",\n\t\t\t\t\tportListID, index, actualChildListID, expectedChildListID,\n\t\t\t\t)\n\t\t\t}\n\t\t}\n\n\t\treturn nil\n\t}\n}", "func TestDROStructuralValidatorMemberNotFound(t *testing.T) {\n\tvalidator := NewDROStructuralValidator(newMockRepository(nil))\n\tobj := testObjectResource([]string{\"NotfindableID\"})\n\terr := validator.ValidateResource(obj)\n\tassert.NotNil(t, err)\n}", "func TestLinks(t *testing.T) {\n\tt.Parallel()\n\n\ttree := writeTree(t, `\n-- a.go --\n// Link in package doc: https://pkg.go.dev/\npackage a\n\n// Link in internal comment: https://go.dev/cl\n\n// Doc comment link: https://blog.go.dev/\nfunc f() {}\n`)\n\t// no arguments\n\t{\n\t\tres := gopls(t, tree, \"links\")\n\t\tres.checkExit(false)\n\t\tres.checkStderr(\"expects 1 argument\")\n\t}\n\t// success\n\t{\n\t\tres := gopls(t, tree, \"links\", \"a.go\")\n\t\tres.checkExit(true)\n\t\tres.checkStdout(\"https://go.dev/cl\")\n\t\tres.checkStdout(\"https://pkg.go.dev\")\n\t\tres.checkStdout(\"https://blog.go.dev/\")\n\t}\n\t// -json\n\t{\n\t\tres := gopls(t, tree, \"links\", \"-json\", \"a.go\")\n\t\tres.checkExit(true)\n\t\tres.checkStdout(\"https://pkg.go.dev\")\n\t\tres.checkStdout(\"https://go.dev/cl\")\n\t\tres.checkStdout(\"https://blog.go.dev/\") // at 5:21-5:41\n\t\tvar links []protocol.DocumentLink\n\t\tif res.toJSON(&links) {\n\t\t\t// Check just one of the three locations.\n\t\t\tif got, want := fmt.Sprint(links[2].Range), \"5:21-5:41\"; got != want {\n\t\t\t\tt.Errorf(\"wrong link location: got %v, want %v\", got, want)\n\t\t\t}\n\t\t}\n\t}\n}", "func TestFindUnclusteredNeighbours(t *testing.T) {\n\tlog.Println(\"Executing TestFindUnclusteredNeighbours\")\n\tclusterList := []Clusterable{\n\t\tSimpleClusterable{0},\n\t\tSimpleClusterable{1},\n\t\tSimpleClusterable{-1},\n\t\tSimpleClusterable{1.5},\n\t\tSimpleClusterable{-0.5},\n\t}\n\tvisited := make(map[string]bool)\n\teps := 1.0\n\tneighbours := findUnclusteredNeighbours(clusterList[0], clusterList, visited, eps)\n\n\tassertEquals(t, 4, len(neighbours))\n}", "func Test_ServiceInstancePath_HasDirectParent_fail3(t *testing.T) {\n\n\t// Setup the DB for the UT environment\n\tdir, db, err := utsetup()\n\tif err != nil {\n\t\tt.Errorf(\"Error setting up UT DB: %v\", err)\n\t}\n\n\tdefer cleanTestDir(dir)\n\n\t// Setup initial variable values\n\tparentURL := \"url1\"\n\tparentOrg := \"myorg\"\n\tparentVersion := \"1.0.0\"\n\tchildURL := \"url2\"\n\tchildOrg := \"childorg2\"\n\tchildVersion := \"2.0.0\"\n\tchild2URL := \"url3\"\n\tchild2Org := \"child2Org\"\n\tchild2Version := \"3.0.0\"\n\n\t// Establish the dependency path objects\n\tparent := NewServiceInstancePathElement(parentURL, parentOrg, parentVersion)\n\tchild := NewServiceInstancePathElement(childURL, childOrg, childVersion)\n\tchild2 := NewServiceInstancePathElement(child2URL, child2Org, child2Version)\n\n\tnotParent := NewServiceInstancePathElement(\"other\", parentOrg, parentVersion)\n\n\tdepPath := []ServiceInstancePathElement{*parent, *child, *child2}\n\tdp2 := []ServiceInstancePathElement{*parent, *child2}\n\n\t// Create the test microservice instance to represent the child\n\tif msi, err := NewMicroserviceInstance(db, child2URL, child2Org, child2Version, \"1234\", depPath, false); err != nil {\n\t\tt.Errorf(\"Error creating instance: %v\", err)\n\t} else if _, err := UpdateMSInstanceAddDependencyPath(db, msi.GetKey(), &dp2); err != nil {\n\t\tt.Errorf(\"Error updating instance: %v\", err)\n\t} else if msi.HasDirectParent(notParent) {\n\t\tt.Errorf(\"Child %v does not have direct parent: %v %v\", child2, depPath, dp2)\n\t}\n}", "func TestGossipMoveNode(t *testing.T) {\n\tdefer leaktest.AfterTest(t)()\n\tstopper := stop.NewStopper()\n\tdefer stopper.Stop(context.Background())\n\tclock := hlc.NewClock(hlc.UnixNano, time.Nanosecond)\n\trpcContext := rpc.NewInsecureTestingContext(clock, stopper)\n\tg := NewTest(1, rpcContext, rpc.NewServer(rpcContext), stopper, metric.NewRegistry(), zonepb.DefaultZoneConfigRef())\n\tvar nodes []*roachpb.NodeDescriptor\n\tfor i := 1; i <= 3; i++ {\n\t\tnode := &roachpb.NodeDescriptor{\n\t\t\tNodeID: roachpb.NodeID(i),\n\t\t\tAddress: util.MakeUnresolvedAddr(\"tcp\", fmt.Sprintf(\"1.1.1.1:%d\", i)),\n\t\t}\n\t\tif err := g.SetNodeDescriptor(node); err != nil {\n\t\t\tt.Fatalf(\"failed setting node descriptor %+v: %s\", node, err)\n\t\t}\n\t\tnodes = append(nodes, node)\n\t}\n\tfor _, node := range nodes {\n\t\tif val, err := g.GetNodeDescriptor(node.NodeID); err != nil {\n\t\t\tt.Fatal(err)\n\t\t} else if !node.Equal(val) {\n\t\t\tt.Fatalf(\"expected node %+v, got %+v\", node, val)\n\t\t}\n\t}\n\n\t// Move node 2 to the address of node 3.\n\tmovedNode := nodes[1]\n\treplacedNode := nodes[2]\n\tmovedNode.Address = replacedNode.Address\n\tif err := g.SetNodeDescriptor(movedNode); err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\ttestutils.SucceedsSoon(t, func() error {\n\t\tif val, err := g.GetNodeDescriptor(movedNode.NodeID); err != nil {\n\t\t\treturn err\n\t\t} else if !movedNode.Equal(val) {\n\t\t\treturn fmt.Errorf(\"expected node %+v, got %+v\", movedNode, val)\n\t\t}\n\t\treturn nil\n\t})\n}", "func (s *SWIM) handleIndirectPing(msg pb.Message) {\n\tid := msg.Id\n\n\t// retrieve piggyback data from pbkStore\n\tmbrStatsMsg, err := s.mbrStatsMsgStore.Get()\n\tif err != nil {\n\t\tiLogger.Error(nil, err.Error())\n\t}\n\n\t// address of message source member\n\tsrcAddr := msg.Address\n\n\t// address of indirect-ping's target\n\ttargetAddr := msg.Payload.(*pb.Message_IndirectPing).IndirectPing.Target\n\n\tpingId := xid.New().String()\n\tping := createPingMessage(pingId, s.member.Address(), &mbrStatsMsg)\n\n\t// first send the ping to target member, if target member could not send-back\n\t// ack message for whatever reason send nack message to source member,\n\t// if successfully received ack message from target, then send back ack message\n\t// to source member\n\tif _, err := s.messageEndpoint.SyncSend(targetAddr, ping); err != nil {\n\t\tnack := createNackMessage(id, srcAddr, &mbrStatsMsg)\n\t\tif err := s.messageEndpoint.Send(srcAddr, nack); err != nil {\n\t\t\tiLogger.Error(nil, err.Error())\n\t\t}\n\t\treturn\n\t}\n\n\tack := createAckMessage(id, srcAddr, &mbrStatsMsg)\n\tif err := s.messageEndpoint.Send(srcAddr, ack); err != nil {\n\t\tiLogger.Error(nil, err.Error())\n\t}\n}", "func Test_ServiceInstancePath_HasDirectParent_simple(t *testing.T) {\n\n\t// Setup the DB for the UT environment\n\tdir, db, err := utsetup()\n\tif err != nil {\n\t\tt.Errorf(\"Error setting up UT DB: %v\", err)\n\t}\n\n\tdefer cleanTestDir(dir)\n\n\t// Setup initial variable values\n\tparentURL := \"url1\"\n\tparentVersion := \"1.0.0\"\n\tparentOrg := \"myorg\"\n\tchildURL := \"url2\"\n\tchildOrg := \"childorg\"\n\tchildVersion := \"2.0.0\"\n\n\t// Establish the dependency path objects\n\tparent := NewServiceInstancePathElement(parentURL, parentOrg, parentVersion)\n\tchild := NewServiceInstancePathElement(childURL, childOrg, childVersion)\n\n\tdepPath := []ServiceInstancePathElement{*parent, *child}\n\t// Create the test microservice instance to represent the child\n\tif msi, err := NewMicroserviceInstance(db, childURL, childOrg, childVersion, \"1234\", depPath, false); err != nil {\n\t\tt.Errorf(\"Error creating instance: %v\", err)\n\t} else if !msi.HasDirectParent(parent) {\n\t\tt.Errorf(\"Child %v has direct parent: %v\", child, depPath)\n\t}\n}", "func Test_ServiceInstancePath_HasDirectParent_fail1(t *testing.T) {\n\n\t// Setup the DB for the UT environment\n\tdir, db, err := utsetup()\n\tif err != nil {\n\t\tt.Errorf(\"Error setting up UT DB: %v\", err)\n\t}\n\n\tdefer cleanTestDir(dir)\n\n\t// Setup initial variable values\n\tparentURL := \"url1\"\n\tparentOrg := \"myorg\"\n\tparentVersion := \"1.0.0\"\n\tchildURL := \"url2\"\n\tchildOrg := \"childorg\"\n\tchildVersion := \"2.0.0\"\n\n\t// Establish the dependency path objects\n\tparent := NewServiceInstancePathElement(parentURL, parentOrg, parentVersion)\n\tchild := NewServiceInstancePathElement(childURL, childOrg, childVersion)\n\n\tnotParent := NewServiceInstancePathElement(\"other\", parentOrg, parentVersion)\n\n\tdepPath := []ServiceInstancePathElement{*parent, *child}\n\t// Create the test microservice instance to represent the child\n\tif msi, err := NewMicroserviceInstance(db, childURL, childOrg, childVersion, \"1234\", depPath, false); err != nil {\n\t\tt.Errorf(\"Error creating instance: %v\", err)\n\t} else if msi.HasDirectParent(notParent) {\n\t\tt.Errorf(\"Child %v does not have direct parent: %v\", child, depPath)\n\t}\n}", "func TestGetIntersectionNode(t *testing.T) {\n\tlistA := new(ListNode)\n\tlistB := new(ListNode)\n\tcom := &ListNode{Val: 2, Next: &ListNode{Val: 4}}\n\tlistA.Val = 0\n\tlistA.Next = &ListNode{Val: 9, Next: &ListNode{Val: 1, Next: com}}\n\tlistB.Val = 3\n\tlistB.Next = com\n\tlist := GetIntersectionNode(listA, listB)\n\tfmt.Println(list)\n\tfor list != nil {\n\t\tfmt.Println(list.Val)\n\t\tlist = list.Next\n\t}\n\n}", "func TestLoopinInUse(t *testing.T) {\n\tvar (\n\t\tpeer1 = route.Vertex{1}\n\t\tchan1 = lnwire.NewShortChanIDFromInt(1)\n\n\t\tpeer2 = route.Vertex{2}\n\t\tchan2 = lnwire.NewShortChanIDFromInt(2)\n\t)\n\n\ttests := []struct {\n\t\tname string\n\t\tongoingLoopOut *lnwire.ShortChannelID\n\t\tongoingLoopIn *route.Vertex\n\t\tfailedLoopIn *route.Vertex\n\t\texpectedErr error\n\t}{\n\t\t{\n\t\t\tname: \"swap allowed\",\n\t\t\tongoingLoopIn: &peer2,\n\t\t\tongoingLoopOut: &chan2,\n\t\t\tfailedLoopIn: &peer2,\n\t\t\texpectedErr: nil,\n\t\t},\n\t\t{\n\t\t\tname: \"conflicts with loop out\",\n\t\t\tongoingLoopOut: &chan1,\n\t\t\texpectedErr: newReasonError(ReasonLoopOut),\n\t\t},\n\t\t{\n\t\t\tname: \"conflicts with loop in\",\n\t\t\tongoingLoopIn: &peer1,\n\t\t\texpectedErr: newReasonError(ReasonLoopIn),\n\t\t},\n\t\t{\n\t\t\tname: \"previous failed loopin\",\n\t\t\tfailedLoopIn: &peer1,\n\t\t\texpectedErr: newReasonError(ReasonFailureBackoff),\n\t\t},\n\t}\n\n\tfor _, testCase := range tests {\n\t\ttraffic := newSwapTraffic()\n\n\t\tif testCase.ongoingLoopOut != nil {\n\t\t\ttraffic.ongoingLoopOut[*testCase.ongoingLoopOut] = true\n\t\t}\n\n\t\tif testCase.ongoingLoopIn != nil {\n\t\t\ttraffic.ongoingLoopIn[*testCase.ongoingLoopIn] = true\n\t\t}\n\n\t\tif testCase.failedLoopIn != nil {\n\t\t\ttraffic.failedLoopIn[*testCase.failedLoopIn] = testTime\n\t\t}\n\n\t\tbuilder := newLoopInBuilder(nil)\n\t\terr := builder.inUse(traffic, peer1, []lnwire.ShortChannelID{\n\t\t\tchan1,\n\t\t})\n\n\t\trequire.Equal(t, testCase.expectedErr, err)\n\t}\n}", "func indirectMap(tree []Node) []Node {\n\tif isTreeList(tree) {\n\t\t// I use square brackets here to emphasize what is 'tree'.\n\t\t// (a [(a)]) as is\n\t\t// (a [()]) indirect to (a ([]))\n\t\t// (a [(())]) indirect to (a ([()]))\n\t\tif t := tree[0].List; len(t) == 0 || !t[0].IsScalar() {\n\t\t\treturn t\n\t\t}\n\t}\n\treturn tree\n}", "func (db *DB) HasPin(root swarm.Address) (has bool, err error) {\n\tdur := captureDuration(time.Now())\n\tdefer func() {\n\t\tdb.metrics.MethodCallsDuration.WithLabelValues(\"pinstore\", \"HasPin\").Observe(dur())\n\t\tif err == nil {\n\t\t\tdb.metrics.MethodCalls.WithLabelValues(\"pinstore\", \"HasPin\", \"success\").Inc()\n\t\t} else {\n\t\t\tdb.metrics.MethodCalls.WithLabelValues(\"pinstore\", \"HasPin\", \"failure\").Inc()\n\t\t}\n\t}()\n\n\treturn pinstore.HasPin(db.repo.IndexStore(), root)\n}", "func TestListStacksWithMultiplePassphrases(t *testing.T) {\n\t// Login to a temp dir filestate backend\n\ttmpDir := t.TempDir()\n\tctx := context.Background()\n\tb, err := New(ctx, diagtest.LogSink(t), \"file://\"+filepath.ToSlash(tmpDir), nil)\n\tassert.NoError(t, err)\n\n\t// Create stack \"a\" and import a checkpoint with a secret\n\taStackRef, err := b.ParseStackReference(\"organization/project/a\")\n\tassert.NoError(t, err)\n\taStack, err := b.CreateStack(ctx, aStackRef, \"\", nil)\n\tassert.NoError(t, err)\n\tassert.NotNil(t, aStack)\n\tdefer func() {\n\t\tt.Setenv(\"PULUMI_CONFIG_PASSPHRASE\", \"abc123\")\n\t\t_, err := b.RemoveStack(ctx, aStack, true)\n\t\tassert.NoError(t, err)\n\t}()\n\tdeployment, err := makeUntypedDeployment(\"a\", \"abc123\",\n\t\t\"v1:4iF78gb0nF0=:v1:Co6IbTWYs/UdrjgY:FSrAWOFZnj9ealCUDdJL7LrUKXX9BA==\")\n\tassert.NoError(t, err)\n\tt.Setenv(\"PULUMI_CONFIG_PASSPHRASE\", \"abc123\")\n\terr = b.ImportDeployment(ctx, aStack, deployment)\n\tassert.NoError(t, err)\n\n\t// Create stack \"b\" and import a checkpoint with a secret\n\tbStackRef, err := b.ParseStackReference(\"organization/project/b\")\n\tassert.NoError(t, err)\n\tbStack, err := b.CreateStack(ctx, bStackRef, \"\", nil)\n\tassert.NoError(t, err)\n\tassert.NotNil(t, bStack)\n\tdefer func() {\n\t\tt.Setenv(\"PULUMI_CONFIG_PASSPHRASE\", \"123abc\")\n\t\t_, err := b.RemoveStack(ctx, bStack, true)\n\t\tassert.NoError(t, err)\n\t}()\n\tdeployment, err = makeUntypedDeployment(\"b\", \"123abc\",\n\t\t\"v1:C7H2a7/Ietk=:v1:yfAd1zOi6iY9DRIB:dumdsr+H89VpHIQWdB01XEFqYaYjAg==\")\n\tassert.NoError(t, err)\n\tt.Setenv(\"PULUMI_CONFIG_PASSPHRASE\", \"123abc\")\n\terr = b.ImportDeployment(ctx, bStack, deployment)\n\tassert.NoError(t, err)\n\n\t// Remove the config passphrase so that we can no longer deserialize the checkpoints\n\terr = os.Unsetenv(\"PULUMI_CONFIG_PASSPHRASE\")\n\tassert.NoError(t, err)\n\n\t// Ensure that we can list the stacks we created even without a passphrase\n\tstacks, outContToken, err := b.ListStacks(ctx, backend.ListStacksFilter{}, nil /* inContToken */)\n\tassert.NoError(t, err)\n\tassert.Nil(t, outContToken)\n\tassert.Len(t, stacks, 2)\n\tfor _, stack := range stacks {\n\t\tassert.NotNil(t, stack.ResourceCount())\n\t\tassert.Equal(t, 1, *stack.ResourceCount())\n\t}\n}", "func TestReferences(t *testing.T) {\n\tt.Parallel()\n\n\ttree := writeTree(t, `\n-- go.mod --\nmodule example.com\ngo 1.18\n\n-- a.go --\npackage a\nimport \"fmt\"\nfunc f() {\n\tfmt.Println()\n}\n\n-- b.go --\npackage a\nimport \"fmt\"\nfunc g() {\n\tfmt.Println()\n}\n`)\n\t// no arguments\n\t{\n\t\tres := gopls(t, tree, \"references\")\n\t\tres.checkExit(false)\n\t\tres.checkStderr(\"expects 1 argument\")\n\t}\n\t// fmt.Println\n\t{\n\t\tres := gopls(t, tree, \"references\", \"a.go:4:10\")\n\t\tres.checkExit(true)\n\t\tres.checkStdout(\"a.go:4:6-13\")\n\t\tres.checkStdout(\"b.go:4:6-13\")\n\t}\n}", "func TestAccAddressListSimpleCreate(t *testing.T) {\n\tresource.Test(t, resource.TestCase{\n\t\tProviders: testAccProviders,\n\t\tCheckDestroy: resource.ComposeTestCheckFunc(\n\t\t\ttestCheckDDCloudAddressListDestroy,\n\t\t\ttestCheckDDCloudNetworkDomainDestroy,\n\t\t),\n\t\tSteps: []resource.TestStep{\n\t\t\tresource.TestStep{\n\t\t\t\tConfig: testAccDDCloudAddressListSimple(\"acc_test_list\", \"af_terraform_list\"),\n\t\t\t\tCheck: resource.ComposeTestCheckFunc(\n\t\t\t\t\ttestCheckDDCloudAddressListExists(\"acc_test_list\", true),\n\t\t\t\t\ttestCheckDDCloudAddressListMatches(\"acc_test_list\", compute.IPAddressList{\n\t\t\t\t\t\tName: \"af_terraform_list\",\n\t\t\t\t\t\tDescription: \"Adam's Terraform test address list (do not delete).\",\n\t\t\t\t\t\tAddresses: []compute.IPAddressListEntry{\n\t\t\t\t\t\t\tcompute.IPAddressListEntry{\n\t\t\t\t\t\t\t\tBegin: \"192.168.1.10\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tcompute.IPAddressListEntry{\n\t\t\t\t\t\t\t\tBegin: \"192.168.1.20\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t}),\n\t\t\t\t),\n\t\t\t},\n\t\t},\n\t})\n}", "func TestLddList(t *testing.T) {\n\tvar libMap = make(map[string]bool)\n\tn, err := Ldd([]string{\"/bin/date\"})\n\tif err != nil {\n\t\tt.Fatalf(\"Ldd on /bin/date: want nil, got %v\", err)\n\t}\n\tl, err := LddList([]string{\"/bin/date\"})\n\tif err != nil {\n\t\tt.Fatalf(\"LddList on /bin/date: want nil, got %v\", err)\n\t}\n\tif len(n) != len(l) {\n\t\tt.Fatalf(\"Len of Ldd(%v) and LddList(%v): want same, got different\", len(n), len(l))\n\t}\n\tfor i := range n {\n\t\tlibMap[n[i].FullName] = true\n\t}\n\tfor i := range n {\n\t\tif !libMap[l[i]] {\n\t\t\tt.Errorf(\"%v was in LddList but not in Ldd\", l[i])\n\t\t}\n\t}\n}", "func TestLeafSimilarTrees(t *testing.T) {\n\n}", "func Test_ServiceInstancePath_HasDirectParent_fail2(t *testing.T) {\n\n\t// Setup the DB for the UT environment\n\tdir, db, err := utsetup()\n\tif err != nil {\n\t\tt.Errorf(\"Error setting up UT DB: %v\", err)\n\t}\n\n\tdefer cleanTestDir(dir)\n\n\t// Setup initial variable values\n\tparentURL := \"url1\"\n\tparentOrg := \"myorg\"\n\tparentVersion := \"1.0.0\"\n\tchildURL := \"url2\"\n\tchildOrg := \"childorg\"\n\tchildVersion := \"2.0.0\"\n\tchild2URL := \"url3\"\n\tchild2Org := \"child2org\"\n\tchild2Version := \"3.0.0\"\n\n\t// Establish the dependency path objects\n\tparent := NewServiceInstancePathElement(parentURL, parentOrg, parentVersion)\n\tchild := NewServiceInstancePathElement(childURL, childOrg, childVersion)\n\tchild2 := NewServiceInstancePathElement(child2URL, child2Org, child2Version)\n\n\tdepPath := []ServiceInstancePathElement{*parent, *child, *child2}\n\t// Create the test microservice instance to represent the child\n\tif msi, err := NewMicroserviceInstance(db, child2URL, child2Org, child2Version, \"1234\", depPath, false); err != nil {\n\t\tt.Errorf(\"Error creating instance: %v\", err)\n\t} else if msi.HasDirectParent(parent) {\n\t\tt.Errorf(\"Child %v does not have direct parent: %v\", child2, depPath)\n\t}\n}", "func TestUnreachableMarks(t *testing.T) {\n\tseeds := []string {\"127.0.0.1:6000\",}\n\tmanager1 := CreatePeerManager(6000, 6001, nil, FullMode)\n\tmanager2 := CreatePeerManager(7000, 7001, seeds, FullMode)\n\tmanager3 := CreatePeerManager(8000, 8001, seeds, FullMode)\n\n\t// Change update period to lengthen the time between marking a peer unreachable \n\t// and the next status update\n\tmanager1.StatusUpdatePeriod=500*time.Millisecond\n\tmanager2.StatusUpdatePeriod=500*time.Millisecond\n\tmanager3.StatusUpdatePeriod=500*time.Millisecond\n\n\tmarkPeer := func(t *testing.T) {\n\t\tmanager1.MarkPeerUnreachable(\"127.0.0.1:8001\")\n\t\tmanager1.MarkPeerUnreachable(\"127.0.0.1:8001\")\n\t\tmanager1.MarkPeerUnreachable(\"127.0.0.1:8001\")\n\t\tavailable := GetPeerManagerAvailablePeers(manager1)\n\t\texpected := []string {\"127.0.0.1:6001\", \"127.0.0.1:7001\"}\n\t\tif !MapOnlyContains(available, expected) {\n\t\t\tt.Errorf(\"Peer 127.0.0.1:8001 wasn't marked unreachable %v\\n\", available)\n\t\t}\n\t}\n\n\t// After some time has passed all the peers should be available again\n\tallPeers := []string {\"127.0.0.1:6001\", \"127.0.0.1:7001\", \"127.0.0.1:8001\"}\n\tPeerManagerPropagationHelper(t, manager1, manager2, manager3,\n\t\tallPeers, allPeers, allPeers, markPeer, 3200*time.Millisecond, 8*time.Second)\n}", "func Test_DoIterativeFindNodeFail(t *testing.T) {\n\tnotexistid := NewRandomID()\n\tlist := instance[0].DoIterativeFindNode(notexistid)\n\tassertNotContains(\n\t\tlist,\n\t\tnotexistid.AsString(),\n\t\t\"Found not existing id\",\n\t\tt)\n}", "func TestVerifyNested(t *testing.T) {\n\toptions := iniVerifyOptions(t)\n\toptions.CurrentTime = time.Date(2017, 02, 03, 10, 15, 00, 0, gmt)\n\n\tp := loadProxy(\"test-samples/NestedProxy.pem\", t)\n\tif e := p.Verify(options); e != nil {\n\t\tt.Error(e)\n\t}\n}", "func isIdentityrefLeaf(e *yang.Entry) bool {\n\treturn e.Type.IdentityBase != nil\n}", "func TestHandleRequestedTransactionsNotFound(t *testing.T) {\n\ttestutils.ForAllNets(t, true, func(t *testing.T, consensusConfig *consensus.Config) {\n\t\tvar log = logger.RegisterSubSystem(\"PROT\")\n\t\tvar spawn = panics.GoroutineWrapperFunc(log)\n\t\tfactory := consensus.NewFactory()\n\t\ttc, teardown, err := factory.NewTestConsensus(consensusConfig, \"TestHandleRequestedTransactionsNotFound\")\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"Error setting up test Consensus: %+v\", err)\n\t\t}\n\t\tdefer teardown(false)\n\n\t\tsharedRequestedTransactions := flowcontext.NewSharedRequestedTransactions()\n\t\tadapter, err := netadapter.NewNetAdapter(config.DefaultConfig())\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"Failed to create a NetAdapter: %v\", err)\n\t\t}\n\t\tdomainInstance, err := domain.New(consensusConfig, mempool.DefaultConfig(&consensusConfig.Params), tc.Database())\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"Failed to set up a domain Instance: %v\", err)\n\t\t}\n\t\tcontext := &mocTransactionsRelayContext{\n\t\t\tnetAdapter: adapter,\n\t\t\tdomain: domainInstance,\n\t\t\tsharedRequestedTransactions: sharedRequestedTransactions,\n\t\t}\n\t\tincomingRoute := router.NewRoute(\"incoming\")\n\t\toutgoingRoute := router.NewRoute(\"outgoing\")\n\t\tdefer outgoingRoute.Close()\n\n\t\ttxID1 := externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{\n\t\t\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t\t\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t\t\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t\t\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01})\n\t\ttxID2 := externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{\n\t\t\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t\t\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t\t\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t\t\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02})\n\t\ttxIDs := []*externalapi.DomainTransactionID{txID1, txID2}\n\t\tmsg := appmessage.NewMsgRequestTransactions(txIDs)\n\t\terr = incomingRoute.Enqueue(msg)\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"Unexpected error from incomingRoute.Enqueue: %v\", err)\n\t\t}\n\t\t// The goroutine is representing the peer's actions.\n\t\tspawn(\"peerResponseToTheTransactionsMessages\", func() {\n\t\t\tfor i, id := range txIDs {\n\t\t\t\tmsg, err := outgoingRoute.Dequeue()\n\t\t\t\tif err != nil {\n\t\t\t\t\tt.Fatalf(\"Dequeue: %s\", err)\n\t\t\t\t}\n\t\t\t\toutMsg := msg.(*appmessage.MsgTransactionNotFound)\n\t\t\t\tif txIDs[i].String() != outMsg.ID.String() {\n\t\t\t\t\tt.Fatalf(\"TestHandleRelayedTransactions: expected equal txID: expected %s, but got %s\", txIDs[i].String(), id.String())\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Closed the incomingRoute for stop the infinity loop.\n\t\t\tincomingRoute.Close()\n\t\t})\n\n\t\terr = transactionrelay.HandleRequestedTransactions(context, incomingRoute, outgoingRoute)\n\t\t// Make sure the error is due to the closed route.\n\t\tif err == nil || !errors.Is(err, router.ErrRouteClosed) {\n\t\t\tt.Fatalf(\"Unexpected error: expected: %v, got : %v\", router.ErrRouteClosed, err)\n\t\t}\n\t})\n}", "func checkOrphan(x, y, dx, dy, length int) bool {\n orphan := false;\n if x > 1 && y > 1 && length > 0 && length == getInt(&depth) && // this only makes sense when carving paths, not when solving, and only if we haven't exhausted our search depth\n getMaze(x + dx , y + dy ) == wall &&\n getMaze(x + dx/2, y + dy/2) == wall &&\n setCell(x + dx , y + dy , path, noUpdate, length, 0) && // temporarily set new path\n setCell(x + dx/2, y + dy/2, path, noUpdate, length, 0) {\n\n orphan = orphan1x1(x + dx + 2, y + dy ) || // check for 1x1 orphans below & above of the new location\n orphan1x1(x + dx - 2, y + dy ) ||\n orphan1x1(x + dx , y + dy + 2) || // check for 1x1 orphans right & left of the new location\n orphan1x1(x + dx , y + dy - 2)\n\n setMaze(x + dx , y + dy , wall) // restore original walls\n setMaze(x + dx/2, y + dy/2, wall)\n }\n return orphan\n}", "func TestEndpointCase71(t *testing.T) {\n\tvar params = EndpointParameters{\n\t\tRegion: ptr.String(\"us-iso-west-1\"),\n\t\tUseFIPS: ptr.Bool(false),\n\t\tUseDualStack: ptr.Bool(false),\n\t\tOperationType: ptr.String(\"control\"),\n\t\tStreamARN: ptr.String(\"arn:aws-iso:kinesis:us-iso-west-1:123:stream/test-stream\"),\n\t}\n\n\tresolver := NewDefaultEndpointResolverV2()\n\tresult, err := resolver.ResolveEndpoint(context.Background(), params)\n\t_, _ = result, err\n\n\tif err != nil {\n\t\tt.Fatalf(\"expect no error, got %v\", err)\n\t}\n\n\turi, _ := url.Parse(\"https://kinesis.us-iso-west-1.c2s.ic.gov\")\n\n\texpectEndpoint := smithyendpoints.Endpoint{\n\t\tURI: *uri,\n\t\tHeaders: http.Header{},\n\t\tProperties: smithy.Properties{},\n\t}\n\n\tif e, a := expectEndpoint.URI, result.URI; e != a {\n\t\tt.Errorf(\"expect %v URI, got %v\", e, a)\n\t}\n\n\tif diff := cmp.Diff(expectEndpoint.Headers, result.Headers); diff != \"\" {\n\t\tt.Errorf(\"expect headers to match\\n%s\", diff)\n\t}\n\n\tif diff := cmp.Diff(expectEndpoint.Properties, result.Properties,\n\t\tcmp.AllowUnexported(smithy.Properties{}),\n\t); diff != \"\" {\n\t\tt.Errorf(\"expect properties to match\\n%s\", diff)\n\t}\n}", "func TestFetchMirrorNameIdMapInternal(t *testing.T) {\n\tfor _, json := range []string{\n\t\t`{\"data\":[[\"m0018\",[\"uuid\",\"d5dfa2a6-7633-4f13-89d9-ecfa2b161bda\"],[\"uuid\",\"62208f49-cf74-4275-8db9-34a023a686c9\"]],[\"m0017\",[\"uuid\",\"5ab854d3-b050-48de-9d60-3f5791478d1c\"],[\"set\",[]]]],\"headings\":[\"name\",\"_uuid\",\"output_port\"]}`,\n\t\t`{\"data\":[[\"m0018\",[\"uuid\",\"518561f0-2b69-46c3-9455-dd04d01dc5f5\"],[\"uuid\",\"62208f49-cf74-4275-8db9-34a023a686c9\"]]],\"headings\":[\"name\",\"_uuid\",\"output_port\"]}`,\n\t} {\n\t\tret, err := fetchMirrorNameIdMapInternal([]byte(json))\n\t\tif err != nil {\n\t\t\tt.Errorf(\"fetchMirrorNameIdMapInternal fail %s\", err)\n\t\t} else {\n\t\t\tt.Logf(\"%s\", jsonutils.Marshal(ret))\n\t\t}\n\t}\n}", "func TestLinkExisting(t *testing.T) {\n\ttc := NewTestCase(t)\n\tdefer tc.Cleanup()\n\n\tc := RandomData(5)\n\n\terr := ioutil.WriteFile(tc.orig+\"/file1\", c, 0644)\n\tCheckSuccess(err)\n\terr = os.Link(tc.orig+\"/file1\", tc.orig+\"/file2\")\n\tCheckSuccess(err)\n\n\tvar s1, s2 syscall.Stat_t\n\terr = syscall.Lstat(tc.mnt+\"/file1\", &s1)\n\tCheckSuccess(err)\n\terr = syscall.Lstat(tc.mnt+\"/file2\", &s2)\n\tCheckSuccess(err)\n\n\tif s1.Ino != s2.Ino {\n\t\tt.Errorf(\"linked files should have identical inodes %v %v\", s1.Ino, s2.Ino)\n\t}\n\n\tback, err := ioutil.ReadFile(tc.mnt + \"/file1\")\n\tCheckSuccess(err)\n\tCompareSlices(t, back, c)\n}", "func (rest *TestTokenStorageREST) TestStatusExternalTokenValidOnForcePullInternalError() {\n\tidentity, err := testsupport.CreateTestIdentity(rest.DB, uuid.NewV4().String(), \"KC\")\n\trequire.Nil(rest.T(), err)\n\trest.checkStatusExternalTokenValidOnForcePullInternalError(identity, \"https://github.com/a/b\", \"https://github.com\")\n\trest.checkStatusExternalTokenValidOnForcePullInternalError(identity, \"github\", \"https://github.com\")\n\trest.checkStatusExternalTokenValidOnForcePullInternalError(identity, \"openshift\", \"https://api.starter-us-east-2.openshift.com/\")\n\trest.checkStatusExternalTokenValidOnForcePullInternalError(identity, \"https://api.starter-us-east-2.openshift.com\", \"https://api.starter-us-east-2.openshift.com/\")\n}", "func (s *BasejossListener) EnterIndirect_(ctx *Indirect_Context) {}", "func main() {\n\tnode1 := &TreeNode{5, nil, nil}\n\tnode2 := &TreeNode{4, nil, nil}\n\tnode3 := &TreeNode{8, nil, nil}\n\tnode4 := &TreeNode{11, nil, nil}\n\tnode5 := &TreeNode{13, nil, nil}\n\tnode6 := &TreeNode{4, nil, nil}\n\tnode7 := &TreeNode{7, nil, nil}\n\tnode8 := &TreeNode{2, nil, nil}\n\tnode9 := &TreeNode{1, nil, nil}\n\n\tnode1.Left = node2\n\tnode1.Right = node3\n\n\tnode2.Left = node4\n\n\tnode3.Left = node5\n\tnode3.Right = node6\n\n\tnode4.Left = node7\n\tnode4.Right = node8\n\n\tnode6.Right = node9\n\n\tcheck := hasPathSum(node1, 22)\n\tfmt.Println(check)\n}", "func TestFragSources(t *testing.T) {\n\n\turi0, err := NewURIFromAddress(\"host0\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\turi1, err := NewURIFromAddress(\"host1\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\turi2, err := NewURIFromAddress(\"host2\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\turi3, err := NewURIFromAddress(\"host3\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tnode0 := &Node{ID: \"node0\", URI: *uri0}\n\tnode1 := &Node{ID: \"node1\", URI: *uri1}\n\tnode2 := &Node{ID: \"node2\", URI: *uri2}\n\tnode3 := &Node{ID: \"node3\", URI: *uri3}\n\n\tc1 := newCluster()\n\tc1.ReplicaN = 1\n\tc1.addNodeBasicSorted(node0)\n\tc1.addNodeBasicSorted(node1)\n\n\tc2 := newCluster()\n\tc2.ReplicaN = 1\n\tc2.addNodeBasicSorted(node0)\n\tc2.addNodeBasicSorted(node1)\n\tc2.addNodeBasicSorted(node2)\n\n\tc3 := newCluster()\n\tc3.ReplicaN = 2\n\tc3.addNodeBasicSorted(node0)\n\tc3.addNodeBasicSorted(node1)\n\n\tc4 := newCluster()\n\tc4.ReplicaN = 2\n\tc4.addNodeBasicSorted(node0)\n\tc4.addNodeBasicSorted(node1)\n\tc4.addNodeBasicSorted(node2)\n\n\tc5 := newCluster()\n\tc5.ReplicaN = 2\n\tc5.addNodeBasicSorted(node0)\n\tc5.addNodeBasicSorted(node1)\n\tc5.addNodeBasicSorted(node2)\n\tc5.addNodeBasicSorted(node3)\n\n\tidx := newIndexWithTempPath(\"i\")\n\tfield, err := idx.CreateFieldIfNotExists(\"f\", OptFieldTypeDefault())\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\t_, err = field.SetBit(1, 101, nil)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\t_, err = field.SetBit(1, 1300000, nil)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\t_, err = field.SetBit(1, 2600000, nil)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\t_, err = field.SetBit(1, 3900000, nil)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\ttests := []struct {\n\t\tfrom *cluster\n\t\tto *cluster\n\t\tidx *Index\n\t\texpected map[string][]*ResizeSource\n\t\terr string\n\t}{\n\t\t{\n\t\t\tfrom: c1,\n\t\t\tto: c2,\n\t\t\tidx: idx,\n\t\t\texpected: map[string][]*ResizeSource{\n\t\t\t\t\"node0\": {},\n\t\t\t\t\"node1\": {},\n\t\t\t\t\"node2\": {\n\t\t\t\t\t{&Node{ID: \"node0\", URI: URI{\"http\", \"host0\", 10101}, IsCoordinator: false}, \"i\", \"f\", \"standard\", uint64(0)},\n\t\t\t\t\t{&Node{ID: \"node1\", URI: URI{\"http\", \"host1\", 10101}, IsCoordinator: false}, \"i\", \"f\", \"standard\", uint64(2)},\n\t\t\t\t},\n\t\t\t},\n\t\t\terr: \"\",\n\t\t},\n\t\t{\n\t\t\tfrom: c4,\n\t\t\tto: c3,\n\t\t\tidx: idx,\n\t\t\texpected: map[string][]*ResizeSource{\n\t\t\t\t\"node0\": {\n\t\t\t\t\t{&Node{ID: \"node1\", URI: URI{\"http\", \"host1\", 10101}, IsCoordinator: false}, \"i\", \"f\", \"standard\", uint64(1)},\n\t\t\t\t},\n\t\t\t\t\"node1\": {\n\t\t\t\t\t{&Node{ID: \"node0\", URI: URI{\"http\", \"host0\", 10101}, IsCoordinator: false}, \"i\", \"f\", \"standard\", uint64(0)},\n\t\t\t\t\t{&Node{ID: \"node0\", URI: URI{\"http\", \"host0\", 10101}, IsCoordinator: false}, \"i\", \"f\", \"standard\", uint64(2)},\n\t\t\t\t},\n\t\t\t},\n\t\t\terr: \"\",\n\t\t},\n\t\t{\n\t\t\tfrom: c5,\n\t\t\tto: c4,\n\t\t\tidx: idx,\n\t\t\texpected: map[string][]*ResizeSource{\n\t\t\t\t\"node0\": {\n\t\t\t\t\t{&Node{ID: \"node2\", URI: URI{\"http\", \"host2\", 10101}, IsCoordinator: false}, \"i\", \"f\", \"standard\", uint64(0)},\n\t\t\t\t\t{&Node{ID: \"node2\", URI: URI{\"http\", \"host2\", 10101}, IsCoordinator: false}, \"i\", \"f\", \"standard\", uint64(2)},\n\t\t\t\t},\n\t\t\t\t\"node1\": {\n\t\t\t\t\t{&Node{ID: \"node0\", URI: URI{\"http\", \"host0\", 10101}, IsCoordinator: false}, \"i\", \"f\", \"standard\", uint64(3)},\n\t\t\t\t},\n\t\t\t\t\"node2\": {},\n\t\t\t},\n\t\t\terr: \"\",\n\t\t},\n\t\t{\n\t\t\tfrom: c2,\n\t\t\tto: c4,\n\t\t\tidx: idx,\n\t\t\texpected: nil,\n\t\t\terr: \"clusters are the same size\",\n\t\t},\n\t\t{\n\t\t\tfrom: c1,\n\t\t\tto: c5,\n\t\t\tidx: idx,\n\t\t\texpected: nil,\n\t\t\terr: \"adding more than one node at a time is not supported\",\n\t\t},\n\t\t{\n\t\t\tfrom: c5,\n\t\t\tto: c1,\n\t\t\tidx: idx,\n\t\t\texpected: nil,\n\t\t\terr: \"removing more than one node at a time is not supported\",\n\t\t},\n\t}\n\tfor _, test := range tests {\n\n\t\tactual, err := (test.from).fragSources(test.to, test.idx)\n\t\tif test.err != \"\" {\n\t\t\tif !strings.Contains(err.Error(), test.err) {\n\t\t\t\tt.Fatalf(\"expected error: %s, got: %s\", test.err, err.Error())\n\t\t\t}\n\t\t} else {\n\t\t\tif err != nil {\n\t\t\t\tt.Fatal(err)\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(actual, test.expected) {\n\t\t\t\tt.Errorf(\"expected: %v, but got: %v\", test.expected, actual)\n\t\t\t}\n\t\t}\n\t}\n}", "func (b *buffer) pin(loc Location, ptr unsafe.Pointer) {\n\tif !atomic.CompareAndSwapPointer(b.index(loc.index()), nil, ptr) {\n\t\tpanic(\"double pin\")\n\t}\n\t// atomic.StorePointer(b.index(loc.index()), ptr)\n\tatomic.AddUint32(&b.free, ^uint32(0))\n}", "func TestReaddirTypeFixup(t *testing.T) {\n\troot := &randomTypeTest{}\n\n\tmntDir, _ := testMount(t, root, nil)\n\n\tf, err := os.Open(mntDir)\n\tif err != nil {\n\t\tt.Fatalf(\"open: %v\", err)\n\t}\n\tdefer f.Close()\n\n\t// (Ab)use loopbackDirStream to call and parse getdents(2) on mntDir.\n\t// This makes the kernel call READDIRPLUS, which ultimately calls\n\t// randomTypeTest.Readdir() and randomTypeTest.Lookup() above.\n\tds, errno := NewLoopbackDirStream(mntDir)\n\tif errno != 0 {\n\t\tt.Fatalf(\"readdir: %v\", err)\n\t}\n\tdefer ds.Close()\n\n\tfor ds.HasNext() {\n\t\te, err := ds.Next()\n\t\tif err != 0 {\n\t\t\tt.Errorf(\"Next: %d\", err)\n\t\t}\n\t\tgotIsDir := (e.Mode & syscall.S_IFDIR) != 0\n\t\twantIsdir := (crc32.ChecksumIEEE([]byte(e.Name)) % 2) == 1\n\t\tif gotIsDir != wantIsdir {\n\t\t\tt.Errorf(\"%q: isdir %v, want %v\", e.Name, gotIsDir, wantIsdir)\n\t\t}\n\t}\n}", "func TestPointingPairs(t *testing.T) {\n\tinputBoard := []byte(pointingPairsParam)\n\tb := NewBoard(inputBoard)\n\n\tassert.True(t, b.candidates[18].Contains('1'))\n\tassert.True(t, b.candidates[20].Contains('1'))\n\n\tassert.True(t, b.candidates[32].Contains('2'))\n\tassert.True(t, b.candidates[41].Contains('2'))\n\tassert.True(t, b.candidates[50].Contains('2'))\n\tassert.True(t, b.candidates[68].Contains('2'))\n\n\tassert.True(t, b.candidates[32].Contains('3'))\n\tassert.True(t, b.candidates[41].Contains('3'))\n\tassert.True(t, b.candidates[50].Contains('3'))\n\tassert.True(t, b.candidates[68].Contains('3'))\n\n\tassert.True(t, b.candidates[36].Contains('4'))\n\tassert.True(t, b.candidates[37].Contains('4'))\n\n\tassert.True(t, b.candidates[39].Contains('6'))\n\n\tassert.True(t, b.candidates[71].Contains('7'))\n\n\tassert.True(t, b.candidates[69].Contains('8'))\n\tassert.True(t, b.candidates[70].Contains('8'))\n\tassert.True(t, b.candidates[71].Contains('8'))\n\n\tassert.True(t, b.candidates[57].Contains('9'))\n\n\tb.PointingPairs()\n\n\tassert.True(t, !b.candidates[18].Contains('1'))\n\tassert.True(t, !b.candidates[20].Contains('1'))\n\n\tassert.True(t, !b.candidates[32].Contains('2'))\n\tassert.True(t, !b.candidates[41].Contains('2'))\n\tassert.True(t, !b.candidates[50].Contains('2'))\n\tassert.True(t, !b.candidates[68].Contains('2'))\n\n\tassert.True(t, !b.candidates[32].Contains('3'))\n\tassert.True(t, !b.candidates[41].Contains('3'))\n\tassert.True(t, !b.candidates[50].Contains('3'))\n\tassert.True(t, !b.candidates[68].Contains('3'))\n\n\tassert.True(t, !b.candidates[36].Contains('4'))\n\tassert.True(t, !b.candidates[37].Contains('4'))\n\n\tassert.True(t, !b.candidates[39].Contains('6'))\n\n\tassert.True(t, !b.candidates[71].Contains('7'))\n\n\tassert.True(t, !b.candidates[69].Contains('8'))\n\tassert.True(t, !b.candidates[70].Contains('8'))\n\tassert.True(t, !b.candidates[71].Contains('8'))\n\n\tassert.True(t, !b.candidates[57].Contains('9'))\n}", "func TestReconcileGatewayRoutesOnStartup(t *testing.T) {\n\tskipIfNumNodesLessThan(t, 2)\n\n\tdata, err := setupTest(t)\n\tif err != nil {\n\t\tt.Fatalf(\"Error when setting up test: %v\", err)\n\t}\n\tdefer teardownTest(t, data)\n\n\ttype Route struct {\n\t\tpeerPodCIDR *net.IPNet\n\t\tpeerPodGW net.IP\n\t}\n\n\tnodeName := nodeName(0)\n\tantreaPodName := func() string {\n\t\tantreaPodName, err := data.getAntreaPodOnNode(nodeName)\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"Error when retrieving the name of the Antrea Pod running on Node '%s': %v\", nodeName, err)\n\t\t}\n\t\tt.Logf(\"The Antrea Pod for Node '%s' is '%s'\", nodeName, antreaPodName)\n\t\treturn antreaPodName\n\t}\n\n\tgetGatewayRoutes := func() (routes []Route, err error) {\n\t\tcmd := fmt.Sprintf(\"ip route list dev %s\", antreaGWName)\n\t\trc, stdout, _, err := RunCommandOnNode(nodeName, cmd)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"error when running ip command on Node '%s': %v\", nodeName, err)\n\t\t}\n\t\tif rc != 0 {\n\t\t\treturn nil, fmt.Errorf(\"running ip command on Node '%s' returned error\", nodeName)\n\t\t}\n\t\tre := regexp.MustCompile(`([^\\s]+) via ([^\\s]+)`)\n\t\tfor _, line := range strings.Split(stdout, \"\\n\") {\n\t\t\tvar err error\n\t\t\tmatches := re.FindStringSubmatch(line)\n\t\t\tif len(matches) == 0 {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\troute := Route{}\n\t\t\tif _, route.peerPodCIDR, err = net.ParseCIDR(matches[1]); err != nil {\n\t\t\t\treturn nil, fmt.Errorf(\"%s is not a valid net CIDR\", matches[1])\n\t\t\t}\n\t\t\tif route.peerPodGW = net.ParseIP(matches[2]); route.peerPodGW == nil {\n\t\t\t\treturn nil, fmt.Errorf(\"%s is not a valid IP\", matches[2])\n\t\t\t}\n\t\t\troutes = append(routes, route)\n\t\t}\n\t\treturn routes, nil\n\t}\n\n\tt.Logf(\"Retrieving gateway routes on Node '%s'\", nodeName)\n\tvar routes []Route\n\tif err := wait.PollImmediate(1*time.Second, defaultTimeout, func() (found bool, err error) {\n\t\troutes, err = getGatewayRoutes()\n\t\tif err != nil {\n\t\t\treturn false, err\n\t\t}\n\t\tif len(routes) < clusterInfo.numNodes-1 {\n\t\t\t// Not enough routes, keep trying\n\t\t\treturn false, nil\n\t\t} else if len(routes) > clusterInfo.numNodes-1 {\n\t\t\treturn false, fmt.Errorf(\"found too many gateway routes, expected %d but got %d\", clusterInfo.numNodes-1, len(routes))\n\t\t}\n\t\treturn true, nil\n\t}); err == wait.ErrWaitTimeout {\n\t\tt.Fatalf(\"Not enough gateway routes after %v\", defaultTimeout)\n\t} else if err != nil {\n\t\tt.Fatalf(\"Error while waiting for gateway routes: %v\", err)\n\t} else {\n\t\tt.Logf(\"Found all expected gateway routes\")\n\t}\n\n\trouteToDelete := routes[0]\n\t// A dummy route\n\trouteToAdd := Route{}\n\t_, routeToAdd.peerPodCIDR, _ = net.ParseCIDR(\"99.99.99.0/24\")\n\trouteToAdd.peerPodGW = net.ParseIP(\"99.99.99.1\")\n\n\t// We run the ip command from the antrea-agent container for delete / add since they need to\n\t// be run as root and the antrea-agent container is privileged. If we used RunCommandOnNode,\n\t// we may need to use \"sudo\" for some providers (e.g. vagrant).\n\tdeleteGatewayRoute := func(route Route) error {\n\t\tcmd := []string{\"ip\", \"route\", \"del\", route.peerPodCIDR.String()}\n\t\t_, _, err := data.runCommandFromPod(antreaNamespace, antreaPodName(), agentContainerName, cmd)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"error when running ip command on Node '%s': %v\", nodeName, err)\n\t\t}\n\t\treturn nil\n\t}\n\n\taddGatewayRoute := func(route Route) error {\n\t\tcmd := []string{\"ip\", \"route\", \"add\", route.peerPodCIDR.String(), \"via\", route.peerPodGW.String(), \"dev\", antreaGWName, \"onlink\"}\n\t\t_, _, err := data.runCommandFromPod(antreaNamespace, antreaPodName(), agentContainerName, cmd)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"error when running ip command on Node '%s': %v\", nodeName, err)\n\t\t}\n\t\treturn nil\n\t}\n\n\tt.Logf(\"Deleting one actual gateway route and adding a dummy one\")\n\tif err := deleteGatewayRoute(routeToDelete); err != nil {\n\t\tt.Fatalf(\"Error when deleting route: %v\", err)\n\t}\n\tif err := addGatewayRoute(routeToAdd); err != nil {\n\t\tt.Fatalf(\"Error when adding dummy route route: %v\", err)\n\t}\n\tdefer func() {\n\t\t// Cleanup the dummy route regardless of whether the test was a success or a\n\t\t// failure; ignore error (there will be an error if the test is a success since the\n\t\t// dummy route will no longer exist).\n\t\t_ = deleteGatewayRoute(routeToAdd)\n\t}()\n\n\tt.Logf(\"Restarting antrea-agent on Node '%s'\", nodeName)\n\tif _, err := data.deleteAntreaAgentOnNode(nodeName, 30 /* grace period in seconds */, defaultTimeout); err != nil {\n\t\tt.Fatalf(\"Error when restarting antrea-agent on Node '%s': %v\", nodeName, err)\n\t}\n\n\tt.Logf(\"Checking that all Antrea DaemonSet Pods are running\")\n\tif err := data.waitForAntreaDaemonSetPods(defaultTimeout); err != nil {\n\t\tt.Fatalf(\"Error when waiting for Antrea Pods: %v\", err)\n\t}\n\n\t// We expect the agent to delete the extra route we added and add back the route we deleted\n\tt.Logf(\"Waiting for gateway routes to converge\")\n\tif err := wait.Poll(1*time.Second, defaultTimeout, func() (bool, error) {\n\t\tnewRoutes, err := getGatewayRoutes()\n\t\tif err != nil {\n\t\t\treturn false, err\n\t\t}\n\t\tif len(newRoutes) != len(routes) {\n\t\t\treturn false, nil\n\t\t}\n\t\tfor _, route := range newRoutes {\n\t\t\tif route.peerPodGW.Equal(routeToAdd.peerPodGW) {\n\t\t\t\t// The dummy route hasn't been deleted yet, keep trying\n\t\t\t\treturn false, nil\n\t\t\t}\n\t\t}\n\t\t// At this stage we have confirmed that the dummy route has been deleted\n\t\tfor _, route := range newRoutes {\n\t\t\tif route.peerPodGW.Equal(routeToDelete.peerPodGW) {\n\t\t\t\t// The deleted route was added back, success!\n\t\t\t\treturn true, nil\n\t\t\t}\n\t\t}\n\t\t// We haven't found the deleted route, keep trying\n\t\treturn false, nil\n\t}); err == wait.ErrWaitTimeout {\n\t\tt.Errorf(\"Gateway routes did not converge after %v\", defaultTimeout)\n\t} else if err != nil {\n\t\tt.Fatalf(\"Error while waiting for gateway routes to converge: %v\", err)\n\t} else {\n\t\tt.Logf(\"Gateway routes successfully converged\")\n\t}\n}", "func TestUnmanaged(t *testing.T) {\n\tnewIntegrationTest(\"unmanaged.example.com\", \"unmanaged\").\n\t\twithAddons(\n\t\t\tawsEBSCSIAddon,\n\t\t\tdnsControllerAddon,\n\t\t\tawsCCMAddon,\n\t\t).\n\t\twithPrivate().\n\t\trunTestTerraformAWS(t)\n}", "func TestLeaf(t *testing.T){\n\tdata := []byte(\"some_utxo\")\n\tvar left Node\n\tvar right Node\n\tvar hash [32]byte\n\tleft = Node{hash:nil,left:nil,right:nil,}\n\tright = Node{hash:nil,left:nil,right:nil,}\n\thash = sha256.Sum256(data)\n\tn_test := Node{\n\t\thash: hash[:],\n\t\tleft: &left,\n\t\tright: &right,\n\t\t\n\t}\t\t\n\tn := makeNode(data,left,right)\n\tn1 := n\n\tn2 := n_test\n\t// first the lenght of bytes\n\tif (len(n1.hash) != len(n2.hash)) {\n\t\tt.Errorf(\"hashes are a different length, %d and %d\", len(n1.hash), len(n2.hash))\n\t}\n\t// the bytes must match\n\tfor i := 0; i < len(n1.hash); i++ {\n\t\tif (n1.hash[i] != n2.hash[i]) {\n\t\t\tt.Errorf(\"hash bytes do not match for byte %d, found %x, expected %x \",i,n1.hash[i],n2.hash[i])\n\t\t}\n\t}\n}", "func TestList(t *testing.T) {\n\tif testing.Short() {\n\t\tt.SkipNow()\n\t}\n\t// Prepare a siadirset\n\troot := filepath.Join(testDir(t.Name()), \"fs-root\")\n\tos.RemoveAll(root)\n\tfs := newTestFileSystem(root)\n\n\t// Specify a directory structure for this test.\n\tvar dirStructure = []string{\n\t\t\"dir1\",\n\t\t\"dir1/subdir1\",\n\t\t\"dir1/subdir1/subsubdir1\",\n\t\t\"dir1/subdir1/subsubdir2\",\n\t\t\"dir1/subdir1/subsubdir3\",\n\t\t\"dir1/subdir2\",\n\t\t\"dir1/subdir2/subsubdir1\",\n\t\t\"dir1/subdir2/subsubdir2\",\n\t\t\"dir1/subdir2/subsubdir3\",\n\t\t\"dir1/subdir3\",\n\t\t\"dir1/subdir3/subsubdir1\",\n\t\t\"dir1/subdir3/subsubdir2\",\n\t\t\"dir1/subdir3/subsubdir3\",\n\t}\n\n\t// Create filesystem\n\tfor _, d := range dirStructure {\n\t\t// Create directory\n\t\tsiaPath := newSiaPath(d)\n\t\terr := fs.NewSiaDir(siaPath, persist.DefaultDiskPermissionsTest)\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\n\t\t// Add a file\n\t\tfileSiaPath, err := siaPath.Join(\"file\")\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t\tfs.addTestSiaFile(fileSiaPath)\n\t}\n\n\t// Get the cached information\n\tfis, dis, err := fs.CachedListCollect(newSiaPath(dirStructure[0]), true)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tif len(fis) != len(dirStructure) {\n\t\tt.Fatal(\"wrong number of files\", len(fis), len(dirStructure))\n\t}\n\tif len(dis) != len(dirStructure) {\n\t\tt.Fatal(\"wrong number of dirs\", len(dis), len(dirStructure))\n\t}\n}", "func pinImages(ctx context.Context, dc *declcfg.DeclarativeConfig, resolverConfigPath string, insecure bool) error {\n\tresolver, err := containerdregistry.NewResolver(resolverConfigPath, insecure, nil)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error creating image resolver: %v\", err)\n\t}\n\n\tvar errs []error\n\tfor i, b := range dc.Bundles {\n\n\t\tif !image.IsImagePinned(b.Image) {\n\n\t\t\tif !image.IsImageTagged(b.Image) {\n\t\t\t\tlogrus.Warnf(\"bundle %s: bundle image tag not set\", b.Name)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif dc.Bundles[i].Image, err = image.ResolveToPin(ctx, resolver, b.Image); err != nil {\n\t\t\t\terrs = append(errs, err)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t\tfor j, ri := range b.RelatedImages {\n\t\t\tif !image.IsImagePinned(ri.Image) {\n\n\t\t\t\tif !image.IsImageTagged(ri.Image) {\n\t\t\t\t\tlogrus.Warnf(\"bundle %s: related image tag not set\", b.Name)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\n\t\t\t\tif b.RelatedImages[j].Image, err = image.ResolveToPin(ctx, resolver, ri.Image); err != nil {\n\t\t\t\t\terrs = append(errs, err)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn utilerrors.NewAggregate(errs)\n}", "func (a *Client) GetPinned(params *GetPinnedParams, authInfo runtime.ClientAuthInfoWriter) (*GetPinnedOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewGetPinnedParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"getPinned\",\n\t\tMethod: \"GET\",\n\t\tPathPattern: \"/app/rest/builds/{buildLocator}/pin\",\n\t\tProducesMediaTypes: []string{\"application/json\", \"application/xml\", \"text/plain\"},\n\t\tConsumesMediaTypes: []string{\"application/json\", \"application/xml\", \"text/plain\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &GetPinnedReader{formats: a.formats},\n\t\tAuthInfo: authInfo,\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn result.(*GetPinnedOK), nil\n\n}", "func TestFindLogicalRouter(t *testing.T) {\n\n\tnsxClient, teardown := setupTest()\n\tdefer teardown()\n\n\ttype args struct {\n\t\tnsxClient *nsxt.APIClient\n\t\tcallback nsxtapi.RouterSearchHandler\n\t\tsearchVal string\n\t}\n\ttests := []struct {\n\t\tname string\n\t\targs args\n\t\twantErr bool\n\t}{\n\t\t{\n\t\t\t\"should return not found\",\n\t\t\targs{\n\t\t\t\t&nsxClient,\n\t\t\t\tnsxtapi.RouterCallback[\"name\"],\n\t\t\t\t\"\",\n\t\t\t},\n\t\t\ttrue,\n\t\t},\n\t\t{\n\t\t\t\"test search by router name\",\n\t\t\targs{\n\t\t\t\t&nsxClient,\n\t\t\t\tnsxtapi.RouterCallback[\"name\"],\n\t\t\t\t\"primary-t0\",\n\t\t\t},\n\t\t\tfalse,\n\t\t},\n\t\t{\n\t\t\t\"test search by uuid name\",\n\t\t\targs{\n\t\t\t\t&nsxClient,\n\t\t\t\tnsxtapi.RouterCallback[\"uuid\"],\n\t\t\t\t\"ba95b780-3689-419b-8f20-c7179e05813f\",\n\t\t\t},\n\t\t\tfalse,\n\t\t},\n\t\t{\n\t\t\t\"test search by edge id\",\n\t\t\targs{\n\t\t\t\t&nsxClient,\n\t\t\t\tnsxtapi.RouterCallback[\"edgeid\"],\n\t\t\t\t\"133fe9a7-2e87-409a-b1b3-406ab5833986\",\n\t\t\t},\n\t\t\tfalse,\n\t\t},\n\t\t{\n\t\t\t\"test search by type \",\n\t\t\targs{\n\t\t\t\t&nsxClient,\n\t\t\t\tnsxtapi.RouterCallback[\"type\"],\n\t\t\t\t\"TIER1\",\n\t\t\t},\n\t\t\tfalse,\n\t\t},\n\t}\n\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tgot, err := nsxtapi.FindLogicalRouter(tt.args.nsxClient, tt.args.callback, tt.args.searchVal)\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"FindLogicalRouter() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tfor _, v := range got {\n\t\t\t\tif err == nil {\n\t\t\t\t\tt.Log(\"Found router id \", v.Id, \" name\", v.DisplayName, \" type \", v.RouterType, \"edge id\", v.EdgeClusterId)\n\t\t\t\t}\n\t\t\t}\n\t\t})\n\t}\n}", "func TestWindowsTerragruntSourceMapDebug(t *testing.T) {\n\ttestCases := []struct {\n\t\tname string\n\t}{\n\t\t{\n\t\t\tname: \"multiple-match\",\n\t\t},\n\t\t{\n\t\t\tname: \"multiple-with-dependency\",\n\t\t},\n\t}\n\tfor _, testCase := range testCases {\n\t\ttestCase := testCase\n\t\tt.Run(testCase.name, func(t *testing.T) {\n\t\t\tfixtureSourceMapPath := \"fixture-source-map\"\n\t\t\tcleanupTerraformFolder(t, fixtureSourceMapPath)\n\t\t\ttargetPath := \"C:\\\\test\\\\infrastructure-modules/\"\n\t\t\tcopyEnvironmentToPath(t, fixtureSourceMapPath, targetPath)\n\t\t\trootPath := filepath.Join(targetPath, fixtureSourceMapPath)\n\n\t\t\tos.Setenv(\n\t\t\t\t\"TERRAGRUNT_SOURCE_MAP\",\n\t\t\t\tstrings.Join(\n\t\t\t\t\t[]string{\n\t\t\t\t\t\tfmt.Sprintf(\"git::ssh://[email protected]/gruntwork-io/i-dont-exist.git=%s\", targetPath),\n\t\t\t\t\t\tfmt.Sprintf(\"git::ssh://[email protected]/gruntwork-io/another-dont-exist.git=%s\", targetPath),\n\t\t\t\t\t},\n\t\t\t\t\t\",\",\n\t\t\t\t),\n\t\t\t)\n\t\t\ttgPath := filepath.Join(rootPath, testCase.name)\n\t\t\ttgArgs := fmt.Sprintf(\"terragrunt run-all apply -auto-approve --terragrunt-log-level debug --terragrunt-non-interactive --terragrunt-working-dir %s\", tgPath)\n\t\t\trunTerragrunt(t, tgArgs)\n\t\t})\n\t}\n}", "func (m *ListRepositoryMock) MinimockIsInListInspect() {\n\tfor _, e := range m.IsInListMock.expectations {\n\t\tif mm_atomic.LoadUint64(&e.Counter) < 1 {\n\t\t\tm.t.Errorf(\"Expected call to ListRepositoryMock.IsInList with params: %#v\", *e.params)\n\t\t}\n\t}\n\n\t// if default expectation was set then invocations count should be greater than zero\n\tif m.IsInListMock.defaultExpectation != nil && mm_atomic.LoadUint64(&m.afterIsInListCounter) < 1 {\n\t\tif m.IsInListMock.defaultExpectation.params == nil {\n\t\t\tm.t.Error(\"Expected call to ListRepositoryMock.IsInList\")\n\t\t} else {\n\t\t\tm.t.Errorf(\"Expected call to ListRepositoryMock.IsInList with params: %#v\", *m.IsInListMock.defaultExpectation.params)\n\t\t}\n\t}\n\t// if func was set then invocations count should be greater than zero\n\tif m.funcIsInList != nil && mm_atomic.LoadUint64(&m.afterIsInListCounter) < 1 {\n\t\tm.t.Error(\"Expected call to ListRepositoryMock.IsInList\")\n\t}\n}", "func Test_ServiceInstancePath_HasDirectParent_second(t *testing.T) {\n\n\t// Setup the DB for the UT environment\n\tdir, db, err := utsetup()\n\tif err != nil {\n\t\tt.Errorf(\"Error setting up UT DB: %v\", err)\n\t}\n\n\tdefer cleanTestDir(dir)\n\n\t// Setup initial variable values\n\tparentURL := \"url1\"\n\tparentOrg := \"myorg\"\n\tparentVersion := \"1.0.0\"\n\tchildURL := \"url2\"\n\tchildOrg := \"childorg\"\n\tchildVersion := \"2.0.0\"\n\tchild2URL := \"url3\"\n\tchild2Org := \"childorg3\"\n\tchild2Version := \"3.0.0\"\n\n\t// Establish the dependency path objects\n\tparent := NewServiceInstancePathElement(parentURL, parentOrg, parentVersion)\n\tchild := NewServiceInstancePathElement(childURL, childOrg, childVersion)\n\tchild2 := NewServiceInstancePathElement(child2URL, child2Org, child2Version)\n\n\tdepPath := []ServiceInstancePathElement{*parent, *child, *child2}\n\tdp2 := []ServiceInstancePathElement{*parent, *child2}\n\n\t// Create the test microservice instance to represent the child\n\tif msi, err := NewMicroserviceInstance(db, child2URL, child2Org, child2Version, \"1234\", depPath, false); err != nil {\n\t\tt.Errorf(\"Error creating instance: %v\", err)\n\t} else if newmsi, err := UpdateMSInstanceAddDependencyPath(db, msi.GetKey(), &dp2); err != nil {\n\t\tt.Errorf(\"Error updating instance: %v\", err)\n\t} else if !newmsi.HasDirectParent(parent) {\n\t\tt.Errorf(\"Child %v does have direct parent: %v\", child2, dp2)\n\t}\n}", "func (p *pinner) Flush() error {\n\tp.lock.Lock()\n\tdefer p.lock.Unlock()\n\n\tctx := context.TODO()\n\n\tinternalPin := make(map[util.Key]struct{})\n\trecordInternal := func(k util.Key) {\n\t\tinternalPin[k] = struct{}{}\n\t}\n\n\troot := &mdag.Node{}\n\t{\n\t\tn, err := storeSet(ctx, p.dserv, p.directPin.GetKeys(), recordInternal)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif err := root.AddNodeLink(linkDirect, n); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t{\n\t\tn, err := storeSet(ctx, p.dserv, p.recursePin.GetKeys(), recordInternal)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif err := root.AddNodeLink(linkRecursive, n); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t{\n\t\tn, err := storeMultiset(ctx, p.dserv, p.indirPin.GetRefs(), recordInternal)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif err := root.AddNodeLink(linkIndirect, n); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tk, err := p.dserv.Add(root)\n\tif err != nil {\n\t\treturn err\n\t}\n\tinternalPin[k] = struct{}{}\n\tif err := p.dstore.Put(pinDatastoreKey, []byte(k)); err != nil {\n\t\treturn fmt.Errorf(\"cannot store pin state: %v\", err)\n\t}\n\tp.internalPin = internalPin\n\treturn nil\n}", "func TestEndpointCase64(t *testing.T) {\n\tvar params = EndpointParameters{\n\t\tRegion: ptr.String(\"us-east-1\"),\n\t\tUseFIPS: ptr.Bool(true),\n\t\tUseDualStack: ptr.Bool(true),\n\t\tOperationType: ptr.String(\"control\"),\n\t\tStreamARN: ptr.String(\"arn:aws:kinesis:us-east-1:123:stream/test-stream\"),\n\t}\n\n\tresolver := NewDefaultEndpointResolverV2()\n\tresult, err := resolver.ResolveEndpoint(context.Background(), params)\n\t_, _ = result, err\n\n\tif err != nil {\n\t\tt.Fatalf(\"expect no error, got %v\", err)\n\t}\n\n\turi, _ := url.Parse(\"https://123.control-kinesis-fips.us-east-1.api.aws\")\n\n\texpectEndpoint := smithyendpoints.Endpoint{\n\t\tURI: *uri,\n\t\tHeaders: http.Header{},\n\t\tProperties: smithy.Properties{},\n\t}\n\n\tif e, a := expectEndpoint.URI, result.URI; e != a {\n\t\tt.Errorf(\"expect %v URI, got %v\", e, a)\n\t}\n\n\tif diff := cmp.Diff(expectEndpoint.Headers, result.Headers); diff != \"\" {\n\t\tt.Errorf(\"expect headers to match\\n%s\", diff)\n\t}\n\n\tif diff := cmp.Diff(expectEndpoint.Properties, result.Properties,\n\t\tcmp.AllowUnexported(smithy.Properties{}),\n\t); diff != \"\" {\n\t\tt.Errorf(\"expect properties to match\\n%s\", diff)\n\t}\n}", "func TestResolvePolicyNo1(t *testing.T) {\n\tconst (\n\t\tconfigName, secretName = \"all-cfg-map-test-redis\", \"all-secret-test-redis\"\n\t\tkeyNo1, keyNo2, keyNo3 = \"keyNo1\", \"keyNo2\", \"keyNo3\"\n\t\tnamespace = \"test-ns\"\n\t)\n\n\ttype given struct {\n\t\tconfigData configMapData\n\t\tsecretData secretData\n\t\tbindYAML string\n\t}\n\ttype expected struct {\n\t\tcredentials internal.InstanceCredentials\n\t}\n\tfor tn, tc := range map[string]struct {\n\t\tgiven\n\t\texpected\n\t}{\n\t\t\"secret overrides configMap values\": {\n\t\t\tgiven: given{\n\t\t\t\tconfigData: configMapData{keyNo1: \"key_1_cfg_val\", keyNo2: \"key_2_cfg_val\"},\n\t\t\t\tsecretData: secretData{keyNo1: []byte(\"key_1_secret_val\"), keyNo3: []byte(\"key_3_secret_val\")},\n\t\t\t\tbindYAML: dedent.Dedent(`\n credentialFrom:\n - configMapRef:\n name: ` + configName + `\n - secretRef:\n name: ` + secretName),\n\t\t\t},\n\t\t\texpected: expected{\n\t\t\t\tcredentials: internal.InstanceCredentials{\n\t\t\t\t\tkeyNo1: \"key_1_secret_val\",\n\t\t\t\t\tkeyNo2: \"key_2_cfg_val\",\n\t\t\t\t\tkeyNo3: \"key_3_secret_val\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t\"configMap overrides secret values\": {\n\t\t\tgiven: given{\n\t\t\t\tconfigData: configMapData{keyNo1: \"key_1_cfg_val\", keyNo2: \"key_2_cfg_val\"},\n\t\t\t\tsecretData: secretData{keyNo1: []byte(\"key_1_secret_val\"), keyNo3: []byte(\"key_3_secret_val\")},\n\t\t\t\tbindYAML: dedent.Dedent(`\n\t\t credentialFrom:\n\t\t - secretRef:\n\t\t name: ` + secretName + `\n\t\t - configMapRef:\n\t\t name: ` + configName),\n\t\t\t},\n\t\t\texpected: expected{\n\t\t\t\tcredentials: internal.InstanceCredentials{\n\t\t\t\t\tkeyNo1: \"key_1_cfg_val\",\n\t\t\t\t\tkeyNo2: \"key_2_cfg_val\",\n\t\t\t\t\tkeyNo3: \"key_3_secret_val\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t} {\n\t\tt.Run(tn, func(t *testing.T) {\n\t\t\t// given\n\t\t\tts := newResolverTestSuit()\n\t\t\tvar (\n\t\t\t\tconfigMap = ts.configMap(namespace, \"all-cfg-map-test-redis\", tc.given.configData)\n\t\t\t\tsecret = ts.secret(namespace, \"all-secret-test-redis\", tc.given.secretData)\n\t\t\t\tfakeClient = fake.NewSimpleClientset(&configMap, &secret)\n\t\t\t\tresolver = bind.NewResolver(fakeClient.CoreV1())\n\t\t\t)\n\n\t\t\t// when\n\t\t\tout, err := resolver.Resolve(bind.RenderedBindYAML(tc.given.bindYAML), internal.Namespace(namespace))\n\n\t\t\t// then\n\t\t\trequire.NoError(t, err)\n\t\t\tassert.EqualValues(t, tc.expected.credentials, out.Credentials)\n\t\t\tassert.Len(t, fakeClient.Actions(), 2)\n\t\t})\n\t}\n}", "func TestInsertWithoutDuplicates_SortListByCloseness(t *testing.T) {\n\tlocalNode, _ := start(ID{0, 0, 0, 2}, 0, \"\")\n\tneighbors := make([]RemoteNode, 0)\n\n\tneighbors = InsertWithoutDuplicates(neighbors, []RemoteNode{{ID{0, 0, 0, 0}, \"\"}})\n\tassert.Equal(t,\n\t\t[]RemoteNode{\n\t\t\t{ID{0, 0, 0, 0}, \"\"}},\n\t\tneighbors)\n\n\tneighbors = localNode.SortListByCloseness(neighbors)\n\tassert.Equal(t,\n\t\t[]RemoteNode{\n\t\t\t{ID{0, 0, 0, 0}, \"\"}},\n\t\tneighbors)\n\n\tneighbors = InsertWithoutDuplicates(neighbors, []RemoteNode{{ID{0, 0, 0, 0}, \"\"}})\n\tassert.Equal(t,\n\t\t[]RemoteNode{\n\t\t\t{ID{0, 0, 0, 0}, \"\"}},\n\t\tneighbors)\n\n\tneighbors = InsertWithoutDuplicates(neighbors, []RemoteNode{{ID{9, 12, 4, 15}, \"\"}})\n\tassert.ElementsMatch(t,\n\t\t[]RemoteNode{\n\t\t\t{ID{0, 0, 0, 0}, \"\"},\n\t\t\t{ID{9, 12, 4, 15}, \"\"}},\n\t\tneighbors)\n\n\tneighbors = InsertWithoutDuplicates(neighbors, []RemoteNode{{ID{9, 12, 4, 15}, \"\"}})\n\tassert.ElementsMatch(t,\n\t\t[]RemoteNode{\n\t\t\t{ID{0, 0, 0, 0}, \"\"},\n\t\t\t{ID{9, 12, 4, 15}, \"\"}},\n\t\tneighbors)\n\n\tneighbors = localNode.SortListByCloseness(neighbors)\n\tassert.Equal(t,\n\t\t[]RemoteNode{\n\t\t\t{ID{0, 0, 0, 0}, \"\"},\n\t\t\t{ID{9, 12, 4, 15}, \"\"}},\n\t\tneighbors)\n\n\tneighbors = InsertWithoutDuplicates(neighbors, []RemoteNode{{ID{3, 16, 5, 16}, \"\"}})\n\tassert.ElementsMatch(t,\n\t\t[]RemoteNode{\n\t\t\t{ID{0, 0, 0, 0}, \"\"},\n\t\t\t{ID{3, 16, 5, 16}, \"\"},\n\t\t\t{ID{9, 12, 4, 15}, \"\"}},\n\t\tneighbors)\n\n\tneighbors = InsertWithoutDuplicates(neighbors, []RemoteNode{{ID{3, 16, 5, 16}, \"\"}})\n\tassert.ElementsMatch(t,\n\t\t[]RemoteNode{\n\t\t\t{ID{0, 0, 0, 0}, \"\"},\n\t\t\t{ID{3, 16, 5, 16}, \"\"},\n\t\t\t{ID{9, 12, 4, 15}, \"\"}},\n\t\tneighbors)\n\n\tneighbors = localNode.SortListByCloseness(neighbors)\n\tassert.Equal(t,\n\t\t[]RemoteNode{\n\t\t\t{ID{0, 0, 0, 0}, \"\"},\n\t\t\t{ID{3, 16, 5, 16}, \"\"},\n\t\t\t{ID{9, 12, 4, 15}, \"\"}},\n\t\tneighbors)\n\n\tneighbors = InsertWithoutDuplicates(neighbors, []RemoteNode{{ID{15, 1, 3, 9}, \"\"}})\n\tassert.ElementsMatch(t,\n\t\t[]RemoteNode{\n\t\t\t{ID{0, 0, 0, 0}, \"\"},\n\t\t\t{ID{3, 16, 5, 16}, \"\"},\n\t\t\t{ID{9, 12, 4, 15}, \"\"},\n\t\t\t{ID{15, 1, 3, 9}, \"\"}},\n\t\tneighbors)\n\n\tneighbors = InsertWithoutDuplicates(neighbors, []RemoteNode{{ID{15, 1, 3, 9}, \"\"}})\n\tassert.ElementsMatch(t,\n\t\t[]RemoteNode{\n\t\t\t{ID{0, 0, 0, 0}, \"\"},\n\t\t\t{ID{3, 16, 5, 16}, \"\"},\n\t\t\t{ID{9, 12, 4, 15}, \"\"},\n\t\t\t{ID{15, 1, 3, 9}, \"\"}},\n\t\tneighbors)\n\n\tneighbors = localNode.SortListByCloseness(neighbors)\n\tassert.Equal(t,\n\t\t[]RemoteNode{\n\t\t\t{ID{0, 0, 0, 0}, \"\"},\n\t\t\t{ID{3, 16, 5, 16}, \"\"},\n\t\t\t{ID{9, 12, 4, 15}, \"\"},\n\t\t\t{ID{15, 1, 3, 9}, \"\"}},\n\t\tneighbors)\n\n\t// same dist\n\tid := ID{0, 0, 0, 4}\n\tassert.False(t, localNode.node.Id.Closer(id, ID{0, 0, 0, 0}))\n\tassert.False(t, localNode.node.Id.Closer(ID{0, 0, 0, 0}, id))\n\tneighbors = InsertWithoutDuplicates(neighbors, []RemoteNode{{id, \"\"}})\n\tassert.ElementsMatch(t,\n\t\t[]RemoteNode{\n\t\t\t{ID{0, 0, 0, 0}, \"\"},\n\t\t\t{id, \"\"},\n\t\t\t{ID{3, 16, 5, 16}, \"\"},\n\t\t\t{ID{9, 12, 4, 15}, \"\"},\n\t\t\t{ID{15, 1, 3, 9}, \"\"}},\n\t\tneighbors)\n\n\tneighbors = localNode.SortListByCloseness(neighbors)\n\tassert.Equal(t,\n\t\t[]RemoteNode{\n\t\t\t{ID{0, 0, 0, 0}, \"\"},\n\t\t\t{id, \"\"},\n\t\t\t{ID{3, 16, 5, 16}, \"\"},\n\t\t\t{ID{9, 12, 4, 15}, \"\"},\n\t\t\t{ID{15, 1, 3, 9}, \"\"}},\n\t\tneighbors)\n\n\tneighbors = InsertWithoutDuplicates(neighbors, []RemoteNode{{ID{0, 0, 0, 2}, \"\"}})\n\tassert.ElementsMatch(t,\n\t\t[]RemoteNode{\n\t\t\t{ID{0, 0, 0, 2}, \"\"},\n\t\t\t{ID{0, 0, 0, 0}, \"\"},\n\t\t\t{id, \"\"},\n\t\t\t{ID{3, 16, 5, 16}, \"\"},\n\t\t\t{ID{9, 12, 4, 15}, \"\"},\n\t\t\t{ID{15, 1, 3, 9}, \"\"}},\n\t\tneighbors)\n\n\tneighbors = localNode.SortListByCloseness(neighbors)\n\tassert.Equal(t,\n\t\t[]RemoteNode{\n\t\t\t{ID{0, 0, 0, 2}, \"\"},\n\t\t\t{ID{0, 0, 0, 0}, \"\"},\n\t\t\t{id, \"\"},\n\t\t\t{ID{3, 16, 5, 16}, \"\"},\n\t\t\t{ID{9, 12, 4, 15}, \"\"},\n\t\t\t{ID{15, 1, 3, 9}, \"\"}},\n\t\tneighbors)\n}", "func InsideProperty(source *specs.Property, target *specs.Property) bool {\n\tif source == target {\n\t\treturn true\n\t}\n\n\tif len(source.Nested) > 0 {\n\t\tfor _, nested := range source.Nested {\n\t\t\tis := InsideProperty(nested, target)\n\t\t\tif is {\n\t\t\t\treturn is\n\t\t\t}\n\t\t}\n\t}\n\n\treturn false\n}", "func TestFetchMirrorIdBridgeMapInternal(t *testing.T) {\n\tfor _, json := range []string{\n\t\t`{\"data\":[[\"br1\",[\"set\",[]]],[\"br0\",[\"set\",[[\"uuid\",\"5ab854d3-b050-48de-9d60-3f5791478d1c\"],[\"uuid\",\"d5dfa2a6-7633-4f13-89d9-ecfa2b161bda\"]]]],[\"brtap\",[\"set\",[]]],[\"brmapped\",[\"set\",[]]],[\"breip\",[\"set\",[]]],[\"brvpc\",[\"set\",[]]]],\"headings\":[\"name\",\"mirrors\"]}`,\n\t\t`{\"data\":[[\"br1\",[\"set\",[]]],[\"br0\",[\"set\",[]]],[\"brtap\",[\"set\",[]]],[\"brmapped\",[\"set\",[]]],[\"breip\",[\"set\",[]]],[\"brvpc\",[\"set\",[]]]],\"headings\":[\"name\",\"mirrors\"]}`,\n\t\t`{\"data\":[[\"br1\",[\"set\",[]]],[\"br0\",[\"uuid\",\"518561f0-2b69-46c3-9455-dd04d01dc5f5\"]],[\"brtap\",[\"set\",[]]],[\"brmapped\",[\"set\",[]]],[\"breip\",[\"set\",[]]],[\"brvpc\",[\"set\",[]]]],\"headings\":[\"name\",\"mirrors\"]}`,\n\t} {\n\t\tret, err := fetchMirrorIdBridgeMapInternal([]byte(json))\n\t\tif err != nil {\n\t\t\tt.Errorf(\"fetchMirrorIdBridgeMapInternal fail %s\", err)\n\t\t} else {\n\t\t\tt.Logf(\"%s\", jsonutils.Marshal(ret))\n\t\t}\n\t}\n}", "func TestNewBST(t *testing.T) {\n\ttt := []struct {\n\t\tname string\n\t\tinBtree []int\n\t\toutInOrder []int\n\t}{\n\t\t{\n\t\t\tname: \"test1\",\n\t\t\tinBtree: []int{2, 1, 3},\n\t\t\toutInOrder: []int{1, 2, 3},\n\t\t},\n\t\t{\n\t\t\tname: \"test2\",\n\t\t\tinBtree: []int{5, 6, 4, 2, 3, 1, 7, 8, 9},\n\t\t\toutInOrder: []int{1, 2, 3, 4, 5, 6, 7, 8, 9},\n\t\t},\n\t\t{\n\t\t\tname: \"test3\",\n\t\t\tinBtree: []int{100, 50, 60, 30, 10, 55, 80, 90, 75, 1},\n\t\t\toutInOrder: []int{1, 10, 30, 50, 55, 60, 75, 80, 90, 100},\n\t\t},\n\t}\n\n\tfor _, tc := range tt {\n\t\tt.Run(tc.name, func(t *testing.T) {\n\t\t\ttree := newBST(tc.inBtree[:1][0], tc.inBtree[1:])\n\t\t\tresult := []int{}\n\t\t\tresult = tree.inOrderTraverseRecursive(result)\n\t\t\tif !reflect.DeepEqual(result, tc.outInOrder) {\n\t\t\t\tt.Fatalf(\"result: \\n %v, want: \\n %v\", result, tc.outInOrder)\n\t\t\t}\n\t\t})\n\t}\n}", "func PINSRD(i, mr, x operand.Op) { ctx.PINSRD(i, mr, x) }", "func (m *Module) instPtrToInt(old *ast.InstPtrToInt, resolved, unresolved map[ast.NamedValue]value.Named) bool {\n\tif isUnresolved(unresolved, old.From) {\n\t\treturn false\n\t}\n\tv := m.getLocal(old.Name)\n\tinst, ok := v.(*ir.InstPtrToInt)\n\tif !ok {\n\t\tpanic(fmt.Errorf(\"invalid instruction type for instruction %s; expected *ir.InstPtrToInt, got %T\", enc.Local(old.Name), v))\n\t}\n\tinst.From = m.irValue(old.From)\n\tinst.To = m.irType(old.To)\n\tinst.Metadata = m.irMetadata(old.Metadata)\n\treturn true\n}" ]
[ "0.60909045", "0.577816", "0.5526697", "0.5223124", "0.5107354", "0.5074", "0.50663936", "0.50328296", "0.4914689", "0.48910722", "0.48430848", "0.48398843", "0.4772984", "0.4762767", "0.4748573", "0.47405154", "0.4732296", "0.47175515", "0.46788254", "0.45888147", "0.45874774", "0.4579521", "0.45737052", "0.45617595", "0.45430917", "0.45258167", "0.4516878", "0.45032564", "0.44833106", "0.44713464", "0.44586235", "0.44565165", "0.44486699", "0.44451338", "0.44426858", "0.44301847", "0.44245937", "0.4416637", "0.44066706", "0.4403873", "0.4402169", "0.43963867", "0.43693277", "0.4363296", "0.4358868", "0.43578115", "0.43559447", "0.43488836", "0.4341896", "0.43314785", "0.4330887", "0.43302658", "0.43218124", "0.43184105", "0.43180752", "0.43178746", "0.43169376", "0.43141276", "0.43139765", "0.43096256", "0.4302854", "0.4301226", "0.43005696", "0.4298268", "0.4295301", "0.42924672", "0.42890012", "0.4286197", "0.42787072", "0.4277182", "0.42735857", "0.42668948", "0.42546302", "0.42542785", "0.42533612", "0.42458576", "0.42332566", "0.42307895", "0.42190677", "0.4215315", "0.42054212", "0.42042008", "0.42000344", "0.41984135", "0.41974068", "0.4194265", "0.41885507", "0.4185834", "0.41758627", "0.4174116", "0.41725692", "0.41724762", "0.41701514", "0.41681534", "0.4166892", "0.416385", "0.416354", "0.41629893", "0.41617548", "0.4157676" ]
0.86240685
0
TestPinLsPrecedence verifies the precedence of pins (recursive > direct > indirect)
func (tp *TestSuite) TestPinLsPrecedence(t *testing.T) { // Testing precedence of recursive, direct and indirect pins // Results should be recursive > indirect, direct > indirect, and recursive > direct t.Run("TestPinLsPredenceRecursiveIndirect", tp.TestPinLsPredenceRecursiveIndirect) t.Run("TestPinLsPrecedenceDirectIndirect", tp.TestPinLsPrecedenceDirectIndirect) t.Run("TestPinLsPrecedenceRecursiveDirect", tp.TestPinLsPrecedenceRecursiveDirect) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (tp *TestSuite) TestPinLsIndirect(t *testing.T) {\n\tctx, cancel := context.WithCancel(context.Background())\n\tdefer cancel()\n\tapi, err := tp.makeAPI(ctx)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tleaf, parent, grandparent := getThreeChainedNodes(t, ctx, api, \"foo\")\n\n\terr = api.Pin().Add(ctx, path.IpldPath(grandparent.Cid()))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\terr = api.Pin().Add(ctx, path.IpldPath(parent.Cid()), opt.Pin.Recursive(false))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tassertPinTypes(t, ctx, api, []cidContainer{grandparent}, []cidContainer{parent}, []cidContainer{leaf})\n}", "func assertPinLsAllConsistency(t *testing.T, ctx context.Context, api iface.CoreAPI) {\n\tt.Helper()\n\tallPins, err := accPins(api.Pin().Ls(ctx))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\ttype pinTypeProps struct {\n\t\t*cid.Set\n\t\topt.PinLsOption\n\t}\n\n\tall, recursive, direct, indirect := cid.NewSet(), cid.NewSet(), cid.NewSet(), cid.NewSet()\n\ttypeMap := map[string]*pinTypeProps{\n\t\t\"recursive\": {recursive, opt.Pin.Ls.Recursive()},\n\t\t\"direct\": {direct, opt.Pin.Ls.Direct()},\n\t\t\"indirect\": {indirect, opt.Pin.Ls.Indirect()},\n\t}\n\n\tfor _, p := range allPins {\n\t\tif !all.Visit(p.Path().Cid()) {\n\t\t\tt.Fatalf(\"pin ls returned the same cid multiple times\")\n\t\t}\n\n\t\ttypeStr := p.Type()\n\t\tif typeSet, ok := typeMap[p.Type()]; ok {\n\t\t\ttypeSet.Add(p.Path().Cid())\n\t\t} else {\n\t\t\tt.Fatalf(\"unknown pin type: %s\", typeStr)\n\t\t}\n\t}\n\n\tfor typeStr, pinProps := range typeMap {\n\t\tpins, err := accPins(api.Pin().Ls(ctx, pinProps.PinLsOption))\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\n\t\tif expected, actual := len(pins), pinProps.Set.Len(); expected != actual {\n\t\t\tt.Fatalf(\"pin ls all has %d pins of type %s, but pin ls for the type has %d\", expected, typeStr, actual)\n\t\t}\n\n\t\tfor _, p := range pins {\n\t\t\tif pinType := p.Type(); pinType != typeStr {\n\t\t\t\tt.Fatalf(\"returned wrong pin type: expected %s, got %s\", typeStr, pinType)\n\t\t\t}\n\n\t\t\tif c := p.Path().Cid(); !pinProps.Has(c) {\n\t\t\t\tt.Fatalf(\"%s expected to be in pin ls all as type %s\", c.String(), typeStr)\n\t\t\t}\n\t\t}\n\t}\n}", "func testAllowAllPrecedenceIngress() []*TestStep {\n\tbuilder := &NetworkPolicySpecBuilder{}\n\tbuilder = builder.SetName(\"x\", \"deny-all\").SetPodSelector(map[string]string{\"pod\": \"a\"})\n\tbuilder.SetTypeIngress()\n\tbuilder.AddIngress(v1.ProtocolTCP, &p80, nil, nil, nil, map[string]string{}, nil, nil, nil)\n\n\tpolicy1 := builder.Get()\n\treachability1 := NewReachability(allPods, true)\n\treachability1.ExpectAllIngress(Pod(\"x/a\"), false)\n\treachability1.Expect(Pod(\"x/a\"), Pod(\"x/a\"), true)\n\n\tbuilder2 := &NetworkPolicySpecBuilder{}\n\t// by preserving the same name, this policy will also serve to test the 'updated policy' scenario.\n\tbuilder2 = builder2.SetName(\"x\", \"allow-all\").SetPodSelector(map[string]string{\"pod\": \"a\"})\n\tbuilder2.SetTypeIngress()\n\tbuilder2.AddIngress(v1.ProtocolTCP, &p80, nil, nil, nil, nil, nil, nil, nil)\n\n\tpolicy2 := builder2.Get()\n\treachability2 := NewReachability(allPods, true)\n\n\treturn []*TestStep{\n\t\t{\n\t\t\t\"Port 81\",\n\t\t\treachability1,\n\t\t\tpolicy1,\n\t\t\tp81,\n\t\t\t0,\n\t\t},\n\t\t{\n\t\t\t\"Port 80\",\n\t\t\treachability2,\n\t\t\tpolicy2,\n\t\t\tp80,\n\t\t\t0,\n\t\t},\n\t}\n}", "func TestBuildPrecedence(t *testing.T) {\n\tfor _, vs := range []string{\n\t\t\"1.0.0-alpha+001\",\n\t\t\"1.0.0+20130313144700\",\n\t\t\"1.0.0-beta+exp.sha.5114f85\",\n\t} {\n\t\tv, err := Parse(vs)\n\t\tif err != nil {\n\t\t\tt.Errorf(`Couldn't parse version string \"%s\": %s`,\n\t\t\t\tvs, err)\n\t\t\tcontinue\n\t\t}\n\t\tnormal := Version{v.Major, v.Minor, v.Patch, v.PreRelease, nil}\n\t\tif Less(normal, v) {\n\t\t\tt.Errorf(`Failed assertion: !(\"%s\" < \"%s)\"`,\n\t\t\t\tnormal, v)\n\t\t}\n\t\tif Less(v, normal) {\n\t\t\tt.Errorf(`Failed assertion: !(\"%s\" < \"%s)\"`,\n\t\t\t\tv, normal)\n\t\t}\n\t}\n}", "func (p *parser) precedence(lhs Node, minP int) Node {\n\tlook := p.peek()\n\tfor isOperator(look.typ) && precedence[look.typ] >= minP {\n\t\top := p.next()\n\t\trhs := p.primary()\n\t\tlook = p.peek()\n\t\t// right-associative\n\t\tfor isOperator(look.typ) && precedence[look.typ] >= precedence[op.typ] {\n\t\t\trhs = p.precedence(rhs, precedence[look.typ])\n\t\t\tlook = p.peek()\n\t\t}\n\t\tlhs = newBinary(op, lhs, rhs)\n\t}\n\treturn lhs\n}", "func TestSuggestSwaps(t *testing.T) {\n\tsingleChannel := []lndclient.ChannelInfo{\n\t\tchannel1,\n\t}\n\n\texpectedAmt := btcutil.Amount(10000)\n\tprepay, routing := testPPMFees(defaultFeePPM, testQuote, expectedAmt)\n\n\ttests := []struct {\n\t\tname string\n\t\tchannels []lndclient.ChannelInfo\n\t\trules map[lnwire.ShortChannelID]*SwapRule\n\t\tpeerRules map[route.Vertex]*SwapRule\n\t\tsuggestions *Suggestions\n\t\terr error\n\t}{\n\t\t{\n\t\t\tname: \"no rules\",\n\t\t\tchannels: singleChannel,\n\t\t\trules: map[lnwire.ShortChannelID]*SwapRule{},\n\t\t\terr: ErrNoRules,\n\t\t},\n\t\t{\n\t\t\tname: \"loop out\",\n\t\t\tchannels: singleChannel,\n\t\t\trules: map[lnwire.ShortChannelID]*SwapRule{\n\t\t\t\tchanID1: chanRule,\n\t\t\t},\n\t\t\tsuggestions: &Suggestions{\n\t\t\t\tOutSwaps: []loop.OutRequest{\n\t\t\t\t\tchan1Rec,\n\t\t\t\t},\n\t\t\t\tDisqualifiedChans: noneDisqualified,\n\t\t\t\tDisqualifiedPeers: noPeersDisqualified,\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"no rule for channel\",\n\t\t\tchannels: singleChannel,\n\t\t\trules: map[lnwire.ShortChannelID]*SwapRule{\n\t\t\t\tchanID2: {\n\t\t\t\t\tThresholdRule: NewThresholdRule(10, 10),\n\t\t\t\t\tType: swap.TypeOut,\n\t\t\t\t},\n\t\t\t},\n\t\t\tsuggestions: &Suggestions{\n\t\t\t\tDisqualifiedChans: noneDisqualified,\n\t\t\t\tDisqualifiedPeers: noPeersDisqualified,\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"multiple peer rules\",\n\t\t\tchannels: []lndclient.ChannelInfo{\n\t\t\t\t{\n\t\t\t\t\tPubKeyBytes: peer1,\n\t\t\t\t\tChannelID: chanID1.ToUint64(),\n\t\t\t\t\tCapacity: 20000,\n\t\t\t\t\tLocalBalance: 8000,\n\t\t\t\t\tRemoteBalance: 12000,\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tPubKeyBytes: peer1,\n\t\t\t\t\tChannelID: chanID2.ToUint64(),\n\t\t\t\t\tCapacity: 10000,\n\t\t\t\t\tLocalBalance: 9000,\n\t\t\t\t\tRemoteBalance: 1000,\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tPubKeyBytes: peer2,\n\t\t\t\t\tChannelID: chanID3.ToUint64(),\n\t\t\t\t\tCapacity: 5000,\n\t\t\t\t\tLocalBalance: 2000,\n\t\t\t\t\tRemoteBalance: 3000,\n\t\t\t\t},\n\t\t\t},\n\t\t\tpeerRules: map[route.Vertex]*SwapRule{\n\t\t\t\tpeer1: {\n\t\t\t\t\tThresholdRule: NewThresholdRule(80, 0),\n\t\t\t\t\tType: swap.TypeOut,\n\t\t\t\t},\n\t\t\t\tpeer2: {\n\t\t\t\t\tThresholdRule: NewThresholdRule(40, 50),\n\t\t\t\t\tType: swap.TypeOut,\n\t\t\t\t},\n\t\t\t},\n\t\t\tsuggestions: &Suggestions{\n\t\t\t\tOutSwaps: []loop.OutRequest{\n\t\t\t\t\t{\n\t\t\t\t\t\tAmount: expectedAmt,\n\t\t\t\t\t\tOutgoingChanSet: loopdb.ChannelSet{\n\t\t\t\t\t\t\tchanID1.ToUint64(),\n\t\t\t\t\t\t\tchanID2.ToUint64(),\n\t\t\t\t\t\t},\n\t\t\t\t\t\tMaxPrepayRoutingFee: prepay,\n\t\t\t\t\t\tMaxSwapRoutingFee: routing,\n\t\t\t\t\t\tMaxMinerFee: scaleMaxMinerFee(\n\t\t\t\t\t\t\tscaleMinerFee(testQuote.MinerFee),\n\t\t\t\t\t\t),\n\t\t\t\t\t\tMaxSwapFee: testQuote.SwapFee,\n\t\t\t\t\t\tMaxPrepayAmount: testQuote.PrepayAmount,\n\t\t\t\t\t\tSweepConfTarget: defaultConfTarget,\n\t\t\t\t\t\tInitiator: autoloopSwapInitiator,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tDisqualifiedChans: noneDisqualified,\n\t\t\t\tDisqualifiedPeers: map[route.Vertex]Reason{\n\t\t\t\t\tpeer2: ReasonLiquidityOk,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\n\tfor _, testCase := range tests {\n\t\ttestCase := testCase\n\n\t\tt.Run(testCase.name, func(t *testing.T) {\n\t\t\tcfg, lnd := newTestConfig()\n\n\t\t\tlnd.Channels = testCase.channels\n\n\t\t\tparams := defaultParameters\n\t\t\tparams.AutoloopBudgetLastRefresh = testBudgetStart\n\t\t\tif testCase.rules != nil {\n\t\t\t\tparams.ChannelRules = testCase.rules\n\t\t\t}\n\n\t\t\tif testCase.peerRules != nil {\n\t\t\t\tparams.PeerRules = testCase.peerRules\n\t\t\t}\n\n\t\t\ttestSuggestSwaps(\n\t\t\t\tt, newSuggestSwapsSetup(cfg, lnd, params),\n\t\t\t\ttestCase.suggestions, testCase.err,\n\t\t\t)\n\t\t})\n\t}\n}", "func Test_OtpTestHotpAccuracy(t *testing.T) {\n\thotp := testGetHotp(t)\n\tshift := int64(5)\n\n\tfor _, data := range referenceRunsHotp {\n\t\tdata.hotp.Count = data.hotp.Count - shift - 1\n\t\tsetStateHotp(hotp, data)\n\t\tfor i := -shift; i < shift+1; i++ {\n\t\t\tval, err := data.hotp.Next()\n\t\t\tif err != nil {\n\t\t\t\tt.Error(\"Test fail before running, illigal parameters:\", err)\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tif i != 0 && val == data.result {\n\t\t\t\tt.Error(\"OTP value did not change when the seed counter changed\",\n\t\t\t\t\t\"(all other parameters remained identical) :\", data,\n\t\t\t\t\t\"but the OTP calculated value was the same:\", val)\n\t\t\t}\n\t\t\tif i == 0 && val != data.result {\n\t\t\t\tt.Error(\"OTP value modified even though all parameters\",\n\t\t\t\t\t\"as well as the counter value and seed counter are identical: \", data,\n\t\t\t\t\t\"OTP\", val, \"is different from the result OTP:\", data.result)\n\t\t\t}\n\t\t}\n\t}\n}", "func Precedence(symbol string) int {\n\top, found := operatorMap[symbol]\n\tif !found {\n\t\treturn 0\n\t}\n\treturn op.precedence\n}", "func TestWalkPreOrder(t *testing.T) {\n\ttree := New()\n\tfor i := 0; i < 10; i++ {\n\t\ttree.Insert(Int(10 - i))\n\t}\n\ttree.Insert(Int(1))\n\tverify(tree.root, t)\n\twalked := tree.Walk()\n\tfor i := 0; i < len(walked)-1; i++ {\n\t\tif walked[i].(Int).CompareTo(walked[i+1]) == 1 {\n\t\t\tt.Errorf(\"Post order walk out of order results: %v after %v\", walked[i], walked[i+1])\n\t\t}\n\t}\n}", "func TestPointingPairs(t *testing.T) {\n\tinputBoard := []byte(pointingPairsParam)\n\tb := NewBoard(inputBoard)\n\n\tassert.True(t, b.candidates[18].Contains('1'))\n\tassert.True(t, b.candidates[20].Contains('1'))\n\n\tassert.True(t, b.candidates[32].Contains('2'))\n\tassert.True(t, b.candidates[41].Contains('2'))\n\tassert.True(t, b.candidates[50].Contains('2'))\n\tassert.True(t, b.candidates[68].Contains('2'))\n\n\tassert.True(t, b.candidates[32].Contains('3'))\n\tassert.True(t, b.candidates[41].Contains('3'))\n\tassert.True(t, b.candidates[50].Contains('3'))\n\tassert.True(t, b.candidates[68].Contains('3'))\n\n\tassert.True(t, b.candidates[36].Contains('4'))\n\tassert.True(t, b.candidates[37].Contains('4'))\n\n\tassert.True(t, b.candidates[39].Contains('6'))\n\n\tassert.True(t, b.candidates[71].Contains('7'))\n\n\tassert.True(t, b.candidates[69].Contains('8'))\n\tassert.True(t, b.candidates[70].Contains('8'))\n\tassert.True(t, b.candidates[71].Contains('8'))\n\n\tassert.True(t, b.candidates[57].Contains('9'))\n\n\tb.PointingPairs()\n\n\tassert.True(t, !b.candidates[18].Contains('1'))\n\tassert.True(t, !b.candidates[20].Contains('1'))\n\n\tassert.True(t, !b.candidates[32].Contains('2'))\n\tassert.True(t, !b.candidates[41].Contains('2'))\n\tassert.True(t, !b.candidates[50].Contains('2'))\n\tassert.True(t, !b.candidates[68].Contains('2'))\n\n\tassert.True(t, !b.candidates[32].Contains('3'))\n\tassert.True(t, !b.candidates[41].Contains('3'))\n\tassert.True(t, !b.candidates[50].Contains('3'))\n\tassert.True(t, !b.candidates[68].Contains('3'))\n\n\tassert.True(t, !b.candidates[36].Contains('4'))\n\tassert.True(t, !b.candidates[37].Contains('4'))\n\n\tassert.True(t, !b.candidates[39].Contains('6'))\n\n\tassert.True(t, !b.candidates[71].Contains('7'))\n\n\tassert.True(t, !b.candidates[69].Contains('8'))\n\tassert.True(t, !b.candidates[70].Contains('8'))\n\tassert.True(t, !b.candidates[71].Contains('8'))\n\n\tassert.True(t, !b.candidates[57].Contains('9'))\n}", "func TestBinaryTreeLevelOrderTraversalII(t *testing.T) {\n x := &TreeNode{Val: 3, Left: &TreeNode{Val: 9},\n Right: &TreeNode{Val: 20, Left: &TreeNode{Val: 15}, Right: &TreeNode{Val: 7}}}\n var cases = []struct {\n input *TreeNode\n output [][]int\n }{\n {\n input: x,\n output: [][]int{{15,7},{9,20},{3}},\n },\n }\n for _, c := range cases {\n x := levelOrderBottom(c.input)\n if !reflect.DeepEqual(x, c.output) {\n t.Fail()\n }\n }\n}", "func (op Op) precedence() int {\n\tswitch op.OpCode {\n\tcase ImportAltOp:\n\t\treturn 1\n\tcase OrOp:\n\t\treturn 2\n\tcase PlusOp:\n\t\treturn 3\n\tcase TextAppendOp:\n\t\treturn 4\n\tcase ListAppendOp:\n\t\treturn 5\n\tcase AndOp:\n\t\treturn 6\n\tcase RecordMergeOp:\n\t\treturn 7\n\tcase RightBiasedRecordMergeOp:\n\t\treturn 8\n\tcase RecordTypeMergeOp:\n\t\treturn 9\n\tcase TimesOp:\n\t\treturn 10\n\tcase EqOp:\n\t\treturn 11\n\tcase NeOp:\n\t\treturn 12\n\tcase EquivOp:\n\t\treturn 13\n\tcase CompleteOp:\n\t\treturn 14\n\tdefault:\n\t\tpanic(\"unknown opcode\")\n\t}\n}", "func PTEST(mx, x operand.Op) { ctx.PTEST(mx, x) }", "func TestProposal(t *testing.T) {\n\ttests := []struct {\n\t\t*network\n\t\tsuccess bool\n\t}{\n\t\t{newNetwork(nil, nil, nil), true},\n\t\t{newNetwork(nil, nil, nopStepper), true},\n\t\t{newNetwork(nil, nopStepper, nopStepper), false},\n\t\t{newNetwork(nil, nopStepper, nopStepper, nil), false},\n\t\t{newNetwork(nil, nopStepper, nopStepper, nil, nil), true},\n\t}\n\n\tfor j, tt := range tests {\n\t\tsend := func(m pb.Message) {\n\t\t\tdefer func() {\n\t\t\t\t// only recover is we expect it to panic so\n\t\t\t\t// panics we don't expect go up.\n\t\t\t\tif !tt.success {\n\t\t\t\t\te := recover()\n\t\t\t\t\tif e != nil {\n\t\t\t\t\t\tt.Logf(\"#%d: err: %s\", j, e)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}()\n\t\t\ttt.send(m)\n\t\t}\n\n\t\tdefer tt.closeAll()\n\t\tdata := []byte(\"somedata\")\n\n\t\t// promote 0 the leader\n\t\tsend(pb.Message{From: 1, To: 1, Type: pb.MsgHup})\n\t\tsend(pb.Message{From: 1, To: 1, Type: pb.MsgProp, Entries: []pb.Entry{{Data: data}}})\n\n\t\twantLog := newLog(NewMemoryStorage(), raftLogger)\n\t\tif tt.success {\n\t\t\twantLog = &raftLog{\n\t\t\t\tstorage: newInitedMemoryStorage(\n\t\t\t\t\t[]pb.Entry{{}, {Data: nil, Term: 1, Index: 1}, {Term: 1, Index: 2, Data: data}},\n\t\t\t\t),\n\t\t\t\tunstable: unstable{offset: 3},\n\t\t\t\tcommitted: 2}\n\t\t}\n\t\tdefer wantLog.storage.(IExtRaftStorage).Close()\n\t\tbase := ltoa(wantLog)\n\t\tfor i, p := range tt.peers {\n\t\t\tif sm, ok := p.(*raft); ok {\n\t\t\t\tl := ltoa(sm.raftLog)\n\t\t\t\tif g := diffu(base, l); g != \"\" {\n\t\t\t\t\tt.Errorf(\"#%d: diff:\\n%s\", i, g)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tt.Logf(\"#%d: empty log\", i)\n\t\t\t}\n\t\t}\n\t\tsm := tt.network.peers[1].(*raft)\n\t\tif g := sm.Term; g != 1 {\n\t\t\tt.Errorf(\"#%d: term = %d, want %d\", j, g, 1)\n\t\t}\n\t}\n}", "func PSRLO(i, x operand.Op) { ctx.PSRLO(i, x) }", "func lspTests(t testing.TB, ctx context.Context, c *jsonrpc2.Conn, root *gituri.URI, wantHover, wantDefinition, wantXDefinition map[string]string, wantReferences, wantSymbols map[string][]string, wantXDependencies string, wantXReferences map[*lsext.WorkspaceReferencesParams][]string, wantXPackages []string) {\n\tfor pos, want := range wantHover {\n\t\ttbRun(t, fmt.Sprintf(\"hover-%s\", strings.Replace(pos, \"/\", \"-\", -1)), func(t testing.TB) {\n\t\t\thoverTest(t, ctx, c, root, pos, want)\n\t\t})\n\t}\n\n\tfor pos, want := range wantDefinition {\n\t\ttbRun(t, fmt.Sprintf(\"definition-%s\", strings.Replace(pos, \"/\", \"-\", -1)), func(t testing.TB) {\n\t\t\tdefinitionTest(t, ctx, c, root, pos, want)\n\t\t})\n\t}\n\tfor pos, want := range wantXDefinition {\n\t\ttbRun(t, fmt.Sprintf(\"xdefinition-%s\", strings.Replace(pos, \"/\", \"-\", -1)), func(t testing.TB) {\n\t\t\txdefinitionTest(t, ctx, c, root, pos, want)\n\t\t})\n\t}\n\n\tfor pos, want := range wantReferences {\n\t\ttbRun(t, fmt.Sprintf(\"references-%s\", pos), func(t testing.TB) {\n\t\t\treferencesTest(t, ctx, c, root, pos, want)\n\t\t})\n\t}\n\n\tfor query, want := range wantSymbols {\n\t\ttbRun(t, fmt.Sprintf(\"symbols(q=%q)\", query), func(t testing.TB) {\n\t\t\tsymbolsTest(t, ctx, c, root, query, want)\n\t\t})\n\t}\n\n\tif wantXDependencies != \"\" {\n\t\ttbRun(t, fmt.Sprintf(\"xdependencies-\"+wantXDependencies), func(t testing.TB) {\n\t\t\tvar deps []lspext.DependencyReference\n\t\t\terr := c.Call(ctx, \"workspace/xdependencies\", struct{}{}, &deps)\n\t\t\tif err != nil {\n\t\t\t\tt.Fatal(err)\n\t\t\t}\n\t\t\tjsonTest(t, deps, \"xdependencies-\"+wantXDependencies)\n\t\t})\n\t}\n\n\tfor params, want := range wantXReferences {\n\t\ttbRun(t, fmt.Sprintf(\"xreferences\"), func(t testing.TB) {\n\t\t\tworkspaceReferencesTest(t, ctx, c, root, *params, want)\n\t\t})\n\t}\n\n\tif wantXPackages != nil {\n\t\ttbRun(t, \"xpackages\", func(t testing.TB) {\n\t\t\tworkspacePackagesTest(t, ctx, c, root, wantXPackages)\n\t\t})\n\t}\n}", "func (s *mergeBaseSuite) TestIndependentBeyondShortcut(c *C) {\n\trevs := []string{\"S\", \"G\", \"P\"}\n\texpectedRevs := []string{\"S\", \"G\"}\n\ts.AssertIndependents(c, revs, expectedRevs)\n}", "func VPTESTNMQ(ops ...operand.Op) { ctx.VPTESTNMQ(ops...) }", "func TestPreciseSigOps(t *testing.T) {\n\tfor _, test := range detailedTests {\n\t\tcount := btcscript.GetPreciseSigOpCount(\n\t\t\t[]byte{btcscript.OP_1}, test.script, false)\n\t\tif count != test.nPreciseSigOps {\n\t\t\tt.Errorf(\"%s: expected count of %d, got %d\", test.name,\n\t\t\t\ttest.nPreciseSigOps, count)\n\n\t\t}\n\t}\n}", "func canMakeTwoTopOneLeft(p int) bool {\n\tif p <= 8 {\n\t\treturn false\n\t}\n\n\tswitch p {\n\tcase 16, 24, 32, 40, 48, 56, 64, 15, 23, 31, 39, 47, 55, 63:\n\t\treturn false\n\t}\n\treturn true\n}", "func TestPreciseSigOps(t *testing.T) {\n\tt.Parallel()\n\n\tfor _, test := range detailedTests {\n\t\tcount := txscript.GetPreciseSigOpCount(\n\t\t\t[]byte{txscript.OP_1}, test.script, false)\n\t\tif count != test.nPreciseSigOps {\n\t\t\tt.Errorf(\"%s: expected count of %d, got %d\", test.name,\n\t\t\t\ttest.nPreciseSigOps, count)\n\n\t\t}\n\t}\n}", "func testPortsPoliciesStackedOrUpdated() []*TestStep {\n\tblocked := func() *Reachability {\n\t\tr := NewReachability(allPods, true)\n\t\tr.ExpectAllIngress(Pod(\"x/a\"), false)\n\t\tr.Expect(Pod(\"x/a\"), Pod(\"x/a\"), true)\n\t\treturn r\n\t}\n\n\tunblocked := func() *Reachability {\n\t\treturn NewReachability(allPods, true)\n\t}\n\n\t/***\n\tInitially, only allow port 80, and verify 81 is blocked.\n\t*/\n\tpolicyName := \"policy-that-will-update-for-ports\"\n\tbuilder := &NetworkPolicySpecBuilder{}\n\tbuilder = builder.SetName(\"x\", policyName).SetPodSelector(map[string]string{\"pod\": \"a\"})\n\tbuilder.SetTypeIngress()\n\tbuilder.AddIngress(v1.ProtocolTCP, &p80, nil, nil, nil, nil, nil, nil, nil)\n\tpolicy1 := builder.Get()\n\n\tbuilder2 := &NetworkPolicySpecBuilder{}\n\t// by preserving the same name, this policy will also serve to test the 'updated policy' scenario.\n\tbuilder2 = builder2.SetName(\"x\", policyName).SetPodSelector(map[string]string{\"pod\": \"a\"})\n\tbuilder2.SetTypeIngress()\n\tbuilder2.AddIngress(v1.ProtocolTCP, &p81, nil, nil, nil, nil, nil, nil, nil)\n\tpolicy2 := builder2.Get()\n\n\t// The first policy was on port 80, which was allowed, while 81 wasn't.\n\t// The second policy was on port 81, which was allowed.\n\t// At this point, if we stacked, make sure 80 is still unblocked\n\t// Whereas if we DIDNT stack, make sure 80 is blocked.\n\treturn []*TestStep{\n\t\t{\n\t\t\t\"Port 81 -- blocked\",\n\t\t\tblocked(), // 81 blocked\n\t\t\tpolicy1,\n\t\t\t81,\n\t\t\t0,\n\t\t},\n\t\t{\n\t\t\t\"Port 81 -- unblocked\",\n\t\t\tunblocked(), // 81 open now\n\t\t\tpolicy2,\n\t\t\t81,\n\t\t\t0,\n\t\t},\n\t\t{\n\t\t\t\"Port 80 -- blocked\",\n\t\t\tblocked(),\n\t\t\tpolicy2,\n\t\t\t80,\n\t\t\t0,\n\t\t},\n\t}\n}", "func (suite *DetectorTestSuite) TestConfigureTwoByPrio() {\n\tone := registerMock(\"one\", NodeRuntime)\n\tone.On(\"Detect\").Return(nil).Once()\n\ttwo := registerMock(\"two\", NodeOrchestrator)\n\ttwo.On(\"Detect\").Return(nil).Once()\n\n\td := NewDetector(\"\")\n\tassert.Len(suite.T(), d.candidates, 2)\n\tassert.Len(suite.T(), d.detected, 0)\n\n\tc, n, err := d.GetPreferred()\n\tassert.NoError(suite.T(), err)\n\tassert.Equal(suite.T(), \"two\", n)\n\tassert.Equal(suite.T(), two, c)\n\n\tone.AssertNumberOfCalls(suite.T(), \"Detect\", 1)\n\ttwo.AssertNumberOfCalls(suite.T(), \"Detect\", 1)\n\tassert.Nil(suite.T(), d.candidates)\n\tassert.Nil(suite.T(), d.detected)\n}", "func TestTreeOrder(t *testing.T) {\n\tc := NewTree2(NoPad32bytes, minus).(*treeDigest)\n\texpect := int32(0)\n\tfor i := int32(0); i < 100; i++ {\n\t\tn := i // n is the value of the i'th input, any function of i should pass test\n\t\tdata := H256{uint32(n)}\n\t\tc.Write(data.ToBytes())\n\t\tans := int32(FromBytes(c.Sum(nil))[0])\n\t\tif evenBits(uint32(i)) {\n\t\t\texpect += n\n\t\t} else {\n\t\t\texpect -= n\n\t\t}\n\t\tif ans != expect {\n\t\t\tt.Fatalf(\"%v,%v> expect:%v != got:%v\", i, n, expect, ans)\n\t\t}\n\t}\n}", "func VPTESTMQ(ops ...operand.Op) { ctx.VPTESTMQ(ops...) }", "func (s *mergeBaseSuite) TestIndependentChangingOrder(c *C) {\n\trevs := []string{\"A^^^\", \"A^\", \"A^^\", \"A\", \"N\"}\n\texpectedRevs := []string{\"A\", \"N\"}\n\ts.AssertIndependents(c, revs, expectedRevs)\n}", "func (s *mergeBaseSuite) TestIndependentChangingOrderRepetition(c *C) {\n\trevs := []string{\"A\", \"A^\", \"A\", \"N\", \"N^\"}\n\texpectedRevs := []string{\"A\", \"N\"}\n\ts.AssertIndependents(c, revs, expectedRevs)\n}", "func Test_OtpTestHotpNextNew(t *testing.T) {\n\thotp := testGetHotp(t)\n\tfuncCall := [](func() (string, error)){hotp.New, hotp.Next}\n\n\tfor i, f := range funcCall {\n\t\tfor _, data := range referenceRunsHotp {\n\t\t\tsetStateHotp(hotp, data)\n\t\t\tval, err := f()\n\t\t\tif val == data.result {\n\t\t\t\tval, err = f() // give it another chance\n\t\t\t}\n\t\t\tif err != nil {\n\t\t\t\tt.Error(\"Test fail before running, illigal parameters:\", err)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif val == data.result {\n\t\t\t\tt.Error(\"OTP value did not change when the seed counter changed\",\n\t\t\t\t\t\"(all other parameters remained identical). Using function:\", i, \"(0: .New(), 1: .Next())) must return different OTP values:\", data,\n\t\t\t\t\t\"but the OTP calculated value was the same\", val)\n\t\t\t} else {\n\t\t\t\t//\tt.Log(\"OTP calculated value for different seed due to .next/.new call was changed (as expected)\")\n\t\t\t}\n\t\t}\n\t}\n}", "func TestIPv4Routes(t *testing.T) {\n\tctx := Setup(t)\n\tdefer ctx.Teardown()\n\n\tconst (\n\t\t// first subnet\n\t\tmsName1 = \"microservice1\"\n\t\tsubnet1 = \"10.0.0.0/24\"\n\t\ttap1IP = \"10.0.0.1\"\n\t\tlinuxTap1IP = \"10.0.0.2\"\n\t\ttap1Label = \"tap-1\"\n\n\t\t// second subnet\n\t\tmsName2 = \"microservice2\"\n\t\tsubnet2 = \"20.0.0.0/24\"\n\t\ttap2IP = \"20.0.0.1\"\n\t\tlinuxTap2IP = \"20.0.0.2\"\n\t\ttap2Label = \"tap-2\"\n\n\t\tsuffix = \"/24\"\n\t)\n\n\t// TAP interface for the first subnet\n\tvppTap1 := &vpp_interfaces.Interface{\n\t\tName: tap1Label,\n\t\tType: vpp_interfaces.Interface_TAP,\n\t\tEnabled: true,\n\t\tIpAddresses: []string{tap1IP + suffix},\n\t\tLink: &vpp_interfaces.Interface_Tap{\n\t\t\tTap: &vpp_interfaces.TapLink{\n\t\t\t\tVersion: 2,\n\t\t\t\tToMicroservice: MsNamePrefix + msName1,\n\t\t\t},\n\t\t},\n\t}\n\tlinuxTap1 := &linux_interfaces.Interface{\n\t\tName: tap1Label,\n\t\tType: linux_interfaces.Interface_TAP_TO_VPP,\n\t\tEnabled: true,\n\t\tIpAddresses: []string{linuxTap1IP + suffix},\n\t\tLink: &linux_interfaces.Interface_Tap{\n\t\t\tTap: &linux_interfaces.TapLink{\n\t\t\t\tVppTapIfName: tap1Label,\n\t\t\t},\n\t\t},\n\t\tNamespace: &linux_namespace.NetNamespace{\n\t\t\tType: linux_namespace.NetNamespace_MICROSERVICE,\n\t\t\tReference: MsNamePrefix + msName1,\n\t\t},\n\t}\n\n\t// TAP interfaces for the second subnet\n\tvppTap2 := &vpp_interfaces.Interface{\n\t\tName: tap2Label,\n\t\tType: vpp_interfaces.Interface_TAP,\n\t\tEnabled: true,\n\t\tIpAddresses: []string{tap2IP + suffix},\n\t\tLink: &vpp_interfaces.Interface_Tap{\n\t\t\tTap: &vpp_interfaces.TapLink{\n\t\t\t\tVersion: 2,\n\t\t\t\tToMicroservice: MsNamePrefix + msName2,\n\t\t\t},\n\t\t},\n\t}\n\tlinuxTap2 := &linux_interfaces.Interface{\n\t\tName: tap2Label,\n\t\tType: linux_interfaces.Interface_TAP_TO_VPP,\n\t\tEnabled: true,\n\t\tIpAddresses: []string{linuxTap2IP + suffix},\n\t\tLink: &linux_interfaces.Interface_Tap{\n\t\t\tTap: &linux_interfaces.TapLink{\n\t\t\t\tVppTapIfName: tap2Label,\n\t\t\t},\n\t\t},\n\t\tNamespace: &linux_namespace.NetNamespace{\n\t\t\tType: linux_namespace.NetNamespace_MICROSERVICE,\n\t\t\tReference: MsNamePrefix + msName2,\n\t\t},\n\t}\n\n\t// Routes\n\tsubnet1LinuxRoute := &linux_l3.Route{\n\t\tOutgoingInterface: tap1Label,\n\t\tScope: linux_l3.Route_GLOBAL,\n\t\tDstNetwork: subnet2,\n\t\tGwAddr: tap1IP,\n\t}\n\tsubnet2LinuxRoute := &linux_l3.Route{\n\t\tOutgoingInterface: tap2Label,\n\t\tScope: linux_l3.Route_GLOBAL,\n\t\tDstNetwork: subnet1,\n\t\tGwAddr: tap2IP,\n\t}\n\tsubnet2LinuxLinkRoute := &linux_l3.Route{\n\t\tOutgoingInterface: tap2Label,\n\t\tScope: linux_l3.Route_LINK,\n\t\tDstNetwork: subnet1,\n\t}\n\n\tctx.StartMicroservice(msName1)\n\tctx.StartMicroservice(msName2)\n\n\t// configure everything in one resync\n\terr := ctx.GenericClient().ResyncConfig(\n\t\tvppTap1, linuxTap1,\n\t\tvppTap2, linuxTap2,\n\t\tsubnet1LinuxRoute, subnet2LinuxRoute,\n\t)\n\tctx.Expect(err).ToNot(HaveOccurred())\n\n\tctx.Eventually(ctx.GetValueStateClb(vppTap1)).Should(Equal(kvscheduler.ValueState_CONFIGURED))\n\tctx.Expect(ctx.GetValueState(linuxTap1)).To(Equal(kvscheduler.ValueState_CONFIGURED))\n\tctx.Expect(ctx.GetValueState(vppTap2)).To(Equal(kvscheduler.ValueState_CONFIGURED))\n\tctx.Expect(ctx.GetValueState(linuxTap2)).To(Equal(kvscheduler.ValueState_CONFIGURED))\n\tctx.Expect(ctx.GetValueState(subnet1LinuxRoute)).To(Equal(kvscheduler.ValueState_CONFIGURED))\n\tctx.Expect(ctx.GetValueState(subnet2LinuxRoute)).To(Equal(kvscheduler.ValueState_CONFIGURED))\n\n\tctx.Expect(ctx.GetRunningMicroservice(msName1).Ping(\"20.0.0.2\")).To(Succeed())\n\tctx.Expect(ctx.GetRunningMicroservice(msName2).Ping(\"10.0.0.2\")).To(Succeed())\n\n\t// keep the current number of routes before the update\n\tnumLinuxRoutes := ctx.NumValues(&linux_l3.Route{}, kvs.SBView)\n\n\t// reconfigure subnet 1 route as link local\n\terr = ctx.GenericClient().ChangeRequest().Update(\n\t\tsubnet2LinuxLinkRoute,\n\t).Send(context.Background())\n\tctx.Expect(err).ToNot(HaveOccurred())\n\n\tctx.Expect(ctx.GetRunningMicroservice(msName1).Ping(\"20.0.0.2\")).NotTo(Succeed())\n\tctx.Expect(ctx.GetRunningMicroservice(msName2).Ping(\"10.0.0.2\")).NotTo(Succeed())\n\n\t// route count should be unchanged\n\tctx.Expect(ctx.NumValues(&linux_l3.Route{}, kvs.SBView)).To(Equal(numLinuxRoutes))\n}", "func Test_10(t *testing.T){\n\n\troot:=&TreeNode{\n\t\tVal: 1,\n\t\tLeft: &TreeNode{\n\t\t\tVal: 2,\n\t\t\tLeft: &TreeNode{Val: 5},\n\t\t},\n\t\tRight: &TreeNode{\n\t\t\tVal: 10,\n\t\t\tLeft: &TreeNode{Val: 11},\n\t\t\tRight: &TreeNode{Val: 12},\n\t\t},\n\t}\n\tfmt.Println(postorderTraversal(root))\n\n}", "func (t Type) Precedence() [2]int {\n\tswitch t {\n\tcase CARET:\n\t\treturn [2]int{10, 9}\n\tcase ASTERISK, SLASH, PERCENT:\n\t\treturn [2]int{7, 7}\n\tcase PLUS, MINUS:\n\t\treturn [2]int{6, 6}\n\tcase CONCAT:\n\t\treturn [2]int{5, 4}\n\tcase LT, GT, LEQ, GEQ, NEQ, EQ:\n\t\treturn [2]int{3, 3}\n\tcase AND:\n\t\treturn [2]int{2, 2}\n\tcase OR:\n\t\treturn [2]int{1, 1}\n\t}\n\treturn [2]int{0, 0}\n}", "func PSRLL(imx, x operand.Op) { ctx.PSRLL(imx, x) }", "func TESTL(ir, emr operand.Op) { ctx.TESTL(ir, emr) }", "func TestReversePairs(t *testing.T) {\n\tif x := reversePairs([]int{2, 4, 1, 3, 5}); x != 3 {\n\t\tt.Log(x)\n\t\tt.Fatal(\"reversePairs([]int{2, 4, 1, 3, 5}) != 3\")\n\t}\n\n\tif x := reversePairs([]int{1, 2, 3, 4}); x != 0 {\n\t\tt.Log(x)\n\t\tt.Fatal(\"reversePairs([]int{1, 2, 3, 4}) != 0\")\n\t}\n\n\tif x := reversePairs([]int{4, 3, 2, 1}); x != 6 {\n\t\tt.Log(x)\n\t\tt.Fatal(\"reversePairs([]int{4,3,2,1}) != 6\")\n\t}\n\n\tif x := reversePairs([]int{2, 3, 1, 55, 6, 4, 7, 3, 0}); x != 18 {\n\t\tt.Log(x)\n\t\tt.Fatal(\"reversePairs([]int{2,3,1,55,6,4,7,3,0}) != 18\")\n\t}\n}", "func CMPPS(mx, x, i operand.Op) { ctx.CMPPS(mx, x, i) }", "func TestLambdaAnyNestedProperties(t *testing.T) {\n\tinput := \"Config/any(var:var/Config/Priority eq 123)\"\n\ttokens, err := GlobalFilterTokenizer.Tokenize(input)\n\tif err != nil {\n\t\tt.Error(err)\n\t\treturn\n\t}\n\toutput, err := GlobalFilterParser.InfixToPostfix(tokens)\n\tif err != nil {\n\t\tt.Error(err)\n\t\treturn\n\t}\n\ttree, err := GlobalFilterParser.PostfixToTree(output)\n\tif err != nil {\n\t\tt.Error(err)\n\t\treturn\n\t}\n\n\tvar expect []expectedParseNode = []expectedParseNode{\n\t\t{\"/\", 0},\n\t\t{\"Config\", 1},\n\t\t{\"any\", 1},\n\t\t{\"var\", 2},\n\t\t{\"eq\", 2},\n\t\t{\"/\", 3},\n\t\t{\"/\", 4},\n\t\t{\"var\", 5},\n\t\t{\"Config\", 5},\n\t\t{\"Priority\", 4},\n\t\t{\"123\", 3},\n\t}\n\tpos := 0\n\terr = CompareTree(tree, expect, &pos, 0)\n\tif err != nil {\n\t\tprintTree(tree)\n\t\tt.Errorf(\"Tree representation does not match expected value. error: %s\", err.Error())\n\t}\n}", "func TestEvaluateRPNInsuficientOperand(t *testing.T) {\n\t// tokens to be tokenized\n\ttokens := []evaluator.TokenWithValue{\n\t\tevaluator.ValueToken(token.INT, 1),\n\t\tevaluator.OperatorToken(token.ADD),\n\t}\n\n\t// value map used during evaluation\n\tvar values = make(map[string]int)\n\n\t// evaluate expression represented as sequence of tokens in RPN order\n\t_, err := evaluator.EvaluateRPN(tokens, values)\n\n\t// check the output -> error needs to be detected\n\tassert.Error(t, err)\n}", "func TestEndpointCase84(t *testing.T) {\n\tvar params = EndpointParameters{\n\t\tRegion: ptr.String(\"us-iso-west-1\"),\n\t\tUseFIPS: ptr.Bool(true),\n\t\tUseDualStack: ptr.Bool(true),\n\t\tConsumerARN: ptr.String(\"arn:aws-iso:kinesis:us-iso-west-1:123456789012:stream/testStream/consumer/test-consumer:1525898737\"),\n\t\tOperationType: ptr.String(\"control\"),\n\t}\n\n\tresolver := NewDefaultEndpointResolverV2()\n\tresult, err := resolver.ResolveEndpoint(context.Background(), params)\n\t_, _ = result, err\n\n\tif err == nil {\n\t\tt.Fatalf(\"expect error, got none\")\n\t}\n\tif e, a := \"FIPS and DualStack are enabled, but this partition does not support one or both\", err.Error(); !strings.Contains(a, e) {\n\t\tt.Errorf(\"expect %v error in %v\", e, a)\n\t}\n}", "func TestICMP(t *testing.T) {\n\tif os.Getuid() != 0 {\n\t\tt.Logf(\"test disabled; must be root\")\n\t\treturn\n\t}\n\n\tvar (\n\t\tladdr *IPAddr\n\t\terr os.Error\n\t)\n\tif *srchost != \"\" {\n\t\tladdr, err = ResolveIPAddr(\"ip4\", *srchost)\n\t\tif err != nil {\n\t\t\tt.Fatalf(`net.ResolveIPAddr(\"ip4\", %v\") = %v, %v`, *srchost, laddr, err)\n\t\t}\n\t}\n\n\traddr, err := ResolveIPAddr(\"ip4\", *dsthost)\n\tif err != nil {\n\t\tt.Fatalf(`net.ResolveIPAddr(\"ip4\", %v\") = %v, %v`, *dsthost, raddr, err)\n\t}\n\n\tc, err := ListenIP(\"ip4:icmp\", laddr)\n\tif err != nil {\n\t\tt.Fatalf(`net.ListenIP(\"ip4:icmp\", %v) = %v, %v`, *srchost, c, err)\n\t}\n\n\tsendid := os.Getpid() & 0xffff\n\tconst sendseq = 61455\n\tconst pingpktlen = 128\n\tsendpkt := makePingRequest(sendid, sendseq, pingpktlen, []byte(\"Go Go Gadget Ping!!!\"))\n\n\tn, err := c.WriteToIP(sendpkt, raddr)\n\tif err != nil || n != pingpktlen {\n\t\tt.Fatalf(`net.WriteToIP(..., %v) = %v, %v`, raddr, n, err)\n\t}\n\n\tc.SetTimeout(100e6)\n\tresp := make([]byte, 1024)\n\tfor {\n\t\tn, from, err := c.ReadFrom(resp)\n\t\tif err != nil {\n\t\t\tt.Fatalf(`ReadFrom(...) = %v, %v, %v`, n, from, err)\n\t\t}\n\t\tif resp[0] != ICMP_ECHO_REPLY {\n\t\t\tcontinue\n\t\t}\n\t\trcvid, rcvseq := parsePingReply(resp)\n\t\tif rcvid != sendid || rcvseq != sendseq {\n\t\t\tt.Fatalf(`Ping reply saw id,seq=0x%x,0x%x (expected 0x%x, 0x%x)`, rcvid, rcvseq, sendid, sendseq)\n\t\t}\n\t\treturn\n\t}\n\tt.Fatalf(\"saw no ping return\")\n}", "func PSRAL(imx, x operand.Op) { ctx.PSRAL(imx, x) }", "func TestResolvePolicyNo1(t *testing.T) {\n\tconst (\n\t\tconfigName, secretName = \"all-cfg-map-test-redis\", \"all-secret-test-redis\"\n\t\tkeyNo1, keyNo2, keyNo3 = \"keyNo1\", \"keyNo2\", \"keyNo3\"\n\t\tnamespace = \"test-ns\"\n\t)\n\n\ttype given struct {\n\t\tconfigData configMapData\n\t\tsecretData secretData\n\t\tbindYAML string\n\t}\n\ttype expected struct {\n\t\tcredentials internal.InstanceCredentials\n\t}\n\tfor tn, tc := range map[string]struct {\n\t\tgiven\n\t\texpected\n\t}{\n\t\t\"secret overrides configMap values\": {\n\t\t\tgiven: given{\n\t\t\t\tconfigData: configMapData{keyNo1: \"key_1_cfg_val\", keyNo2: \"key_2_cfg_val\"},\n\t\t\t\tsecretData: secretData{keyNo1: []byte(\"key_1_secret_val\"), keyNo3: []byte(\"key_3_secret_val\")},\n\t\t\t\tbindYAML: dedent.Dedent(`\n credentialFrom:\n - configMapRef:\n name: ` + configName + `\n - secretRef:\n name: ` + secretName),\n\t\t\t},\n\t\t\texpected: expected{\n\t\t\t\tcredentials: internal.InstanceCredentials{\n\t\t\t\t\tkeyNo1: \"key_1_secret_val\",\n\t\t\t\t\tkeyNo2: \"key_2_cfg_val\",\n\t\t\t\t\tkeyNo3: \"key_3_secret_val\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t\"configMap overrides secret values\": {\n\t\t\tgiven: given{\n\t\t\t\tconfigData: configMapData{keyNo1: \"key_1_cfg_val\", keyNo2: \"key_2_cfg_val\"},\n\t\t\t\tsecretData: secretData{keyNo1: []byte(\"key_1_secret_val\"), keyNo3: []byte(\"key_3_secret_val\")},\n\t\t\t\tbindYAML: dedent.Dedent(`\n\t\t credentialFrom:\n\t\t - secretRef:\n\t\t name: ` + secretName + `\n\t\t - configMapRef:\n\t\t name: ` + configName),\n\t\t\t},\n\t\t\texpected: expected{\n\t\t\t\tcredentials: internal.InstanceCredentials{\n\t\t\t\t\tkeyNo1: \"key_1_cfg_val\",\n\t\t\t\t\tkeyNo2: \"key_2_cfg_val\",\n\t\t\t\t\tkeyNo3: \"key_3_secret_val\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t} {\n\t\tt.Run(tn, func(t *testing.T) {\n\t\t\t// given\n\t\t\tts := newResolverTestSuit()\n\t\t\tvar (\n\t\t\t\tconfigMap = ts.configMap(namespace, \"all-cfg-map-test-redis\", tc.given.configData)\n\t\t\t\tsecret = ts.secret(namespace, \"all-secret-test-redis\", tc.given.secretData)\n\t\t\t\tfakeClient = fake.NewSimpleClientset(&configMap, &secret)\n\t\t\t\tresolver = bind.NewResolver(fakeClient.CoreV1())\n\t\t\t)\n\n\t\t\t// when\n\t\t\tout, err := resolver.Resolve(bind.RenderedBindYAML(tc.given.bindYAML), internal.Namespace(namespace))\n\n\t\t\t// then\n\t\t\trequire.NoError(t, err)\n\t\t\tassert.EqualValues(t, tc.expected.credentials, out.Credentials)\n\t\t\tassert.Len(t, fakeClient.Actions(), 2)\n\t\t})\n\t}\n}", "func TestCheckBinaryExprIntLssInt(t *testing.T) {\n\tenv := MakeSimpleEnv()\n\n\texpectConst(t, `4 < 4`, env, (4 < 4), ConstBool)\n}", "func testSuggestSwaps(t *testing.T, setup *testSuggestSwapsSetup,\n\texpected *Suggestions, expectedErr error) {\n\n\tt.Parallel()\n\n\t// If our setup struct is nil, we replace it with our default test\n\t// values.\n\tif setup == nil {\n\t\tcfg, lnd := newTestConfig()\n\n\t\tlnd.Channels = []lndclient.ChannelInfo{\n\t\t\tchannel1, channel2,\n\t\t}\n\n\t\tparams := defaultParameters\n\t\tparams.AutoloopBudgetLastRefresh = testBudgetStart\n\t\tparams.ChannelRules = map[lnwire.ShortChannelID]*SwapRule{\n\t\t\tchanID1: chanRule,\n\t\t\tchanID2: chanRule,\n\t\t}\n\n\t\tsetup = &testSuggestSwapsSetup{\n\t\t\tcfg: cfg,\n\t\t\tlnd: lnd,\n\t\t\tparams: params,\n\t\t}\n\t}\n\n\t// Create a new manager, get our current set of parameters and update\n\t// them to use the rules set by the test.\n\tmanager := NewManager(setup.cfg)\n\n\terr := manager.setParameters(context.Background(), setup.params)\n\trequire.NoError(t, err)\n\n\tactual, err := manager.SuggestSwaps(context.Background())\n\trequire.Equal(t, expectedErr, err)\n\trequire.Equal(t, expected, actual)\n}", "func TestRxp(t *testing.T) {\n\tconst fname = \"Rxp\"\n\tvar r [3][3]float64\n\tvar p [3]float64\n\n\tr[0][0] = 2.0\n\tr[0][1] = 3.0\n\tr[0][2] = 2.0\n\n\tr[1][0] = 3.0\n\tr[1][1] = 2.0\n\tr[1][2] = 3.0\n\n\tr[2][0] = 3.0\n\tr[2][1] = 4.0\n\tr[2][2] = 5.0\n\n\tp[0] = 0.2\n\tp[1] = 1.5\n\tp[2] = 0.1\n\n\ttests := []struct {\n\t\tref string\n\t\tfn func([3][3]float64, [3]float64) [3]float64\n\t}{\n\t\t{\"cgo\", CgoRxp},\n\t\t{\"go\", GoRxp},\n\t}\n\tfor _, test := range tests {\n\t\ttname := fname + \" \" + test.ref\n\t\trp := test.fn(r, p)\n\n\t\tvvd(t, rp[0], 5.1, 1e-12, tname, \"1\")\n\t\tvvd(t, rp[1], 3.9, 1e-12, tname, \"2\")\n\t\tvvd(t, rp[2], 7.1, 1e-12, tname, \"3\")\n\t}\n}", "func PrecedenceWalk(root string, precedence []string, walkFn filepath.WalkFunc) error {\n\tinfo, err := os.Lstat(root)\n\tif err != nil {\n\t\terr = walkFn(root, nil, err)\n\t} else {\n\t\terr = walk(root, precedence, info, walkFn)\n\t}\n\tif err == filepath.SkipDir {\n\t\treturn nil\n\t}\n\treturn err\n}", "func hasHigherOrEqualPrecedence(a *Node, b *Node) bool {\n\treturn b.Type.Precedence < a.Type.Precedence ||\n\t\t(b.Type.Precedence == a.Type.Precedence &&\n\t\t\ta.Type.Associate == \"left\")\n}", "func TestPreReleasePrecedence(t *testing.T) {\n\tfor _, vs := range []string{\n\t\t\"1.0.0-alpha\",\n\t\t\"1.0.0-alpha.1\",\n\t\t\"1.0.0-0.3.7\",\n\t\t\"1.0.0-x.7.z.92\",\n\t} {\n\t\tv, err := Parse(vs)\n\t\tif err != nil {\n\t\t\tt.Errorf(`Couldn't parse version string \"%s\": %s`,\n\t\t\t\tvs, err)\n\t\t\tcontinue\n\t\t}\n\t\tnormal := Version{v.Major, v.Minor, v.Patch, nil, nil}\n\t\tif !Less(v, normal) {\n\t\t\tt.Errorf(`Failed assertion: \"%s\" < \"%s\"`, v, normal)\n\t\t}\n\t}\n}", "func (s *mergeBaseSuite) TestIndependentWithPairOfAncestors(c *C) {\n\trevs := []string{\"C\", \"D\", \"M\", \"N\"}\n\texpectedRevs := []string{\"C\", \"D\"}\n\ts.AssertIndependents(c, revs, expectedRevs)\n}", "func (s *MovesSuite) TestPawnTakesPinned() {\n\tmoves := s.validateMovesByFEN(\n\t\t\"8/8/8/8/8/K3P2q/8/8 w - - 0 1\",\n\t\tengine.TT(\"e3\"),\n\t\t[]engine.Tile{},\n\t)\n\n\t// can move 2 pieces forward\n\tassert.Equal(s.T(), 0, len(moves))\n\n\tmoves = s.validateMovesByFEN(\n\t\t\"8/8/8/8/5q2/8/3P4/2K5 w - - 0 1\",\n\t\tengine.TT(\"e3\"),\n\t\t[]engine.Tile{},\n\t)\n\n\t// can move 2 pieces forward\n\tassert.Equal(s.T(), 0, len(moves))\n\n\t// can take if it clears the pin\n\tmoves = s.validateMovesByFEN(\n\t\t\"8/8/8/8/5q2/4P3/8/2K5 w - - 0 1\",\n\t\tengine.TT(\"e3\"),\n\t\t[]engine.Tile{\n\t\t\tengine.TT(\"f4\"),\n\t\t},\n\t)\n\n\t// can move 2 pieces forward\n\tassert.Equal(s.T(), 1, len(moves))\n}", "func TestInstructionOr(t *testing.T) {\n\tchipCfg := GetDefaultConfig()\n\tchip, _, _ := NewCHIP8(chipCfg)\n\n\tchip.Reg[0x0] = 0xaa\n\tchip.Reg[0x1] = 0x55\n\n\tchip.WriteShort(0x200, 0x8011)\n\tchip.WriteShort(0x202, 0x8231)\n\n\tvar tests = []struct {\n\t\tPC uint16\n\t\tregIdx uint8\n\t\tregVal uint8\n\t}{\n\t\t{0x202, 0x0, 0xff},\n\t\t{0x204, 0x2, 0x00},\n\t}\n\n\tfor i, want := range tests {\n\t\tchip.StepEmulation()\n\n\t\tif chip.PC != want.PC {\n\t\t\tt.Errorf(\"test %d: chip.PC = 0x%x; want 0x%x\", i, chip.PC, want.PC)\n\t\t}\n\n\t\tif chip.Reg[want.regIdx] != want.regVal {\n\t\t\tt.Errorf(\"test %d: chip.Reg[0x%x] = 0x%x; want 0x%x\", i, want.regIdx, chip.Reg[want.regIdx], want.regVal)\n\t\t}\n\t}\n}", "func TestFindRelativeRanks(t *testing.T) {\n\ttest(t, findRelativeRanks)\n}", "func TestPreVoteWithCheckQuorum(t *testing.T) {\n\tn1 := newTestRaft(1, []uint64{1, 2, 3}, 10, 1, NewMemoryStorage())\n\tn2 := newTestRaft(2, []uint64{1, 2, 3}, 10, 1, NewMemoryStorage())\n\tn3 := newTestRaft(3, []uint64{1, 2, 3}, 10, 1, NewMemoryStorage())\n\n\tn1.becomeFollower(1, None)\n\tn2.becomeFollower(1, None)\n\tn3.becomeFollower(1, None)\n\n\tn1.preVote = true\n\tn2.preVote = true\n\tn3.preVote = true\n\n\tn1.checkQuorum = true\n\tn2.checkQuorum = true\n\tn3.checkQuorum = true\n\n\tnt := newNetwork(n1, n2, n3)\n\tdefer nt.closeAll()\n\tnt.send(pb.Message{From: 1, To: 1, Type: pb.MsgHup})\n\n\t// isolate node 1. node 2 and node 3 have leader info\n\tnt.isolate(1)\n\n\t// check state\n\tsm := nt.peers[1].(*raft)\n\tif sm.state != StateLeader {\n\t\tt.Fatalf(\"peer 1 state: %s, want %s\", sm.state, StateLeader)\n\t}\n\tsm = nt.peers[2].(*raft)\n\tif sm.state != StateFollower {\n\t\tt.Fatalf(\"peer 2 state: %s, want %s\", sm.state, StateFollower)\n\t}\n\tsm = nt.peers[3].(*raft)\n\tif sm.state != StateFollower {\n\t\tt.Fatalf(\"peer 3 state: %s, want %s\", sm.state, StateFollower)\n\t}\n\n\t// node 2 will ignore node 3's PreVote\n\tnt.send(pb.Message{From: 3, To: 3, Type: pb.MsgHup})\n\tnt.send(pb.Message{From: 2, To: 2, Type: pb.MsgHup})\n\n\t// Do we have a leader?\n\tif n2.state != StateLeader && n3.state != StateFollower {\n\t\tt.Errorf(\"no leader\")\n\t}\n}", "func TestFindLogicalRouter(t *testing.T) {\n\n\tnsxClient, teardown := setupTest()\n\tdefer teardown()\n\n\ttype args struct {\n\t\tnsxClient *nsxt.APIClient\n\t\tcallback nsxtapi.RouterSearchHandler\n\t\tsearchVal string\n\t}\n\ttests := []struct {\n\t\tname string\n\t\targs args\n\t\twantErr bool\n\t}{\n\t\t{\n\t\t\t\"should return not found\",\n\t\t\targs{\n\t\t\t\t&nsxClient,\n\t\t\t\tnsxtapi.RouterCallback[\"name\"],\n\t\t\t\t\"\",\n\t\t\t},\n\t\t\ttrue,\n\t\t},\n\t\t{\n\t\t\t\"test search by router name\",\n\t\t\targs{\n\t\t\t\t&nsxClient,\n\t\t\t\tnsxtapi.RouterCallback[\"name\"],\n\t\t\t\t\"primary-t0\",\n\t\t\t},\n\t\t\tfalse,\n\t\t},\n\t\t{\n\t\t\t\"test search by uuid name\",\n\t\t\targs{\n\t\t\t\t&nsxClient,\n\t\t\t\tnsxtapi.RouterCallback[\"uuid\"],\n\t\t\t\t\"ba95b780-3689-419b-8f20-c7179e05813f\",\n\t\t\t},\n\t\t\tfalse,\n\t\t},\n\t\t{\n\t\t\t\"test search by edge id\",\n\t\t\targs{\n\t\t\t\t&nsxClient,\n\t\t\t\tnsxtapi.RouterCallback[\"edgeid\"],\n\t\t\t\t\"133fe9a7-2e87-409a-b1b3-406ab5833986\",\n\t\t\t},\n\t\t\tfalse,\n\t\t},\n\t\t{\n\t\t\t\"test search by type \",\n\t\t\targs{\n\t\t\t\t&nsxClient,\n\t\t\t\tnsxtapi.RouterCallback[\"type\"],\n\t\t\t\t\"TIER1\",\n\t\t\t},\n\t\t\tfalse,\n\t\t},\n\t}\n\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tgot, err := nsxtapi.FindLogicalRouter(tt.args.nsxClient, tt.args.callback, tt.args.searchVal)\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"FindLogicalRouter() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tfor _, v := range got {\n\t\t\t\tif err == nil {\n\t\t\t\t\tt.Log(\"Found router id \", v.Id, \" name\", v.DisplayName, \" type \", v.RouterType, \"edge id\", v.EdgeClusterId)\n\t\t\t\t}\n\t\t\t}\n\t\t})\n\t}\n}", "func (s *MovesSuite) TestPawnMovePinned() {\n\tmoves := s.validateMovesByFEN(\n\t\t\"8/8/8/8/8/K3P2q/8/8 w - - 0 1\",\n\t\tengine.T(4, 2),\n\t\t[]engine.Tile{},\n\t)\n\n\t// can move 2 pieces forward\n\tassert.Equal(s.T(), 0, len(moves))\n}", "func TestAddColonLst(t *testing.T) {\n\tf := New(nil, nil)\n\tf.AddWord(\": nop ;\")\n\tgood := []uint16{}\n\tfor _, v := range []string{\"CALL\", \":\", \";\"} {\n\t\taa, _ := f.Addr(v)\n\t\tgood = append(good, aa)\n\t}\n\tgood[0] = 2\n\tstart, _ := f.Addr(\"nop\")\n\tcmp := []uint16{f.WordPtr(start), f.WordPtr(start + 2), f.WordPtr(start + 4)}\n\tif !Uint16sEqual(cmp, good) {\n\t\tt.Fatal(\"code of nop wasn't\", good, \"but was\", cmp)\n\t}\n}", "func (api *PinAPI) pinLsAll(ctx context.Context, typeStr string) <-chan coreiface.Pin {\n\tout := make(chan coreiface.Pin, 1)\n\n\temittedSet := cid.NewSet()\n\n\tAddToResultKeys := func(c cid.Cid, typeStr string) error {\n\t\tif emittedSet.Visit(c) {\n\t\t\tselect {\n\t\t\tcase out <- &pinInfo{\n\t\t\t\tpinType: typeStr,\n\t\t\t\tpath: path.IpldPath(c),\n\t\t\t}:\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn ctx.Err()\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t}\n\n\tgo func() {\n\t\tdefer close(out)\n\n\t\tvar rkeys []cid.Cid\n\t\tvar err error\n\t\tif typeStr == \"recursive\" || typeStr == \"all\" {\n\t\t\tfor streamedCid := range api.pinning.RecursiveKeys(ctx) {\n\t\t\t\tif streamedCid.Err != nil {\n\t\t\t\t\tout <- &pinInfo{err: streamedCid.Err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tif err = AddToResultKeys(streamedCid.C, \"recursive\"); err != nil {\n\t\t\t\t\tout <- &pinInfo{err: err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\trkeys = append(rkeys, streamedCid.C)\n\t\t\t}\n\t\t}\n\t\tif typeStr == \"direct\" || typeStr == \"all\" {\n\t\t\tfor streamedCid := range api.pinning.DirectKeys(ctx) {\n\t\t\t\tif streamedCid.Err != nil {\n\t\t\t\t\tout <- &pinInfo{err: streamedCid.Err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tif err = AddToResultKeys(streamedCid.C, \"direct\"); err != nil {\n\t\t\t\t\tout <- &pinInfo{err: err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif typeStr == \"indirect\" {\n\t\t\t// We need to first visit the direct pins that have priority\n\t\t\t// without emitting them\n\n\t\t\tfor streamedCid := range api.pinning.DirectKeys(ctx) {\n\t\t\t\tif streamedCid.Err != nil {\n\t\t\t\t\tout <- &pinInfo{err: streamedCid.Err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\temittedSet.Add(streamedCid.C)\n\t\t\t}\n\n\t\t\tfor streamedCid := range api.pinning.RecursiveKeys(ctx) {\n\t\t\t\tif streamedCid.Err != nil {\n\t\t\t\t\tout <- &pinInfo{err: streamedCid.Err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\temittedSet.Add(streamedCid.C)\n\t\t\t\trkeys = append(rkeys, streamedCid.C)\n\t\t\t}\n\t\t}\n\t\tif typeStr == \"indirect\" || typeStr == \"all\" {\n\t\t\twalkingSet := cid.NewSet()\n\t\t\tfor _, k := range rkeys {\n\t\t\t\terr = merkledag.Walk(\n\t\t\t\t\tctx, merkledag.GetLinksWithDAG(api.dag), k,\n\t\t\t\t\tfunc(c cid.Cid) bool {\n\t\t\t\t\t\tif !walkingSet.Visit(c) {\n\t\t\t\t\t\t\treturn false\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif emittedSet.Has(c) {\n\t\t\t\t\t\t\treturn true // skipped\n\t\t\t\t\t\t}\n\t\t\t\t\t\terr := AddToResultKeys(c, \"indirect\")\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\tout <- &pinInfo{err: err}\n\t\t\t\t\t\t\treturn false\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn true\n\t\t\t\t\t},\n\t\t\t\t\tmerkledag.SkipRoot(), merkledag.Concurrent(),\n\t\t\t\t)\n\t\t\t\tif err != nil {\n\t\t\t\t\tout <- &pinInfo{err: err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}()\n\n\treturn out\n}", "func (op Token) Precedence() int {\n\tswitch op {\n\tcase LOR:\n\t\treturn 1\n\tcase LAND:\n\t\treturn 2\n\tcase EQL, NEQ, LSS, LEQ, GTR, GEQ:\n\t\treturn 3\n\tcase ADD, SUB, OR, XOR:\n\t\treturn 4\n\tcase MUL, QUO, REM, SHL, SHR, AND, AND_NOT:\n\t\treturn 5\n\t}\n\treturn LowestPrec\n}", "func TestGetTuplesCmd(t *testing.T) {\n\tbaseArgs := []string{debugCmdString, getTuplesCmdString}\n\tstandardArgs := concatArgs(baseArgs, srcFlag, testIP1, dstFlag, testIP2)\n\n\ttests := []*testCases{\n\t\t{\n\t\t\tname: \"no src or dst\",\n\t\t\targs: baseArgs,\n\t\t\twantErr: true,\n\t\t},\n\t\t{\n\t\t\tname: \"no src\",\n\t\t\targs: concatArgs(baseArgs, dstFlag, testIP2),\n\t\t\twantErr: true,\n\t\t},\n\t\t{\n\t\t\tname: \"no dst\",\n\t\t\targs: concatArgs(baseArgs, srcFlag, testIP1),\n\t\t\twantErr: true,\n\t\t},\n\t\t{\n\t\t\tname: \"unknown shorthand flag\",\n\t\t\targs: concatArgs(standardArgs, unknownShorthandFlag),\n\t\t\twantErr: true,\n\t\t},\n\t\t{\n\t\t\tname: \"iptables save file but no cache file\",\n\t\t\targs: concatArgs(standardArgs, iptablesSaveFileFlag, iptableSaveFile),\n\t\t\twantErr: true,\n\t\t},\n\t\t{\n\t\t\tname: \"iptables save file but bad cache file\",\n\t\t\targs: concatArgs(standardArgs, iptablesSaveFileFlag, iptableSaveFile, npmCacheFlag, nonExistingFile),\n\t\t\twantErr: true,\n\t\t},\n\t\t{\n\t\t\tname: \"cache file but no iptables save file\",\n\t\t\targs: concatArgs(standardArgs, npmCacheFlag, npmCacheFile),\n\t\t\twantErr: true,\n\t\t},\n\t\t{\n\t\t\tname: \"cache file but bad iptables save file\",\n\t\t\targs: concatArgs(standardArgs, iptablesSaveFileFlag, nonExistingFile, npmCacheFlag, npmCacheFile),\n\t\t\twantErr: true,\n\t\t},\n\t\t{\n\t\t\tname: \"correct files\",\n\t\t\targs: concatArgs(standardArgs, iptablesSaveFileFlag, iptableSaveFile, npmCacheFlag, npmCacheFile),\n\t\t\twantErr: false,\n\t\t},\n\t\t{\n\t\t\tname: \"correct files with file order switched\",\n\t\t\targs: concatArgs(standardArgs, npmCacheFlag, npmCacheFile, iptablesSaveFileFlag, iptableSaveFile),\n\t\t\twantErr: false,\n\t\t},\n\t\t{\n\t\t\tname: \"src/dst after files\",\n\t\t\targs: concatArgs(baseArgs, npmCacheFlag, npmCacheFile, iptablesSaveFileFlag, iptableSaveFile, srcFlag, testIP1, dstFlag, testIP2),\n\t\t\twantErr: false,\n\t\t},\n\t\t{\n\t\t\tname: \"shorthand flags before command\",\n\t\t\targs: []string{debugCmdString, srcFlag, testIP1, dstFlag, testIP2, iptablesSaveFileFlag, iptableSaveFile, npmCacheFlag, npmCacheFile, getTuplesCmdString},\n\t\t\twantErr: false,\n\t\t},\n\t}\n\n\ttestCommand(t, tests)\n}", "func TestProposedNewWithPortedCases(t *testing.T) {\n\ttestAttributes := map[string]rschema.Attribute{\n\t\t\"optional\": rschema.StringAttribute{\n\t\t\tOptional: true,\n\t\t},\n\t\t\"computed\": rschema.StringAttribute{\n\t\t\tComputed: true,\n\t\t},\n\t\t\"optional_computed\": rschema.StringAttribute{\n\t\t\tComputed: true,\n\t\t\tOptional: true,\n\t\t},\n\t\t\"required\": rschema.StringAttribute{\n\t\t\tRequired: true,\n\t\t},\n\t}\n\n\ttests := map[string]testcase{\n\n\t\t\"empty\": {\n\t\t\tFromResourceSchema(rschema.Schema{Attributes: map[string]rschema.Attribute{}}),\n\t\t\tprim(nil),\n\t\t\tprim(nil),\n\t\t\tprim(nil),\n\t\t},\n\n\t\t\"no prior\": (func() testcase {\n\t\t\tschema := rschema.Schema{\n\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\"foo\": rschema.StringAttribute{\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t\"bar\": rschema.StringAttribute{\n\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t},\n\t\t\t\t\t\"bloop\": rschema.SingleNestedAttribute{\n\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\"blop\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tBlocks: map[string]rschema.Block{\n\t\t\t\t\t\"baz\": rschema.SingleNestedBlock{\n\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\"boz\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\"biz\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}\n\n\t\t\tconfig := obj(\n\t\t\t\tfield(\"foo\", prim(\"hello\")),\n\t\t\t\tfield(\"bloop\", prim(nil)),\n\t\t\t\tfield(\"bar\", prim(nil)),\n\t\t\t\tfield(\"baz\", obj(\n\t\t\t\t\tfield(\"boz\", prim(\"world\")),\n\n\t\t\t\t\t// An unknown in the config represents a situation where\n\t\t\t\t\t// an argument is explicitly set to an expression result\n\t\t\t\t\t// that is derived from an unknown value. This is distinct\n\t\t\t\t\t// from leaving it null, which allows the provider itself\n\t\t\t\t\t// to decide the value during PlanResourceChange.\n\t\t\t\t\tfield(\"biz\", prim(tftypes.UnknownValue)),\n\t\t\t\t)),\n\t\t\t)\n\n\t\t\twant := obj(\n\t\t\t\tfield(\"foo\", prim(\"hello\")),\n\n\t\t\t\t// unset computed attributes are null in the proposal; provider\n\t\t\t\t// usually changes them to \"unknown\" during PlanResourceChange,\n\t\t\t\t// to indicate that the value will be decided during apply.\n\t\t\t\tfield(\"bar\", prim(nil)),\n\t\t\t\tfield(\"bloop\", prim(nil)),\n\t\t\t\tfield(\"baz\", obj(\n\t\t\t\t\tfield(\"boz\", prim(\"world\")),\n\t\t\t\t\tfield(\"biz\", prim(tftypes.UnknownValue)), // explicit unknown preserved from config\n\t\t\t\t)),\n\t\t\t)\n\n\t\t\treturn testcase{\n\t\t\t\tSchema: FromResourceSchema(schema),\n\t\t\t\tPrior: prim(nil),\n\t\t\t\tConfig: config,\n\t\t\t\tWant: want,\n\t\t\t}\n\t\t})(),\n\n\t\t\"null block remains null\": (func() testcase {\n\t\t\tschema := rschema.Schema{\n\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\"foo\": rschema.StringAttribute{\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t\"bloop\": rschema.SingleNestedAttribute{\n\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\"blop\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tBlocks: map[string]rschema.Block{\n\t\t\t\t\t\"baz\": rschema.SingleNestedBlock{\n\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\"boz\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}\n\t\t\tconfig := obj(\n\t\t\t\tfield(\"foo\", prim(\"bar\")),\n\t\t\t\tfield(\"bloop\", prim(nil)),\n\t\t\t\tfield(\"baz\", prim(nil)),\n\t\t\t)\n\n\t\t\t// The bloop attribute and baz block does not exist in the config, and therefore shouldn't be\n\t\t\t// planned.\n\t\t\twant := obj(\n\t\t\t\tfield(\"foo\", prim(\"bar\")),\n\t\t\t\tfield(\"bloop\", prim(nil)),\n\t\t\t\tfield(\"baz\", prim(nil)),\n\t\t\t)\n\t\t\treturn testcase{\n\t\t\t\tSchema: FromResourceSchema(schema),\n\t\t\t\tPrior: prim(nil),\n\t\t\t\tConfig: config,\n\t\t\t\tWant: want,\n\t\t\t}\n\t\t})(),\n\n\t\t\"no prior with set\": (func() testcase {\n\t\t\t// This one is here because our handling of sets is more complex than others (due to the fuzzy\n\t\t\t// correlation heuristic) and historically that caused us some panic-related grief.\n\t\t\tschema := rschema.Schema{\n\t\t\t\tBlocks: map[string]rschema.Block{\n\t\t\t\t\t\"baz\": rschema.SetNestedBlock{\n\t\t\t\t\t\tNestedObject: rschema.NestedBlockObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"boz\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\"bloop\": rschema.SetNestedAttribute{\n\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"blop\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}\n\t\t\tconfig := obj(\n\t\t\t\tfield(\"baz\", set(obj(field(\"boz\", prim(\"world\"))))),\n\t\t\t\tfield(\"bloop\", set(obj(field(\"blop\", prim(\"blub\"))))),\n\t\t\t)\n\t\t\twant := obj(\n\t\t\t\tfield(\"baz\", set(obj(field(\"boz\", prim(\"world\"))))),\n\t\t\t\tfield(\"bloop\", set(obj(field(\"blop\", prim(\"blub\"))))),\n\t\t\t)\n\t\t\treturn testcase{\n\t\t\t\tSchema: FromResourceSchema(schema),\n\t\t\t\tPrior: prim(nil),\n\t\t\t\tConfig: config,\n\t\t\t\tWant: want,\n\t\t\t}\n\t\t})(),\n\n\t\t\"prior attributes\": (func() testcase {\n\t\t\tschema := rschema.Schema{\n\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\"foo\": rschema.StringAttribute{\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t\"bar\": rschema.StringAttribute{\n\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t},\n\t\t\t\t\t\"baz\": rschema.StringAttribute{\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t},\n\t\t\t\t\t\"boz\": rschema.StringAttribute{\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t},\n\t\t\t\t\t\"bloop\": rschema.SingleNestedAttribute{\n\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\"blop\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}\n\t\t\tprior := obj(\n\t\t\t\tfield(\"foo\", prim(\"bonjour\")),\n\t\t\t\tfield(\"bar\", prim(\"petit dejeuner\")),\n\t\t\t\tfield(\"baz\", prim(\"grande dejeuner\")),\n\t\t\t\tfield(\"boz\", prim(\"a la monde\")),\n\t\t\t\tfield(\"bloop\", obj(field(\"blop\", prim(\"glub\")))),\n\t\t\t)\n\t\t\tconfig := obj(\n\t\t\t\tfield(\"foo\", prim(\"hello\")),\n\t\t\t\tfield(\"bar\", prim(nil)),\n\t\t\t\tfield(\"baz\", prim(nil)),\n\t\t\t\tfield(\"boz\", prim(\"world\")),\n\t\t\t\tfield(\"bloop\", obj(field(\"blop\", prim(\"bleep\")))),\n\t\t\t)\n\t\t\twant := obj(\n\t\t\t\tfield(\"foo\", prim(\"hello\")),\n\t\t\t\tfield(\"bar\", prim(\"petit dejeuner\")),\n\t\t\t\tfield(\"baz\", prim(\"grande dejeuner\")),\n\t\t\t\tfield(\"boz\", prim(\"world\")),\n\t\t\t\tfield(\"bloop\", obj(field(\"blop\", prim(\"bleep\")))),\n\t\t\t)\n\t\t\treturn testcase{\n\t\t\t\tSchema: FromResourceSchema(schema),\n\t\t\t\tPrior: prior,\n\t\t\t\tConfig: config,\n\t\t\t\tWant: want,\n\t\t\t}\n\t\t})(),\n\n\t\t\"prior nested single\": {\n\t\t\tFromResourceSchema(rschema.Schema{\n\t\t\t\tBlocks: map[string]rschema.Block{\n\t\t\t\t\t\"foo\": rschema.SingleNestedBlock{\n\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\"bar\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\"baz\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\"bloop\": rschema.SingleNestedAttribute{\n\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\"blop\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\"bleep\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}),\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": object(map[string]valueBuilder{\n\t\t\t\t\t\"bar\": prim(\"beep\"),\n\t\t\t\t\t\"baz\": prim(\"boop\"),\n\t\t\t\t}),\n\n\t\t\t\t\"bloop\": object(map[string]valueBuilder{\n\t\t\t\t\t\"blop\": prim(\"glub\"),\n\t\t\t\t\t\"bleep\": prim(nil),\n\t\t\t\t}),\n\t\t\t}),\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": object(map[string]valueBuilder{\n\t\t\t\t\t\"bar\": prim(\"bap\"),\n\t\t\t\t\t\"baz\": prim(nil),\n\t\t\t\t}),\n\t\t\t\t\"bloop\": object(map[string]valueBuilder{\n\t\t\t\t\t\"blop\": prim(\"glub\"),\n\t\t\t\t\t\"bleep\": prim(\"beep\"),\n\t\t\t\t}),\n\t\t\t}),\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": object(map[string]valueBuilder{\n\t\t\t\t\t\"bar\": prim(\"bap\"),\n\t\t\t\t\t\"baz\": prim(\"boop\"),\n\t\t\t\t}),\n\t\t\t\t\"bloop\": object(map[string]valueBuilder{\n\t\t\t\t\t\"blop\": prim(\"glub\"),\n\t\t\t\t\t\"bleep\": prim(\"beep\"),\n\t\t\t\t}),\n\t\t\t}),\n\t\t},\n\n\t\t\"prior nested list\": {\n\t\t\tFromResourceSchema(rschema.Schema{\n\t\t\t\tBlocks: map[string]rschema.Block{\n\t\t\t\t\t\"foo\": rschema.ListNestedBlock{\n\t\t\t\t\t\tNestedObject: rschema.NestedBlockObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"bar\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\"baz\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\"bloop\": rschema.ListNestedAttribute{\n\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"blop\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}),\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": list(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": prim(\"beep\"),\n\t\t\t\t\t\t\"baz\": prim(\"boop\"),\n\t\t\t\t\t}),\n\t\t\t\t),\n\t\t\t\t\"bloop\": list(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": prim(\"bar\"),\n\t\t\t\t\t}),\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": prim(\"baz\"),\n\t\t\t\t\t}),\n\t\t\t\t),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": list(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": prim(\"bap\"),\n\t\t\t\t\t\t\"baz\": prim(nil),\n\t\t\t\t\t}),\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": prim(\"blep\"),\n\t\t\t\t\t\t\"baz\": prim(nil),\n\t\t\t\t\t})),\n\n\t\t\t\t\"bloop\": list(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": prim(\"bar\"),\n\t\t\t\t\t}),\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": prim(\"baz\"),\n\t\t\t\t\t})),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": list(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": prim(\"bap\"),\n\t\t\t\t\t\t\"baz\": prim(\"boop\"),\n\t\t\t\t\t}),\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": prim(\"blep\"),\n\t\t\t\t\t\t\"baz\": prim(nil),\n\t\t\t\t\t})),\n\n\t\t\t\t\"bloop\": list(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": prim(\"bar\"),\n\t\t\t\t\t}),\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": prim(\"baz\"),\n\t\t\t\t\t})),\n\t\t\t}),\n\t\t},\n\n\t\t\"prior nested map\": {\n\t\t\tFromResourceSchema(rschema.Schema{\n\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\"bloop\": rschema.MapNestedAttribute{\n\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"blop\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}),\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"bloop\": mapv(map[string]valueBuilder{\n\t\t\t\t\t\"a\": object(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": prim(\"glub\"),\n\t\t\t\t\t}),\n\n\t\t\t\t\t\"b\": object(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": prim(\"blub\"),\n\t\t\t\t\t}),\n\t\t\t\t}),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"bloop\": mapv(map[string]valueBuilder{\n\t\t\t\t\t\"a\": object(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": prim(\"glub\"),\n\t\t\t\t\t}),\n\n\t\t\t\t\t\"c\": object(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": prim(\"blub\"),\n\t\t\t\t\t}),\n\t\t\t\t}),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"bloop\": mapv(map[string]valueBuilder{\n\t\t\t\t\t\"a\": object(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": prim(\"glub\"),\n\t\t\t\t\t}),\n\n\t\t\t\t\t\"c\": object(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": prim(\"blub\"),\n\t\t\t\t\t}),\n\t\t\t\t}),\n\t\t\t}),\n\t\t},\n\t\t\"prior nested set\": {\n\t\t\tFromResourceSchema(rschema.Schema{\n\t\t\t\tBlocks: map[string]rschema.Block{\n\t\t\t\t\t\"foo\": rschema.SetNestedBlock{\n\t\t\t\t\t\tNestedObject: rschema.NestedBlockObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"bar\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\t\t// This non-computed attribute will serve\n\t\t\t\t\t\t\t\t\t// as our matching key for propagating\n\t\t\t\t\t\t\t\t\t// \"baz\" from elements in the prior value.\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\"baz\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\"bloop\": rschema.SetNestedAttribute{\n\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"blop\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\"bleep\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}),\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": prim(\"beep\"),\n\t\t\t\t\t\t\"baz\": prim(\"boop\"),\n\t\t\t\t\t}),\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": prim(\"blep\"),\n\t\t\t\t\t\t\"baz\": prim(\"boot\"),\n\t\t\t\t\t})),\n\n\t\t\t\t\"bloop\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": prim(\"glubglub\"),\n\t\t\t\t\t\t\"bleep\": prim(nil),\n\t\t\t\t\t}),\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": prim(\"glubglub\"),\n\t\t\t\t\t\t\"bleep\": prim(\"beep\"),\n\t\t\t\t\t})),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": prim(\"beep\"),\n\t\t\t\t\t\t\"baz\": prim(nil),\n\t\t\t\t\t}),\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": prim(\"bosh\"),\n\t\t\t\t\t\t\"baz\": prim(nil),\n\t\t\t\t\t})),\n\n\t\t\t\t\"bloop\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": prim(\"glubglub\"),\n\t\t\t\t\t\t\"bleep\": prim(nil),\n\t\t\t\t\t}),\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": prim(\"glub\"),\n\t\t\t\t\t\t\"bleep\": prim(nil),\n\t\t\t\t\t})),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": prim(\"beep\"),\n\t\t\t\t\t\t\"baz\": prim(\"boop\"),\n\t\t\t\t\t}),\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": prim(\"bosh\"),\n\t\t\t\t\t\t\"baz\": prim(nil),\n\t\t\t\t\t})),\n\n\t\t\t\t\"bloop\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": prim(\"glubglub\"),\n\t\t\t\t\t\t\"bleep\": prim(nil),\n\t\t\t\t\t}),\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": prim(\"glub\"),\n\t\t\t\t\t\t\"bleep\": prim(nil),\n\t\t\t\t\t})),\n\t\t\t}),\n\t\t},\n\n\t\t\"sets differing only by unknown\": {\n\t\t\tFromResourceSchema(rschema.Schema{\n\t\t\t\tBlocks: map[string]rschema.Block{\n\t\t\t\t\t\"multi\": rschema.SetNestedBlock{\n\t\t\t\t\t\tNestedObject: rschema.NestedBlockObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"optional\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\"bloop\": rschema.SetNestedAttribute{\n\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"blop\": schema.StringAttribute{\n\t\t\t\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}),\n\t\t\tprim(nil),\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"multi\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"optional\": unk(),\n\t\t\t\t\t}),\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"optional\": unk(),\n\t\t\t\t\t})),\n\n\t\t\t\t\"bloop\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": unk(),\n\t\t\t\t\t}),\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": unk(),\n\t\t\t\t\t})),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"multi\": set(\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"optional\": unk(),\n\t\t\t\t\t}),\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"optional\": unk(),\n\t\t\t\t\t})),\n\n\t\t\t\t\"bloop\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": unk(),\n\t\t\t\t\t}),\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"blop\": unk(),\n\t\t\t\t\t})),\n\t\t\t}),\n\t\t},\n\n\t\t\"nested list in set\": {\n\t\t\tFromResourceSchema(rschema.Schema{\n\t\t\t\tBlocks: map[string]rschema.Block{\n\t\t\t\t\t\"foo\": rschema.SetNestedBlock{\n\t\t\t\t\t\tNestedObject: rschema.NestedBlockObject{\n\t\t\t\t\t\t\tBlocks: map[string]rschema.Block{\n\t\t\t\t\t\t\t\t\"bar\": rschema.ListNestedBlock{\n\t\t\t\t\t\t\t\t\tNestedObject: rschema.NestedBlockObject{\n\t\t\t\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\t\t\t\"baz\": rschema.StringAttribute{},\n\t\t\t\t\t\t\t\t\t\t\t\"qux\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}),\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": list(\n\t\t\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\t\t\"baz\": prim(\"beep\"),\n\t\t\t\t\t\t\t\t\"qux\": prim(\"boop\"),\n\t\t\t\t\t\t\t})),\n\t\t\t\t\t})),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": list(\n\t\t\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\t\t\"baz\": prim(\"beep\"),\n\t\t\t\t\t\t\t\t\"qux\": prim(nil),\n\t\t\t\t\t\t\t})),\n\t\t\t\t\t})),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": list(\n\t\t\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\t\t\"baz\": prim(\"beep\"),\n\t\t\t\t\t\t\t\t\"qux\": prim(\"boop\"),\n\t\t\t\t\t\t\t})),\n\t\t\t\t\t})),\n\t\t\t}),\n\t\t},\n\n\t\t\"empty nested list in set\": {\n\t\t\tFromResourceSchema(rschema.Schema{\n\t\t\t\tBlocks: map[string]rschema.Block{\n\t\t\t\t\t\"foo\": rschema.SetNestedBlock{\n\t\t\t\t\t\tNestedObject: rschema.NestedBlockObject{\n\t\t\t\t\t\t\tBlocks: map[string]rschema.Block{\n\t\t\t\t\t\t\t\t\"bar\": rschema.ListNestedBlock{\n\t\t\t\t\t\t\t\t\tNestedObject: rschema.NestedBlockObject{\n\t\t\t\t\t\t\t\t\t\tBlocks: map[string]rschema.Block{},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}),\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": list(),\n\t\t\t\t\t})),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": list(),\n\t\t\t\t\t})),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": list(),\n\t\t\t\t\t})),\n\t\t\t}),\n\t\t},\n\n\t\t// Could not port empty nested map in set since tfsdk.BlockNestingModeMap is not supported, substituting\n\t\t// an empty object instead.\n\t\t\"empty nested object in set\": {\n\t\t\tFromResourceSchema(rschema.Schema{\n\t\t\t\tBlocks: map[string]rschema.Block{\n\t\t\t\t\t\"foo\": rschema.SetNestedBlock{\n\t\t\t\t\t\tNestedObject: rschema.NestedBlockObject{\n\t\t\t\t\t\t\tBlocks: map[string]rschema.Block{\n\t\t\t\t\t\t\t\t\"bar\": rschema.SingleNestedBlock{\n\t\t\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\t\t\"baz\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}),\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": object(nil),\n\t\t\t\t\t})),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": object(nil),\n\t\t\t\t\t})),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": object(nil),\n\t\t\t\t\t})),\n\t\t\t}),\n\t\t},\n\n\t\t// This example has a mixture of optional, computed and required in a deeply-nested NestedType attribute\n\t\t\"deeply NestedType\": {\n\t\t\tFromResourceSchema(rschema.Schema{\n\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\"foo\": rschema.SingleNestedAttribute{\n\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\"bar\": rschema.SingleNestedAttribute{\n\t\t\t\t\t\t\t\tAttributes: testAttributes,\n\t\t\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\"baz\": rschema.SingleNestedAttribute{\n\t\t\t\t\t\t\t\tAttributes: testAttributes,\n\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": object(map[string]valueBuilder{\n\t\t\t\t\t\"bar\": prim(nil),\n\t\t\t\t\t\"baz\": object(map[string]valueBuilder{\n\t\t\t\t\t\t\"optional\": prim(nil),\n\t\t\t\t\t\t\"computed\": prim(\"hello\"),\n\t\t\t\t\t\t\"optional_computed\": prim(\"prior\"),\n\t\t\t\t\t\t\"required\": prim(\"present\"),\n\t\t\t\t\t}),\n\t\t\t\t}),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": object(map[string]valueBuilder{\n\t\t\t\t\t\"bar\": unk(),\n\n\t\t\t\t\t\"baz\": object(map[string]valueBuilder{\n\t\t\t\t\t\t\"optional\": prim(nil),\n\t\t\t\t\t\t\"computed\": prim(nil),\n\t\t\t\t\t\t\"optional_computed\": prim(\"hello\"),\n\t\t\t\t\t\t\"required\": prim(\"present\"),\n\t\t\t\t\t}),\n\t\t\t\t}),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": object(map[string]valueBuilder{\n\t\t\t\t\t\"bar\": unk(),\n\n\t\t\t\t\t\"baz\": object(map[string]valueBuilder{\n\t\t\t\t\t\t\"optional\": prim(nil),\n\t\t\t\t\t\t\"computed\": prim(\"hello\"),\n\t\t\t\t\t\t\"optional_computed\": prim(\"hello\"),\n\t\t\t\t\t\t\"required\": prim(\"present\"),\n\t\t\t\t\t}),\n\t\t\t\t}),\n\t\t\t}),\n\t\t},\n\n\t\t\"deeply nested set\": {\n\t\t\tFromResourceSchema(rschema.Schema{\n\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\"foo\": rschema.SetNestedAttribute{\n\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"bar\": rschema.SetNestedAttribute{\n\t\t\t\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\t\t\t\tAttributes: testAttributes,\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": set(\n\t\t\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\t\t\"optional\": prim(\"prior\"),\n\t\t\t\t\t\t\t\t\"computed\": prim(\"prior\"),\n\t\t\t\t\t\t\t\t\"optional_computed\": prim(\"prior\"),\n\t\t\t\t\t\t\t\t\"required\": prim(\"prior\"),\n\t\t\t\t\t\t\t})),\n\t\t\t\t\t}),\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": set(object(map[string]valueBuilder{\n\t\t\t\t\t\t\t\"optional\": prim(\"other_prior\"),\n\t\t\t\t\t\t\t\"computed\": prim(\"other_prior\"),\n\t\t\t\t\t\t\t\"optional_computed\": prim(\"other_prior\"),\n\t\t\t\t\t\t\t\"required\": prim(\"other_prior\"),\n\t\t\t\t\t\t})),\n\t\t\t\t\t})),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": set(object(map[string]valueBuilder{\n\t\t\t\t\t\t\t\"optional\": prim(\"configured\"),\n\t\t\t\t\t\t\t\"computed\": prim(nil),\n\t\t\t\t\t\t\t\"optional_computed\": prim(\"configured\"),\n\t\t\t\t\t\t\t\"required\": prim(\"configured\"),\n\t\t\t\t\t\t})),\n\t\t\t\t\t}),\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": set(object(map[string]valueBuilder{\n\t\t\t\t\t\t\t\"optional\": prim(nil),\n\t\t\t\t\t\t\t\"computed\": prim(nil),\n\t\t\t\t\t\t\t\"optional_computed\": prim(\"other_configured\"),\n\t\t\t\t\t\t\t\"required\": prim(\"other_configured\"),\n\t\t\t\t\t\t})),\n\t\t\t\t\t})),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"foo\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": set(object(map[string]valueBuilder{\n\t\t\t\t\t\t\t\"optional\": prim(\"configured\"),\n\t\t\t\t\t\t\t\"computed\": prim(nil),\n\t\t\t\t\t\t\t\"optional_computed\": prim(\"configured\"),\n\t\t\t\t\t\t\t\"required\": prim(\"configured\"),\n\t\t\t\t\t\t})),\n\t\t\t\t\t}),\n\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": set(object(map[string]valueBuilder{\n\t\t\t\t\t\t\t\"optional\": prim(nil),\n\t\t\t\t\t\t\t\"computed\": prim(nil),\n\t\t\t\t\t\t\t\"optional_computed\": prim(\"other_configured\"),\n\t\t\t\t\t\t\t\"required\": prim(\"other_configured\"),\n\t\t\t\t\t\t})),\n\t\t\t\t\t})),\n\t\t\t}),\n\t\t},\n\n\t\t\"expected null NestedTypes\": {\n\t\t\tFromResourceSchema(rschema.Schema{\n\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\"single\": rschema.SingleNestedAttribute{\n\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\"bar\": rschema.StringAttribute{},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t\"list\": rschema.ListNestedAttribute{\n\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"bar\": rschema.StringAttribute{},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t\"set\": rschema.SetNestedAttribute{\n\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"bar\": rschema.StringAttribute{},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t\"map\": rschema.MapNestedAttribute{\n\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"bar\": rschema.StringAttribute{},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t\"nested_map\": rschema.MapNestedAttribute{\n\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"inner\": rschema.SingleNestedAttribute{\n\t\t\t\t\t\t\t\t\tAttributes: testAttributes,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}),\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"single\": object(map[string]valueBuilder{\"bar\": prim(\"baz\")}),\n\t\t\t\t\"list\": list(object(map[string]valueBuilder{\"bar\": prim(\"baz\")})),\n\t\t\t\t\"map\": mapv(map[string]valueBuilder{\n\t\t\t\t\t\"map_entry\": object(map[string]valueBuilder{\"bar\": prim(\"baz\")}),\n\t\t\t\t}),\n\t\t\t\t\"set\": set(object(map[string]valueBuilder{\"bar\": prim(\"baz\")})),\n\t\t\t\t\"nested_map\": mapv(map[string]valueBuilder{\n\t\t\t\t\t\"a\": object(map[string]valueBuilder{\n\t\t\t\t\t\t\"inner\": object(map[string]valueBuilder{\n\t\t\t\t\t\t\t\"optional\": prim(\"foo\"),\n\t\t\t\t\t\t\t\"computed\": prim(\"foo\"),\n\t\t\t\t\t\t\t\"optional_computed\": prim(\"foo\"),\n\t\t\t\t\t\t\t\"required\": prim(\"foo\"),\n\t\t\t\t\t\t}),\n\t\t\t\t\t}),\n\t\t\t\t}),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"single\": prim(nil),\n\t\t\t\t\"list\": prim(nil),\n\t\t\t\t\"map\": prim(nil),\n\t\t\t\t\"set\": prim(nil),\n\t\t\t\t\"nested_map\": prim(nil),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"single\": prim(nil),\n\t\t\t\t\"list\": prim(nil),\n\t\t\t\t\"map\": prim(nil),\n\t\t\t\t\"set\": prim(nil),\n\t\t\t\t\"nested_map\": prim(nil),\n\t\t\t}),\n\t\t},\n\n\t\t\"expected empty NestedTypes\": {\n\t\t\tFromResourceSchema(rschema.Schema{\n\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\"set\": rschema.SetNestedAttribute{\n\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"bar\": rschema.StringAttribute{},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t\"map\": rschema.MapNestedAttribute{\n\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"bar\": rschema.StringAttribute{},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}),\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"map\": mapv(nil),\n\t\t\t\t\"set\": set(),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"map\": mapv(nil),\n\t\t\t\t\"set\": set(),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"map\": mapv(nil),\n\t\t\t\t\"set\": set(),\n\t\t\t}),\n\t\t},\n\n\t\t\"optional types set replacement\": {\n\t\t\tFromResourceSchema(rschema.Schema{\n\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\"set\": rschema.SetNestedAttribute{\n\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"bar\": rschema.StringAttribute{\n\t\t\t\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}),\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"set\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": prim(\"old\"),\n\t\t\t\t\t})),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"set\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": prim(\"new\"),\n\t\t\t\t\t})),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"set\": set(\n\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\"bar\": prim(\"new\"),\n\t\t\t\t\t})),\n\t\t\t}),\n\t\t},\n\n\t\t\"prior null nested objects\": {\n\t\t\tFromResourceSchema(rschema.Schema{\n\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\"single\": rschema.SingleNestedAttribute{\n\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\"list\": rschema.ListNestedAttribute{\n\t\t\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\t\t\"foo\": rschema.StringAttribute{},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t\t\"map\": rschema.MapNestedAttribute{\n\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"list\": rschema.ListNestedAttribute{\n\t\t\t\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\t\t\t\"foo\": rschema.StringAttribute{},\n\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}),\n\t\t\tprim(nil),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"single\": object(map[string]valueBuilder{\n\t\t\t\t\t\"list\": list(\n\t\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\t\"foo\": prim(\"a\"),\n\t\t\t\t\t\t}),\n\n\t\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\t\"foo\": prim(\"b\"),\n\t\t\t\t\t\t})),\n\t\t\t\t}),\n\n\t\t\t\t\"map\": mapv(map[string]valueBuilder{\n\t\t\t\t\t\"one\": object(map[string]valueBuilder{\n\t\t\t\t\t\t\"list\": list(\n\t\t\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\t\t\"foo\": prim(\"a\"),\n\t\t\t\t\t\t\t}),\n\n\t\t\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\t\t\"foo\": prim(\"b\"),\n\t\t\t\t\t\t\t})),\n\t\t\t\t\t}),\n\t\t\t\t}),\n\t\t\t}),\n\n\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\"single\": object(map[string]valueBuilder{\n\t\t\t\t\t\"list\": list(\n\t\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\t\"foo\": prim(\"a\"),\n\t\t\t\t\t\t}),\n\n\t\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\t\"foo\": prim(\"b\"),\n\t\t\t\t\t\t})),\n\t\t\t\t}),\n\n\t\t\t\t\"map\": mapv(map[string]valueBuilder{\n\t\t\t\t\t\"one\": object(map[string]valueBuilder{\n\t\t\t\t\t\t\"list\": list(\n\t\t\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\t\t\"foo\": prim(\"a\"),\n\t\t\t\t\t\t\t}),\n\n\t\t\t\t\t\t\tobject(map[string]valueBuilder{\n\t\t\t\t\t\t\t\t\"foo\": prim(\"b\"),\n\t\t\t\t\t\t\t})),\n\t\t\t\t\t}),\n\t\t\t\t}),\n\t\t\t}),\n\t\t},\n\n\t\t// data sources are planned with an unknown value\n\t\t\"unknown prior nested objects\": {\n\t\t\tFromResourceSchema(rschema.Schema{\n\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\"list\": rschema.ListNestedAttribute{\n\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\"list\": rschema.ListNestedAttribute{\n\t\t\t\t\t\t\t\t\tNestedObject: rschema.NestedAttributeObject{\n\t\t\t\t\t\t\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\t\t\t\t\t\t\"foo\": rschema.StringAttribute{},\n\t\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t\tComputed: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}),\n\t\t\tunk(),\n\n\t\t\tprim(nil),\n\n\t\t\tunk(),\n\t\t},\n\n\t\t// This test is simple but regresses panis in helper code around sets.\n\t\t\"simple set attribute\": {\n\t\t\tFromResourceSchema(rschema.Schema{\n\t\t\t\tAttributes: map[string]rschema.Attribute{\n\t\t\t\t\t\"set_optional\": rschema.SetAttribute{\n\t\t\t\t\t\tElementType: basetypes.StringType{},\n\t\t\t\t\t\tOptional: true,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}),\n\t\t\tobj(field(\"set_optional\", set(prim(\"input1\")))),\n\t\t\tobj(field(\"set_optional\", set(prim(\"a\"), prim(\"b\"), prim(\"c\")))),\n\t\t\tobj(field(\"set_optional\", set(prim(\"a\"), prim(\"b\"), prim(\"c\")))),\n\t\t},\n\t}\n\n\tfor name, test := range tests {\n\t\tcheckTestCase(t, name, test)\n\t}\n}", "func TestBacktrack(t *testing.T) {\n\tcases := []solverTestCase{\n\t\t{\n\t\t\tdesc: \"Two vars sat (1 1)\",\n\t\t\tinputLines: []string{\n\t\t\t\t\"p cnf 2 3\",\n\t\t\t\t// X1 => X2 and X2 => X1 (so X1 <==> X2)\n\t\t\t\t\"-1 2 0\", \"-2 1 0\", \"1 2 0\",\n\t\t\t},\n\t\t\texpectedSolution: sat([]int{1, 1}),\n\t\t},\n\t\t{\n\t\t\tdesc: \"Two vars sat (0 0)\",\n\t\t\tinputLines: []string{\n\t\t\t\t\"p cnf 2 3\",\n\t\t\t\t\"-1 2 0\", \"-2 1 0\", \"-2 -1 0\",\n\t\t\t},\n\t\t\texpectedSolution: sat([]int{0, 0}),\n\t\t},\n\t\t{\n\t\t\tdesc: \"Two vars sat (0 1)\",\n\t\t\tinputLines: []string{\n\t\t\t\t\"p cnf 2 3\",\n\t\t\t\t\"1 2 0\", \"-1 -2 0\", \"-1 2 0\",\n\t\t\t},\n\t\t\texpectedSolution: sat([]int{0, 1}),\n\t\t},\n\t\t{\n\t\t\tdesc: \"Two vars sat (1 0)\",\n\t\t\tinputLines: []string{\n\t\t\t\t\"p cnf 2 3\",\n\t\t\t\t\"1 2 0\", \"-1 -2 0\", \"1 -2 0\",\n\t\t\t},\n\t\t\texpectedSolution: sat([]int{1, 0}),\n\t\t},\n\t\t{\n\t\t\tdesc: \"2-towers sat\",\n\t\t\tinputLines: []string{\n\t\t\t\t\"c 1 == x11, 2 == x12, 3 == x21, 4 == x22\",\n\t\t\t\t\"c x11 => -x12, x11 => -x21, x12 => -x11, x12 => -x22, etc.\",\n\t\t\t\t\"p cnf 4 10\",\n\t\t\t\t\"-1 -2 0\",\n\t\t\t\t\"-1 -3 0\",\n\t\t\t\t\"-2 -1 0\",\n\t\t\t\t\"-2 -4 0\",\n\t\t\t\t\"-3 -1 0\",\n\t\t\t\t\"-3 -4 0\",\n\t\t\t\t\"-4 -2 0\",\n\t\t\t\t\"-4 -3 0\",\n\t\t\t\t\"1 2 0\",\n\t\t\t\t\"3 4 0\",\n\t\t\t},\n\t\t\texpectedSolution: sat([]int{1, 0, 0, 1}), // {0, 1, 1, 0} also possible\n\t\t},\n\t}\n\tfor _, c := range cases {\n\t\tproblem := inputToProblem(c.inputLines, t)\n\t\tt.Logf(\"Testing problem %v: %v\\n\", c.desc, problem)\n\t\tsolution := Solve(problem)\n\t\tif !equalSolution(solution, c.expectedSolution) {\n\t\t\tt.Errorf(\"Case %q, expected solution %v, but got %v\",\n\t\t\t\tc.desc, c.expectedSolution, solution)\n\t\t}\n\t}\n}", "func TestReferences(t *testing.T) {\n\tt.Parallel()\n\n\ttree := writeTree(t, `\n-- go.mod --\nmodule example.com\ngo 1.18\n\n-- a.go --\npackage a\nimport \"fmt\"\nfunc f() {\n\tfmt.Println()\n}\n\n-- b.go --\npackage a\nimport \"fmt\"\nfunc g() {\n\tfmt.Println()\n}\n`)\n\t// no arguments\n\t{\n\t\tres := gopls(t, tree, \"references\")\n\t\tres.checkExit(false)\n\t\tres.checkStderr(\"expects 1 argument\")\n\t}\n\t// fmt.Println\n\t{\n\t\tres := gopls(t, tree, \"references\", \"a.go:4:10\")\n\t\tres.checkExit(true)\n\t\tres.checkStdout(\"a.go:4:6-13\")\n\t\tres.checkStdout(\"b.go:4:6-13\")\n\t}\n}", "func shouldSwapWithChildMinUtil(hasRC bool, leftDiff, rightDiff, lcIndex, rcIndex int) (bool, int, error) {\n\tif hasRC {\n\t\tif leftDiff < 0 && rightDiff < 0 {\n\t\t\treturn false, invalidIndex, nil\n\t\t}\n\n\t\tif leftDiff > rightDiff {\n\t\t\treturn true, lcIndex, nil\n\t\t}\n\n\t\treturn true, rcIndex, nil\n\t}\n\n\tif leftDiff < 0 {\n\t\treturn false, invalidIndex, nil\n\t}\n\n\treturn true, lcIndex, nil\n}", "func TestRouterOrder(t *testing.T) {\n\tprefixrouter := v2.Router{}\n\tprefixrouter.Match = v2.RouterMatch{\n\t\tPrefix: \"/foo\",\n\t}\n\tprefixrouter.Route = v2.RouteAction{\n\t\tRouterActionConfig: v2.RouterActionConfig{\n\t\t\tClusterName: \"prefix\",\n\t\t},\n\t}\n\tpathrouter := v2.Router{}\n\tpathrouter.Match = v2.RouterMatch{\n\t\tPath: \"/foo1\",\n\t}\n\tpathrouter.Route = v2.RouteAction{\n\t\tRouterActionConfig: v2.RouterActionConfig{\n\t\t\tClusterName: \"path\",\n\t\t},\n\t}\n\tregrouter := v2.Router{}\n\tregrouter.Match = v2.RouterMatch{\n\t\tRegex: \"/foo[0-9]+\",\n\t}\n\tregrouter.Route = v2.RouteAction{\n\t\tRouterActionConfig: v2.RouterActionConfig{\n\t\t\tClusterName: \"regexp\",\n\t\t},\n\t}\n\t// path \"/foo1\" match all of the router, the path router should be matched\n\t// path \"/foo11\" match prefix and regexp router, the regexp router should be matched\n\t// path \"/foo\" match prefix router only\n\ttestCases := []struct {\n\t\tpath string\n\t\tclustername string\n\t}{\n\t\t{\"/foo1\", \"path\"},\n\t\t{\"/foo11\", \"regexp\"},\n\t\t{\"/foo\", \"prefix\"},\n\t}\n\tvirtualHost, _ := NewVirtualHostImpl(&v2.VirtualHost{\n\t\tName: \"test\",\n\t\tDomains: []string{\"*\"},\n\t\tRouters: []v2.Router{pathrouter, regrouter, prefixrouter},\n\t})\n\tfor i, tc := range testCases {\n\t\theaders := protocol.CommonHeader(map[string]string{\n\t\t\tstrings.ToLower(protocol.MosnHeaderPathKey): tc.path,\n\t\t})\n\t\trt := virtualHost.GetRouteFromEntries(headers, 1)\n\t\tif rt == nil || rt.RouteRule().ClusterName() != tc.clustername {\n\t\t\tt.Errorf(\"#%d route unexpected result\\n\", i)\n\t\t}\n\t}\n\t//prefix router first, only prefix will be matched\n\tprefixVirtualHost, _ := NewVirtualHostImpl(&v2.VirtualHost{\n\t\tName: \"test\",\n\t\tDomains: []string{\"*\"},\n\t\tRouters: []v2.Router{prefixrouter, regrouter, pathrouter},\n\t})\n\tfor i, tc := range testCases {\n\t\theaders := protocol.CommonHeader(map[string]string{\n\t\t\tstrings.ToLower(protocol.MosnHeaderPathKey): tc.path,\n\t\t})\n\t\trt := prefixVirtualHost.GetRouteFromEntries(headers, 1)\n\t\tif rt == nil || rt.RouteRule().ClusterName() != \"prefix\" {\n\t\t\tt.Errorf(\"#%d route unexpected result\\n\", i)\n\t\t}\n\t}\n\n}", "func TestProxyArp(t *testing.T) {\n\tctx, ifIndexes, pArpHandler := pArpTestSetup(t)\n\tdefer ctx.TeardownTestCtx()\n\n\tifIndexes.Put(\"if1\", &ifaceidx.IfaceMetadata{SwIfIndex: 1})\n\n\tctx.MockVpp.MockReply(&vpp_arp.ProxyArpIntfcEnableDisableReply{})\n\terr := pArpHandler.EnableProxyArpInterface(\"if1\")\n\tExpect(err).To(Succeed())\n\n\tctx.MockVpp.MockReply(&vpp_arp.ProxyArpIntfcEnableDisableReply{})\n\terr = pArpHandler.DisableProxyArpInterface(\"if1\")\n\tExpect(err).To(Succeed())\n\n\tctx.MockVpp.MockReply(&vpp_arp.ProxyArpIntfcEnableDisableReply{Retval: 1})\n\terr = pArpHandler.DisableProxyArpInterface(\"if1\")\n\tExpect(err).NotTo(BeNil())\n}", "func TestPreVoteWithSplitVote(t *testing.T) {\n\tn1 := newTestRaft(1, []uint64{1, 2, 3}, 10, 1, NewMemoryStorage())\n\tn2 := newTestRaft(2, []uint64{1, 2, 3}, 10, 1, NewMemoryStorage())\n\tn3 := newTestRaft(3, []uint64{1, 2, 3}, 10, 1, NewMemoryStorage())\n\tdefer closeAndFreeRaft(n1)\n\tdefer closeAndFreeRaft(n2)\n\tdefer closeAndFreeRaft(n3)\n\n\tn1.becomeFollower(1, None)\n\tn2.becomeFollower(1, None)\n\tn3.becomeFollower(1, None)\n\n\tn1.preVote = true\n\tn2.preVote = true\n\tn3.preVote = true\n\n\tnt := newNetwork(n1, n2, n3)\n\tnt.send(pb.Message{From: 1, To: 1, Type: pb.MsgHup})\n\n\t// simulate leader down. followers start split vote.\n\tnt.isolate(1)\n\tnt.send([]pb.Message{\n\t\t{From: 2, To: 2, Type: pb.MsgHup},\n\t\t{From: 3, To: 3, Type: pb.MsgHup},\n\t}...)\n\n\t// check whether the term values are expected\n\t// n2.Term == 3\n\t// n3.Term == 3\n\tsm := nt.peers[2].(*raft)\n\tif sm.Term != 3 {\n\t\tt.Errorf(\"peer 2 term: %d, want %d\", sm.Term, 3)\n\t}\n\tsm = nt.peers[3].(*raft)\n\tif sm.Term != 3 {\n\t\tt.Errorf(\"peer 3 term: %d, want %d\", sm.Term, 3)\n\t}\n\n\t// check state\n\t// n2 == candidate\n\t// n3 == candidate\n\tsm = nt.peers[2].(*raft)\n\tif sm.state != StateCandidate {\n\t\tt.Errorf(\"peer 2 state: %s, want %s\", sm.state, StateCandidate)\n\t}\n\tsm = nt.peers[3].(*raft)\n\tif sm.state != StateCandidate {\n\t\tt.Errorf(\"peer 3 state: %s, want %s\", sm.state, StateCandidate)\n\t}\n\n\t// node 2 election timeout first\n\tnt.send(pb.Message{From: 2, To: 2, Type: pb.MsgHup})\n\n\t// check whether the term values are expected\n\t// n2.Term == 4\n\t// n3.Term == 4\n\tsm = nt.peers[2].(*raft)\n\tif sm.Term != 4 {\n\t\tt.Errorf(\"peer 2 term: %d, want %d\", sm.Term, 4)\n\t}\n\tsm = nt.peers[3].(*raft)\n\tif sm.Term != 4 {\n\t\tt.Errorf(\"peer 3 term: %d, want %d\", sm.Term, 4)\n\t}\n\n\t// check state\n\t// n2 == leader\n\t// n3 == follower\n\tsm = nt.peers[2].(*raft)\n\tif sm.state != StateLeader {\n\t\tt.Errorf(\"peer 2 state: %s, want %s\", sm.state, StateLeader)\n\t}\n\tsm = nt.peers[3].(*raft)\n\tif sm.state != StateFollower {\n\t\tt.Errorf(\"peer 3 state: %s, want %s\", sm.state, StateFollower)\n\t}\n}", "func verifyPointerCaptureRelativeMovement(ctx context.Context, s *testing.State, t pointerCaptureSubtestState) {\n\tif err := ensureRelativeMovement(ctx, t, coords.NewPoint(10, 10)); err != nil {\n\t\ts.Fatal(\"Failed to verify relative movement: \", err)\n\t}\n}", "func main() {\n\ttest32()\n\ttest64()\n}", "func TestEvaluateRPNArithmeticOperation(t *testing.T) {\n\t// tokens to be tokenized\n\ttokens := []evaluator.TokenWithValue{\n\t\t// RPN order (postfix)\n\t\tevaluator.ValueToken(token.INT, 1),\n\t\tevaluator.ValueToken(token.INT, 2),\n\t\tevaluator.OperatorToken(token.ADD),\n\t}\n\n\t// value map used during evaluation\n\tvar values = make(map[string]int)\n\n\t// evaluate expression represented as sequence of tokens in RPN order\n\tstack, err := evaluator.EvaluateRPN(tokens, values)\n\n\t// check the output\n\tassert.NoError(t, err)\n\tassert.False(t, stack.Empty())\n\tassert.Equal(t, stack.Size(), 1)\n\n\tvalue, err := stack.Pop()\n\tassert.NoError(t, err)\n\tassert.Equal(t, value, 3)\n}", "func canMakeTwoLeftOneTop(p int) bool {\n\tif p <= 16 {\n\t\treturn false\n\t}\n\n\tswitch p {\n\tcase 24, 32, 40, 48, 56, 64:\n\t\treturn false\n\t}\n\treturn true\n}", "func TestToRelative(t *testing.T) {\n\tinput := PermuSeq{\n\t\ttrue,\n\t\ttrue, false,\n\t\ttrue, false, true, false,\n\t}\n\texpected := PermuSeq{\n\t\ttrue,\n\t\tfalse, true,\n\t\ttrue, false, false, true,\n\t}\n\tresult, err := input.ToRelative()\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tif len(result) != len(expected) {\n\t\tt.Fatal(\"len != len\")\n\t}\n\n\tfor i, v := range result {\n\t\tif v != expected[i] {\n\t\t\tt.Errorf(\"index %d fails\", i)\n\t\t}\n\t}\n}", "func TestDirectConnectionFlags(t *testing.T) {\n // Test with the equivalent of a single IP address in the -d arg: -d 1.2.3.4\n gDirects = \"1.2.3.4\"\n dirFuncs := buildDirectors(gDirects)\n director = getDirector(dirFuncs)\n \n ipv4 := \"1.2.3.4\"\n wentDirect,_ := director(ipv4)\n if wentDirect == false {\n t.Errorf(\"The IP address %s should have been sent direct, but instead was proxied\", ipv4)\n }\n\n // now make sure an address that should be proxied still works\n ipv4 = \"4.5.6.7\"\n wentDirect,_ = director(ipv4)\n if wentDirect == true {\n t.Errorf(\"The IP address %s should have been sent to an upstream proxy, but instead was sent directly\", ipv4)\n }\n\n\n // Test with the equivalent of a multiple IP addresses in the -d arg: -d 1.2.3.4,2.3.4.5\n gDirects = \"1.2.3.4,2.3.4.5\"\n dirFuncs = buildDirectors(gDirects)\n director = getDirector(dirFuncs)\n \n addrsToTest := []string{\"1.2.3.4\", \"2.3.4.5\"}\n for _,ipv4 = range addrsToTest {\n wentDirect,_ := director(ipv4)\n if wentDirect == false {\n t.Errorf(\"The IP address %s should have been sent direct, but instead was proxied\", ipv4)\n }\n }\n\n // now make sure an address that should be proxied still works\n ipv4 = \"4.5.6.7\"\n wentDirect,_ = director(ipv4)\n if wentDirect == true {\n t.Errorf(\"The IP address %s should have been sent to an upstream proxy, but instead was sent directly\", ipv4)\n }\n\n\n // Test with the equivalent of multiple IP address specs in the -d arg: -d 1.2.3.0/24,2.3.4.0/25,4.4.4.4\"\n gDirects = \"1.2.3.0/24,2.3.4.0/25,4.4.4.4\"\n dirFuncs = buildDirectors(gDirects)\n director = getDirector(dirFuncs)\n \n addrsToTest = []string{\"1.2.3.4\", \"1.2.3.254\", \"2.3.4.5\", \"4.4.4.4\"}\n for _,ipv4 = range addrsToTest {\n wentDirect,_ := director(ipv4)\n if wentDirect == false {\n t.Errorf(\"The IP address %s should have been sent direct, but instead was proxied\", ipv4)\n }\n }\n\n // now make sure an address that should be proxied still works\n addrsToTest = []string{\"4.5.6.7\", \"2.3.4.254\"}\n for _,ipv4 = range addrsToTest {\n wentDirect,_ = director(ipv4)\n if wentDirect == true {\n t.Errorf(\"The IP address %s should have been sent to an upstream proxy, but instead was sent directly\", ipv4)\n }\n }\n}", "func (p *Chip) iPLA() (bool, error) {\n\tswitch {\n\tcase p.opTick <= 1 || p.opTick > 4:\n\t\treturn true, InvalidCPUState{fmt.Sprintf(\"PLA invalid opTick %d\", p.opTick)}\n\tcase p.opTick == 2:\n\t\t// Nothing else happens here\n\t\treturn false, nil\n\tcase p.opTick == 3:\n\t\t// A read of the current stack happens while the CPU is incrementing S.\n\t\t// Since our popStack does both of these together on this cycle it's just\n\t\t// a throw away read.\n\t\tp.S--\n\t\t_ = p.popStack()\n\t\treturn false, nil\n\t}\n\t// case p.opTick == 4:\n\t// The real read\n\treturn p.loadRegister(&p.A, p.popStack())\n}", "func TestCheckBinaryExprIntRhlInt(t *testing.T) {\n\tenv := MakeSimpleEnv()\n\n\texpectConst(t, `4 >> 4`, env, NewConstInt64(4 >> 4), ConstInt)\n}", "func TestGraphQLOnCallAssignments(t *testing.T) {\n\tt.Parallel()\n\n\th := harness.NewHarness(t, \"\", \"escalation-policy-step-reorder\")\n\tdefer h.Close()\n\n\tdoQL := func(t *testing.T, silent bool, query string, res interface{}) {\n\t\tg := h.GraphQLQueryT(t, query, \"/v1/graphql\")\n\t\tfor _, err := range g.Errors {\n\t\t\tt.Error(\"GraphQL Error:\", err.Message)\n\t\t}\n\t\tif len(g.Errors) > 0 {\n\t\t\tt.Fatal(\"errors returned from GraphQL\")\n\t\t}\n\t\tif !silent {\n\t\t\tt.Log(\"Response:\", string(g.Data))\n\t\t}\n\n\t\tif res == nil {\n\t\t\treturn\n\t\t}\n\t\terr := json.Unmarshal(g.Data, &res)\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t}\n\n\ttype asnID struct {\n\t\tSvc, EP, Rot, Sched string\n\t\tStep int\n\t}\n\n\tgetID := func(a resolver.OnCallAssignment) asnID {\n\t\treturn asnID{\n\t\t\tSvc: a.ServiceName,\n\t\t\tEP: a.EPName,\n\t\t\tRot: a.RotationName,\n\t\t\tSched: a.ScheduleName,\n\t\t\tStep: a.Level,\n\t\t}\n\t}\n\n\tvar idCounter int\n\tcheck := func(name, input string, user1OnCall, user2OnCall []resolver.OnCallAssignment) {\n\t\tu1 := h.CreateUser()\n\t\tu2 := h.CreateUser()\n\t\trep := strings.NewReplacer(\n\t\t\t\"generatedA\", fmt.Sprintf(\"generatedA%d\", idCounter),\n\t\t\t\"generatedB\", fmt.Sprintf(\"generatedB%d\", idCounter),\n\t\t)\n\t\tidCounter++\n\n\t\tfor i, oc := range user1OnCall {\n\t\t\toc.EPName = rep.Replace(oc.EPName)\n\t\t\toc.RotationName = rep.Replace(oc.RotationName)\n\t\t\toc.ScheduleName = rep.Replace(oc.ScheduleName)\n\t\t\toc.ServiceName = rep.Replace(oc.ServiceName)\n\t\t\tuser1OnCall[i] = oc\n\t\t}\n\n\t\tfor i, oc := range user2OnCall {\n\t\t\toc.EPName = rep.Replace(oc.EPName)\n\t\t\toc.RotationName = rep.Replace(oc.RotationName)\n\t\t\toc.ScheduleName = rep.Replace(oc.ScheduleName)\n\t\t\toc.ServiceName = rep.Replace(oc.ServiceName)\n\t\t\tuser2OnCall[i] = oc\n\t\t}\n\n\t\tinput = strings.Replace(input, \"u1\", u1.ID, -1)\n\t\tinput = strings.Replace(input, \"u2\", u2.ID, -1)\n\t\tinput = rep.Replace(input)\n\t\tquery := fmt.Sprintf(`\n\t\t\tmutation {\n\t\t\t\tcreateAll(input:{\n\t\t\t\t\t%s\n\t\t\t\t}) {\n\t\t\t\t\tservices {id}\n\t\t\t\t\tescalation_policies {id}\n\t\t\t\t\trotations {id}\n\t\t\t\t\tuser_overrides {id}\n\t\t\t\t\tschedules {id}\n\t\t\t\t}\n\t\t\t}\n\t\t\t`, input)\n\t\tt.Run(name, func(t *testing.T) {\n\n\t\t\tvar resp struct {\n\t\t\t\tCreateAll map[string][]struct{ ID string }\n\t\t\t}\n\t\t\tdoQL(t, false, query, &resp)\n\t\t\th.Trigger()\n\n\t\t\tvar onCall struct {\n\t\t\t\tUser struct {\n\t\t\t\t\tOnCallAssignments []resolver.OnCallAssignment `json:\"on_call_assignments\"`\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tvar hasFailure bool\n\n\t\t\tcheckUser := func(name, uid string) {\n\n\t\t\t\tt.Run(\"User_\"+name, func(t *testing.T) {\n\t\t\t\t\tdoQL(t, false, fmt.Sprintf(`\n\t\t\t\t\t\tquery {\n\t\t\t\t\t\t\tuser(id: \"%s\") { on_call_assignments{\n\t\t\t\t\t\t\t\tescalation_policy_name\n\t\t\t\t\t\t\t\tescalation_policy_step_number\n\t\t\t\t\t\t\t\tis_active\n\t\t\t\t\t\t\t\trotation_name\n\t\t\t\t\t\t\t\tschedule_name\n\t\t\t\t\t\t\t\tservice_name\n\t\t\t\t\t\t\t\tuser_id\n\t\t\t\t\t\t\t} }\n\t\t\t\t\t\t\n\t\t\t\t\t\t}\n\t\t\t\t\t`, uid), &onCall)\n\n\t\t\t\t\tm := make(map[asnID]resolver.OnCallAssignment, len(onCall.User.OnCallAssignments))\n\t\t\t\t\tchecked := make(map[asnID]bool)\n\t\t\t\t\tfor _, a := range onCall.User.OnCallAssignments {\n\t\t\t\t\t\tm[getID(a)] = a\n\t\t\t\t\t}\n\t\t\t\t\tvar asn []resolver.OnCallAssignment\n\t\t\t\t\tswitch name {\n\t\t\t\t\tcase \"u1\":\n\t\t\t\t\t\tasn = user1OnCall\n\t\t\t\t\tcase \"u2\":\n\t\t\t\t\t\tasn = user2OnCall\n\t\t\t\t\t}\n\n\t\t\t\t\tfor _, a := range asn {\n\t\t\t\t\t\tid := getID(a)\n\t\t\t\t\t\tchecked[id] = true\n\t\t\t\t\t\tresp, ok := m[id]\n\t\t\t\t\t\tif !ok {\n\t\t\t\t\t\t\thasFailure = true\n\t\t\t\t\t\t\tt.Errorf(\"got nil, want assignment %+v\", id)\n\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif resp.UserID != uid {\n\t\t\t\t\t\t\thasFailure = true\n\t\t\t\t\t\t\tt.Errorf(\"Bad UserID for %+v: got %s; want %s\", id, resp.UserID, uid)\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif resp.IsActive != a.IsActive {\n\t\t\t\t\t\t\thasFailure = true\n\t\t\t\t\t\t\tt.Errorf(\"Wrong active state for %+v: got %t; want %t\", id, resp.IsActive, a.IsActive)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tfor aID := range m {\n\t\t\t\t\t\tif checked[aID] {\n\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\t}\n\t\t\t\t\t\thasFailure = true\n\t\t\t\t\t\tt.Errorf(\"got unexpected assignment: %+v\", aID)\n\t\t\t\t\t}\n\t\t\t\t})\n\t\t\t}\n\n\t\t\tcheckUser(\"u1\", u1.ID)\n\t\t\tcheckUser(\"u2\", u2.ID)\n\n\t\t\tif hasFailure {\n\t\t\t\tt.Fatal()\n\t\t\t}\n\n\t\t})\n\t}\n\n\t// User directly on EP is always on call\n\tcheck(\"User EP Direct\", `\n\t\t\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\t\t\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: user, target_id: \"u1\" }] }]\n\t\t\tservices: [{id_placeholder: \"svc\", description: \"ok\", name: \"generatedA\", escalation_policy_id: \"ep\"}]\n\t\t`,\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", Level: 0, IsActive: true},\n\t\t},\n\t\tnil,\n\t)\n\n\t// Active participant directly on EP is always on call\n\tcheck(\"User EP Rotation Direct\", `\n\t\t\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\t\t\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: rotation, target_id: \"rot\" }] }]\n\t\t\tservices: [{id_placeholder: \"svc\", description: \"ok\", name: \"generatedA\", escalation_policy_id: \"ep\"}]\n\t\t\trotations: [{id_placeholder: \"rot\", time_zone: \"UTC\", shift_length: 1, type: weekly, start: \"2006-01-02T15:04:05Z\", name: \"generatedA\", description: \"1\"}]\n\t\t\trotation_participants: [{rotation_id: \"rot\", user_id: \"u1\"}]\n\t\t`,\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", RotationName: \"generatedA\", Level: 0, IsActive: true},\n\t\t},\n\t\tnil,\n\t)\n\n\t// Active participant directly on EP is always on call, rotation directly on EP but no participant, user has no assignments\n\tcheck(\"Only One User EP Rotation Direct\", `\n\t\t\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\t\t\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: rotation, target_id: \"rot\" }, {target_type: rotation, target_id: \"rot2\" }] }]\n\t\t\tservices: [{id_placeholder: \"svc\", description: \"ok\", name: \"generatedA\", escalation_policy_id: \"ep\"}]\n\t\t\trotations: [{id_placeholder: \"rot\", time_zone: \"UTC\", shift_length: 1, type: weekly, start: \"3006-01-02T15:04:05Z\", name: \"generatedA\", description: \"1\"},\n\t\t\t\t\t\t{id_placeholder: \"rot2\", time_zone: \"UTC\", shift_length: 1, type: weekly, start: \"2016-01-02T15:04:05Z\", name: \"generatedB\", description: \"2\"} ]\n\t\t\trotation_participants: [{rotation_id: \"rot2\", user_id: \"u2\"}]\n\t\t`,\n\t\tnil,\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", RotationName: \"generatedB\", Level: 0, IsActive: true},\n\t\t},\n\t)\n\n\t// Different users on different rotations, users are on call but with different assignment rotations\n\tcheck(\"Multiple Users EP Rotation Direct\", `\n\t\t\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\t\t\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: rotation, target_id: \"rot\" }, {target_type: rotation, target_id: \"rot2\" }] }]\n\t\t\tservices: [{id_placeholder: \"svc\", description: \"ok\", name: \"generatedA\", escalation_policy_id: \"ep\"}]\n\t\t\trotations: [{id_placeholder: \"rot\", time_zone: \"UTC\", shift_length: 1, type: weekly, start: \"2006-01-02T15:04:05Z\", name: \"generatedA\", description: \"1\"},\n\t\t\t\t\t\t{id_placeholder: \"rot2\", time_zone: \"UTC\", shift_length: 1, type: weekly, start: \"2016-01-02T15:04:05Z\", name: \"generatedB\", description: \"2\"} ]\n\t\t\trotation_participants: [{rotation_id: \"rot\", user_id: \"u1\"}, {rotation_id: \"rot2\", user_id: \"u2\"}]\n\t\t`,\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", RotationName: \"generatedA\", Level: 0, IsActive: true},\n\t\t},\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", RotationName: \"generatedB\", Level: 0, IsActive: true},\n\t\t},\n\t)\n\n\t// EP -> Schedule, where there is an active ADD for a user\n\tcheck(\"User EP Schedule Add Override\", `\n\t\t\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\t\t\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: schedule, target_id: \"s\" }] }]\n\t\t\tservices: [{id_placeholder: \"svc\", description: \"ok\", name: \"generatedA\", escalation_policy_id: \"ep\"}]\n\t\t\tschedules: [{id_placeholder: \"s\", time_zone: \"UTC\", name: \"generatedA\", description: \"1\"}]\n\t\t\tuser_overrides: [{add_user_id: \"u1\", start_time: \"1006-01-02T15:04:05Z\", end_time: \"4006-01-02T15:04:05Z\", target_type: schedule, target_id: \"s\"}]\n\t\t`,\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: true},\n\t\t},\n\t\tnil,\n\t)\n\n\t// EP -> Schedule, where there is an inactive ADD for a user\n\tcheck(\"User EP Schedule Inactive Add Override\", `\n\t\t\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\t\t\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: schedule, target_id: \"s\" }] }]\n\t\t\tservices: [{id_placeholder: \"svc\", description: \"ok\", name: \"generatedA\", escalation_policy_id: \"ep\"}]\n\t\t\tschedules: [{id_placeholder: \"s\", time_zone: \"UTC\", name: \"generatedA\", description: \"1\"}]\n\t\t\tuser_overrides: [{add_user_id: \"u1\", start_time: \"3006-01-02T15:04:05Z\", end_time: \"4006-01-02T15:04:05Z\", target_type: schedule, target_id: \"s\"}]\n\t\t`,\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: false},\n\t\t},\n\t\tnil,\n\t)\n\n\t// Active schedule rule, user is replaced\n\tcheck(\"User EP Schedule Replace Override\", `\n\t\t\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\t\t\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: schedule, target_id: \"s\" }] }]\n\t\t\tservices: [{id_placeholder: \"svc\", description: \"ok\", name: \"generatedA\", escalation_policy_id: \"ep\"}]\n\t\t\tschedules: [{id_placeholder: \"s\", time_zone: \"UTC\", name: \"generatedA\", description: \"1\"}]\n\t\t\tschedule_rules: [{target:{target_type:user, target_id:\"u2\"}, start:\"00:00\", end:\"23:59\", schedule_id: \"s\", sunday: true, monday:true, tuesday:true, wednesday: true, thursday: true, friday: true, saturday: true}]\n\t\t\tuser_overrides: [{add_user_id: \"u1\", remove_user_id: \"u2\", start_time: \"1006-01-02T15:04:05Z\", end_time: \"4006-01-02T15:04:05Z\", target_type: schedule, target_id: \"s\"}]\n\t\t`,\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: true},\n\t\t},\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: false},\n\t\t},\n\t)\n\n\t// Active schedule rule, user is replaced but in the future (inactive replacement)\n\tcheck(\"User EP Schedule Inactive Replace Override\", `\n\t\t\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\t\t\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: schedule, target_id: \"s\" }] }]\n\t\t\tservices: [{id_placeholder: \"svc\", description: \"ok\", name: \"generatedA\", escalation_policy_id: \"ep\"}]\n\t\t\tschedules: [{id_placeholder: \"s\", time_zone: \"UTC\", name: \"generatedA\", description: \"1\"}]\n\t\t\tschedule_rules: [{target:{target_type:user, target_id:\"u2\"}, start:\"00:00\", end:\"23:59\", schedule_id: \"s\", sunday: true, monday:true, tuesday:true, wednesday: true, thursday: true, friday: true, saturday: true}]\n\t\t\tuser_overrides: [{add_user_id: \"u1\", remove_user_id: \"u2\", start_time: \"3006-01-02T15:04:05Z\", end_time: \"4006-01-02T15:04:05Z\", target_type: schedule, target_id: \"s\"}]\n\t\t`,\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: false},\n\t\t},\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: true},\n\t\t},\n\t)\n\n\t// Same scenario, user is NOT replaced (no override)\n\tcheck(\"User EP Schedule Replace Override Absent\", `\n\t\t\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\t\t\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: schedule, target_id: \"s\" }] }]\n\t\t\tservices: [{id_placeholder: \"svc\", description: \"ok\", name: \"generatedA\", escalation_policy_id: \"ep\"}]\n\t\t\tschedules: [{id_placeholder: \"s\", time_zone: \"UTC\", name: \"generatedA\", description: \"1\"}]\n\t\t\tschedule_rules: [{target:{target_type:user, target_id:\"u2\"}, start:\"00:00\", end:\"23:59\", schedule_id: \"s\", sunday: true, monday:true, tuesday:true, wednesday: true, thursday: true, friday: true, saturday: true}]\n\t\t`,\n\t\t[]resolver.OnCallAssignment{},\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: true},\n\t\t},\n\t)\n\n\t// Same scenario, user is NOT replaced (no override), inactive schedule rule\n\tcheck(\"User EP Schedule No Days Replace Override Absent\", `\n\t\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\t\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: schedule, target_id: \"s\" }] }]\n\t\tservices: [{id_placeholder: \"svc\", description: \"ok\", name: \"generatedA\", escalation_policy_id: \"ep\"}]\n\t\tschedules: [{id_placeholder: \"s\", time_zone: \"UTC\", name: \"generatedA\", description: \"1\"}]\n\t\tschedule_rules: [{target:{target_type:user, target_id:\"u2\"}, start:\"00:00\", end:\"23:59\", schedule_id: \"s\", sunday: false, monday:false, tuesday:false, wednesday: false, thursday: false, friday: false, saturday: false}]\n\t`,\n\t\t[]resolver.OnCallAssignment{},\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: false},\n\t\t},\n\t)\n\n\t// Active schedule rule, active rotation participant is replaced\n\tcheck(\"User EP Schedule Replace Rotation Override\", `\n\t\t\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\t\t\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: schedule, target_id: \"s\" }] }]\n\t\t\tservices: [{id_placeholder: \"svc\", description: \"ok\", name: \"generatedA\", escalation_policy_id: \"ep\"}]\n\t\t\tschedules: [{id_placeholder: \"s\", time_zone: \"UTC\", name: \"generatedA\", description: \"1\"}]\n\t\t\tschedule_rules: [{target:{target_type:rotation, target_id:\"rot\"}, start:\"00:00\", end:\"23:59\", schedule_id: \"s\", sunday: true, monday:true, tuesday:true, wednesday: true, thursday: true, friday: true, saturday: true}]\n\t\t\trotations: [{id_placeholder: \"rot\", time_zone: \"UTC\", shift_length: 1, type: weekly, start: \"2006-01-02T15:04:05Z\", name: \"generatedA\", description: \"1\"}]\n\t\t\trotation_participants: [{rotation_id: \"rot\", user_id: \"u2\"}]\n\t\t\tuser_overrides: [{add_user_id: \"u1\", remove_user_id: \"u2\", start_time: \"1006-01-02T15:04:05Z\", end_time: \"4006-01-02T15:04:05Z\", target_type: schedule, target_id: \"s\"}]\n\t\t`,\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: true},\n\t\t},\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: false},\n\t\t},\n\t)\n\n\t// Active schedule rule, active rotation participant is replaced\n\tcheck(\"User EP Schedule Replace Rotation Override\", `\n\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: schedule, target_id: \"s\" }] }]\n\tservices: [{id_placeholder: \"svc\", description: \"ok\", name: \"generatedA\", escalation_policy_id: \"ep\"}]\n\tschedules: [{id_placeholder: \"s\", time_zone: \"UTC\", name: \"generatedA\", description: \"1\"}]\n\tschedule_rules: [{target:{target_type:rotation, target_id:\"rot\"}, start:\"00:00\", end:\"23:59\", schedule_id: \"s\", sunday: true, monday:true, tuesday:true, wednesday: true, thursday: true, friday: true, saturday: true}]\n\trotations: [{id_placeholder: \"rot\", time_zone: \"UTC\", shift_length: 1, type: weekly, start: \"2006-01-02T15:04:05Z\", name: \"generatedA\", description: \"1\"}]\n\trotation_participants: [{rotation_id: \"rot\", user_id: \"u2\"}]\n\tuser_overrides: [{add_user_id: \"u1\", remove_user_id: \"u2\", start_time: \"1006-01-02T15:04:05Z\", end_time: \"4006-01-02T15:04:05Z\", target_type: schedule, target_id: \"s\"}]\n`,\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: true},\n\t\t},\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: false},\n\t\t},\n\t)\n\n\t// Active schedule rule, active rotation participant is replaced with an inactive replace override\n\tcheck(\"User EP Schedule Replace Rotation Override (Inactive)\", `\n\t\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\t\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: schedule, target_id: \"s\" }] }]\n\t\tservices: [{id_placeholder: \"svc\", description: \"ok\", name: \"generatedA\", escalation_policy_id: \"ep\"}]\n\t\tschedules: [{id_placeholder: \"s\", time_zone: \"UTC\", name: \"generatedA\", description: \"1\"}]\n\t\tschedule_rules: [{target:{target_type:rotation, target_id:\"rot\"}, start:\"00:00\", end:\"23:59\", schedule_id: \"s\", sunday: true, monday:true, tuesday:true, wednesday: true, thursday: true, friday: true, saturday: true}]\n\t\trotations: [{id_placeholder: \"rot\", time_zone: \"UTC\", shift_length: 1, type: weekly, start: \"2006-01-02T15:04:05Z\", name: \"generatedA\", description: \"1\"}]\n\t\trotation_participants: [{rotation_id: \"rot\", user_id: \"u2\"}]\n\t\tuser_overrides: [{add_user_id: \"u1\", remove_user_id: \"u2\", start_time: \"3006-01-02T15:04:05Z\", end_time: \"4006-01-02T15:04:05Z\", target_type: schedule, target_id: \"s\"}]\n\t`,\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: false},\n\t\t},\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: true},\n\t\t},\n\t)\n\n\t// Same as above, but no service assignment\n\tcheck(\"User EP Schedule Replace Rotation Override No Service\", `\n\t\t\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\t\t\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: schedule, target_id: \"s\" }] }]\n\t\t\tschedules: [{id_placeholder: \"s\", time_zone: \"UTC\", name: \"generatedA\", description: \"1\"}]\n\t\t\tschedule_rules: [{target:{target_type:rotation, target_id:\"rot\"}, start:\"00:00\", end:\"23:59\", schedule_id: \"s\", sunday: true, monday:true, tuesday:true, wednesday: true, thursday: true, friday: true, saturday: true}]\n\t\t\trotations: [{id_placeholder: \"rot\", time_zone: \"UTC\", shift_length: 1, type: weekly, start: \"2006-01-02T15:04:05Z\", name: \"generatedA\", description: \"1\"}]\n\t\t\trotation_participants: [{rotation_id: \"rot\", user_id: \"u2\"}]\n\t\t\tuser_overrides: [{add_user_id: \"u1\", remove_user_id: \"u2\", start_time: \"1006-01-02T15:04:05Z\", end_time: \"4006-01-02T15:04:05Z\", target_type: schedule, target_id: \"s\"}]\n\t\t`,\n\t\t[]resolver.OnCallAssignment{},\n\t\t[]resolver.OnCallAssignment{},\n\t)\n\n\t// Same as above, but 2 service assignments\n\tcheck(\"User EP Schedule Replace Rotation Override Double Service\", `\n\t\t\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\t\t\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: schedule, target_id: \"s\" }] }]\n\t\t\tservices: [{id_placeholder: \"svc\", description: \"ok\", name: \"generatedA\", escalation_policy_id: \"ep\"},{description: \"ok\", name: \"generatedB\", escalation_policy_id: \"ep\"}]\n\t\t\tschedules: [{id_placeholder: \"s\", time_zone: \"UTC\", name: \"generatedA\", description: \"1\"}]\n\t\t\tschedule_rules: [{target:{target_type:rotation, target_id:\"rot\"}, start:\"00:00\", end:\"23:59\", schedule_id: \"s\", sunday: true, monday:true, tuesday:true, wednesday: true, thursday: true, friday: true, saturday: true}]\n\t\t\trotations: [{id_placeholder: \"rot\", time_zone: \"UTC\", shift_length: 1, type: weekly, start: \"2006-01-02T15:04:05Z\", name: \"generatedA\", description: \"1\"}]\n\t\t\trotation_participants: [{rotation_id: \"rot\", user_id: \"u2\"}]\n\t\t\tuser_overrides: [{add_user_id: \"u1\", remove_user_id: \"u2\", start_time: \"1006-01-02T15:04:05Z\", end_time: \"4006-01-02T15:04:05Z\", target_type: schedule, target_id: \"s\"}]\n\t\t`,\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: true},\n\t\t\t{ServiceName: \"generatedB\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: true},\n\t\t},\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: false},\n\t\t\t{ServiceName: \"generatedB\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: false},\n\t\t},\n\t)\n\n\t// Active schedule rule, active rotation participant is NOT replaced (no override)\n\tcheck(\"User EP Schedule Replace Rotation Override Absent\", `\n\t\t\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\t\t\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: schedule, target_id: \"s\" }] }]\n\t\t\tservices: [{id_placeholder: \"svc\", description: \"ok\", name: \"generatedA\", escalation_policy_id: \"ep\"}]\n\t\t\tschedules: [{id_placeholder: \"s\", time_zone: \"UTC\", name: \"generatedA\", description: \"1\"}]\n\t\t\tschedule_rules: [{target:{target_type:rotation, target_id:\"rot\"}, start:\"00:00\", end:\"23:59\", schedule_id: \"s\", sunday: true, monday:true, tuesday:true, wednesday: true, thursday: true, friday: true, saturday: true}]\n\t\t\trotations: [{id_placeholder: \"rot\", time_zone: \"UTC\", shift_length: 1, type: weekly, start: \"2006-01-02T15:04:05Z\", name: \"generatedA\", description: \"1\"}]\n\t\t\trotation_participants: [{rotation_id: \"rot\", user_id: \"u2\"}]\n\t\t`,\n\t\t[]resolver.OnCallAssignment{},\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: true},\n\t\t},\n\t)\n\n\t// Active schedule rule, active rotation participant is removed\n\tcheck(\"User EP Schedule Remove Rotation Override\", `\n\t\t\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\t\t\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: schedule, target_id: \"s\" }] }]\n\t\t\tservices: [{id_placeholder: \"svc\", description: \"ok\", name: \"generatedA\", escalation_policy_id: \"ep\"}]\n\t\t\tschedules: [{id_placeholder: \"s\", time_zone: \"UTC\", name: \"generatedA\", description: \"1\"}]\n\t\t\tschedule_rules: [{target:{target_type:rotation, target_id:\"rot\"}, start:\"00:00\", end:\"23:59\", schedule_id: \"s\", sunday: true, monday:true, tuesday:true, wednesday: true, thursday: true, friday: true, saturday: true}]\n\t\t\trotations: [{id_placeholder: \"rot\", time_zone: \"UTC\", shift_length: 1, type: weekly, start: \"2006-01-02T15:04:05Z\", name: \"generatedA\", description: \"1\"}]\n\t\t\trotation_participants: [{rotation_id: \"rot\", user_id: \"u2\"}]\n\t\t\tuser_overrides: [{ remove_user_id: \"u2\", start_time: \"1006-01-02T15:04:05Z\", end_time: \"4006-01-02T15:04:05Z\", target_type: schedule, target_id: \"s\"}]\n\t\t`,\n\t\t[]resolver.OnCallAssignment{},\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: false},\n\t\t},\n\t)\n\n\t// Active schedule rule, user is removed\n\tcheck(\"User EP Schedule Remove Override\", `\n\t\t\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\t\t\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: schedule, target_id: \"s\" }] }]\n\t\t\tservices: [{id_placeholder: \"svc\", description: \"ok\", name: \"generatedA\", escalation_policy_id: \"ep\"}]\n\t\t\tschedules: [{id_placeholder: \"s\", time_zone: \"UTC\", name: \"generatedA\", description: \"1\"}]\n\t\t\tschedule_rules: [{target:{target_type:user, target_id:\"u2\"}, start:\"00:00\", end:\"23:59\", schedule_id: \"s\", sunday: true, monday:true, tuesday:true, wednesday: true, thursday: true, friday: true, saturday: true}]\n\t\t\tuser_overrides: [{remove_user_id: \"u2\", start_time: \"1006-01-02T15:04:05Z\", end_time: \"4006-01-02T15:04:05Z\", target_type: schedule, target_id: \"s\"}]\n\t\t`,\n\t\t[]resolver.OnCallAssignment{},\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: false},\n\t\t},\n\t)\n\n\t// Multiple add overrides, active schedule rules\n\tcheck(\"User EP Schedule Multiple Overrides\", `\n\t\t\tescalation_policies: [{ id_placeholder: \"ep\", name: \"generatedA\", description: \"1\"}]\n\t\t\tescalation_policy_steps: [{escalation_policy_id: \"ep\", delay_minutes: 1, targets: [{target_type: schedule, target_id: \"s\" }] }]\n\t\t\tservices: [{id_placeholder: \"svc\", description: \"ok\", name: \"generatedA\", escalation_policy_id: \"ep\"}]\n\t\t\tschedules: [{id_placeholder: \"s\", time_zone: \"UTC\", name: \"generatedA\", description: \"1\"}]\n\t\t\tschedule_rules: [{target:{target_type:user, target_id:\"u2\"}, start:\"00:00\", end:\"23:59\", schedule_id: \"s\", sunday: true, monday:true, tuesday:true, wednesday: true, thursday: true, friday: true, saturday: true}]\n\t\t\tuser_overrides: [{add_user_id: \"u1\", start_time: \"1006-01-02T15:04:05Z\", end_time: \"4006-01-02T15:04:05Z\", target_type: schedule, target_id: \"s\"},\n\t\t\t\t\t\t\t {add_user_id: \"u2\", start_time: \"1006-01-02T15:04:05Z\", end_time: \"4006-01-02T15:04:05Z\", target_type: schedule, target_id: \"s\"}]\n\t\t`,\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: true},\n\t\t},\n\t\t[]resolver.OnCallAssignment{\n\t\t\t{ServiceName: \"generatedA\", EPName: \"generatedA\", ScheduleName: \"generatedA\", Level: 0, IsActive: true},\n\t\t},\n\t)\n\n}", "func canMakeTwoRightOneTop(p int) bool {\n\tif p >= 49 {\n\t\treturn false\n\t}\n\n\tswitch p {\n\tcase 8, 16, 24, 32, 40, 48, 56, 64:\n\t\treturn false\n\t}\n\treturn true\n}", "func LogicalRuleTestHelper(t *testing.T, tc *RuleTestCase, options ...cmp.Option) {\n\tt.Helper()\n\n\tbefore := CreatePlanSpec(tc.Before)\n\tvar after *plan.Spec\n\tif tc.NoChange {\n\t\tafter = CreatePlanSpec(tc.Before.Copy())\n\t} else {\n\t\tafter = CreatePlanSpec(tc.After)\n\t}\n\n\tlogicalPlanner := plan.NewLogicalPlanner(\n\t\tplan.OnlyLogicalRules(tc.Rules...),\n\t)\n\n\tctx := tc.Context\n\tif ctx == nil {\n\t\tctx = context.Background()\n\t}\n\n\tpp, err := logicalPlanner.Plan(ctx, before)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\ttype testAttrs struct {\n\t\tID plan.NodeID\n\t\tSpec plan.ProcedureSpec\n\t}\n\twant := make([]testAttrs, 0)\n\tafter.BottomUpWalk(func(node plan.Node) error {\n\t\twant = append(want, testAttrs{\n\t\t\tID: node.ID(),\n\t\t\tSpec: node.ProcedureSpec(),\n\t\t})\n\t\treturn nil\n\t})\n\n\tgot := make([]testAttrs, 0)\n\tpp.BottomUpWalk(func(node plan.Node) error {\n\t\tgot = append(got, testAttrs{\n\t\t\tID: node.ID(),\n\t\t\tSpec: node.ProcedureSpec(),\n\t\t})\n\t\treturn nil\n\t})\n\n\ttempOptions := make([]cmp.Option, 0, len(CmpOptions)+len(options))\n\ttempOptions = append(tempOptions, CmpOptions...)\n\ttempOptions = append(tempOptions, options...)\n\tif !cmp.Equal(want, got, tempOptions...) {\n\t\tt.Errorf(\"transformed plan not as expected, -want/+got:\\n%v\",\n\t\t\tcmp.Diff(want, got, tempOptions...))\n\t}\n}", "func testNSCAndICMP(t *testing.T, nodesCount int, nscPodFactory func(*v1.Node) *v1.Pod) {\n\tk8s, err := kube_testing.NewK8s()\n\tdefer k8s.Cleanup()\n\n\tExpect(err).To(BeNil())\n\n\ts1 := time.Now()\n\tk8s.Prepare(\"nsmd\", \"nsc\", \"nsmd-dataplane\", \"icmp-responder-nse\")\n\tlogrus.Printf(\"Cleanup done: %v\", time.Since(s1))\n\n\tnodes_setup := nsmd_test_utils.SetupNodes(k8s, nodesCount, defaultTimeout)\n\n\t// Run ICMP on latest node\n\t_ = nsmd_test_utils.DeployIcmp(k8s, nodes_setup[nodesCount-1].Node, \"icmp-responder-nse1\", defaultTimeout)\n\n\tnscPodNode := nsmd_test_utils.DeployNsc(k8s, nodes_setup[0].Node, \"nsc1\", defaultTimeout)\n\n\tvar nscInfo *nsmd_test_utils.NSCCheckInfo\n\n\tfailures := InterceptGomegaFailures(func() {\n\t\tnscInfo = nsmd_test_utils.CheckNSC(k8s, t, nscPodNode)\n\t})\n\t// Do dumping of container state to dig into what is happened.\n\tif len(failures) > 0 {\n\t\tlogrus.Errorf(\"Failues: %v\", failures)\n\t\tnsmd_test_utils.PrintLogs(k8s, nodes_setup)\n\t\tnscInfo.PrintLogs()\n\n\t\tt.Fail()\n\t}\n}", "func TestRestrictedSuggestions(t *testing.T) {\n\tvar (\n\t\tfailedWithinTimeout = &loopdb.LoopEvent{\n\t\t\tSwapStateData: loopdb.SwapStateData{\n\t\t\t\tState: loopdb.StateFailOffchainPayments,\n\t\t\t},\n\t\t\tTime: testTime,\n\t\t}\n\n\t\tfailedBeforeBackoff = &loopdb.LoopEvent{\n\t\t\tSwapStateData: loopdb.SwapStateData{\n\t\t\t\tState: loopdb.StateFailOffchainPayments,\n\t\t\t},\n\t\t\tTime: testTime.Add(\n\t\t\t\tdefaultFailureBackoff * -1,\n\t\t\t),\n\t\t}\n\n\t\t// failedTemporary is a swap that failed outside of our backoff\n\t\t// period, but we still want to back off because the swap is\n\t\t// considered pending.\n\t\tfailedTemporary = &loopdb.LoopEvent{\n\t\t\tSwapStateData: loopdb.SwapStateData{\n\t\t\t\tState: loopdb.StateFailTemporary,\n\t\t\t},\n\t\t\tTime: testTime.Add(\n\t\t\t\tdefaultFailureBackoff * -3,\n\t\t\t),\n\t\t}\n\n\t\tchanRules = map[lnwire.ShortChannelID]*SwapRule{\n\t\t\tchanID1: chanRule,\n\t\t\tchanID2: chanRule,\n\t\t}\n\t)\n\n\ttests := []struct {\n\t\tname string\n\t\tchannels []lndclient.ChannelInfo\n\t\tloopOut []*loopdb.LoopOut\n\t\tloopIn []*loopdb.LoopIn\n\t\tchanRules map[lnwire.ShortChannelID]*SwapRule\n\t\tpeerRules map[route.Vertex]*SwapRule\n\t\texpected *Suggestions\n\t}{\n\t\t{\n\t\t\tname: \"no existing swaps\",\n\t\t\tchannels: []lndclient.ChannelInfo{\n\t\t\t\tchannel1,\n\t\t\t},\n\t\t\tloopOut: nil,\n\t\t\tloopIn: nil,\n\t\t\tchanRules: chanRules,\n\t\t\texpected: &Suggestions{\n\t\t\t\tOutSwaps: []loop.OutRequest{\n\t\t\t\t\tchan1Rec,\n\t\t\t\t},\n\t\t\t\tDisqualifiedChans: noneDisqualified,\n\t\t\t\tDisqualifiedPeers: noPeersDisqualified,\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"unrestricted loop out\",\n\t\t\tchannels: []lndclient.ChannelInfo{\n\t\t\t\tchannel1,\n\t\t\t},\n\t\t\tloopOut: []*loopdb.LoopOut{\n\t\t\t\t{\n\t\t\t\t\tContract: &loopdb.LoopOutContract{\n\t\t\t\t\t\tOutgoingChanSet: nil,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tchanRules: chanRules,\n\t\t\texpected: &Suggestions{\n\t\t\t\tOutSwaps: []loop.OutRequest{\n\t\t\t\t\tchan1Rec,\n\t\t\t\t},\n\t\t\t\tDisqualifiedChans: noneDisqualified,\n\t\t\t\tDisqualifiedPeers: noPeersDisqualified,\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"unrestricted loop in\",\n\t\t\tchannels: []lndclient.ChannelInfo{\n\t\t\t\tchannel1,\n\t\t\t},\n\t\t\tloopIn: []*loopdb.LoopIn{\n\t\t\t\t{\n\t\t\t\t\tContract: &loopdb.LoopInContract{\n\t\t\t\t\t\tLastHop: nil,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tchanRules: chanRules,\n\t\t\texpected: &Suggestions{\n\t\t\t\tOutSwaps: []loop.OutRequest{\n\t\t\t\t\tchan1Rec,\n\t\t\t\t},\n\t\t\t\tDisqualifiedChans: noneDisqualified,\n\t\t\t\tDisqualifiedPeers: noPeersDisqualified,\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"restricted loop out\",\n\t\t\tchannels: []lndclient.ChannelInfo{\n\t\t\t\tchannel1, channel2,\n\t\t\t},\n\t\t\tloopOut: []*loopdb.LoopOut{\n\t\t\t\t{\n\t\t\t\t\tContract: chan1Out,\n\t\t\t\t},\n\t\t\t},\n\t\t\tchanRules: chanRules,\n\t\t\texpected: &Suggestions{\n\t\t\t\tOutSwaps: []loop.OutRequest{\n\t\t\t\t\tchan2Rec,\n\t\t\t\t},\n\t\t\t\tDisqualifiedChans: map[lnwire.ShortChannelID]Reason{\n\t\t\t\t\tchanID1: ReasonLoopOut,\n\t\t\t\t},\n\t\t\t\tDisqualifiedPeers: noPeersDisqualified,\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"restricted loop in\",\n\t\t\tchannels: []lndclient.ChannelInfo{\n\t\t\t\tchannel1, channel2,\n\t\t\t},\n\t\t\tloopIn: []*loopdb.LoopIn{\n\t\t\t\t{\n\t\t\t\t\tContract: &loopdb.LoopInContract{\n\t\t\t\t\t\tLastHop: &peer2,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tchanRules: chanRules,\n\t\t\texpected: &Suggestions{\n\t\t\t\tOutSwaps: []loop.OutRequest{\n\t\t\t\t\tchan1Rec,\n\t\t\t\t},\n\t\t\t\tDisqualifiedChans: map[lnwire.ShortChannelID]Reason{\n\t\t\t\t\tchanID2: ReasonLoopIn,\n\t\t\t\t},\n\t\t\t\tDisqualifiedPeers: noPeersDisqualified,\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"swap failed recently\",\n\t\t\tchannels: []lndclient.ChannelInfo{\n\t\t\t\tchannel1,\n\t\t\t},\n\t\t\tloopOut: []*loopdb.LoopOut{\n\t\t\t\t{\n\t\t\t\t\tContract: chan1Out,\n\t\t\t\t\tLoop: loopdb.Loop{\n\t\t\t\t\t\tEvents: []*loopdb.LoopEvent{\n\t\t\t\t\t\t\tfailedWithinTimeout,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tchanRules: chanRules,\n\t\t\texpected: &Suggestions{\n\t\t\t\tDisqualifiedChans: map[lnwire.ShortChannelID]Reason{\n\t\t\t\t\tchanID1: ReasonFailureBackoff,\n\t\t\t\t},\n\t\t\t\tDisqualifiedPeers: noPeersDisqualified,\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"swap failed before cutoff\",\n\t\t\tchannels: []lndclient.ChannelInfo{\n\t\t\t\tchannel1,\n\t\t\t},\n\t\t\tloopOut: []*loopdb.LoopOut{\n\t\t\t\t{\n\t\t\t\t\tContract: chan1Out,\n\t\t\t\t\tLoop: loopdb.Loop{\n\t\t\t\t\t\tEvents: []*loopdb.LoopEvent{\n\t\t\t\t\t\t\tfailedBeforeBackoff,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tchanRules: chanRules,\n\t\t\texpected: &Suggestions{\n\t\t\t\tOutSwaps: []loop.OutRequest{\n\t\t\t\t\tchan1Rec,\n\t\t\t\t},\n\t\t\t\tDisqualifiedChans: noneDisqualified,\n\t\t\t\tDisqualifiedPeers: noPeersDisqualified,\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"temporary failure\",\n\t\t\tchannels: []lndclient.ChannelInfo{\n\t\t\t\tchannel1,\n\t\t\t},\n\t\t\tloopOut: []*loopdb.LoopOut{\n\t\t\t\t{\n\t\t\t\t\tContract: chan1Out,\n\t\t\t\t\tLoop: loopdb.Loop{\n\t\t\t\t\t\tEvents: []*loopdb.LoopEvent{\n\t\t\t\t\t\t\tfailedTemporary,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tchanRules: chanRules,\n\t\t\texpected: &Suggestions{\n\t\t\t\tDisqualifiedChans: map[lnwire.ShortChannelID]Reason{\n\t\t\t\t\tchanID1: ReasonLoopOut,\n\t\t\t\t},\n\t\t\t\tDisqualifiedPeers: noPeersDisqualified,\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"existing on peer's channel\",\n\t\t\tchannels: []lndclient.ChannelInfo{\n\t\t\t\tchannel1,\n\t\t\t\t{\n\t\t\t\t\tChannelID: chanID3.ToUint64(),\n\t\t\t\t\tPubKeyBytes: peer1,\n\t\t\t\t},\n\t\t\t},\n\t\t\tloopOut: []*loopdb.LoopOut{\n\t\t\t\t{\n\t\t\t\t\tContract: chan1Out,\n\t\t\t\t},\n\t\t\t},\n\t\t\tpeerRules: map[route.Vertex]*SwapRule{\n\t\t\t\tpeer1: {\n\t\t\t\t\tThresholdRule: NewThresholdRule(0, 50),\n\t\t\t\t\tType: swap.TypeOut,\n\t\t\t\t},\n\t\t\t},\n\t\t\texpected: &Suggestions{\n\t\t\t\tDisqualifiedChans: noneDisqualified,\n\t\t\t\tDisqualifiedPeers: map[route.Vertex]Reason{\n\t\t\t\t\tpeer1: ReasonLoopOut,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\n\tfor _, testCase := range tests {\n\t\ttestCase := testCase\n\n\t\tt.Run(testCase.name, func(t *testing.T) {\n\t\t\t// Create a manager config which will return the test\n\t\t\t// case's set of existing swaps.\n\t\t\tcfg, lnd := newTestConfig()\n\t\t\tcfg.ListLoopOut = func(context.Context) ([]*loopdb.LoopOut, error) {\n\t\t\t\treturn testCase.loopOut, nil\n\t\t\t}\n\t\t\tcfg.ListLoopIn = func(context.Context) ([]*loopdb.LoopIn, error) {\n\t\t\t\treturn testCase.loopIn, nil\n\t\t\t}\n\n\t\t\tlnd.Channels = testCase.channels\n\n\t\t\tparams := defaultParameters\n\t\t\tparams.AutoloopBudgetLastRefresh = testBudgetStart\n\t\t\tif testCase.chanRules != nil {\n\t\t\t\tparams.ChannelRules = testCase.chanRules\n\t\t\t}\n\n\t\t\tif testCase.peerRules != nil {\n\t\t\t\tparams.PeerRules = testCase.peerRules\n\t\t\t}\n\n\t\t\ttestSuggestSwaps(\n\t\t\t\tt, newSuggestSwapsSetup(cfg, lnd, params),\n\t\t\t\ttestCase.expected, nil,\n\t\t\t)\n\t\t})\n\t}\n}", "func (p *Chip) addrIndirectX(mode instructionMode) (bool, error) {\n\tswitch {\n\tcase p.opTick <= 1 || p.opTick > 7:\n\t\treturn true, InvalidCPUState{fmt.Sprintf(\"addrIndirectX invalid opTick: %d\", p.opTick)}\n\tcase p.opTick == 2:\n\t\t// Already read the value but need to bump the PC\n\t\tp.opAddr = uint16(0x00FF & p.opVal)\n\t\tp.PC++\n\t\treturn false, nil\n\tcase p.opTick == 3:\n\t\t// Read from the ZP addr. We'll add the X register as well for the real read next.\n\t\t_ = p.ram.Read(p.opAddr)\n\t\t// Does this as a uint8 so it wraps as needed.\n\t\tp.opAddr = uint16(uint8(p.opVal + p.X))\n\t\treturn false, nil\n\tcase p.opTick == 4:\n\t\t// Read effective addr low byte.\n\t\tp.opVal = p.ram.Read(p.opAddr)\n\t\t// Setup opAddr for next read and handle wrapping\n\t\tp.opAddr = uint16(uint8(p.opAddr&0x00FF) + 1)\n\t\treturn false, nil\n\tcase p.opTick == 5:\n\t\tp.opAddr = (uint16(p.ram.Read(p.opAddr)) << 8) + uint16(p.opVal)\n\t\tdone := false\n\t\t// For a store we're done since we have the address needed.\n\t\tif mode == kSTORE_INSTRUCTION {\n\t\t\tdone = true\n\t\t}\n\t\treturn done, nil\n\tcase p.opTick == 6:\n\t\tp.opVal = p.ram.Read(p.opAddr)\n\t\tdone := true\n\t\tif mode == kRMW_INSTRUCTION {\n\t\t\tdone = false\n\t\t}\n\t\treturn done, nil\n\t}\n\t// case p.opTick == 7:\n\tp.ram.Write(p.opAddr, p.opVal)\n\treturn true, nil\n}", "func TestSolution(t *testing.T) {\n\tast := assert.New(t)\n\n\tfor _, f := range SolutionFuncList {\n\t\tfor _, c := range cases {\n\t\t\tt.Run(c.name, func(t *testing.T) {\n\t\t\t\tgot := f(c.nums, c.target)\n\t\t\t\tast.Equal(c.expect, got,\n\t\t\t\t\t\"func: %v case: %v \",\n\t\t\t\t\truntime.FuncForPC(reflect.ValueOf(f).Pointer()).Name(), c.name)\n\t\t\t})\n\t\t}\n\t}\n}", "func getForkPrecedence(forkId int) int {\n\tleftPhil := leftPhilId(forkId)\n\trightPhil := rightPhilId(forkId)\n\tforkOwner := forkOwners[forkId]\n\tisClean := cleanForks[forkId]\n\n\t// let u and v be left or right philosophers and u != v\n\t// u holds clean fork: u has precedence\n\t// u holds dirty fork: v has precedence\n\tif (leftPhil == forkOwner) == isClean {\n\t\treturn leftPhil\n\t} else {\n\t\treturn rightPhil\n\t}\n}", "func ROUNDPS(i, mx, x operand.Op) { ctx.ROUNDPS(i, mx, x) }", "func (tok Token) Precedence() int {\n\tswitch tok {\n\tcase LOR:\n\t\treturn 1\n\tcase LAND:\n\t\treturn 2\n\tcase EQL, NEQ, LSS, LEQ, GTR, GEQ:\n\t\treturn 3\n\tcase ADD, SUB, OR, XOR:\n\t\treturn 4\n\tcase MUL, QUO, REM, SHL, SHR, AND, AND_NOT:\n\t\treturn 5\n\t}\n\treturn LowestPrec\n}", "func applyPrecdence(rootDir string, names []string, precedence []string) []string {\n\n\t// create a map so we can group names that match precedence prefixes and\n\t// then apply extra logic\n\tmatchMap := make(map[string][]string, 0)\n\n\t// dedupe the precedence list\n\tdedupedPrecedence := make([]string, 0)\n\tfor _, rule := range precedence {\n\t\tif !InStringArray(dedupedPrecedence, rule) {\n\t\t\tdedupedPrecedence = append(dedupedPrecedence, rule)\n\t\t}\n\t}\n\n\t// build an array of all names in preferential order\n\tvar matches []string\n\tvar ok bool\n\tfor _, rule := range dedupedPrecedence {\n\t\tfor _, name := range names {\n\t\t\t// append the match to an array keyed by precedence rule\n\t\t\tif rule == StripExtension(name) {\n\t\t\t\tmatches, ok = matchMap[rule]\n\t\t\t\tif !ok {\n\t\t\t\t\tmatches = make([]string, 0)\n\t\t\t\t}\n\n\t\t\t\tmatches = append(matches, name)\n\t\t\t\tmatchMap[rule] = matches\n\t\t\t}\n\t\t}\n\t}\n\n\t// apply extra logic to each match - favour files over directories\n\tfor rule := range matchMap {\n\t\tmatches := matchMap[rule]\n\t\t// the bool is true if i < j\n\t\tsort.SliceStable(matches, func(i, j int) bool {\n\t\t\tleft := matches[i]\n\t\t\tright := matches[j]\n\n\t\t\tleftExtension := filepath.Ext(left)\n\t\t\trightExtension := filepath.Ext(right)\n\n\t\t\tleftBaseName := StripExtension(left)\n\t\t\trightBaseName := StripExtension(right)\n\n\t\t\tabsLeft := filepath.Join(rootDir, left)\n\t\t\tabsRight := filepath.Join(rootDir, right)\n\n\t\t\t// if both basenames match exactly, favour a file over a directory.\n\t\t\t// if both are files, or both are directories, sort by extension\n\t\t\tif leftBaseName == rule && rightBaseName == rule {\n\t\t\t\t// if only one is a file, favour it\n\t\t\t\tif isFile(absLeft) && !isFile(absRight) || !isFile(absLeft) && isFile(absRight) {\n\t\t\t\t\treturn isFile(absLeft)\n\t\t\t\t} else if isFile(absLeft) && isFile(absRight) {\n\t\t\t\t\t// both are files. Return based on the extensions\n\t\t\t\t\treturn leftExtension < rightExtension\n\t\t\t\t} else {\n\t\t\t\t\t// the same, so return false to cover all branches\n\t\t\t\t\treturn false\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// if one is an exact match, favour it\n\t\t\t\treturn leftBaseName == rule\n\t\t\t}\n\t\t})\n\n\t\tmatchMap[rule] = matches\n\t}\n\n\tintermediateResults := make([]string, 0)\n\n\t// populate the final results array\n\tfor _, prefix := range dedupedPrecedence {\n\t\tmatches, ok := matchMap[prefix]\n\t\tif ok {\n\t\t\tintermediateResults = append(intermediateResults, matches...)\n\t\t}\n\t}\n\n\t// now perform another pass hoisting files over directories so the traversal\n\t// is breadth first\n\tfiles := make([]string, 0)\n\tdirs := make([]string, 0)\n\n\tfor _, path := range intermediateResults {\n\t\tabsPath := filepath.Join(rootDir, path)\n\t\tif isFile(absPath) {\n\t\t\tfiles = append(files, path)\n\t\t} else {\n\t\t\tdirs = append(dirs, path)\n\t\t}\n\t}\n\n\tresults := append(files, dirs...)\n\n\tlog.Logger.Tracef(\"Sorted input names: %#v by precedence to: %#v\",\n\t\tnames, results)\n\n\treturn results\n}", "func TestBitFlags(t *testing.T){\n\tvar bitflag8 BitFlag8 = 16;\n\tvar bitflag16 BitFlag16 = 256;\n\tvar bitflag32 BitFlag32 = 65536;\n\tvar bitflag64 BitFlag64 = 4294967296;\n\tvar bitflag8_p *BitFlag8 = &(bitflag8);\n\tvar bitflag16_p *BitFlag16 = &(bitflag16);\n\tvar bitflag32_p *BitFlag32 = &(bitflag32);\n\tvar bitflag64_p *BitFlag64 = &(bitflag64);\n\t//BitFlag8 test\n\t_ = bitflag8;\n\tt.Logf(\"%T: %d @ %p (%d)\", bitflag8, bitflag8, bitflag8_p, *bitflag8_p);\n\tt.Logf(\"GetSize: %d (%d)\", bitflag8.GetSize(), bitflag8_p.GetSize());\n\tt.Logf(\"HasFlagBit(8): %t (%t) HasFlagBit(16): %t (%t)\", bitflag8.HasFlagBit(uint64(8)), bitflag8_p.HasFlagBit(uint64(8)), bitflag8.HasFlagBit(uint64(16)), bitflag8_p.HasFlagBit(uint64(16)));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag8.GetFlagBit(uint64(8)), bitflag8_p.GetFlagBit(uint64(8)), bitflag8.GetFlagBit(uint64(16)), bitflag8_p.GetFlagBit(uint64(16)));\n\tt.Logf(\"SetFlagBit(8)\");\n\tbitflag8.SetFlagBit(uint64(8));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag8.GetFlagBit(uint64(8)), bitflag8_p.GetFlagBit(uint64(8)), bitflag8.GetFlagBit(uint64(16)), bitflag8_p.GetFlagBit(uint64(16)));\n\tt.Logf(\"UnsetFlagBit(16)\");\n\tbitflag8.UnsetFlagBit(uint64(16));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag8.GetFlagBit(uint64(8)), bitflag8_p.GetFlagBit(uint64(8)), bitflag8.GetFlagBit(uint64(16)), bitflag8_p.GetFlagBit(uint64(16)));\n\tt.Logf(\"GetBit(2): %d (%d) GetBit(5): %d (%d)\", bitflag8.GetBit(uint8(2)), bitflag8_p.GetBit(uint8(2)), bitflag8.GetBit(uint8(5)), bitflag8_p.GetBit(uint8(5)));\n\tt.Logf(\"SetBit(5, 1)\");\n\tbitflag8.SetBit(uint8(5), uint8(1));\n\tt.Logf(\"GetBit(2): %d (%d) GetBit(5): %d (%d)\", bitflag8.GetBit(uint8(2)), bitflag8_p.GetBit(uint8(2)), bitflag8.GetBit(uint8(5)), bitflag8_p.GetBit(uint8(5)));\n\t//BitFlag16 test\n\t_ = bitflag16;\n\tt.Logf(\"%T: %d @ %p (%d)\", bitflag16, bitflag16, bitflag16_p, *bitflag16_p);\n\tt.Logf(\"GetSize: %d (%d)\", bitflag16.GetSize(), bitflag16_p.GetSize());\n\tt.Logf(\"HasFlagBit(8): %t (%t) HasFlagBit(16): %t (%t)\", bitflag16.HasFlagBit(uint64(8)), bitflag16_p.HasFlagBit(uint64(8)), bitflag16.HasFlagBit(uint64(256)), bitflag16_p.HasFlagBit(uint64(256)));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag16.GetFlagBit(uint64(8)), bitflag16_p.GetFlagBit(uint64(8)), bitflag16.GetFlagBit(uint64(256)), bitflag16_p.GetFlagBit(uint64(256)));\n\tt.Logf(\"SetFlagBit(8)\");\n\tbitflag16.SetFlagBit(uint64(8));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag16.GetFlagBit(uint64(8)), bitflag16_p.GetFlagBit(uint64(8)), bitflag16.GetFlagBit(uint64(256)), bitflag16_p.GetFlagBit(uint64(256)));\n\tt.Logf(\"UnsetFlagBit(16)\");\n\tbitflag16.UnsetFlagBit(uint64(256));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag16.GetFlagBit(uint64(8)), bitflag16_p.GetFlagBit(uint64(8)), bitflag16.GetFlagBit(uint64(256)), bitflag16_p.GetFlagBit(uint64(256)));\n\tt.Logf(\"GetBit(2): %d (%d) GetBit(5): %d (%d)\", bitflag16.GetBit(uint8(2)), bitflag16_p.GetBit(uint8(2)), bitflag16.GetBit(uint8(5)), bitflag16_p.GetBit(uint8(5)));\n\tt.Logf(\"SetBit(5, 1)\");\n\tbitflag16.SetBit(uint8(5), uint8(1));\n\tt.Logf(\"GetBit(2): %d (%d) GetBit(5): %d (%d)\", bitflag16.GetBit(uint8(2)), bitflag16_p.GetBit(uint8(2)), bitflag16.GetBit(uint8(5)), bitflag16_p.GetBit(uint8(5)));\n\t//BitFlag32 test\n\t_ = bitflag32;\n\tt.Logf(\"%T: %d @ %p (%d)\", bitflag32, bitflag32, bitflag32_p, *bitflag32_p);\n\tt.Logf(\"GetSize: %d (%d)\", bitflag32.GetSize(), bitflag32_p.GetSize());\n\tt.Logf(\"HasFlagBit(8): %t (%t) HasFlagBit(16): %t (%t)\", bitflag32.HasFlagBit(uint64(8)), bitflag32_p.HasFlagBit(uint64(8)), bitflag32.HasFlagBit(uint64(65536)), bitflag32_p.HasFlagBit(uint64(65536)));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag32.GetFlagBit(uint64(8)), bitflag32_p.GetFlagBit(uint64(8)), bitflag32.GetFlagBit(uint64(65536)), bitflag32_p.GetFlagBit(uint64(65536)));\n\tt.Logf(\"SetFlagBit(8)\");\n\tbitflag32.SetFlagBit(uint64(8));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag32.GetFlagBit(uint64(8)), bitflag32_p.GetFlagBit(uint64(8)), bitflag32.GetFlagBit(uint64(65536)), bitflag32_p.GetFlagBit(uint64(65536)));\n\tt.Logf(\"UnsetFlagBit(16)\");\n\tbitflag32.UnsetFlagBit(uint64(65536));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag32.GetFlagBit(uint64(8)), bitflag32_p.GetFlagBit(uint64(8)), bitflag32.GetFlagBit(uint64(65536)), bitflag32_p.GetFlagBit(uint64(65536)));\n\tt.Logf(\"GetBit(2): %d (%d) GetBit(5): %d (%d)\", bitflag32.GetBit(uint8(2)), bitflag32_p.GetBit(uint8(2)), bitflag32.GetBit(uint8(5)), bitflag32_p.GetBit(uint8(5)));\n\tt.Logf(\"SetBit(5, 1)\");\n\tbitflag32.SetBit(uint8(5), uint8(1));\n\tt.Logf(\"GetBit(2): %d (%d) GetBit(5): %d (%d)\", bitflag32.GetBit(uint8(2)), bitflag32_p.GetBit(uint8(2)), bitflag32.GetBit(uint8(5)), bitflag32_p.GetBit(uint8(5)));\n\t//BitFlag64 test\n\t_ = bitflag64;\n\tt.Logf(\"%T: %d @ %p (%d)\", bitflag64, bitflag64, bitflag64_p, *bitflag64_p);\n\tt.Logf(\"GetSize: %d (%d)\", bitflag64.GetSize(), bitflag64_p.GetSize());\n\tt.Logf(\"HasFlagBit(8): %t (%t) HasFlagBit(16): %t (%t)\", bitflag64.HasFlagBit(uint64(8)), bitflag64_p.HasFlagBit(uint64(8)), bitflag64.HasFlagBit(uint64(4294967296)), bitflag64_p.HasFlagBit(uint64(4294967296)));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag64.GetFlagBit(uint64(8)), bitflag64_p.GetFlagBit(uint64(8)), bitflag64.GetFlagBit(uint64(4294967296)), bitflag64_p.GetFlagBit(uint64(4294967296)));\n\tt.Logf(\"SetFlagBit(8)\");\n\tbitflag64.SetFlagBit(uint64(8));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag64.GetFlagBit(uint64(8)), bitflag64_p.GetFlagBit(uint64(8)), bitflag64.GetFlagBit(uint64(4294967296)), bitflag64_p.GetFlagBit(uint64(4294967296)));\n\tt.Logf(\"UnsetFlagBit(16)\");\n\tbitflag64.UnsetFlagBit(uint64(4294967296));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag64.GetFlagBit(uint64(8)), bitflag64_p.GetFlagBit(uint64(8)), bitflag64.GetFlagBit(uint64(4294967296)), bitflag64_p.GetFlagBit(uint64(4294967296)));\n\tt.Logf(\"GetBit(2): %d (%d) GetBit(5): %d (%d)\", bitflag64.GetBit(uint8(2)), bitflag64_p.GetBit(uint8(2)), bitflag64.GetBit(uint8(5)), bitflag64_p.GetBit(uint8(5)));\n\tt.Logf(\"SetBit(5, 1)\");\n\tbitflag64.SetBit(uint8(5), uint8(1));\n\tt.Logf(\"GetBit(2): %d (%d) GetBit(5): %d (%d)\", bitflag64.GetBit(uint8(2)), bitflag64_p.GetBit(uint8(2)), bitflag64.GetBit(uint8(5)), bitflag64_p.GetBit(uint8(5)));\n\t//arraylist test\n\tarray_list := arraylist.New();\n\tarray_list.Add( bitflag8 );\n\tt.Logf(\"%T: %v Size: %d\", array_list, array_list, array_list.Size());\n\tarray_list.Add( bitflag16, bitflag32, bitflag64 );\n\tt.Logf(\"%v (size: %d)\", array_list, array_list.Size());\n\tarray_list.Insert(2, bitflag32_p);\n\tt.Logf(\"%v (size: %d)\", array_list, array_list.Size());\n\tbitflag32.SetBit(uint8(7), uint8(1));\n\tt.Logf(\"%v (size: %d)\", array_list, array_list.Size());\n\tarray_list.Swap(2, 3);\n\tt.Logf(\"%v (size: %d)\", array_list, array_list.Size());\n\tarray_list.Remove(2);\n\tt.Logf(\"%v (size: %d)\", array_list, array_list.Size());\n\tarray_list.Remove(2);\n\tt.Logf(\"%v (size: %d)\", array_list, array_list.Size());\n\tarray_list.Insert(2, *bitflag32_p);\n\tt.Logf(\"%v (size: %d)\", array_list, array_list.Size());\n\tarray_list.Clear();\n\tt.Logf(\"%v (size: %d)\", array_list, array_list.Size());\n\n\treturn;\n}", "func xopJPnZnn(cpu *CPU, l, h uint8) {\n\tif cpu.AF.Lo&maskZ == 0 {\n\t\tcpu.PC = toU16(l, h)\n\t}\n}", "func TestNodeAffinityAnnotationsPriority(t *testing.T) {\n\tutilfeature.DefaultFeatureGate.Set(\"AffinityInAnnotations=true\")\n\tlabel1 := map[string]string{\"foo\": \"bar\"}\n\tlabel2 := map[string]string{\"key\": \"value\"}\n\tlabel3 := map[string]string{\"az\": \"az1\"}\n\tlabel4 := map[string]string{\"abc\": \"az11\", \"def\": \"az22\"}\n\tlabel5 := map[string]string{\"foo\": \"bar\", \"key\": \"value\", \"az\": \"az1\"}\n\n\taffinity1 := map[string]string{\n\t\tv1.AffinityAnnotationKey: `\n\t\t{\"nodeAffinity\": {\"preferredDuringSchedulingIgnoredDuringExecution\": [\n\t\t\t{\n\t\t\t\t\"weight\": 2,\n\t\t\t\t\"preference\": {\n\t\t\t\t\t\"matchExpressions\": [\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\"key\": \"foo\",\n\t\t\t\t\t\t\t\"operator\": \"In\", \"values\": [\"bar\"]\n\t\t\t\t\t\t}\n\t\t\t\t\t]\n\t\t\t\t}\n\t\t\t}\n\t\t]}}`,\n\t}\n\n\taffinity2 := map[string]string{\n\t\tv1.AffinityAnnotationKey: `\n\t\t{\"nodeAffinity\": {\"preferredDuringSchedulingIgnoredDuringExecution\": [\n\t\t\t{\n\t\t\t\t\"weight\": 2,\n\t\t\t\t\"preference\": {\"matchExpressions\": [\n\t\t\t\t\t{\n\t\t\t\t\t\t\"key\": \"foo\",\n\t\t\t\t\t\t\"operator\": \"In\", \"values\": [\"bar\"]\n\t\t\t\t\t}\n\t\t\t\t]}\n\t\t\t},\n\t\t\t{\n\t\t\t\t\"weight\": 4,\n\t\t\t\t\"preference\": {\"matchExpressions\": [\n\t\t\t\t\t{\n\t\t\t\t\t\t\"key\": \"key\",\n\t\t\t\t\t\t\"operator\": \"In\", \"values\": [\"value\"]\n\t\t\t\t\t}\n\t\t\t\t]}\n\t\t\t},\n\t\t\t{\n\t\t\t\t\"weight\": 5,\n\t\t\t\t\"preference\": {\"matchExpressions\": [\n\t\t\t\t\t{\n\t\t\t\t\t\t\"key\": \"foo\",\n\t\t\t\t\t\t\"operator\": \"In\", \"values\": [\"bar\"]\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\t\"key\": \"key\",\n\t\t\t\t\t\t\"operator\": \"In\", \"values\": [\"value\"]\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\t\"key\": \"az\",\n\t\t\t\t\t\t\"operator\": \"In\", \"values\": [\"az1\"]\n\t\t\t\t\t}\n\t\t\t\t]}\n\t\t\t}\n\t\t]}}`,\n\t}\n\n\ttests := []struct {\n\t\tpod *v1.Pod\n\t\tnodes []*v1.Node\n\t\texpectedList schedulerapi.HostPriorityList\n\t\ttest string\n\t}{\n\t\t{\n\t\t\tpod: &v1.Pod{\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tAnnotations: map[string]string{},\n\t\t\t\t},\n\t\t\t},\n\t\t\tnodes: []*v1.Node{\n\t\t\t\t{ObjectMeta: metav1.ObjectMeta{Name: \"machine1\", Labels: label1}},\n\t\t\t\t{ObjectMeta: metav1.ObjectMeta{Name: \"machine2\", Labels: label2}},\n\t\t\t\t{ObjectMeta: metav1.ObjectMeta{Name: \"machine3\", Labels: label3}},\n\t\t\t},\n\t\t\texpectedList: []schedulerapi.HostPriority{{Host: \"machine1\", Score: 0}, {Host: \"machine2\", Score: 0}, {Host: \"machine3\", Score: 0}},\n\t\t\ttest: \"all machines are same priority as NodeAffinity is nil\",\n\t\t},\n\t\t{\n\t\t\tpod: &v1.Pod{\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tAnnotations: affinity1,\n\t\t\t\t},\n\t\t\t},\n\t\t\tnodes: []*v1.Node{\n\t\t\t\t{ObjectMeta: metav1.ObjectMeta{Name: \"machine1\", Labels: label4}},\n\t\t\t\t{ObjectMeta: metav1.ObjectMeta{Name: \"machine2\", Labels: label2}},\n\t\t\t\t{ObjectMeta: metav1.ObjectMeta{Name: \"machine3\", Labels: label3}},\n\t\t\t},\n\t\t\texpectedList: []schedulerapi.HostPriority{{Host: \"machine1\", Score: 0}, {Host: \"machine2\", Score: 0}, {Host: \"machine3\", Score: 0}},\n\t\t\ttest: \"no machine macthes preferred scheduling requirements in NodeAffinity of pod so all machines' priority is zero\",\n\t\t},\n\t\t{\n\t\t\tpod: &v1.Pod{\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tAnnotations: affinity1,\n\t\t\t\t},\n\t\t\t},\n\t\t\tnodes: []*v1.Node{\n\t\t\t\t{ObjectMeta: metav1.ObjectMeta{Name: \"machine1\", Labels: label1}},\n\t\t\t\t{ObjectMeta: metav1.ObjectMeta{Name: \"machine2\", Labels: label2}},\n\t\t\t\t{ObjectMeta: metav1.ObjectMeta{Name: \"machine3\", Labels: label3}},\n\t\t\t},\n\t\t\texpectedList: []schedulerapi.HostPriority{{Host: \"machine1\", Score: 10}, {Host: \"machine2\", Score: 0}, {Host: \"machine3\", Score: 0}},\n\t\t\ttest: \"only machine1 matches the preferred scheduling requirements of pod\",\n\t\t},\n\t\t{\n\t\t\tpod: &v1.Pod{\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tAnnotations: affinity2,\n\t\t\t\t},\n\t\t\t},\n\t\t\tnodes: []*v1.Node{\n\t\t\t\t{ObjectMeta: metav1.ObjectMeta{Name: \"machine1\", Labels: label1}},\n\t\t\t\t{ObjectMeta: metav1.ObjectMeta{Name: \"machine5\", Labels: label5}},\n\t\t\t\t{ObjectMeta: metav1.ObjectMeta{Name: \"machine2\", Labels: label2}},\n\t\t\t},\n\t\t\texpectedList: []schedulerapi.HostPriority{{Host: \"machine1\", Score: 1}, {Host: \"machine5\", Score: 10}, {Host: \"machine2\", Score: 3}},\n\t\t\ttest: \"all machines matches the preferred scheduling requirements of pod but with different priorities \",\n\t\t},\n\t}\n\n\tfor _, test := range tests {\n\t\tnodeNameToInfo := schedulercache.CreateNodeNameToInfoMap(nil, test.nodes)\n\t\tnap := priorityFunction(CalculateNodeAffinityPriorityMap, CalculateNodeAffinityPriorityReduce)\n\t\tlist, err := nap(test.pod, nodeNameToInfo, test.nodes)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"unexpected error: %v\", err)\n\t\t}\n\t\tif !reflect.DeepEqual(test.expectedList, list) {\n\t\t\tt.Errorf(\"%s: \\nexpected %#v, \\ngot %#v\", test.test, test.expectedList, list)\n\t\t}\n\t}\n}", "func TestCalcPriority(t *testing.T) {\n\t// commonSourceTx1 is a valid transaction used in the tests below as an\n\t// input to transactions that are having their priority calculated.\n\t//\n\t// From block 7 in main blockchain.\n\t// tx 0437cd7f8525ceed2324359c2d0ba26006d92d856a9c20fa0241106ee5a597c9\n\tcommonSourceTx1 := &wire.MsgTx{\n\t\tVersion: 1,\n\t\tTxIn: []*wire.TxIn{{\n\t\t\tPreviousOutPoint: wire.OutPoint{\n\t\t\t\tHash: chainhash.Hash{},\n\t\t\t\tIndex: wire.MaxPrevOutIndex,\n\t\t\t},\n\t\t\tSignatureScript: hexToBytes(\"04ffff001d0134\"),\n\t\t\tSequence: 0xffffffff,\n\t\t}},\n\t\tTxOut: []*wire.TxOut{{\n\t\t\tValue: 5000000000,\n\t\t\tPkScript: hexToBytes(\"410411db93e1dcdb8a016b49840f8c5\" +\n\t\t\t\t\"3bc1eb68a382e97b1482ecad7b148a6909a5cb2e0ead\" +\n\t\t\t\t\"dfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8\" +\n\t\t\t\t\"643f656b412a3ac\"),\n\t\t}},\n\t\tLockTime: 0,\n\t}\n\n\t// commonRedeemTx1 is a valid transaction used in the tests below as the\n\t// transaction to calculate the priority for.\n\t//\n\t// It originally came from block 170 in main blockchain.\n\tcommonRedeemTx1 := &wire.MsgTx{\n\t\tVersion: 1,\n\t\tTxIn: []*wire.TxIn{{\n\t\t\tPreviousOutPoint: wire.OutPoint{\n\t\t\t\tHash: *newHashFromStr(\"0437cd7f8525ceed232435\" +\n\t\t\t\t\t\"9c2d0ba26006d92d856a9c20fa0241106ee5\" +\n\t\t\t\t\t\"a597c9\"),\n\t\t\t\tIndex: 0,\n\t\t\t},\n\t\t\tSignatureScript: hexToBytes(\"47304402204e45e16932b8af\" +\n\t\t\t\t\"514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5f\" +\n\t\t\t\t\"b8cd410220181522ec8eca07de4860a4acdd12909d83\" +\n\t\t\t\t\"1cc56cbbac4622082221a8768d1d0901\"),\n\t\t\tSequence: 0xffffffff,\n\t\t}},\n\t\tTxOut: []*wire.TxOut{{\n\t\t\tValue: 1000000000,\n\t\t\tPkScript: hexToBytes(\"4104ae1a62fe09c5f51b13905f07f06\" +\n\t\t\t\t\"b99a2f7159b2225f374cd378d71302fa28414e7aab37\" +\n\t\t\t\t\"397f554a7df5f142c21c1b7303b8a0626f1baded5c72\" +\n\t\t\t\t\"a704f7e6cd84cac\"),\n\t\t}, {\n\t\t\tValue: 4000000000,\n\t\t\tPkScript: hexToBytes(\"410411db93e1dcdb8a016b49840f8c5\" +\n\t\t\t\t\"3bc1eb68a382e97b1482ecad7b148a6909a5cb2e0ead\" +\n\t\t\t\t\"dfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8\" +\n\t\t\t\t\"643f656b412a3ac\"),\n\t\t}},\n\t\tLockTime: 0,\n\t}\n\n\ttests := []struct {\n\t\tname string // test description\n\t\ttx *wire.MsgTx // tx to calc priority for\n\t\tutxoView *blockchain.UtxoViewpoint // inputs to tx\n\t\tnextHeight int32 // height for priority calc\n\t\twant float64 // expected priority\n\t}{\n\t\t{\n\t\t\tname: \"one height 7 input, prio tx height 169\",\n\t\t\ttx: commonRedeemTx1,\n\t\t\tutxoView: newUtxoViewpoint([]*wire.MsgTx{commonSourceTx1},\n\t\t\t\t[]int32{7}),\n\t\t\tnextHeight: 169,\n\t\t\twant: 5e9,\n\t\t},\n\t\t{\n\t\t\tname: \"one height 100 input, prio tx height 169\",\n\t\t\ttx: commonRedeemTx1,\n\t\t\tutxoView: newUtxoViewpoint([]*wire.MsgTx{commonSourceTx1},\n\t\t\t\t[]int32{100}),\n\t\t\tnextHeight: 169,\n\t\t\twant: 2129629629.6296296,\n\t\t},\n\t\t{\n\t\t\tname: \"one height 7 input, prio tx height 100000\",\n\t\t\ttx: commonRedeemTx1,\n\t\t\tutxoView: newUtxoViewpoint([]*wire.MsgTx{commonSourceTx1},\n\t\t\t\t[]int32{7}),\n\t\t\tnextHeight: 100000,\n\t\t\twant: 3086203703703.7036,\n\t\t},\n\t\t{\n\t\t\tname: \"one height 100 input, prio tx height 100000\",\n\t\t\ttx: commonRedeemTx1,\n\t\t\tutxoView: newUtxoViewpoint([]*wire.MsgTx{commonSourceTx1},\n\t\t\t\t[]int32{100}),\n\t\t\tnextHeight: 100000,\n\t\t\twant: 3083333333333.3335,\n\t\t},\n\t}\n\n\tfor i, test := range tests {\n\t\tgot := CalcPriority(test.tx, test.utxoView, test.nextHeight)\n\t\tif got != test.want {\n\t\t\tt.Errorf(\"CalcPriority #%d (%q): unexpected priority \"+\n\t\t\t\t\"got %v want %v\", i, test.name, got, test.want)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func TestMatchesByPrefix(t *testing.T) {\n\tdefer leaktest.AfterTest(t)\n\tpcc := buildTestPrefixConfigMap()\n\ttestData := []struct {\n\t\tkey proto.Key\n\t\texpConfigs []ConfigUnion\n\t}{\n\t\t{proto.KeyMin, []ConfigUnion{config1}},\n\t\t{proto.Key(\"\\x01\"), []ConfigUnion{config1}},\n\t\t{proto.Key(\"/db\"), []ConfigUnion{config1}},\n\t\t{proto.Key(\"/db1\"), []ConfigUnion{config2, config1}},\n\t\t{proto.Key(\"/db1/a\"), []ConfigUnion{config2, config1}},\n\t\t{proto.Key(\"/db1/table1\"), []ConfigUnion{config3, config2, config1}},\n\t\t{proto.Key(\"/db1/table\\xff\"), []ConfigUnion{config3, config2, config1}},\n\t\t{proto.Key(\"/db2\"), []ConfigUnion{config1}},\n\t\t{proto.Key(\"/db3\"), []ConfigUnion{config4, config1}},\n\t\t{proto.Key(\"/db3\\xff\"), []ConfigUnion{config4, config1}},\n\t\t{proto.Key(\"/db5\"), []ConfigUnion{config1}},\n\t\t{proto.Key(\"/xfe\"), []ConfigUnion{config1}},\n\t\t{proto.Key(\"/xff\"), []ConfigUnion{config1}},\n\t}\n\tfor i, test := range testData {\n\t\tpcs := pcc.MatchesByPrefix(test.key)\n\t\tif len(pcs) != len(test.expConfigs) {\n\t\t\tt.Errorf(\"%d: expected %d matches, got %d\", i, len(test.expConfigs), len(pcs))\n\t\t\tcontinue\n\t\t}\n\t\tfor j, pc := range pcs {\n\t\t\tif pc.Config != test.expConfigs[j] {\n\t\t\t\tt.Errorf(\"%d: expected \\\"%d\\\"th config %v for %q; got %v\", i, j, test.expConfigs[j], test.key, pc.Config)\n\t\t\t}\n\t\t}\n\t}\n}", "func (tok Token) Precedence() int {\n\tswitch tok {\n\tcase OR:\n\t\treturn 1\n\tcase AND:\n\t\treturn 2\n\tcase EQ, NEQ, EQREGEX, NEQREGEX, LT, LTE, GT, GTE:\n\t\treturn 3\n\tcase ADD, SUB, BITWISEOR, BITWISEXOR:\n\t\treturn 4\n\tcase MUL, DIV, MOD, BITWISEAND:\n\t\treturn 5\n\t}\n\treturn 0\n}", "func TestConnectednessCorrect(t *testing.T) {\n\tnets := make([]network.Network, 4)\n\tfor i := 0; i < 4; i++ {\n\t\tnets[i] = GenSwarm(t)\n\t}\n\n\t// connect 0-1, 0-2, 0-3, 1-2, 2-3\n\n\tdial := func(a, b network.Network) {\n\t\tDivulgeAddresses(b, a)\n\t\tif _, err := a.DialPeer(context.Background(), b.LocalPeer()); err != nil {\n\t\t\tt.Fatalf(\"Failed to dial: %s\", err)\n\t\t}\n\t}\n\n\tdial(nets[0], nets[1])\n\tdial(nets[0], nets[3])\n\tdial(nets[1], nets[2])\n\tdial(nets[3], nets[2])\n\n\t// The notifications for new connections get sent out asynchronously.\n\t// There is the potential for a race condition here, so we sleep to ensure\n\t// that they have been received.\n\ttime.Sleep(time.Millisecond * 100)\n\n\t// test those connected show up correctly\n\n\t// test connected\n\texpectConnectedness(t, nets[0], nets[1], network.Connected)\n\texpectConnectedness(t, nets[0], nets[3], network.Connected)\n\texpectConnectedness(t, nets[1], nets[2], network.Connected)\n\texpectConnectedness(t, nets[3], nets[2], network.Connected)\n\n\t// test not connected\n\texpectConnectedness(t, nets[0], nets[2], network.NotConnected)\n\texpectConnectedness(t, nets[1], nets[3], network.NotConnected)\n\n\trequire.Len(t, nets[0].Peers(), 2, \"expected net 0 to have two peers\")\n\trequire.Len(t, nets[2].Peers(), 2, \"expected net 2 to have two peers\")\n\trequire.NotZerof(t, nets[1].ConnsToPeer(nets[3].LocalPeer()), \"net 1 should have no connections to net 3\")\n\trequire.NoError(t, nets[2].ClosePeer(nets[1].LocalPeer()))\n\n\ttime.Sleep(time.Millisecond * 50)\n\texpectConnectedness(t, nets[2], nets[1], network.NotConnected)\n\n\tfor _, n := range nets {\n\t\tn.Close()\n\t}\n}", "func Test_OtpTestHotpAtCount(t *testing.T) {\n\thotp := testGetHotp(t)\n\tcounterOffset := []int64{0, -1, 1, 12345}\n\n\tfor _, offset := range counterOffset {\n\t\tfor _, data := range referenceRunsHotp {\n\t\t\tsetStateHotp(hotp, data)\n\t\t\tval, err := data.hotp.AtCount(data.counter + offset)\n\t\t\tif err != nil {\n\t\t\t\tt.Error(\"Test fail before running, illigal parameters:\", err)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif offset == 0 {\n\t\t\t\tif val != data.result {\n\t\t\t\t\tt.Error(\"OTP did not change after incrementing the counter even though all other parameters, including the seed counter, are identical:\", data,\n\t\t\t\t\t\t\"original seed -\", data.counter, \", curent seed -\", data.counter+offset,\n\t\t\t\t\t\t\"but the OTP -\", data.result, \"is different from the resultant OPT -\", val)\n\t\t\t\t} else {\n\t\t\t\t\tt.Log(\"OTP calculated value for the same seed stays the same (as expected)\")\n\t\t\t\t}\n\n\t\t\t}\n\t\t\tif offset != 0 {\n\t\t\t\tif val == data.result {\n\t\t\t\t\tt.Error(\"OTP value did not change when the seed counter changed (all other parameters remained identical) :\", data,\n\t\t\t\t\t\t\"original seed -\", data.counter, \", curent seed -\", data.counter+offset,\n\t\t\t\t\t\t\"but the OTP calculated value was the same\", val)\n\t\t\t\t} else {\n\t\t\t\t\tt.Log(\"OTP calculated value for different seed changed (as expected)\")\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}", "func TestMinimumSwaps(t *testing.T) {\n\ttests := []struct {\n\t\tname string\n\t\tinput []int32\n\t\twant int32\n\t}{\n\t\t{\n\t\t\tname: \"test case #1\",\n\t\t\tinput: []int32{7, 1, 3, 2, 4, 5, 6},\n\t\t\twant: 5,\n\t\t},\n\t\t{\n\t\t\tname: \"test case #2\",\n\t\t\tinput: []int32{2, 3, 4, 1, 5},\n\t\t\twant: 3,\n\t\t},\n\t\t{\n\t\t\tname: \"test case #3\",\n\t\t\tinput: []int32{1, 3, 5, 2, 4, 6, 7},\n\t\t\twant: 3,\n\t\t},\n\t}\n\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tgot := minimumSwaps(tt.input)\n\n\t\t\tif got != tt.want {\n\t\t\t\tt.Errorf(\"got %d, want %d\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}", "func testPortsPolicies() []*TestStep {\n\tbuilder := &NetworkPolicySpecBuilder{}\n\tbuilder = builder.SetName(\"x\", \"allow-port-81-not-port-80\").SetPodSelector(map[string]string{\"pod\": \"a\"})\n\tbuilder.SetTypeIngress()\n\t// anyone on port 81 is ok...\n\tbuilder.AddIngress(v1.ProtocolTCP, &p81, nil, nil, nil, nil, nil, nil, nil)\n\n\t// disallow port 80\n\treachability1 := func() *Reachability {\n\t\treachability := NewReachability(allPods, true)\n\t\treachability.ExpectAllIngress(Pod(\"x/a\"), false)\n\t\treachability.Expect(Pod(\"x/a\"), Pod(\"x/a\"), true)\n\t\treturn reachability\n\t}\n\n\t// allow port 81\n\treachability2 := func() *Reachability {\n\t\treachability := NewReachability(allPods, true)\n\t\treachability.ExpectAllIngress(Pod(\"x/a\"), true)\n\t\treturn reachability\n\t}\n\n\treturn []*TestStep{\n\t\t{\n\t\t\t\"Port 80\",\n\t\t\treachability1(),\n\t\t\tbuilder.Get(),\n\t\t\t80,\n\t\t\t0,\n\t\t},\n\t\t{\n\t\t\t\"Port 81\",\n\t\t\t// Applying the same nw policy to test a different port\n\t\t\treachability2(),\n\t\t\tbuilder.Get(),\n\t\t\t81,\n\t\t\t0,\n\t\t},\n\t}\n}", "func opprec(op ast.Expr) int {\n\tswitch n := op.(type) {\n\tcase *ast.TernaryExpr, *ast.ParenExpr:\n\t\treturn 1\n\tcase *ast.BinaryExpr:\n\t\tswitch n.Op {\n\t\tcase token.OROR:\n\t\t\treturn 2\n\t\tcase token.ANDAND:\n\t\t\treturn 3\n\t\tcase token.EQEQ,\n\t\t\ttoken.EQEQCI,\n\t\t\ttoken.EQEQCS,\n\t\t\ttoken.NEQ,\n\t\t\ttoken.NEQCI,\n\t\t\ttoken.NEQCS,\n\t\t\ttoken.GT,\n\t\t\ttoken.GTCI,\n\t\t\ttoken.GTCS,\n\t\t\ttoken.GTEQ,\n\t\t\ttoken.GTEQCI,\n\t\t\ttoken.GTEQCS,\n\t\t\ttoken.LT,\n\t\t\ttoken.LTCI,\n\t\t\ttoken.LTCS,\n\t\t\ttoken.LTEQ,\n\t\t\ttoken.LTEQCI,\n\t\t\ttoken.LTEQCS,\n\t\t\ttoken.MATCHCS,\n\t\t\ttoken.NOMATCH,\n\t\t\ttoken.NOMATCHCI,\n\t\t\ttoken.NOMATCHCS,\n\t\t\ttoken.IS,\n\t\t\ttoken.ISCI,\n\t\t\ttoken.ISCS,\n\t\t\ttoken.ISNOT,\n\t\t\ttoken.ISNOTCI,\n\t\t\ttoken.ISNOTCS:\n\t\t\treturn 4\n\t\tcase token.PLUS, token.MINUS, token.DOT:\n\t\t\treturn 5\n\t\tcase token.STAR, token.SLASH, token.PERCENT:\n\t\t\treturn 6\n\t\tdefault:\n\t\t\tpanic(fmt.Errorf(\"unexpected token of BinaryExpr: %v\", n.Op))\n\t\t}\n\tcase *ast.UnaryExpr:\n\t\tswitch n.Op {\n\t\tcase token.NOT, token.MINUS, token.PLUS:\n\t\t\treturn 7\n\t\tdefault:\n\t\t\tpanic(fmt.Errorf(\"unexpected token of UnaryExpr: %v\", n.Op))\n\t\t}\n\tcase *ast.SubscriptExpr, *ast.SliceExpr, *ast.CallExpr, *ast.DotExpr, *ast.MethodExpr:\n\t\treturn 8\n\tcase *ast.BasicLit, *ast.Ident, *ast.List, *ast.Dict, *ast.CurlyName, *ast.HeredocExpr:\n\t\treturn 9\n\tcase *ast.CurlyNameExpr, *ast.CurlyNameLit, *ast.LambdaExpr:\n\t\tpanic(fmt.Errorf(\"precedence is undefined for expr: %T\", n))\n\tdefault:\n\t\tpanic(fmt.Errorf(\"unexpected expr: %T\", n))\n\t}\n}", "func UNPCKLPS(mx, x operand.Op) { ctx.UNPCKLPS(mx, x) }", "func check_prohibits(p int, rs []rune) (err error) {\n\tif rs == nil {\n\t\treturn\n\t}\n\tvar dir int\n var v rune = get_bst(prohibits, rs[0], 0, prohibits_max_idx)\n if v == -1 {\n\t\tdir = b_l\n\t} else if int(v) & p == p {\n return errors.New(\"prohibited symbol\")\n\t} else if int(v) & b_randal == b_randal {\n\t\tdir = b_randal\n\t} else {\n\t\tdir = b_l\n\t}\n\tlast_dir := dir\n\tfor _, x := range rs[1:] {\n v = get_bst(prohibits, x, 0, prohibits_max_idx)\n if v == -1 {\n\t\t\tlast_dir = b_l\n\t\t} else if int(v) & p == p {\n\t\t\treturn errors.New(\"prohibited symbol\")\n\t\t} else if int(v) & dir != dir && \n\t\t\t(v & b_randal == b_randal || v & b_l == b_l) {\n\t\t\treturn errors.New(\"invalid bidi\")\n }\n\t}\n\tif last_dir != dir {\n\t\terr = errors.New(\"invalid bidi\")\n\t}\n\treturn\n}", "func CMPPD(mx, x, i operand.Op) { ctx.CMPPD(mx, x, i) }", "func Test(t *testing.T) {\n\tl3 := InitNode(3, nil, nil)\n\tl2 := InitNode(2, l3, nil)\n\tl1 := InitNode(1, nil, l2)\n\tfmt.Println(preorderTraversal(l1))\n}", "func TestPiBenchmarks(t *testing.T) {\n\tfor _, test := range [...]struct {\n\t\tname string\n\t\tfn testFunc\n\t}{\n\t\t{\"decimal (Go)\", func(prec int) string {\n\t\t\treturn calcPiGo(prec).String()\n\t\t}},\n\t\t{\"decimal (GDA)\", func(prec int) string {\n\t\t\treturn calcPiGDA(prec).String()\n\t\t}},\n\t\t{\"apd\", func(prec int) string {\n\t\t\treturn calcPi_apd(uint32(prec)).String()\n\t\t}},\n\t\t{\"shopSpring\", func(prec int) string {\n\t\t\treturn calcPi_shopSpring(int32(prec)).String()\n\t\t}},\n\t\t{\"inf\", func(prec int) string {\n\t\t\treturn calcPi_inf(prec).String()\n\t\t}},\n\t} {\n\t\tvar ctx decimal.Context\n\t\tfor _, prec := range [...]int{9, 19, 38, 100} {\n\t\t\tt.Run(fmt.Sprintf(\"%s/%d\", test.name, prec), func(t *testing.T) {\n\t\t\t\tctx.Precision = prec\n\n\t\t\t\tstr := test.fn(prec)\n\t\t\t\tname := test.name\n\n\t\t\t\tvar x decimal.Big\n\t\t\t\tif _, ok := ctx.SetString(&x, str); !ok {\n\t\t\t\t\tt.Fatalf(\"%s (%d): bad input: %q\", name, prec, str)\n\t\t\t\t}\n\n\t\t\t\tvar act decimal.Big\n\t\t\t\tctx.SetString(&act, pi)\n\t\t\t\tif act.Cmp(&x) != 0 {\n\t\t\t\t\tt.Fatalf(`%s (%d): bad output:\nwant: %q\ngot : %q\n`, name, prec, &act, &x)\n\t\t\t\t}\n\t\t\t})\n\t\t}\n\t}\n}" ]
[ "0.6392197", "0.60636485", "0.5847384", "0.5095937", "0.50211644", "0.49825346", "0.48835298", "0.48553285", "0.48435774", "0.4832537", "0.47929558", "0.47858366", "0.4782585", "0.47379354", "0.47195414", "0.47043484", "0.4698812", "0.46964467", "0.46933302", "0.4665786", "0.46627158", "0.46623287", "0.46157795", "0.46074837", "0.4535003", "0.4533437", "0.451556", "0.4515528", "0.4511829", "0.45113418", "0.45080453", "0.45069817", "0.45044392", "0.44997153", "0.44945538", "0.4493189", "0.44779503", "0.446116", "0.4460667", "0.44542733", "0.44520557", "0.4444903", "0.4442975", "0.44416243", "0.44336393", "0.44318464", "0.4431793", "0.44316298", "0.4428044", "0.44209412", "0.44193745", "0.44102293", "0.4406741", "0.44064045", "0.44029465", "0.4370341", "0.43598828", "0.435587", "0.43530566", "0.43424645", "0.43413836", "0.43411943", "0.4340261", "0.4335893", "0.4333996", "0.4333381", "0.4331922", "0.43317032", "0.43308198", "0.4329993", "0.43223816", "0.43148634", "0.43053514", "0.4303781", "0.4301256", "0.4299457", "0.42844278", "0.42843908", "0.4279536", "0.42765382", "0.4275532", "0.42655122", "0.42593578", "0.4256084", "0.42531705", "0.42523265", "0.42501324", "0.42431265", "0.42329046", "0.42324406", "0.42290944", "0.42275572", "0.422373", "0.4219419", "0.42186043", "0.42161754", "0.42146224", "0.42114905", "0.42080888", "0.4206676" ]
0.88310105
0
assertPinCids verifies that the pins match the expected cids
func assertPinCids(t *testing.T, pins []iface.Pin, cids ...cidContainer) { t.Helper() if expected, actual := len(cids), len(pins); expected != actual { t.Fatalf("expected pin list to have len %d, was %d", expected, actual) } cSet := cid.NewSet() for _, c := range cids { cSet.Add(c.Cid()) } valid := true for _, p := range pins { c := p.Path().Cid() if cSet.Has(c) { cSet.Remove(c) } else { valid = false break } } valid = valid && cSet.Len() == 0 if !valid { pinStrs := make([]string, len(pins)) for i, p := range pins { pinStrs[i] = p.Path().Cid().String() } pathStrs := make([]string, len(cids)) for i, c := range cids { pathStrs[i] = c.Cid().String() } t.Fatalf("expected: %s \nactual: %s", strings.Join(pathStrs, ", "), strings.Join(pinStrs, ", ")) } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func AssertCidsEqual(t *testing.T, m cid.Cid, n cid.Cid) {\n\tif !m.Equals(n) {\n\t\tassert.Fail(t, \"CIDs don't match\", \"not equal %v %v\", m, n)\n\t}\n}", "func assertPinLsAllConsistency(t *testing.T, ctx context.Context, api iface.CoreAPI) {\n\tt.Helper()\n\tallPins, err := accPins(api.Pin().Ls(ctx))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\ttype pinTypeProps struct {\n\t\t*cid.Set\n\t\topt.PinLsOption\n\t}\n\n\tall, recursive, direct, indirect := cid.NewSet(), cid.NewSet(), cid.NewSet(), cid.NewSet()\n\ttypeMap := map[string]*pinTypeProps{\n\t\t\"recursive\": {recursive, opt.Pin.Ls.Recursive()},\n\t\t\"direct\": {direct, opt.Pin.Ls.Direct()},\n\t\t\"indirect\": {indirect, opt.Pin.Ls.Indirect()},\n\t}\n\n\tfor _, p := range allPins {\n\t\tif !all.Visit(p.Path().Cid()) {\n\t\t\tt.Fatalf(\"pin ls returned the same cid multiple times\")\n\t\t}\n\n\t\ttypeStr := p.Type()\n\t\tif typeSet, ok := typeMap[p.Type()]; ok {\n\t\t\ttypeSet.Add(p.Path().Cid())\n\t\t} else {\n\t\t\tt.Fatalf(\"unknown pin type: %s\", typeStr)\n\t\t}\n\t}\n\n\tfor typeStr, pinProps := range typeMap {\n\t\tpins, err := accPins(api.Pin().Ls(ctx, pinProps.PinLsOption))\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\n\t\tif expected, actual := len(pins), pinProps.Set.Len(); expected != actual {\n\t\t\tt.Fatalf(\"pin ls all has %d pins of type %s, but pin ls for the type has %d\", expected, typeStr, actual)\n\t\t}\n\n\t\tfor _, p := range pins {\n\t\t\tif pinType := p.Type(); pinType != typeStr {\n\t\t\t\tt.Fatalf(\"returned wrong pin type: expected %s, got %s\", typeStr, pinType)\n\t\t\t}\n\n\t\t\tif c := p.Path().Cid(); !pinProps.Has(c) {\n\t\t\t\tt.Fatalf(\"%s expected to be in pin ls all as type %s\", c.String(), typeStr)\n\t\t\t}\n\t\t}\n\t}\n}", "func AssertHaveSameCid(t *testing.T, m HasCid, n HasCid) {\n\tif !m.Cid().Equals(n.Cid()) {\n\t\tassert.Fail(t, \"CIDs don't match\", \"not equal %v %v\", m.Cid(), n.Cid())\n\t}\n}", "func (m *MockFullNode) StateActorCodeCIDs(arg0 context.Context, arg1 network.Version) (map[string]cid.Cid, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"StateActorCodeCIDs\", arg0, arg1)\n\tret0, _ := ret[0].(map[string]cid.Cid)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func AssertCitiesAreConnected(c []*cities.City) error {\n\tfor _, city := range c {\n\t\tif !cityHasNeighbors(city) {\n\t\t\treturn fmt.Errorf(\"city %s has no connections\", city.Name)\n\t\t}\n\n\t\tif ok, neighbor := neighborsAreConnectedBack(city); !ok {\n\t\t\treturn fmt.Errorf(\"city %s connected to %s, but %s is not connected to %s\", city.Name, neighbor.Name, neighbor.Name, city.Name)\n\t\t}\n\t}\n\treturn nil\n}", "func TestDropSectorsVerify(t *testing.T) {\n\ttests := []struct {\n\t\tnumDropped, oldNum uint64\n\t\terr error\n\t}{\n\t\t{0, 0, nil},\n\t\t{0, 1, nil},\n\t\t{1, 1, nil},\n\t\t{2, 1, fmt.Errorf(\"bad input: numSectors (%v) is greater than the number of sectors in the contract (%v)\", 2, 1)},\n\t}\n\tfor _, test := range tests {\n\t\terr := dropSectorsVerify(test.numDropped, test.oldNum)\n\t\tif err != test.err && err.Error() != test.err.Error() {\n\t\t\tt.Errorf(\"dropSectorsVerify(%v, %v): expected '%v', got '%v'\", test.numDropped, test.oldNum, test.err, err)\n\t\t}\n\t}\n}", "func TestValidateContractPerms(t *testing.T) {\n\ttests := []struct {\n\t\tcRef string\n\t\tcidRoles string\n\t\tallow bool\n\t\tmsg string\n\t}{\n\t\t{\n\t\t\tcRef: contractCreateTransfer,\n\t\t\tcidRoles: \"user\",\n\t\t\tallow: false,\n\t\t\tmsg: \"Should not allow\",\n\t\t},\n\t\t{\n\t\t\tcRef: contractCreateWallet,\n\t\t\tcidRoles: \"user\",\n\t\t\tallow: true,\n\t\t\tmsg: \"Should allow\",\n\t\t},\n\t\t{\n\t\t\tcRef: contractCreateTransfer,\n\t\t\tcidRoles: \"admin\",\n\t\t\tallow: true,\n\t\t\tmsg: \"Should allow\",\n\t\t},\n\t\t{\n\t\t\tcRef: contractCreateWallet,\n\t\t\tcidRoles: \"admin\",\n\t\t\tallow: false,\n\t\t\tmsg: \"Should not allow\",\n\t\t},\n\t}\n\n\tfor _, tt := range tests {\n\t\tt.Logf(\"%v %v to invoke %v contract\", tt.msg, tt.cidRoles, tt.cRef)\n\n\t\tappAuth := simpleSetup(t, tt.cidRoles)\n\t\terr := appAuth.ValidateContractPerms(tt.cRef)\n\n\t\tif !tt.allow {\n\t\t\tassert.Error(t, err)\n\t\t} else {\n\t\t\tassert.NoError(t, err)\n\t\t}\n\t}\n}", "func verifyCnsVolumeMetadataAndCnsVSphereVolumeMigrationCrdForPvcs(ctx context.Context, client clientset.Interface, namespace string, pvcs []*v1.PersistentVolumeClaim) {\n\tfor _, pvc := range pvcs {\n\t\tvpath := getvSphereVolumePathFromClaim(ctx, client, namespace, pvc.Name)\n\t\tframework.Logf(\"Processing PVC: %s\", pvc.Name)\n\t\tpv := getPvFromClaim(client, namespace, pvc.Name)\n\t\tcrd, err := waitForCnsVSphereVolumeMigrationCrd(ctx, vpath)\n\t\tgomega.Expect(err).NotTo(gomega.HaveOccurred())\n\t\tpod := getPodTryingToUsePvc(ctx, client, namespace, pvc.Name)\n\t\terr = waitAndVerifyCnsVolumeMetadata(crd.Spec.VolumeID, pvc, pv, pod)\n\t\tgomega.Expect(err).NotTo(gomega.HaveOccurred())\n\t}\n}", "func TestACIFaultSeverityRaw_IDPositive(t *testing.T) {\n\tfor _, tr := range t2 {\n\t\tt.Run(tr.raw, func(t *testing.T) {\n\t\t\t// Prepare to assert multiple times.\n\t\t\tassert := assert.New(t)\n\t\t\t// Pass in Raw from table to make new instance.\n\t\t\traw, err := acigo.NewACIFaultSeverityRaw(tr.raw)\n\t\t\tassert.Nil(err)\n\t\t\tassert.Equal(tr.id, int(raw.ID()))\n\t\t})\n\t}\n}", "func TestSortCIDRs(t *testing.T) {\n\tRegisterTestingT(t)\n\tcidrs := []ip.CIDR{\n\t\tip.MustParseCIDROrIP(\"100.65.0.1/32\"),\n\t\tip.MustParseCIDROrIP(\"10.65.0.1/32\"),\n\t\tip.MustParseCIDROrIP(\"127.0.0.1/32\"),\n\t\tip.MustParseCIDROrIP(\"172.17.0.6/32\"),\n\t\tip.MustParseCIDROrIP(\"10.65.1.0/26\"),\n\t\tip.MustParseCIDROrIP(\"10.65.0.2/32\"),\n\t\tip.MustParseCIDROrIP(\"10.65.0.3/32\"),\n\t\tip.MustParseCIDROrIP(\"172.17.0.7/32\"),\n\t}\n\tsortCIDRs(cidrs)\n\texpectedResult := []ip.CIDR{\n\t\tip.MustParseCIDROrIP(\"10.65.0.1/32\"),\n\t\tip.MustParseCIDROrIP(\"10.65.0.2/32\"),\n\t\tip.MustParseCIDROrIP(\"10.65.0.3/32\"),\n\t\tip.MustParseCIDROrIP(\"10.65.1.0/26\"),\n\t\tip.MustParseCIDROrIP(\"100.65.0.1/32\"),\n\t\tip.MustParseCIDROrIP(\"127.0.0.1/32\"),\n\t\tip.MustParseCIDROrIP(\"172.17.0.6/32\"),\n\t\tip.MustParseCIDROrIP(\"172.17.0.7/32\"),\n\t}\n\tExpect(cidrs).To(Equal(expectedResult))\n}", "func AssertCertificateHasIPAddresses(t *testing.T, cert *x509.Certificate, IPAddresses ...net.IP) {\n\tfor _, IPAddress := range IPAddresses {\n\t\tfound := false\n\t\tfor _, val := range cert.IPAddresses {\n\t\t\tif val.Equal(IPAddress) {\n\t\t\t\tfound = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\n\t\tif !found {\n\t\t\tt.Errorf(\"cert does not contain IPAddress %s\", IPAddress)\n\t\t}\n\t}\n}", "func TestAllInt(t *testing.T) {\n\tt.Parallel()\n\tvar tests = []struct {\n\t\ts []int\n\t\texpected bool\n\t}{\n\t\t{[]int{0, 2, 4}, true},\n\t\t{[]int{}, true},\n\t\t{[]int{2, 4, 1}, false},\n\t\t{[]int{1}, false},\n\t\t{[]int{-2, 4}, true},\n\t}\n\tfor _, test := range tests {\n\t\tactual := primitives.AllInt(test.s, func(i int) bool {\n\t\t\treturn i%2 == 0\n\t\t})\n\t\tassert.Equal(t, test.expected, actual, \"expected value '%v' | actual : '%v'\", test.expected, actual)\n\t}\n}", "func verifyCIDRsNotOverlap(acidr, bcidr *net.IPNet) error {\n\tif acidr.Contains(bcidr.IP) || bcidr.Contains(acidr.IP) {\n\t\treturn errors.Errorf(\"CIDRS %s and %s overlap\", acidr.String(), bcidr.String())\n\t}\n\treturn nil\n}", "func TestEqIntSlice(t *testing.T) {\n\tt.Parallel()\n\tvar tests = []struct {\n\t\ta []int\n\t\tb []int\n\t\texpected bool\n\t}{\n\t\t{[]int{1, 2}, []int{1, 2}, true},\n\t\t{[]int{1, 2}, []int{2, 1}, false},\n\t\t{[]int{1, 2}, []int{1}, false},\n\t\t{[]int{1, 2}, []int{1, 2, 1}, false},\n\t}\n\tfor _, test := range tests {\n\t\tactual := primitives.EqSlices(&test.a, &test.b)\n\t\tassert.Equal(t, test.expected, actual, \"expected value '%v' | actual : '%v'\", test.expected, actual)\n\t}\n}", "func CountIntArrayAssert(lenght int,array []int) bool{\n\tif lenght == len(array) {\n\t\treturn true\n\t} else {\n\t\tif show == true {\n\t\t\tfmt.Printf(\"Failed! %v and %v are not equal : \\n\",lenght,array)\n\t\t}\n\t\treturn false\n\t}\n}", "func TestWithContractAuthErrors(t *testing.T) {\n\tvar expSTType errors.StackTrace\n\n\targs := []string{mock.Anything}\n\n\ttests := []struct {\n\t\tcRef string\n\t\tc rbac.ContractFunc\n\t\texpSC int32\n\t\texpC int32\n\t\tmsg string\n\t\tcidRoles string\n\t\tcidFound bool\n\t\tcidErr error\n\t}{\n\t\t{\n\t\t\tcRef: contractCreateTransfer,\n\t\t\tc: mockContract,\n\t\t\texpSC: http.StatusUnauthorized,\n\t\t\texpC: rbac.CodeErrAuthentication,\n\t\t\tmsg: \"when an error is returned from the CID\",\n\t\t\tcidRoles: mock.Anything,\n\t\t\tcidFound: false,\n\t\t\tcidErr: errors.New(\"some err from cid\"),\n\t\t},\n\t\t{\n\t\t\tcRef: contractCreateTransfer,\n\t\t\tc: mockContract,\n\t\t\texpSC: http.StatusForbidden,\n\t\t\texpC: rbac.CodeErrRoles,\n\t\t\tmsg: \"when the roleAttr is not found in the identity\",\n\t\t\tcidRoles: mock.Anything,\n\t\t\tcidFound: false,\n\t\t\tcidErr: nil,\n\t\t},\n\t\t{\n\t\t\tcRef: contractCreateTransfer,\n\t\t\tc: mockContract,\n\t\t\texpSC: http.StatusForbidden,\n\t\t\texpC: rbac.CodeErrContract,\n\t\t\tmsg: \"when the role is not found in the permissions map\",\n\t\t\tcidRoles: \"anUnknownRole\",\n\t\t\tcidFound: true,\n\t\t\tcidErr: nil,\n\t\t},\n\t\t{\n\t\t\tcRef: contractCreateTransfer,\n\t\t\tc: mockContract,\n\t\t\texpSC: http.StatusForbidden,\n\t\t\texpC: rbac.CodeErrContract,\n\t\t\tmsg: \"when contract invocation is not allowed\",\n\t\t\tcidRoles: \"user\",\n\t\t\tcidFound: true,\n\t\t\tcidErr: nil,\n\t\t},\n\t}\n\n\tfor _, tt := range tests {\n\t\tstub := initEmptyStub()\n\t\tcid := new(mockCID)\n\t\tcid.On(\"GetAttributeValue\", mock.Anything).Return(tt.cidRoles, tt.cidFound, tt.cidErr)\n\t\tcid.On(\"GetID\", mock.Anything).Return(mock.Anything)\n\n\t\tappAuth, err := rbac.New(stub, cid, getRolePerms(), \"roles\")\n\t\t// If the New constructor didn't fail\n\t\tif err == nil {\n\t\t\t_, err = appAuth.WithContractAuth(tt.cRef, args, tt.c)\n\t\t}\n\n\t\tassert.Implements(t, (*error)(nil), err)\n\t\tassert.Implements(t, (*rbac.AuthErrorInterface)(nil), err)\n\t\tassert.IsType(t, (string)(\"\"), err.Error())\n\n\t\tif assert.Error(t, err) {\n\t\t\tt.Logf(\"Should return an error with code %v and HTTP status code %v %v\\nmsg: %v\", tt.expC, tt.expSC, tt.msg, err)\n\n\t\t\tif e, ok := err.(rbac.AuthErrorInterface); ok {\n\t\t\t\tassert.Equal(t, tt.expC, e.Code())\n\t\t\t\tassert.Equal(t, tt.expSC, e.StatusCode())\n\t\t\t\tassert.IsType(t, expSTType, e.StackTrace())\n\t\t\t}\n\t\t}\n\t}\n}", "func TestInIntSlice(t *testing.T) {\n\tt.Parallel()\n\tvar tests = []struct {\n\t\thaystack []int\n\t\tneedle int\n\t\texpected bool\n\t}{\n\t\t{[]int{0, 1, 2}, 2, true},\n\t\t{[]int{0, 1, 2}, 3, false},\n\t}\n\tfor _, test := range tests {\n\t\tactual := primitives.InSlice(test.needle, &test.haystack)\n\t\tassert.Equal(t, test.expected, actual, \"expected value '%v' | actual : '%v'\", test.expected, actual)\n\t}\n}", "func verifyMembership(n *nwo.Network, expectedPeers []*nwo.Peer, channelName string, chaincodes ...string) {\n\texpectedDiscoveredPeers := make([]nwo.DiscoveredPeer, 0, len(expectedPeers))\n\tfor _, peer := range expectedPeers {\n\t\texpectedDiscoveredPeers = append(expectedDiscoveredPeers, n.DiscoveredPeer(peer, chaincodes...))\n\t}\n\tfor _, peer := range expectedPeers {\n\t\tEventually(nwo.DiscoverPeers(n, peer, \"User1\", channelName), n.EventuallyTimeout).Should(ConsistOf(expectedDiscoveredPeers))\n\t}\n}", "func TestNeigbors(t *testing.T) {\n\tb := MakeBoard(10, 10, 10)\n\tvar tests = []struct {\n\t\tinputX, inputY int\n\t\toutputNeigbors [][2]int\n\t}{\n\t\t// center board\n\t\t{4, 3,\n\t\t\t[][2]int{\n\t\t\t\t{3, 2}, {3, 3}, {3, 4}, {4, 2},\n\t\t\t\t{4, 4}, {5, 2}, {5, 3}, {5, 4}},\n\t\t},\n\t\t// top left corner\n\t\t{0, 0,\n\t\t\t[][2]int{\n\t\t\t\t{0, 1}, {1, 0}, {1, 1}},\n\t\t},\n\t\t// bottom right corner\n\t\t{9, 9,\n\t\t\t[][2]int{\n\t\t\t\t{8, 9}, {9, 8}, {8, 8}},\n\t\t},\n\t}\n\n\tfor _, test := range tests {\n\t\tn := b.neigborCoords(test.inputX, test.inputY)\n\t\texpectedMap := make(map[[2]int]bool)\n\n\t\tfor _, coord := range test.outputNeigbors {\n\t\t\texpectedMap[coord] = true\n\t\t}\n\n\t\tfor _, coord := range n {\n\t\t\tif !expectedMap[coord] {\n\t\t\t\tt.Errorf(\"(%d, %d) Unexpected Coordinate: %v\",\n\t\t\t\t\ttest.inputX, test.inputY, coord)\n\t\t\t}\n\t\t\texpectedMap[coord] = false\n\t\t}\n\n\t\tfor coord, b := range expectedMap {\n\t\t\tif b {\n\t\t\t\tt.Errorf(\"(%d, %d) Coordinate not created: %v\",\n\t\t\t\t\ttest.inputX, test.inputY, coord)\n\t\t\t}\n\t\t}\n\t}\n}", "func testIntArray(ia []int) bool {\n\taTester := [9]int{0, 0, 0, 0, 0, 0, 0, 0, 0}\n\n\tfor i := 0; i < len(ia); i++ {\n\t\taTester[ia[i]-1]++\n\t}\n\n\tfor i := 0; i < len(aTester); i++ {\n\n\t\tif aTester[i] != 1 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func checkContract(t *testing.T, tree *avl.Tree, id TransactionID, codeFilePath string, expectedGasBalance uint64, expectedEmptyMemPages []int, expectedNotEmptyMemPages []int) {\n\tcode, exist := ReadAccountContractCode(tree, id)\n\tassert.True(t, exist, \"contract ID: %x\", id)\n\tassert.NotEmpty(t, code, \"contract ID: %x\", id)\n\n\tgasBalance, exist := ReadAccountContractGasBalance(tree, id)\n\tassert.True(t, exist, \"contract ID: %x\", id)\n\tassert.Equal(t, expectedGasBalance, gasBalance, \"contract ID: %x\", id)\n\n\texpectedCode, err := ioutil.ReadFile(codeFilePath)\n\tassert.NoError(t, err)\n\tassert.EqualValues(t, expectedCode, code, \"contract ID: %x, filepath: %s\", id, codeFilePath)\n\n\tnumPages, exist := ReadAccountContractNumPages(tree, id)\n\tassert.True(t, exist, \"contract ID: %x\", id)\n\tassert.EqualValues(t, expectedPageNum, numPages, \"contract ID: %x\", id)\n\n\tfor _, v := range expectedEmptyMemPages {\n\t\tpage, exist := ReadAccountContractPage(tree, id, uint64(v))\n\t\tassert.False(t, exist)\n\t\tassert.Empty(t, page)\n\t}\n\n\tfor _, v := range expectedNotEmptyMemPages {\n\t\tpage, exist := ReadAccountContractPage(tree, id, uint64(v))\n\t\tassert.True(t, exist)\n\t\tassert.NotEmpty(t, page)\n\t\tassert.Len(t, page, PageSize)\n\t}\n}", "func TestACIFaultSeverityID_RawPositive(t *testing.T) {\n\tfor _, tr := range t1 {\n\t\tt.Run(string(tr.id), func(t *testing.T) {\n\t\t\t// Prepare to assert multiple times.\n\t\t\tassert := assert.New(t)\n\t\t\t// Pass in ID from table to make new instance.\n\t\t\tid, err := acigo.NewACIFaultSeverityID(tr.id)\n\t\t\tassert.Nil(err)\n\t\t\tassert.Equal(tr.raw, string(id.Raw()))\n\t\t})\n\t}\n}", "func verifyTip(t *testing.T, store syncStoreReader, tip types.TipSet, stateRoot cid.Cid) {\n\tfoundTip, err := store.GetTipSet(tip.Key())\n\trequire.NoError(t, err)\n\tassert.Equal(t, tip, foundTip)\n\n\tfoundState, err := store.GetTipSetStateRoot(tip.Key())\n\trequire.NoError(t, err)\n\tassert.Equal(t, stateRoot, foundState)\n\n\tparent, err := tip.Parents()\n\tassert.NoError(t, err)\n\th, err := tip.Height()\n\tassert.NoError(t, err)\n\tchildTsasSlice, err := store.GetTipSetAndStatesByParentsAndHeight(parent, h)\n\tassert.NoError(t, err)\n\tassert.True(t, containsTipSet(childTsasSlice, tip))\n}", "func (g *testGenerator) assertTipBlockNumTxns(expected int) {\n\tnumTxns := len(g.tip.Transactions)\n\tif numTxns != expected {\n\t\tpanic(fmt.Sprintf(\"number of txns in block %q (height %d) is \"+\n\t\t\t\"%d instead of expected %d\", g.tipName,\n\t\t\tg.tip.Header.Height, numTxns, expected))\n\t}\n}", "func assertInvariants(t *testing.T, msg string,\n\tpOrig Pool, cOrig Candidates, pMod Pool, cMods Candidates, tokens int64) {\n\n\t// total tokens conserved\n\trequire.Equal(t,\n\t\tpOrig.UnbondedPool+pOrig.BondedPool,\n\t\tpMod.UnbondedPool+pMod.BondedPool+tokens,\n\t\t\"Tokens not conserved - msg: %v\\n, pOrig.BondedShares: %v, pOrig.UnbondedShares: %v, pMod.BondedShares: %v, pMod.UnbondedShares: %v, pOrig.UnbondedPool: %v, pOrig.BondedPool: %v, pMod.UnbondedPool: %v, pMod.BondedPool: %v, tokens: %v\\n\",\n\t\tmsg,\n\t\tpOrig.BondedShares, pOrig.UnbondedShares,\n\t\tpMod.BondedShares, pMod.UnbondedShares,\n\t\tpOrig.UnbondedPool, pOrig.BondedPool,\n\t\tpMod.UnbondedPool, pMod.BondedPool, tokens)\n\n\t// nonnegative bonded shares\n\trequire.False(t, pMod.BondedShares.LT(sdk.ZeroRat()),\n\t\t\"Negative bonded shares - msg: %v\\npOrig: %#v\\npMod: %#v\\ntokens: %v\\n\",\n\t\tmsg, pOrig, pMod, tokens)\n\n\t// nonnegative unbonded shares\n\trequire.False(t, pMod.UnbondedShares.LT(sdk.ZeroRat()),\n\t\t\"Negative unbonded shares - msg: %v\\npOrig: %#v\\npMod: %#v\\ntokens: %v\\n\",\n\t\tmsg, pOrig, pMod, tokens)\n\n\t// nonnegative bonded ex rate\n\trequire.False(t, pMod.bondedShareExRate().LT(sdk.ZeroRat()),\n\t\t\"Applying operation \\\"%s\\\" resulted in negative bondedShareExRate: %d\",\n\t\tmsg, pMod.bondedShareExRate().Evaluate())\n\n\t// nonnegative unbonded ex rate\n\trequire.False(t, pMod.unbondedShareExRate().LT(sdk.ZeroRat()),\n\t\t\"Applying operation \\\"%s\\\" resulted in negative unbondedShareExRate: %d\",\n\t\tmsg, pMod.unbondedShareExRate().Evaluate())\n\n\tfor _, cMod := range cMods {\n\n\t\t// nonnegative ex rate\n\t\trequire.False(t, cMod.delegatorShareExRate().LT(sdk.ZeroRat()),\n\t\t\t\"Applying operation \\\"%s\\\" resulted in negative candidate.delegatorShareExRate(): %v (candidate.Address: %s)\",\n\t\t\tmsg,\n\t\t\tcMod.delegatorShareExRate(),\n\t\t\tcMod.Address,\n\t\t)\n\n\t\t// nonnegative assets\n\t\trequire.False(t, cMod.Assets.LT(sdk.ZeroRat()),\n\t\t\t\"Applying operation \\\"%s\\\" resulted in negative candidate.Assets: %v (candidate.Liabilities: %v, candidate.delegatorShareExRate: %v, candidate.Address: %s)\",\n\t\t\tmsg,\n\t\t\tcMod.Assets,\n\t\t\tcMod.Liabilities,\n\t\t\tcMod.delegatorShareExRate(),\n\t\t\tcMod.Address,\n\t\t)\n\n\t\t// nonnegative liabilities\n\t\trequire.False(t, cMod.Liabilities.LT(sdk.ZeroRat()),\n\t\t\t\"Applying operation \\\"%s\\\" resulted in negative candidate.Liabilities: %v (candidate.Assets: %v, candidate.delegatorShareExRate: %v, candidate.Address: %s)\",\n\t\t\tmsg,\n\t\t\tcMod.Liabilities,\n\t\t\tcMod.Assets,\n\t\t\tcMod.delegatorShareExRate(),\n\t\t\tcMod.Address,\n\t\t)\n\n\t}\n\n}", "func (c *Client) TorrentVerifyIDs(ids []int64) (err error) {\n\tif err = c.rpcCall(\"torrent-verify\", &torrentActionIDsParam{IDs: ids}, nil); err != nil {\n\t\terr = fmt.Errorf(\"'torrent-verify' rpc method failed: %v\", err)\n\t}\n\treturn\n}", "func testPasterCID(t *testing.T, s *Service) {\n\tres, err := s.PasterCID(context.TODO())\n\tif err != nil {\n\t\tt.Logf(\"testPasterCID error(%v) \\n\", err)\n\t\treturn\n\t}\n\tt.Logf(\"testPasterCID res: %+v \\n\", res)\n}", "func TestSelectVidsByCid(t *testing.T) {\n\tdb, err := NewDBCase()\n\tif err != nil {\n\t\tt.Error(err)\n\t}\n\tdefer db.Close()\n\tif vs, err := SelectVidsByCid(db, \"UCCtTgzGzQSWVzCG0xR7U-MQ\"); err != nil {\n\t\tt.Errorf(\"err: %+v\", err)\n\t} else {\n\t\tfor _, v := range vs {\n\t\t\tfmt.Println(v)\n\t\t}\n\t}\n}", "func (m *PurposeMutation) CarcheckinoutIDs() (ids []int) {\n\tfor id := range m.carcheckinout {\n\t\tids = append(ids, id)\n\t}\n\treturn\n}", "func AssertMembersMatch(t T, actual []toolchainv1alpha1.Member, expected ...toolchainv1alpha1.Member) {\n\trequire.Equal(t, len(expected), len(actual))\n\tfor _, c := range expected {\n\t\tAssertContainsMember(t, actual, c)\n\t}\n}", "func (g *testGenerator) assertTipBlockSigOpsCount(expected int) {\n\tnumSigOps := countBlockSigOps(g.tip)\n\tif numSigOps != expected {\n\t\tpanic(fmt.Sprintf(\"generated number of sigops for block %q \"+\n\t\t\t\"(height %d) is %d instead of expected %d\", g.tipName,\n\t\t\tg.tip.Header.Height, numSigOps, expected))\n\t}\n}", "func OkEqualIntSlices(t *testing.T, found, expected []int) {\n\tif len(expected) != len(found) {\n\t\tt.Logf(\"not ok - slice Found has %d elements, while slice Expected has %d\\n\", len(found), len(expected))\n\t\tt.Logf(\"Found: %v\", found)\n\t\tt.Logf(\"Expected: %v\", expected)\n\t\tt.Fail()\n\t\treturn\n\t}\n\tfor N := 0; N < len(found); N++ {\n\t\tif found[N] == expected[N] {\n\t\t\tt.Logf(\"ok - element %d of Found and the same in Expected are equal [%v]\\n\", N, found[N])\n\t\t} else {\n\t\t\tt.Logf(\"not ok - element %d of Found differs from the corresponding one in Expected. \"+\n\t\t\t\t\"Expected '%d' - found: '%d'\\n\", N, expected[N], found[N])\n\t\t\tt.Fail()\n\t\t}\n\t}\n}", "func ExpectCertificateIPsToMatch(csr *certificatesv1.CertificateSigningRequest, _ crypto.Signer) error {\n\tcert, err := pki.DecodeX509CertificateBytes(csr.Status.Certificate)\n\tif err != nil {\n\t\treturn err\n\t}\n\treq, err := pki.DecodeX509CertificateRequestBytes(csr.Spec.Request)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tactualIPs := pki.IPAddressesToString(cert.IPAddresses)\n\texpectedIPs := pki.IPAddressesToString(req.IPAddresses)\n\tif !util.EqualUnsorted(actualIPs, expectedIPs) {\n\t\treturn fmt.Errorf(\"Expected certificate valid for IPs %v, but got a certificate valid for IPs %v\", expectedIPs, actualIPs)\n\t}\n\n\treturn nil\n}", "func verifyPartialLedgers(t *testing.T, provider *Provider, targetStatus []msgs.Status) {\n\t// Also double-check that deleted ledgers do not appear in the provider listing.\n\tactiveLedgers, err := provider.List()\n\trequire.NoError(t, err)\n\n\tfor i := 0; i < len(targetStatus); i++ {\n\t\tledgerID := constructTestLedgerID(i)\n\t\tif targetStatus[i] == msgs.Status_UNDER_CONSTRUCTION || targetStatus[i] == msgs.Status_UNDER_DELETION {\n\t\t\tverifyLedgerDoesNotExist(t, provider, ledgerID)\n\t\t\trequire.NotContains(t, ledgerID, activeLedgers)\n\t\t} else {\n\t\t\tverifyLedgerIDExists(t, provider, ledgerID, targetStatus[i])\n\t\t\trequire.Contains(t, activeLedgers, ledgerID)\n\t\t}\n\t}\n}", "func assertTaskInSlice(t *testing.T, task *types.Task, slice []*types.Task) {\n\tfor _, other := range slice {\n\t\tif task.Id == other.Id {\n\t\t\tassertdeep.Equal(t, task, other)\n\t\t\treturn\n\t\t}\n\t}\n\tt.Fatalf(\"Did not find task %v in %v.\", task, slice)\n}", "func TestPointingPairs(t *testing.T) {\n\tinputBoard := []byte(pointingPairsParam)\n\tb := NewBoard(inputBoard)\n\n\tassert.True(t, b.candidates[18].Contains('1'))\n\tassert.True(t, b.candidates[20].Contains('1'))\n\n\tassert.True(t, b.candidates[32].Contains('2'))\n\tassert.True(t, b.candidates[41].Contains('2'))\n\tassert.True(t, b.candidates[50].Contains('2'))\n\tassert.True(t, b.candidates[68].Contains('2'))\n\n\tassert.True(t, b.candidates[32].Contains('3'))\n\tassert.True(t, b.candidates[41].Contains('3'))\n\tassert.True(t, b.candidates[50].Contains('3'))\n\tassert.True(t, b.candidates[68].Contains('3'))\n\n\tassert.True(t, b.candidates[36].Contains('4'))\n\tassert.True(t, b.candidates[37].Contains('4'))\n\n\tassert.True(t, b.candidates[39].Contains('6'))\n\n\tassert.True(t, b.candidates[71].Contains('7'))\n\n\tassert.True(t, b.candidates[69].Contains('8'))\n\tassert.True(t, b.candidates[70].Contains('8'))\n\tassert.True(t, b.candidates[71].Contains('8'))\n\n\tassert.True(t, b.candidates[57].Contains('9'))\n\n\tb.PointingPairs()\n\n\tassert.True(t, !b.candidates[18].Contains('1'))\n\tassert.True(t, !b.candidates[20].Contains('1'))\n\n\tassert.True(t, !b.candidates[32].Contains('2'))\n\tassert.True(t, !b.candidates[41].Contains('2'))\n\tassert.True(t, !b.candidates[50].Contains('2'))\n\tassert.True(t, !b.candidates[68].Contains('2'))\n\n\tassert.True(t, !b.candidates[32].Contains('3'))\n\tassert.True(t, !b.candidates[41].Contains('3'))\n\tassert.True(t, !b.candidates[50].Contains('3'))\n\tassert.True(t, !b.candidates[68].Contains('3'))\n\n\tassert.True(t, !b.candidates[36].Contains('4'))\n\tassert.True(t, !b.candidates[37].Contains('4'))\n\n\tassert.True(t, !b.candidates[39].Contains('6'))\n\n\tassert.True(t, !b.candidates[71].Contains('7'))\n\n\tassert.True(t, !b.candidates[69].Contains('8'))\n\tassert.True(t, !b.candidates[70].Contains('8'))\n\tassert.True(t, !b.candidates[71].Contains('8'))\n\n\tassert.True(t, !b.candidates[57].Contains('9'))\n}", "func (mr *MockFullNodeMockRecorder) StateActorCodeCIDs(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"StateActorCodeCIDs\", reflect.TypeOf((*MockFullNode)(nil).StateActorCodeCIDs), arg0, arg1)\n}", "func (m *AmbulanceMutation) CarcheckinoutIDs() (ids []int) {\n\tfor id := range m.carcheckinout {\n\t\tids = append(ids, id)\n\t}\n\treturn\n}", "func (m *MockFactory) Candidates() (uint64, []*state.Candidate) {\n\tret := m.ctrl.Call(m, \"Candidates\")\n\tret0, _ := ret[0].(uint64)\n\tret1, _ := ret[1].([]*state.Candidate)\n\treturn ret0, ret1\n}", "func TestCarToonCouponCount(t *testing.T) {\n\tConvey(\"TestCarToonCouponCount \", t, func() {\n\t\tvar (\n\t\t\terr error\n\t\t\tmid int64 = 1\n\t\t\tct int8 = 2\n\t\t\tcount int\n\t\t\t_novalmid int64 = 9999999\n\t\t)\n\t\tcount, err = s.CarToonCouponCount(c, mid, ct)\n\t\tt.Logf(\"count(%d)\", count)\n\t\tSo(err, ShouldBeNil)\n\t\tcount, err = s.CarToonCouponCount(c, _novalmid, ct)\n\t\tt.Logf(\"count(%d)\", count)\n\t\tSo(err, ShouldBeNil)\n\t})\n}", "func (f Factory) TestGetMapIDs(t *testing.T) {\n\ta := f.initAdapter(t)\n\tdefer f.free(a)\n\n\tfor i := 0; i < store.DefaultLimit; i++ {\n\t\tfor j := 0; j < store.DefaultLimit; j++ {\n\t\t\ts := cstesting.RandomSegment()\n\t\t\ts.Link.Meta[\"mapId\"] = fmt.Sprintf(\"map%d\", i)\n\t\t\ta.SaveSegment(s)\n\t\t}\n\t}\n\n\tslice, err := a.GetMapIDs(&store.MapFilter{Pagination: store.Pagination{Limit: store.DefaultLimit * store.DefaultLimit}})\n\tif err != nil {\n\t\tt.Fatalf(\"a.GetMapIDs(): err: %s\", err)\n\t}\n\n\tif got, want := len(slice), store.DefaultLimit; got != want {\n\t\tt.Errorf(\"len(slice) = %d want %d\", got, want)\n\t}\n\n\tfor i := 0; i < store.DefaultLimit; i++ {\n\t\tmapID := fmt.Sprintf(\"map%d\", i)\n\t\tif !testutil.ContainsString(slice, mapID) {\n\t\t\tt.Errorf(\"slice does not contain %q\", mapID)\n\t\t}\n\t}\n}", "func assertBalanceAmounts(amounts []*rosetta.Amount) error {\n\tcurrencies := make([]*rosetta.Currency, 0)\n\tfor _, amount := range amounts {\n\t\t// Ensure a currency is used at most once in balance.Amounts\n\t\tif containsCurrency(currencies, amount.Currency) {\n\t\t\treturn fmt.Errorf(\"currency %+v used in balance multiple times\", amount.Currency)\n\t\t}\n\t\tcurrencies = append(currencies, amount.Currency)\n\n\t\tif err := Amount(amount); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func (c *config) checkReservationIDs() hcl.Diagnostics {\n\tvar diagnostics hcl.Diagnostics\n\n\td := checkEachReservation(c.ReservationIDs, c.ReservationIDsDefault, c.ClusterName, controller, c.ControllerCount)\n\tdiagnostics = append(diagnostics, d...)\n\n\tfor _, w := range c.WorkerPools {\n\t\td := checkEachReservation(w.ReservationIDs, w.ReservationIDsDefault, w.Name, worker, w.Count)\n\t\tdiagnostics = append(diagnostics, d...)\n\t}\n\n\treturn diagnostics\n}", "func AssertContainsMember(t T, members []toolchainv1alpha1.Member, contains toolchainv1alpha1.Member) {\n\tfor _, c := range members {\n\t\tif c.ClusterName == contains.ClusterName {\n\t\t\tt.Logf(\"checking '%s'\", c.ClusterName)\n\t\t\tAssertConditionsMatch(t, c.MemberStatus.Conditions, contains.MemberStatus.Conditions...)\n\t\t\tassert.Equal(t, contains.APIEndpoint, c.APIEndpoint)\n\t\t\tassert.Equal(t, contains.MemberStatus.ResourceUsage, c.MemberStatus.ResourceUsage)\n\t\t\treturn\n\t\t}\n\t}\n\tassert.FailNow(t, fmt.Sprintf(\"the list of members %+v doesn't contain the expected member %+v\", members, contains))\n}", "func (m *MockUnsignedTx) InputIDs() set.Set[ids.ID] {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"InputIDs\")\n\tret0, _ := ret[0].(set.Set[ids.ID])\n\treturn ret0\n}", "func TestAnyInt(t *testing.T) {\n\tt.Parallel()\n\tvar tests = []struct {\n\t\ts []int\n\t\texpected bool\n\t}{\n\t\t{[]int{0, 2, 4}, true},\n\t\t{[]int{-2, 4}, true},\n\t\t{[]int{1}, false},\n\t\t{[]int{}, false},\n\t}\n\tfor _, test := range tests {\n\t\tactual := primitives.AnyInt(test.s, func(i int) bool {\n\t\t\treturn i%2 == 0\n\t\t})\n\t\tassert.Equal(t, test.expected, actual, \"expected value '%v' | actual : '%v'\", test.expected, actual)\n\t}\n}", "func testCheckDDCloudAddressListMatches(name string, expected compute.IPAddressList) resource.TestCheckFunc {\n\tname = ensureResourceTypePrefix(name, \"ddcloud_address_list\")\n\n\treturn func(state *terraform.State) error {\n\t\tres, ok := state.RootModule().Resources[name]\n\t\tif !ok {\n\t\t\treturn fmt.Errorf(\"Not found: %s\", name)\n\t\t}\n\n\t\taddressListID := res.Primary.ID\n\n\t\tclient := testAccProvider.Meta().(*providerState).Client()\n\t\taddressList, err := client.GetIPAddressList(addressListID)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"bad: Get address list: %s\", err)\n\t\t}\n\t\tif addressList == nil {\n\t\t\treturn fmt.Errorf(\"bad: address list not found with Id '%s'\", addressListID)\n\t\t}\n\n\t\tif addressList.Name != expected.Name {\n\t\t\treturn fmt.Errorf(\"bad: address list '%s' has name '%s' (expected '%s')\", addressListID, addressList.Name, expected.Name)\n\t\t}\n\n\t\tif addressList.Description != expected.Description {\n\t\t\treturn fmt.Errorf(\"bad: address list '%s' has description '%s' (expected '%s')\", addressListID, addressList.Description, expected.Description)\n\t\t}\n\n\t\tif len(addressList.Addresses) != len(expected.Addresses) {\n\t\t\treturn fmt.Errorf(\"bad: address list '%s' has %d addresses or address-ranges (expected '%d')\", addressListID, len(addressList.Addresses), len(expected.Addresses))\n\t\t}\n\n\t\terr = compareAddressListEntries(expected, *addressList)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif len(addressList.ChildLists) != len(expected.ChildLists) {\n\t\t\treturn fmt.Errorf(\"bad: address list '%s' has %d child lists (expected '%d')\", addressListID, len(addressList.ChildLists), len(expected.ChildLists))\n\t\t}\n\n\t\tfor index := range addressList.ChildLists {\n\t\t\texpectedChildListID := expected.ChildLists[index].ID\n\t\t\tactualChildListID := addressList.ChildLists[index].ID\n\n\t\t\tif actualChildListID != expectedChildListID {\n\t\t\t\treturn fmt.Errorf(\"bad: address list '%s' has child list at index %d with Id %s (expected '%s')\",\n\t\t\t\t\taddressListID, index, actualChildListID, expectedChildListID,\n\t\t\t\t)\n\t\t\t}\n\t\t}\n\n\t\treturn nil\n\t}\n}", "func (m *UserMutation) CarcheckinoutIDs() (ids []int) {\n\tfor id := range m.carcheckinout {\n\t\tids = append(ids, id)\n\t}\n\treturn\n}", "func TestGetCNCIs(t *testing.T) {\n\tctx, cancelFunc := context.WithTimeout(context.Background(), standardTimeout)\n\tdefer cancelFunc()\n\n\tinstances, err := bat.StartRandomInstances(ctx, \"\", 1)\n\tif err != nil {\n\t\tt.Fatalf(\"Failed to launch instance: %v\", err)\n\t}\n\n\tdefer func() {\n\t\tscheduled, err := bat.WaitForInstancesLaunch(ctx, \"\", instances, false)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Instance %s did not launch: %v\", instances[0], err)\n\t\t}\n\n\t\t_, err = bat.DeleteInstances(ctx, \"\", scheduled)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Failed to delete instances: %v\", err)\n\t\t}\n\t}()\n\n\tCNCIs, err := bat.GetCNCIs(ctx)\n\tif err != nil {\n\t\tt.Fatalf(\"Failed to retrieve CNCIs: %v\", err)\n\t}\n\n\tif len(CNCIs) == 0 {\n\t\tt.Fatalf(\"No CNCIs found\")\n\t}\n\n\tinstanceDetails, err := bat.GetInstance(ctx, \"\", instances[0])\n\tif err != nil {\n\t\tt.Fatalf(\"Unable to retrieve instance[%s] details: %v\",\n\t\t\tinstances[0], err)\n\t}\n\n\tfoundTenant := false\n\tfor _, v := range CNCIs {\n\t\tif v.TenantID == instanceDetails.TenantID {\n\t\t\tfoundTenant = true\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif !foundTenant {\n\t\tt.Fatalf(\"Unable to locate a CNCI for instance[%s]\", instances[0])\n\t}\n}", "func checkCorners(m *image.Alpha) error {\n\tsize := m.Bounds().Size()\n\tcorners := [4]uint8{\n\t\tm.Pix[(0*size.Y+0)*m.Stride+(0*size.X+0)],\n\t\tm.Pix[(0*size.Y+0)*m.Stride+(1*size.X-1)],\n\t\tm.Pix[(1*size.Y-1)*m.Stride+(0*size.X+0)],\n\t\tm.Pix[(1*size.Y-1)*m.Stride+(1*size.X-1)],\n\t}\n\tif corners != [4]uint8{} {\n\t\treturn fmt.Errorf(\"corners were not all zero: %v\", corners)\n\t}\n\treturn nil\n}", "func (m *MockFullNode) StateActorManifestCID(arg0 context.Context, arg1 network.Version) (cid.Cid, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"StateActorManifestCID\", arg0, arg1)\n\tret0, _ := ret[0].(cid.Cid)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func AssertIntEqual(t *testing.T, expect, actual, errMsg string) {\n\tif expect != actual {\n\t\tt.Errorf(\"%s, expect:%s, actual:%s\", errMsg, expect, actual)\n\t}\n}", "func assertZonesMatch(t *testing.T, expected, actual time.Time) {\n\tt.Helper()\n\texpectedName, expectedOffset := expected.Zone()\n\tactualName, actualOffset := actual.Zone()\n\tif expectedOffset != actualOffset {\n\t\tt.Errorf(\"Expected Zone '%s' with offset %d. Got Zone '%s' with offset %d\", expectedName, expectedOffset, actualName, actualOffset)\n\t}\n}", "func TestCreateIdentity(t *testing.T) {\r\n\r\n\tt.Parallel()\r\n\r\n\tvar (\r\n\t\t// Testing private methods\r\n\t\ttests = []struct {\r\n\t\t\tinputPrivateKey string\r\n\t\t\tinputIDKey string\r\n\t\t\tinputCounter uint32\r\n\t\t\texpectedTxID string\r\n\t\t\texpectedNil bool\r\n\t\t\texpectedError bool\r\n\t\t}{\r\n\t\t\t{\r\n\t\t\t\tprivateKey,\r\n\t\t\t\tidKey,\r\n\t\t\t\t0,\r\n\t\t\t\t\"d2384b0946b8c3137bc0bf12d122efb8b77be998118b65c21448864234188f20\",\r\n\t\t\t\tfalse,\r\n\t\t\t\tfalse,\r\n\t\t\t},\r\n\t\t\t{\r\n\t\t\t\t\"\",\r\n\t\t\t\tidKey,\r\n\t\t\t\t0,\r\n\t\t\t\t\"49957864306b123c3cca8711635ba88890bb334eb3e9f21553b118eb4d66cc62\",\r\n\t\t\t\ttrue,\r\n\t\t\t\ttrue,\r\n\t\t\t},\r\n\t\t\t{\r\n\t\t\t\t\"invalid-key\",\r\n\t\t\t\tidKey,\r\n\t\t\t\t0,\r\n\t\t\t\t\"49957864306b123c3cca8711635ba88890bb334eb3e9f21553b118eb4d66cc62\",\r\n\t\t\t\ttrue,\r\n\t\t\t\ttrue,\r\n\t\t\t},\r\n\t\t\t{\r\n\t\t\t\tprivateKey,\r\n\t\t\t\t\"\",\r\n\t\t\t\t0,\r\n\t\t\t\t\"49957864306b123c3cca8711635ba88890bb334eb3e9f21553b118eb4d66cc62\",\r\n\t\t\t\ttrue,\r\n\t\t\t\ttrue,\r\n\t\t\t},\r\n\t\t\t{\r\n\t\t\t\tprivateKey,\r\n\t\t\t\tidKey,\r\n\t\t\t\t1,\r\n\t\t\t\t\"4f00a4c6bca4a538ecce849b19188222aeb0d28e7b0c9acdb0c20fe9de628f9e\",\r\n\t\t\t\tfalse,\r\n\t\t\t\tfalse,\r\n\t\t\t},\r\n\t\t\t{\r\n\t\t\t\tprivateKey,\r\n\t\t\t\tidKey,\r\n\t\t\t\t100,\r\n\t\t\t\t\"0b61af0cfd6331731b7f897b051a56a903928c6bcff8ba59cdd4b8d0093b12ae\",\r\n\t\t\t\tfalse,\r\n\t\t\t\tfalse,\r\n\t\t\t},\r\n\t\t}\r\n\t)\r\n\r\n\t// Run tests\r\n\tfor _, test := range tests {\r\n\t\tif tx, err := CreateIdentity(test.inputPrivateKey, test.inputIDKey, test.inputCounter); err != nil && !test.expectedError {\r\n\t\t\tt.Errorf(\"%s Failed: [%s] [%s] [%d] inputted and error not expected but got: %s\", t.Name(), test.inputPrivateKey, test.inputIDKey, test.inputCounter, err.Error())\r\n\t\t} else if err == nil && test.expectedError {\r\n\t\t\tt.Errorf(\"%s Failed: [%s] [%s] [%d] inputted and error was expected\", t.Name(), test.inputPrivateKey, test.inputIDKey, test.inputCounter)\r\n\t\t} else if tx == nil && !test.expectedNil {\r\n\t\t\tt.Errorf(\"%s Failed: [%s] [%s] [%d] inputted and nil was not expected\", t.Name(), test.inputPrivateKey, test.inputIDKey, test.inputCounter)\r\n\t\t} else if tx != nil && test.expectedNil {\r\n\t\t\tt.Errorf(\"%s Failed: [%s] [%s] [%d] inputted and nil was expected\", t.Name(), test.inputPrivateKey, test.inputIDKey, test.inputCounter)\r\n\t\t} else if tx != nil && tx.GetTxID() != test.expectedTxID {\r\n\t\t\tt.Errorf(\"%s Failed: [%s] [%s] [%d] inputted and expected [%s] but got [%s]\", t.Name(), test.inputPrivateKey, test.inputIDKey, test.inputCounter, test.expectedTxID, tx.GetTxID())\r\n\t\t}\r\n\t}\r\n}", "func (runner *suiteRunner) checkFixtureArgs() bool {\n succeeded := true\n argType := reflect.Typeof(&C{})\n for _, fv := range []*reflect.FuncValue{runner.setUpSuite,\n runner.tearDownSuite,\n runner.setUpTest,\n runner.tearDownTest} {\n if fv != nil {\n fvType := fv.Type().(*reflect.FuncType)\n if fvType.In(1) != argType || fvType.NumIn() != 2 {\n succeeded = false\n runner.runFunc(fv, fixtureKd, func(c *C) {\n c.logArgPanic(fv, \"*gocheck.C\")\n c.status = panickedSt\n })\n }\n }\n }\n return succeeded\n}", "func TestMultipleIntersections(t *testing.T) {\n\tfirstPath := \"U5,R50\"\n\tsecondPath := \"R10,U10,R10,D10,R10,U10,R20\"\n\n\tfirstWire, err := processWirePath(firstPath)\n\tif err != nil {\n\t\tt.Error(err)\n\t}\n\n\tsecondWire, err := processWirePath(secondPath)\n\tif err != nil {\n\t\tt.Error(err)\n\t}\n\n\tallIntersections := findIntersections(firstWire, secondWire)\n\n\tif len(allIntersections) != 3 {\n\t\tt.Errorf(\"expected len(allIntersections) == 3, got %d\", len(allIntersections))\n\t}\n}", "func assertSubset(t *testing.T, expected, actual *Object) {\nOuter:\n\tfor _, pair := range expected.Pairs {\n\t\tfor _, value := range actual.GetAll(pair.Key) {\n\t\t\tif pair.Value == value {\n\t\t\t\tcontinue Outer\n\t\t\t}\n\t\t}\n\t\tt.Fatalf(\n\t\t\t\"Did not find expected pair %q = %q in\\n%+v\",\n\t\t\tpair.Key,\n\t\t\tpair.Value,\n\t\t\tactual)\n\t}\n}", "func ShouldBeIn(actual interface{}, expected ...interface{}) error {\n\tif err := atLeast(1, expected); err != nil {\n\t\treturn err\n\t}\n\n\texpectedSlice, err := cast.ToSliceE(expected)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor i := range expectedSlice {\n\t\tif ShouldEqual(expectedSlice[i], actual) == nil {\n\t\t\treturn nil\n\t\t}\n\t}\n\treturn fmt.Errorf(\"expected '%v' in %v but it wasnt\", actual, expectedSlice)\n}", "func (s *GeomSuite) TestCrossSegmentsIn(c *C) {\n\tx, y := CrossSegments(-1., 1., 4., 1., 0., 0., 2., 2.)\n\tc.Check(x, Near, 1., math.SmallestNonzeroFloat64)\n\tc.Check(y, Near, 1., math.SmallestNonzeroFloat64)\n}", "func testCDAddendumCString(t testing.TB) {\n\tvar line = \"2801121042882201809051 Y10A 0 \"\n\tr := NewReader(strings.NewReader(line))\n\tr.line = line\n\tclh := mockCashLetterHeader()\n\tr.addCurrentCashLetter(NewCashLetter(clh))\n\tbh := mockBundleHeader()\n\tb := NewBundle(bh)\n\tr.currentCashLetter.AddBundle(b)\n\tr.addCurrentBundle(b)\n\tcd := mockCheckDetail()\n\tr.currentCashLetter.currentBundle.AddCheckDetail(cd)\n\n\tif err := r.parseCheckDetailAddendumC(); err != nil {\n\t\tt.Errorf(\"%T: %s\", err, err)\n\t\tlog.Fatal(err)\n\t}\n\trecord := r.currentCashLetter.currentBundle.GetChecks()[0].CheckDetailAddendumC[0]\n\n\tif record.String() != line {\n\t\tt.Errorf(\"Strings do not match\")\n\t}\n}", "func (lkp *lookupInternal) Verify(vcursor VCursor, ids, values []sqltypes.Value) ([]bool, error) {\n\tout := make([]bool, len(ids))\n\tfor i, id := range ids {\n\t\tbindVars := map[string]*querypb.BindVariable{\n\t\t\tlkp.From: sqltypes.ValueBindVariable(id),\n\t\t\tlkp.To: sqltypes.ValueBindVariable(values[i]),\n\t\t}\n\t\tresult, err := vcursor.Execute(lkp.ver, bindVars, true /* isDML */)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"lookup.Verify: %v\", err)\n\t\t}\n\t\tout[i] = (len(result.Rows) != 0)\n\t}\n\treturn out, nil\n}", "func assertFnIn(fnv reflect.Value, inTypes []reflect.Type) {\n\tif fnv.Type().NumIn() != len(inTypes) {\n\t\tpanic(fmt.Sprintf(\"The number of arguments of fn should be %v, but %v\", len(inTypes), fnv.Type().NumIn()))\n\t}\n\n\tfor i := 0; i < fnv.Type().NumIn(); i++ {\n\t\tif fnv.Type().In(i) != inTypes[i] {\n\t\t\tpanic(fmt.Sprintf(\"The argument type at %v of fn should be %v, but %v\", i, inTypes[i], fnv.Type().In(i)))\n\t\t}\n\t}\n}", "func (m *MocksubnetIDsGetter) SubnetIDs(filters ...ec2.Filter) ([]string, error) {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{}\n\tfor _, a := range filters {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"SubnetIDs\", varargs...)\n\tret0, _ := ret[0].([]string)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func TestBuildAcctMap(t *testing.T) {\n\tacctMapExpect := map[string]string{\n\t\t\"000000000001\": \"Test Team 1\",\n\t\t\"000000000011\": \"Test Team 1\",\n\t\t\"000000000002\": \"Test Team 2\",\n\t\t\"000000000022\": \"Test Team 2\",\n\t}\n\n\tgeneratedMap := BuildAcctMap(exampleTeamMap)\n\n\tfor _, acct := range generatedMap {\n\t\tif acctMapExpect[acct] != generatedMap[acct] {\n\t\t\tt.Errorf(\"ERROR: Incorrect map created for %v\", acct)\n\t\t}\n\t}\n}", "func assertFlagsEqual(t *testing.T, expected, actual []Flag) {\n\tassert.Equal(t, len(expected), len(actual))\n\n\tfor k, v := range expected {\n\t\tassert.Equal(t, v, actual[k])\n\t}\n}", "func areSet(a, b, c *Card) bool {\n\ta1, a2, a3, a4 := a.Attributes()\n\tb1, b2, b3, b4 := b.Attributes()\n\tc1, c2, c3, c4 := c.Attributes()\n\treturn (true &&\n\t\tareSameOrDifferent(a1, b1, c1) &&\n\t\tareSameOrDifferent(a2, b2, c2) &&\n\t\tareSameOrDifferent(a3, b3, c3) &&\n\t\tareSameOrDifferent(a4, b4, c4))\n}", "func (s *Set) checkBounds(ids ...id) error {\n\tfor _, id := range ids {\n\t\tif id < 0 || int(id) >= len(s.symid) {\n\t\t\treturn fmt.Errorf(\"entryset: symid %d out of bounds\", id)\n\t\t}\n\t}\n\treturn nil\n}", "func MsgCidsEqual(m1, m2 *Message) bool {\n\tm1Cid, err := m1.Cid()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tm2Cid, err := m2.Cid()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn m1Cid.Equals(m2Cid)\n}", "func verifyTenor(customer *models.Customer) bool {\n\tlistTenor := [5]int{3, 6, 9, 12, 24}\n\tfor _, tenor := range listTenor {\n\t\tif tenor == customer.Tenor {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func AssertExactMatch(t *testing.T, evs []cap.Event, preds []EventP) {\n\tt.Helper()\n\terr := verifyExactMatch(preds, evs)\n\tif err != nil {\n\t\tt.Error(err)\n\t}\n}", "func TestDirectArc(t *testing.T) {\n\tassert, _ := makeAR(t)\n\tfixture := NewFixture(t, pcct.Config{\n\t\tCsMemoryCapacity: 100,\n\t\tCsIndirectCapacity: 100,\n\t})\n\n\t// insert 1-100 (NEW), p=0, T1=[1..100]\n\tfixture.InsertBulk(1, 100, \"/N/%d\", \"/N/%d\")\n\tassert.InDelta(0.0, fixture.Cs.ReadDirectArcP(), 0.1)\n\tassert.Equal(100, fixture.Cs.CountEntries(cs.ListDirectT1))\n\tassert.Equal(0, fixture.Cs.CountEntries(cs.ListDirectB1))\n\tassert.Equal(0, fixture.Cs.CountEntries(cs.ListDirectT2))\n\tassert.Equal(0, fixture.Cs.CountEntries(cs.ListDirectB2))\n\n\t// use 1-60 (T1), p=0, T1=[61..100], T2=[1..60]\n\tfixture.FindBulk(1, 60, \"/N/%d\", \"/N/%d\")\n\tassert.InDelta(0.0, fixture.Cs.ReadDirectArcP(), 0.1)\n\tassert.Equal(40, fixture.Cs.CountEntries(cs.ListDirectT1))\n\tassert.Equal(0, fixture.Cs.CountEntries(cs.ListDirectB1))\n\tassert.Equal(60, fixture.Cs.CountEntries(cs.ListDirectT2))\n\tassert.Equal(0, fixture.Cs.CountEntries(cs.ListDirectB2))\n\n\t// insert 101-130 (NEW), p=0, T1=[91..130], B1=[61..90], T2=[1..60]\n\tfixture.InsertBulk(101, 130, \"/N/%d\", \"/N/%d\")\n\tassert.InDelta(0.0, fixture.Cs.ReadDirectArcP(), 0.1)\n\tassert.Equal(40, fixture.Cs.CountEntries(cs.ListDirectT1))\n\tassert.Equal(30, fixture.Cs.CountEntries(cs.ListDirectB1))\n\tassert.Equal(60, fixture.Cs.CountEntries(cs.ListDirectT2))\n\tassert.Equal(0, fixture.Cs.CountEntries(cs.ListDirectB2))\n\n\t// insert 61-80 (B1), p=20, T1=[111..130], B1=[81..110], T2=[1..80]\n\tfixture.InsertBulk(61, 80, \"/N/%d\", \"/N/%d\")\n\tassert.InDelta(20.0, fixture.Cs.ReadDirectArcP(), 0.1)\n\tassert.Equal(20, fixture.Cs.CountEntries(cs.ListDirectT1))\n\tassert.Equal(30, fixture.Cs.CountEntries(cs.ListDirectB1))\n\tassert.Equal(80, fixture.Cs.CountEntries(cs.ListDirectT2))\n\tassert.Equal(0, fixture.Cs.CountEntries(cs.ListDirectB2))\n\n\t// insert 111-120 (T1), p=20, T1=[121..130], B1=[81..110], T2=[1..80,111..120]\n\tfixture.InsertBulk(111, 120, \"/N/%d\", \"/N/%d\")\n\tassert.InDelta(20.0, fixture.Cs.ReadDirectArcP(), 0.1)\n\tassert.Equal(10, fixture.Cs.CountEntries(cs.ListDirectT1))\n\tassert.Equal(30, fixture.Cs.CountEntries(cs.ListDirectB1))\n\tassert.Equal(90, fixture.Cs.CountEntries(cs.ListDirectT2))\n\tassert.Equal(0, fixture.Cs.CountEntries(cs.ListDirectB2))\n\n\t// insert 131-140 (NEW), p=20, T1=[121..140], B1=[81..110], T2=[11..80,111..120], B2=[1..10]\n\tfixture.InsertBulk(131, 140, \"/N/%d\", \"/N/%d\")\n\tassert.InDelta(20.0, fixture.Cs.ReadDirectArcP(), 0.1)\n\tassert.Equal(20, fixture.Cs.CountEntries(cs.ListDirectT1))\n\tassert.Equal(30, fixture.Cs.CountEntries(cs.ListDirectB1))\n\tassert.Equal(80, fixture.Cs.CountEntries(cs.ListDirectT2))\n\tassert.Equal(10, fixture.Cs.CountEntries(cs.ListDirectB2))\n\n\t// insert 1 (B2), p=20-30/10=17, T1=[122..140], B1=[81..110,121], T2=[11..80,111..120,1], B2=[2..10]\n\tfixture.InsertBulk(1, 1, \"/N/%d\", \"/N/%d\")\n\tassert.InDelta(17.0, fixture.Cs.ReadDirectArcP(), 0.1)\n\tassert.Equal(19, fixture.Cs.CountEntries(cs.ListDirectT1))\n\tassert.Equal(31, fixture.Cs.CountEntries(cs.ListDirectB1))\n\tassert.Equal(81, fixture.Cs.CountEntries(cs.ListDirectT2))\n\tassert.Equal(9, fixture.Cs.CountEntries(cs.ListDirectB2))\n}", "func TestInArray(t *testing.T) {\n\tarrays := []InArrayTest{\n\t\t{[]string{\"1\", \"2\", \"3\"}, \"2\", true},\n\t\t{[]string{\"1\", \"2\", \"3\"}, \"4\", false},\n\t}\n\n\tfor i, array := range arrays {\n\t\tif !reflect.DeepEqual(array.Result, InArray(array.Arrays, array.Value)) {\n\t\t\tt.Errorf(\"InArray(%v) failed: expected %v got %v\", i, array.Result, !array.Result)\n\t\t}\n\t}\n}", "func (m *MockDiff) Ids() []string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Ids\")\n\tret0, _ := ret[0].([]string)\n\treturn ret0\n}", "func isEqual(cnpj []uint) bool {\n\tfor i := 1; i < len(cnpj); i++ {\n\t\tif cnpj[0] != cnpj[i] {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func (s *mergeBaseSuite) TestIndependentWithPairOfAncestors(c *C) {\n\trevs := []string{\"C\", \"D\", \"M\", \"N\"}\n\texpectedRevs := []string{\"C\", \"D\"}\n\ts.AssertIndependents(c, revs, expectedRevs)\n}", "func (r *role) Idxs(peers [2]wire.Address) (our, their channel.Index) {\n\tif r.setup.Identity.Address().Equal(peers[0]) {\n\t\treturn 0, 1\n\t} else if r.setup.Identity.Address().Equal(peers[1]) {\n\t\treturn 1, 0\n\t}\n\tpanic(\"identity not in peers\")\n}", "func verify(srvChan chan string, channel, nick, hostname string, args []string) {\n\tmessage := \"NOTICE \" + channel + \" :\"\n\tif len(args) != 2 {\n\t\tmessage = \"NOTICE \" + channel + \" :ERROR: Invalid number of arguments\"\n\t} else {\n\t\tuname := args[0]\n\t\tpin := args[1]\n\t\treply := cmdDb.Cmd(\"get\", uname+\"Pin\")\n\t\tpinDb, err := (reply.Bytes())\n\t\tif err != nil {\n\t\t\tlog.Println(err.Error())\n\t\t\treturn\n\t\t}\n\t\tif string(pinDb) == pin {\n\t\t\tmessage += \"You are now verified as \" + uname\n\t\t\tcmdDb.Cmd(\"set\", uname+\"Host\", hostname)\n\t\t\tcmdDb.Cmd(\"set\", uname+\"Pin\", fmt.Sprintf(\"%06d\", rand.Intn(1000000)))\n\t\t} else {\n\t\t\tmessage += \"PIN does not match that of \" + uname\n\t\t}\n\t}\n\tlog.Println(message)\n\tsrvChan <- message\n}", "func (s *testPluginCodeSuite) TestGetRegionIDs(c *C) {\n\tregionIDs := schedule.GetRegionIDs(s.tc,\"757365727461626C653A7573657231773937383833313437333137333731323135\",\n\t\t\"757365727461626C653A7573657234443637353232383738383832303830353737\")\n\tc.Assert(len(regionIDs), Equals, 3)\n\tc.Assert(regionIDs[0], Equals, uint64(2))\n\tc.Assert(regionIDs[1], Equals, uint64(3))\n\tc.Assert(regionIDs[2], Equals, uint64(4))\n}", "func TestCVRVirksomhedByCVRNumre(t *testing.T) {\n\tif testing.Short() {\n\t\tt.Skip(\"Running short tests\")\n\t}\n\n\ttests := map[string]struct {\n\t\tcvrNumre []int\n\t\texpected error\n\t}{\n\t\t\"one virksomhed\": {\n\t\t\tcvrNumre: []int{10103940},\n\t\t\texpected: nil,\n\t\t},\n\t\t\"multiple virksomheder\": {\n\t\t\tcvrNumre: []int{10103940, 10150817, 10213231},\n\t\t\texpected: nil,\n\t\t},\n\t\t\"not found\": {\n\t\t\tcvrNumre: []int{1337},\n\t\t\texpected: cvr.ErrNotFound,\n\t\t},\n\t}\n\n\tapiKey := getAPIKey(t)\n\tfor name, test := range tests {\n\t\tt.Run(name, func(t *testing.T) {\n\t\t\tc := cvr.NewClient(apiKey)\n\t\t\tvs, err := c.CVRVirksomhederByCVRNumre(test.cvrNumre...)\n\t\t\trequire.Equal(t, test.expected, err)\n\n\t\t\tif test.expected == nil {\n\t\t\t\trequire.Equal(t, len(test.cvrNumre), len(vs))\n\n\t\t\t\tfor i, v := range vs {\n\t\t\t\t\trequire.Equal(t, test.cvrNumre[i], v.CVRNummer)\n\t\t\t\t}\n\t\t\t}\n\t\t})\n\t}\n}", "func TestMockCheckDetailAddendumC(t *testing.T) {\n\tcdAddendumC := mockCheckDetailAddendumC()\n\tif err := cdAddendumC.Validate(); err != nil {\n\t\tt.Error(\"mockCheckDetailAddendumC does not validate and will break other tests: \", err)\n\t}\n\tif cdAddendumC.recordType != \"28\" {\n\t\tt.Error(\"recordType does not validate\")\n\t}\n\tif cdAddendumC.RecordNumber != 1 {\n\t\tt.Error(\"RecordNumber does not validate\")\n\t}\n\tif cdAddendumC.EndorsingBankRoutingNumber != \"121042882\" {\n\t\tt.Error(\"EndorsingBankRoutingNumber does not validate\")\n\t}\n\tif cdAddendumC.EndorsingBankItemSequenceNumber != \"1 \" {\n\t\tt.Error(\"EndorsingBankItemSequenceNumber does not validate\")\n\t}\n\tif cdAddendumC.TruncationIndicator != \"Y\" {\n\t\tt.Error(\"TruncationIndicator does not validate\")\n\t}\n\tif cdAddendumC.ReturnReason != \"A\" {\n\t\tt.Error(\"ReturnReason does not validate\")\n\t}\n\tif cdAddendumC.EndorsingBankConversionIndicator != \"1\" {\n\t\tt.Error(\"EndorsingBankConversionIndicator does not validate\")\n\t}\n\tif cdAddendumC.EndorsingBankCorrectionIndicator != 0 {\n\t\tt.Error(\"EndorsingBankCorrectionIndicator does not validate\")\n\t}\n\tif cdAddendumC.UserField != \"\" {\n\t\tt.Error(\"UserField does not validate\")\n\t}\n\tif cdAddendumC.EndorsingBankIdentifier != 0 {\n\t\tt.Error(\"EndorsingBankIdentifier does not validate\")\n\t}\n}", "func TestCountingBits(t *testing.T) {\n\tvar cases = []struct {\n\t\tinput int\n\t\toutput []int\n\t}{\n\t\t{\n\t\t\tinput: 2,\n\t\t\toutput: []int{0, 1, 1},\n\t\t},\n\t\t{\n\t\t\tinput: 5,\n\t\t\toutput: []int{0, 1, 1, 2, 1, 2},\n\t\t},\n\t}\n\tfor _, c := range cases {\n\t\tx := countBits(c.input)\n\t\tif !reflect.DeepEqual(x, c.output) {\n\t\t\tt.Fail()\n\t\t}\n\t}\n}", "func (th *testHelper) assertPresentInCollectionMPD(chaincodeName, marbleName string, peerList ...*nwo.Peer) {\n\tcommand := commands.ChaincodeQuery{\n\t\tChannelID: th.channelID,\n\t\tName: chaincodeName,\n\t\tCtor: fmt.Sprintf(`{\"Args\":[\"readMarblePrivateDetails\",\"%s\"]}`, marbleName),\n\t}\n\texpectedMsg := fmt.Sprintf(`{\"docType\":\"marblePrivateDetails\",\"name\":\"%s\"`, marbleName)\n\tfor _, peer := range peerList {\n\t\tth.queryChaincode(peer, command, expectedMsg, true)\n\t}\n}", "func (s *mergeBaseSuite) TestIndependentAcrossCrossMerges(c *C) {\n\trevs := []string{\"C\", \"G\", \"dev\", \"M\", \"N\"}\n\texpectedRevs := []string{\"C\", \"G\", \"dev\"}\n\ts.AssertIndependents(c, revs, expectedRevs)\n}", "func (me TartIdTypeInt) IsPmcid() bool { return me.String() == \"pmcid\" }", "func (m *PurposeMutation) AddCarcheckinoutIDs(ids ...int) {\n\tif m.carcheckinout == nil {\n\t\tm.carcheckinout = make(map[int]struct{})\n\t}\n\tfor i := range ids {\n\t\tm.carcheckinout[ids[i]] = struct{}{}\n\t}\n}", "func (me TArtIdTypeUnion2) IsPmcpid() bool { return me.String() == \"pmcpid\" }", "func TestMapInt(t *testing.T) {\n\tt.Parallel()\n\tvar tests = []struct {\n\t\ts []int\n\t\texpected []int\n\t}{\n\t\t{[]int{0, 1, 2}, []int{0, 2, 4}},\n\t\t{[]int{-1}, []int{-2}},\n\t\t{[]int{}, []int{}},\n\t}\n\tfor _, test := range tests {\n\t\tactual := primitives.MapInt(test.s, func(i int) int {\n\t\t\treturn i * 2\n\t\t})\n\t\tassert.True(t, primitives.EqSlices(&actual, &test.expected), \"Expected MapInt(%q, fn) to be %q, got %v\", test.s, test.expected, actual)\n\t}\n}", "func (idMap *IdentityMap) CheckIdentity(id string, login map[string]string) error {\n\tfor name, pattern := range *idMap {\n\t\tif !pattern.MatchString(id) {\n\t\t\tcontinue\n\t\t}\n\n\t\tlogin[name] = id\n\t\tlogin[\"type\"] = \"account\"\n\t\tlogin[\"verifyCode\"] = \"\"\n\n\t\treturn nil\n\t}\n\n\treturn ErrIdentity\n}", "func ExpectCertificateURIsToMatch(csr *certificatesv1.CertificateSigningRequest, _ crypto.Signer) error {\n\tcert, err := pki.DecodeX509CertificateBytes(csr.Status.Certificate)\n\tif err != nil {\n\t\treturn err\n\t}\n\treq, err := pki.DecodeX509CertificateRequestBytes(csr.Spec.Request)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tactualURIs := pki.URLsToString(cert.URIs)\n\texpectedURIs := pki.URLsToString(req.URIs)\n\tif !util.EqualUnsorted(actualURIs, expectedURIs) {\n\t\treturn fmt.Errorf(\"Expected certificate valid for URIs %v, but got a certificate valid for URIs %v\", expectedURIs, actualURIs)\n\t}\n\n\treturn nil\n}", "func isValidCitizenNo(citizenNo *[]byte) bool {\n\tnLen := len(*citizenNo)\n\tif nLen != 15 && nLen != 18 {\n\t\treturn false\n\t}\n\n\tfor i, v := range *citizenNo {\n\t\tn, _ := strconv.Atoi(string(v))\n\t\tif n >= 0 && n <= 9 {\n\t\t\tcontinue\n\t\t}\n\n\t\tif v == 'X' && i == 16 {\n\t\t\tcontinue\n\t\t}\n\n\t\treturn false\n\t}\n\n\tif !checkProvinceValid(*citizenNo) {\n\t\treturn false\n\t}\n\n\tif nLen == 15 {\n\t\t*citizenNo = citizen15To18(*citizenNo)\n\t\tif citizenNo == nil {\n\t\t\treturn false\n\t\t}\n\t} else if !isValidCitizenNo18(citizenNo) {\n\t\treturn false\n\t}\n\n\tnYear, _ := strconv.Atoi(string((*citizenNo)[6:10]))\n\tnMonth, _ := strconv.Atoi(string((*citizenNo)[10:12]))\n\tnDay, _ := strconv.Atoi(string((*citizenNo)[12:14]))\n\tif !checkBirthdayValid(nYear, nMonth, nDay) {\n\t\treturn false\n\t}\n\n\treturn true\n}", "func checkPoints(fabric *elfFabric, takenFabric map[point]int) int {\n\tfor addX := 1; addX <= fabric.size[0]; addX++ {\n\t\tfor addY := 1; addY <= fabric.size[1]; addY++ {\n\t\t\tp := point{x: fabric.position[0] + addX, y: fabric.position[1] + addY}\n\t\t\tnumOfOverlaps := takenFabric[p]\n\t\t\tif numOfOverlaps != 1 {\n\t\t\t\treturn -1\n\t\t\t}\n\t\t}\n\t}\n\treturn fabric.ID\n}", "func testCheckDDCloudPortListMatches(name string, expected compute.PortList) resource.TestCheckFunc {\n\tname = ensureResourceTypePrefix(name, \"ddcloud_port_list\")\n\n\treturn func(state *terraform.State) error {\n\t\tres, ok := state.RootModule().Resources[name]\n\t\tif !ok {\n\t\t\treturn fmt.Errorf(\"not found: %s\", name)\n\t\t}\n\n\t\tportListID := res.Primary.ID\n\n\t\tclient := testAccProvider.Meta().(*providerState).Client()\n\t\tportList, err := client.GetPortList(portListID)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"bad: get port list: %s\", err)\n\t\t}\n\t\tif portList == nil {\n\t\t\treturn fmt.Errorf(\"bad: port list not found with Id '%s'\", portListID)\n\t\t}\n\n\t\tif portList.Name != expected.Name {\n\t\t\treturn fmt.Errorf(\"bad: port list '%s' has name '%s' (expected '%s')\", portListID, portList.Name, expected.Name)\n\t\t}\n\n\t\tif portList.Description != expected.Description {\n\t\t\treturn fmt.Errorf(\"bad: port list '%s' has description '%s' (expected '%s')\", portListID, portList.Description, expected.Description)\n\t\t}\n\n\t\tif len(portList.Ports) != len(expected.Ports) {\n\t\t\treturn fmt.Errorf(\"bad: port list '%s' has %d ports or port ranges (expected '%d')\", portListID, len(portList.Ports), len(expected.Ports))\n\t\t}\n\n\t\terr = comparePortListEntries(expected, *portList)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif len(portList.ChildLists) != len(expected.ChildLists) {\n\t\t\treturn fmt.Errorf(\"bad: port list '%s' has %d child lists (expected '%d')\", portListID, len(portList.ChildLists), len(expected.ChildLists))\n\t\t}\n\n\t\tfor index := range portList.ChildLists {\n\t\t\texpectedChildListID := expected.ChildLists[index].ID\n\t\t\tactualChildListID := portList.ChildLists[index].ID\n\n\t\t\tif actualChildListID != expectedChildListID {\n\t\t\t\treturn fmt.Errorf(\"bad: port list '%s' has child list at index %d with Id %s (expected '%s')\",\n\t\t\t\t\tportListID, index, actualChildListID, expectedChildListID,\n\t\t\t\t)\n\t\t\t}\n\t\t}\n\n\t\treturn nil\n\t}\n}", "func verifyExactMatch(preds []EventP, given []cap.Event) error {\n\tif len(preds) != len(given) {\n\t\treturn fmt.Errorf(\n\t\t\t\"Expecting exact match, but length is not the same:\\nwant: %d\\ngiven: %d\\nevents:\\n%s\",\n\t\t\tlen(preds),\n\t\t\tlen(given),\n\t\t\trenderEvents(given),\n\t\t)\n\t}\n\tfor i, pred := range preds {\n\t\tif !pred.Call(given[i]) {\n\t\t\treturn fmt.Errorf(\n\t\t\t\t\"Expecting exact match, but entry %d did not match:\\ncriteria: %s\\nevent: %s\\nevents:\\n%s\",\n\t\t\t\ti,\n\t\t\t\tpred.String(),\n\t\t\t\tgiven[i].String(),\n\t\t\t\trenderEvents(given),\n\t\t\t)\n\t\t}\n\t}\n\treturn nil\n}", "func (th *testHelper) assertPresentInCollectionM(chaincodeName, marbleName string, peerList ...*nwo.Peer) {\n\tcommand := commands.ChaincodeQuery{\n\t\tChannelID: th.channelID,\n\t\tName: chaincodeName,\n\t\tCtor: fmt.Sprintf(`{\"Args\":[\"readMarble\",\"%s\"]}`, marbleName),\n\t}\n\texpectedMsg := fmt.Sprintf(`{\"docType\":\"marble\",\"name\":\"%s\"`, marbleName)\n\tfor _, peer := range peerList {\n\t\tth.queryChaincode(peer, command, expectedMsg, true)\n\t}\n}", "func TestCanParseContainerConfig(t *testing.T) {\n\tfor _, mapping := range getContainerMapping() {\n\t\tservice, err := compose.NewService(map[string]interface{}{mapping.name: mapping.source})\n\t\tif err != nil {\n\t\t\tt.Errorf(err.Error())\n\t\t\treturn\n\t\t}\n\t\tif err := verifyContainerConfig(mapping.name, mapping.expected, service.GetContainerConfig()); err != nil {\n\t\t\tt.Errorf(err.Error())\n\t\t}\n\t}\n}", "func (sm SectorMap) Verify() error {\n\tfor sector := byte(0); sector <= 0xB; sector++ {\n\t\tif file := sm.FileForSector(0, sector); file != FileReserved {\n\t\t\treturn fmt.Errorf(\"expected track 0, sectors 0-C to be reserved (0xFE), but got 0x%02X in sector %X\", file, sector)\n\t\t}\n\t}\n\n\tfor track := byte(0); track < 35; track++ {\n\t\tfor sector := byte(0); sector < 16; sector++ {\n\t\t\tfile := sm.FileForSector(track, sector)\n\t\t\tif file == FileIllegal {\n\t\t\t\treturn fmt.Errorf(\"found illegal sector map value (%02X), in track %X sector %X\", FileIllegal, track, sector)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func (tc *testContext) testNodeAnnotations(t *testing.T) {\n\tfor _, node := range gc.allNodes() {\n\t\tt.Run(node.GetName(), func(t *testing.T) {\n\t\t\tannotations := []string{nc.HybridOverlaySubnet, nc.HybridOverlayMac, metadata.VersionAnnotation,\n\t\t\t\tnc.PubKeyHashAnnotation}\n\t\t\tfor _, annotation := range annotations {\n\t\t\t\tassert.Contains(t, node.Annotations, annotation, \"node missing expected annotation: %s\", annotation)\n\t\t\t}\n\n\t\t\tusernameCorrect, err := tc.checkUsernameAnnotation(&node)\n\t\t\trequire.NoError(t, err)\n\t\t\tassert.True(t, usernameCorrect)\n\n\t\t\tpubKey, err := tc.checkPubKeyAnnotation(&node)\n\t\t\trequire.NoError(t, err)\n\t\t\tassert.True(t, pubKey)\n\t\t})\n\t}\n}", "func TestMgmt_GetAllIDs(t *testing.T) {\n\tfor i, objType := range vpc.ObjTypes() {\n\t\tobjType := objType\n\t\tt.Run(objType.String(), func(t *testing.T) {\n\t\t\tmgr, err := mgmt.New(nil)\n\t\t\tif err != nil {\n\t\t\t\tt.Fatalf(\"[%d] unable to create new VPC Management handle: %v\", i, err)\n\t\t\t}\n\t\t\tdefer mgr.Close()\n\n\t\t\tids, err := mgr.GetAllIDs(objType)\n\t\t\tif err != nil {\n\t\t\t\tt.Fatalf(\"[%d] unable to get IDs for %s VPC objects: %v\", i, objType, err)\n\t\t\t}\n\n\t\t\t_ = ids // Not sure what to do with these UUIDs\n\t\t})\n\t}\n}", "func TestProbeVerify(t *testing.T) {\n\n\t// Create and register consumer.\n\tac, in := newUpdateConsumer(t)\n\tdefer ac.Close()\n\n\t// Create UDP client.\n\tmc := udpecho.Dial(udpServ)\n\n\t// Filter BPF Events based on client port.\n\tout := filterSourcePort(in, mc.ClientPort())\n\n\t// Generate a single dummy event.\n\tmc.Nop(1)\n\tev, err := readTimeout(out, 20)\n\trequire.NoError(t, err)\n\n\t// Network Namespace\n\tns, err := getNSID()\n\trequire.NoError(t, err)\n\tassert.EqualValues(t, ns, ev.NetNS, ev.String())\n\n\t// Connmark (default 0)\n\tassert.EqualValues(t, 0, ev.Connmark, ev.String())\n\n\t// Accounting\n\tassert.EqualValues(t, 1, ev.PacketsOrig, ev.String())\n\tassert.EqualValues(t, 31, ev.BytesOrig, ev.String())\n\tassert.EqualValues(t, 0, ev.PacketsRet, ev.String())\n\tassert.EqualValues(t, 0, ev.BytesRet, ev.String())\n\n\t// Connection tuple\n\tassert.EqualValues(t, udpServ, ev.DstPort, ev.String())\n\tassert.EqualValues(t, mc.ClientPort(), ev.SrcPort, ev.String())\n\tassert.EqualValues(t, net.IPv4(127, 0, 0, 1), ev.SrcAddr, ev.String())\n\tassert.EqualValues(t, net.IPv4(127, 0, 0, 1), ev.DstAddr, ev.String())\n\tassert.EqualValues(t, 17, ev.Proto, ev.String())\n\n\trequire.NoError(t, acctProbe.RemoveConsumer(ac))\n}", "func TestBIP0032Vectors(t *testing.T) {\n\t// The master seeds for each of the two test vectors in [BIP32].\n\ttestVec1MasterHex := \"000102030405060708090a0b0c0d0e0f\"\n\ttestVec2MasterHex := \"fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9c999693908d8a8784817e7b7875726f6c696663605d5a5754514e4b484542\"\n\ttestVec3MasterHex := \"4b381541583be4423346c643850da4b320e46a87ae3d2a4e6da11eba819cd4acba45d239319ac14f863b8d5ab5a0d0c64d2e8a1e7d1457df2e5a3c51c73235be\"\n\thkStart := uint32(0x80000000)\n\n\ttests := []struct {\n\t\tname string\n\t\tmaster string\n\t\tpath []uint32\n\t\twantPub string\n\t\twantPriv string\n\t}{\n\t\t// Test vector 1\n\t\t{\n\t\t\tname: \"test vector 1 chain m\",\n\t\t\tmaster: testVec1MasterHex,\n\t\t\tpath: []uint32{},\n\t\t\twantPub: \"xpub661MyMwAqRbcFtXgS5sYJABqqG9YLmC4Q1Rdap9gSE8NqtwybGhePY2gZ29ESFjqJoCu1Rupje8YtGqsefD265TMg7usUDFdp6W1EGMcet8\",\n\t\t\twantPriv: \"xprv9s21ZrQH143K3QTDL4LXw2F7HEK3wJUD2nW2nRk4stbPy6cq3jPPqjiChkVvvNKmPGJxWUtg6LnF5kejMRNNU3TGtRBeJgk33yuGBxrMPHi\",\n\t\t},\n\t\t{\n\t\t\tname: \"test vector 1 chain m/0H\",\n\t\t\tmaster: testVec1MasterHex,\n\t\t\tpath: []uint32{hkStart},\n\t\t\twantPub: \"xpub68Gmy5EdvgibQVfPdqkBBCHxA5htiqg55crXYuXoQRKfDBFA1WEjWgP6LHhwBZeNK1VTsfTFUHCdrfp1bgwQ9xv5ski8PX9rL2dZXvgGDnw\",\n\t\t\twantPriv: \"xprv9uHRZZhk6KAJC1avXpDAp4MDc3sQKNxDiPvvkX8Br5ngLNv1TxvUxt4cV1rGL5hj6KCesnDYUhd7oWgT11eZG7XnxHrnYeSvkzY7d2bhkJ7\",\n\t\t},\n\t\t{\n\t\t\tname: \"test vector 1 chain m/0H/1\",\n\t\t\tmaster: testVec1MasterHex,\n\t\t\tpath: []uint32{hkStart, 1},\n\t\t\twantPub: \"xpub6ASuArnXKPbfEwhqN6e3mwBcDTgzisQN1wXN9BJcM47sSikHjJf3UFHKkNAWbWMiGj7Wf5uMash7SyYq527Hqck2AxYysAA7xmALppuCkwQ\",\n\t\t\twantPriv: \"xprv9wTYmMFdV23N2TdNG573QoEsfRrWKQgWeibmLntzniatZvR9BmLnvSxqu53Kw1UmYPxLgboyZQaXwTCg8MSY3H2EU4pWcQDnRnrVA1xe8fs\",\n\t\t},\n\t\t{\n\t\t\tname: \"test vector 1 chain m/0H/1/2H\",\n\t\t\tmaster: testVec1MasterHex,\n\t\t\tpath: []uint32{hkStart, 1, hkStart + 2},\n\t\t\twantPub: \"xpub6D4BDPcP2GT577Vvch3R8wDkScZWzQzMMUm3PWbmWvVJrZwQY4VUNgqFJPMM3No2dFDFGTsxxpG5uJh7n7epu4trkrX7x7DogT5Uv6fcLW5\",\n\t\t\twantPriv: \"xprv9z4pot5VBttmtdRTWfWQmoH1taj2axGVzFqSb8C9xaxKymcFzXBDptWmT7FwuEzG3ryjH4ktypQSAewRiNMjANTtpgP4mLTj34bhnZX7UiM\",\n\t\t},\n\t\t{\n\t\t\tname: \"test vector 1 chain m/0H/1/2H/2\",\n\t\t\tmaster: testVec1MasterHex,\n\t\t\tpath: []uint32{hkStart, 1, hkStart + 2, 2},\n\t\t\twantPub: \"xpub6FHa3pjLCk84BayeJxFW2SP4XRrFd1JYnxeLeU8EqN3vDfZmbqBqaGJAyiLjTAwm6ZLRQUMv1ZACTj37sR62cfN7fe5JnJ7dh8zL4fiyLHV\",\n\t\t\twantPriv: \"xprvA2JDeKCSNNZky6uBCviVfJSKyQ1mDYahRjijr5idH2WwLsEd4Hsb2Tyh8RfQMuPh7f7RtyzTtdrbdqqsunu5Mm3wDvUAKRHSC34sJ7in334\",\n\t\t},\n\t\t{\n\t\t\tname: \"test vector 1 chain m/0H/1/2H/2/1000000000\",\n\t\t\tmaster: testVec1MasterHex,\n\t\t\tpath: []uint32{hkStart, 1, hkStart + 2, 2, 1000000000},\n\t\t\twantPub: \"xpub6H1LXWLaKsWFhvm6RVpEL9P4KfRZSW7abD2ttkWP3SSQvnyA8FSVqNTEcYFgJS2UaFcxupHiYkro49S8yGasTvXEYBVPamhGW6cFJodrTHy\",\n\t\t\twantPriv: \"xprvA41z7zogVVwxVSgdKUHDy1SKmdb533PjDz7J6N6mV6uS3ze1ai8FHa8kmHScGpWmj4WggLyQjgPie1rFSruoUihUZREPSL39UNdE3BBDu76\",\n\t\t},\n\n\t\t// Test vector 2\n\t\t{\n\t\t\tname: \"test vector 2 chain m\",\n\t\t\tmaster: testVec2MasterHex,\n\t\t\tpath: []uint32{},\n\t\t\twantPub: \"xpub661MyMwAqRbcFW31YEwpkMuc5THy2PSt5bDMsktWQcFF8syAmRUapSCGu8ED9W6oDMSgv6Zz8idoc4a6mr8BDzTJY47LJhkJ8UB7WEGuduB\",\n\t\t\twantPriv: \"xprv9s21ZrQH143K31xYSDQpPDxsXRTUcvj2iNHm5NUtrGiGG5e2DtALGdso3pGz6ssrdK4PFmM8NSpSBHNqPqm55Qn3LqFtT2emdEXVYsCzC2U\",\n\t\t},\n\t\t{\n\t\t\tname: \"test vector 2 chain m/0\",\n\t\t\tmaster: testVec2MasterHex,\n\t\t\tpath: []uint32{0},\n\t\t\twantPub: \"xpub69H7F5d8KSRgmmdJg2KhpAK8SR3DjMwAdkxj3ZuxV27CprR9LgpeyGmXUbC6wb7ERfvrnKZjXoUmmDznezpbZb7ap6r1D3tgFxHmwMkQTPH\",\n\t\t\twantPriv: \"xprv9vHkqa6EV4sPZHYqZznhT2NPtPCjKuDKGY38FBWLvgaDx45zo9WQRUT3dKYnjwih2yJD9mkrocEZXo1ex8G81dwSM1fwqWpWkeS3v86pgKt\",\n\t\t},\n\t\t{\n\t\t\tname: \"test vector 2 chain m/0/2147483647H\",\n\t\t\tmaster: testVec2MasterHex,\n\t\t\tpath: []uint32{0, hkStart + 2147483647},\n\t\t\twantPub: \"xpub6ASAVgeehLbnwdqV6UKMHVzgqAG8Gr6riv3Fxxpj8ksbH9ebxaEyBLZ85ySDhKiLDBrQSARLq1uNRts8RuJiHjaDMBU4Zn9h8LZNnBC5y4a\",\n\t\t\twantPriv: \"xprv9wSp6B7kry3Vj9m1zSnLvN3xH8RdsPP1Mh7fAaR7aRLcQMKTR2vidYEeEg2mUCTAwCd6vnxVrcjfy2kRgVsFawNzmjuHc2YmYRmagcEPdU9\",\n\t\t},\n\t\t{\n\t\t\tname: \"test vector 2 chain m/0/2147483647H/1\",\n\t\t\tmaster: testVec2MasterHex,\n\t\t\tpath: []uint32{0, hkStart + 2147483647, 1},\n\t\t\twantPub: \"xpub6DF8uhdarytz3FWdA8TvFSvvAh8dP3283MY7p2V4SeE2wyWmG5mg5EwVvmdMVCQcoNJxGoWaU9DCWh89LojfZ537wTfunKau47EL2dhHKon\",\n\t\t\twantPriv: \"xprv9zFnWC6h2cLgpmSA46vutJzBcfJ8yaJGg8cX1e5StJh45BBciYTRXSd25UEPVuesF9yog62tGAQtHjXajPPdbRCHuWS6T8XA2ECKADdw4Ef\",\n\t\t},\n\t\t{\n\t\t\tname: \"test vector 2 chain m/0/2147483647H/1/2147483646H\",\n\t\t\tmaster: testVec2MasterHex,\n\t\t\tpath: []uint32{0, hkStart + 2147483647, 1, hkStart + 2147483646},\n\t\t\twantPub: \"xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL\",\n\t\t\twantPriv: \"xprvA1RpRA33e1JQ7ifknakTFpgNXPmW2YvmhqLQYMmrj4xJXXWYpDPS3xz7iAxn8L39njGVyuoseXzU6rcxFLJ8HFsTjSyQbLYnMpCqE2VbFWc\",\n\t\t},\n\t\t{\n\t\t\tname: \"test vector 2 chain m/0/2147483647H/1/2147483646H/2\",\n\t\t\tmaster: testVec2MasterHex,\n\t\t\tpath: []uint32{0, hkStart + 2147483647, 1, hkStart + 2147483646, 2},\n\t\t\twantPub: \"xpub6FnCn6nSzZAw5Tw7cgR9bi15UV96gLZhjDstkXXxvCLsUXBGXPdSnLFbdpq8p9HmGsApME5hQTZ3emM2rnY5agb9rXpVGyy3bdW6EEgAtqt\",\n\t\t\twantPriv: \"xprvA2nrNbFZABcdryreWet9Ea4LvTJcGsqrMzxHx98MMrotbir7yrKCEXw7nadnHM8Dq38EGfSh6dqA9QWTyefMLEcBYJUuekgW4BYPJcr9E7j\",\n\t\t},\n\n\t\t// Test vector 3\n\t\t{\n\t\t\tname: \"test vector 3 chain m\",\n\t\t\tmaster: testVec3MasterHex,\n\t\t\tpath: []uint32{},\n\t\t\twantPub: \"xpub661MyMwAqRbcEZVB4dScxMAdx6d4nFc9nvyvH3v4gJL378CSRZiYmhRoP7mBy6gSPSCYk6SzXPTf3ND1cZAceL7SfJ1Z3GC8vBgp2epUt13\",\n\t\t\twantPriv: \"xprv9s21ZrQH143K25QhxbucbDDuQ4naNntJRi4KUfWT7xo4EKsHt2QJDu7KXp1A3u7Bi1j8ph3EGsZ9Xvz9dGuVrtHHs7pXeTzjuxBrCmmhgC6\",\n\t\t},\n\t\t{\n\t\t\tname: \"test vector 3 chain m/0H\",\n\t\t\tmaster: testVec3MasterHex,\n\t\t\tpath: []uint32{hkStart},\n\t\t\twantPub: \"xpub68NZiKmJWnxxS6aaHmn81bvJeTESw724CRDs6HbuccFQN9Ku14VQrADWgqbhhTHBaohPX4CjNLf9fq9MYo6oDaPPLPxSb7gwQN3ih19Zm4Y\",\n\t\t\twantPriv: \"xprv9uPDJpEQgRQfDcW7BkF7eTya6RPxXeJCqCJGHuCJ4GiRVLzkTXBAJMu2qaMWPrS7AANYqdq6vcBcBUdJCVVFceUvJFjaPdGZ2y9WACViL4L\",\n\t\t},\n\n\t\t// Test vector 1 - Testnet\n\t\t{\n\t\t\tname: \"test vector 1 chain m - testnet\",\n\t\t\tmaster: testVec1MasterHex,\n\t\t\tpath: []uint32{},\n\t\t\twantPub: \"tpubD6NzVbkrYhZ4XgiXtGrdW5XDAPFCL9h7we1vwNCpn8tGbBcgfVYjXyhWo4E1xkh56hjod1RhGjxbaTLV3X4FyWuejifB9jusQ46QzG87VKp\",\n\t\t\twantPriv: \"tprv8ZgxMBicQKsPeDgjzdC36fs6bMjGApWDNLR9erAXMs5skhMv36j9MV5ecvfavji5khqjWaWSFhN3YcCUUdiKH6isR4Pwy3U5y5egddBr16m\",\n\t\t},\n\t\t{\n\t\t\tname: \"test vector 1 chain m/0H - testnet\",\n\t\t\tmaster: testVec1MasterHex,\n\t\t\tpath: []uint32{hkStart},\n\t\t\twantPub: \"tpubD8eQVK4Kdxg3gHrF62jGP7dKVCoYiEB8dFSpuTawkL5YxTus5j5pf83vaKnii4bc6v2NVEy81P2gYrJczYne3QNNwMTS53p5uzDyHvnw2jm\",\n\t\t\twantPriv: \"tprv8bxNLu25VazNnppTCP4fyhyCvBHcYtzE3wr3cwYeL4HA7yf6TLGEUdS4QC1vLT63TkjRssqJe4CvGNEC8DzW5AoPUw56D1Ayg6HY4oy8QZ9\",\n\t\t},\n\t\t{\n\t\t\tname: \"test vector 1 chain m/0H/1 - testnet\",\n\t\t\tmaster: testVec1MasterHex,\n\t\t\tpath: []uint32{hkStart, 1},\n\t\t\twantPub: \"tpubDApXh6cD2fZ7WjtgpHd8yrWyYaneiFuRZa7fVjMkgxsmC1QzoXW8cgx9zQFJ81Jx4deRGfRE7yXA9A3STsxXj4CKEZJHYgpMYikkas9DBTP\",\n\t\t\twantPriv: \"tprv8e8VYgZxtHsSdGrtvdxYaSrryZGiYviWzGWtDDKTGh5NMXAEB8gYSCLHpFCywNs5uqV7ghRjimALQJkRFZnUrLHpzi2pGkwqLtbubgWuQ8q\",\n\t\t},\n\t\t{\n\t\t\tname: \"test vector 1 chain m/0H/1/2H - testnet\",\n\t\t\tmaster: testVec1MasterHex,\n\t\t\tpath: []uint32{hkStart, 1, hkStart + 2},\n\t\t\twantPub: \"tpubDDRojdS4jYQXNugn4t2WLrZ7mjfAyoVQu7MLk4eurqFCbrc7cHLZX8W5YRS8ZskGR9k9t3PqVv68bVBjAyW4nWM9pTGRddt3GQftg6MVQsm\",\n\t\t\twantPriv: \"tprv8gjmbDPpbAirVSezBEMuwSu1Ci9EpUJWKokZTYccSZSomNMLytWyLdtDNHRbucNaRJWWHANf9AzEdWVAqahfyRjVMKbNRhBmxAM8EJr7R15\",\n\t\t},\n\t\t{\n\t\t\tname: \"test vector 1 chain m/0H/1/2H/2 - testnet\",\n\t\t\tmaster: testVec1MasterHex,\n\t\t\tpath: []uint32{hkStart, 1, hkStart + 2, 2},\n\t\t\twantPub: \"tpubDFfCa4Z1v25WTPAVm9EbEMiRrYwucPocLbEe12BPBGooxxEUg42vihy1DkRWyftztTsL23snYezF9uXjGGwGW6pQjEpcTpmsH6ajpf4CVPn\",\n\t\t\twantPriv: \"tprv8iyAReWmmePqZv8hsVZzpx4KHXRyT4chmHdriW95m11R8Tyi3fDLYDM93bq4NGn1V6eCu5cE3zSQ6hPd31F2ApKXkZgTyn1V78pHjkq1V2v\",\n\t\t},\n\t\t{\n\t\t\tname: \"test vector 1 chain m/0H/1/2H/2/1000000000 - testnet\",\n\t\t\tmaster: testVec1MasterHex,\n\t\t\tpath: []uint32{hkStart, 1, hkStart + 2, 2, 1000000000},\n\t\t\twantPub: \"tpubDHNy3kAG39ThyiwwsgoKY4iRenXDRtce8qdCFJZXPMCJg5dsCUHayp84raLTpvyiNA9sXPob5rgqkKvkN8S7MMyXbnEhGJMW64Cf4vFAoaF\",\n\t\t\twantPriv: \"tprv8kgvuL81tmn36Fv9z38j8f4K5m1HGZRjZY2QxnXDy5PuqbP6a5TzoKWCgTcGHBu66W3TgSbAu2yX6sPza5FkHmy564Sh6gmCPUNeUt4yj2x\",\n\t\t},\n\t}\n\ntests:\n\tfor i, test := range tests {\n\t\tmasterSeed, err := hex.DecodeString(test.master)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"DecodeString #%d (%s): unexpected error: %v\",\n\t\t\t\ti, test.name, err)\n\t\t\tcontinue\n\t\t}\n\n\t\textKey, err := NewMaster(masterSeed)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"NewMaster #%d (%s): unexpected error when \"+\n\t\t\t\t\"creating new master key: %v\", i, test.name,\n\t\t\t\terr)\n\t\t\tcontinue\n\t\t}\n\n\t\tfor _, childNum := range test.path {\n\t\t\tvar err error\n\t\t\textKey, err = extKey.Child(childNum)\n\t\t\tif err != nil {\n\t\t\t\tt.Errorf(\"err: %v\", err)\n\t\t\t\tcontinue tests\n\t\t\t}\n\t\t}\n\n\t\tif extKey.Depth() != uint8(len(test.path)) {\n\t\t\tt.Errorf(\"Depth of key %d should match fixture path: %v\",\n\t\t\t\textKey.Depth(), len(test.path))\n\t\t\tcontinue\n\t\t}\n\n\t\tprivStr := extKey.String()\n\t\tif privStr != test.wantPriv {\n\t\t\tt.Errorf(\"Serialize #%d (%s): mismatched serialized \"+\n\t\t\t\t\"private extended key -- got: %s, want: %s\", i,\n\t\t\t\ttest.name, privStr, test.wantPriv)\n\t\t\tcontinue\n\t\t}\n\n\t\tpubKey, err := extKey.Neuter()\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Neuter #%d (%s): unexpected error: %v \", i,\n\t\t\t\ttest.name, err)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Neutering a second time should have no effect.\n\t\tpubKey, err = pubKey.Neuter()\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Neuter #%d (%s): unexpected error: %v\", i,\n\t\t\t\ttest.name, err)\n\t\t\treturn\n\t\t}\n\n\t\tpubStr := pubKey.String()\n\t\tif pubStr != test.wantPub {\n\t\t\tt.Errorf(\"Neuter #%d (%s): mismatched serialized \"+\n\t\t\t\t\"public extended key -- got: %s, want: %s\", i,\n\t\t\t\ttest.name, pubStr, test.wantPub)\n\t\t\tcontinue\n\t\t}\n\t}\n}" ]
[ "0.6378622", "0.57739544", "0.574653", "0.52474564", "0.5186409", "0.51749194", "0.51449287", "0.50033545", "0.49093032", "0.49070936", "0.49000555", "0.48950276", "0.48849282", "0.48680317", "0.48279548", "0.47963125", "0.47809932", "0.47659677", "0.4739921", "0.4732453", "0.47016183", "0.47001836", "0.46924594", "0.46912625", "0.46890634", "0.4688217", "0.46853548", "0.4685337", "0.4654722", "0.46523434", "0.46511602", "0.46358877", "0.46112406", "0.461113", "0.4594124", "0.45906803", "0.45874226", "0.45869276", "0.45693898", "0.4564923", "0.45611784", "0.45598215", "0.45420527", "0.45344016", "0.4533468", "0.45266938", "0.4526108", "0.45248047", "0.45176297", "0.4508458", "0.44976684", "0.44963753", "0.4487303", "0.44853973", "0.44763136", "0.44709882", "0.4467535", "0.44646683", "0.44612324", "0.44520307", "0.44481993", "0.44404763", "0.4434202", "0.44310758", "0.44266388", "0.44187653", "0.44081393", "0.44022948", "0.4396333", "0.43903363", "0.43855745", "0.4380805", "0.43713638", "0.43633235", "0.43626422", "0.43573567", "0.43482634", "0.43453595", "0.43347445", "0.43338057", "0.43329096", "0.43309414", "0.43276927", "0.43233305", "0.4322905", "0.43223223", "0.43134075", "0.43087125", "0.42993924", "0.42989284", "0.42964974", "0.42847642", "0.42799532", "0.4279679", "0.42748612", "0.4270684", "0.42704248", "0.42681634", "0.42668778", "0.4241585" ]
0.81416184
0
assertPinLsAllConsistency verifies that listing all pins gives the same result as listing the pin types individually
func assertPinLsAllConsistency(t *testing.T, ctx context.Context, api iface.CoreAPI) { t.Helper() allPins, err := accPins(api.Pin().Ls(ctx)) if err != nil { t.Fatal(err) } type pinTypeProps struct { *cid.Set opt.PinLsOption } all, recursive, direct, indirect := cid.NewSet(), cid.NewSet(), cid.NewSet(), cid.NewSet() typeMap := map[string]*pinTypeProps{ "recursive": {recursive, opt.Pin.Ls.Recursive()}, "direct": {direct, opt.Pin.Ls.Direct()}, "indirect": {indirect, opt.Pin.Ls.Indirect()}, } for _, p := range allPins { if !all.Visit(p.Path().Cid()) { t.Fatalf("pin ls returned the same cid multiple times") } typeStr := p.Type() if typeSet, ok := typeMap[p.Type()]; ok { typeSet.Add(p.Path().Cid()) } else { t.Fatalf("unknown pin type: %s", typeStr) } } for typeStr, pinProps := range typeMap { pins, err := accPins(api.Pin().Ls(ctx, pinProps.PinLsOption)) if err != nil { t.Fatal(err) } if expected, actual := len(pins), pinProps.Set.Len(); expected != actual { t.Fatalf("pin ls all has %d pins of type %s, but pin ls for the type has %d", expected, typeStr, actual) } for _, p := range pins { if pinType := p.Type(); pinType != typeStr { t.Fatalf("returned wrong pin type: expected %s, got %s", typeStr, pinType) } if c := p.Path().Cid(); !pinProps.Has(c) { t.Fatalf("%s expected to be in pin ls all as type %s", c.String(), typeStr) } } } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func testCheckDDCloudAddressListMatches(name string, expected compute.IPAddressList) resource.TestCheckFunc {\n\tname = ensureResourceTypePrefix(name, \"ddcloud_address_list\")\n\n\treturn func(state *terraform.State) error {\n\t\tres, ok := state.RootModule().Resources[name]\n\t\tif !ok {\n\t\t\treturn fmt.Errorf(\"Not found: %s\", name)\n\t\t}\n\n\t\taddressListID := res.Primary.ID\n\n\t\tclient := testAccProvider.Meta().(*providerState).Client()\n\t\taddressList, err := client.GetIPAddressList(addressListID)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"bad: Get address list: %s\", err)\n\t\t}\n\t\tif addressList == nil {\n\t\t\treturn fmt.Errorf(\"bad: address list not found with Id '%s'\", addressListID)\n\t\t}\n\n\t\tif addressList.Name != expected.Name {\n\t\t\treturn fmt.Errorf(\"bad: address list '%s' has name '%s' (expected '%s')\", addressListID, addressList.Name, expected.Name)\n\t\t}\n\n\t\tif addressList.Description != expected.Description {\n\t\t\treturn fmt.Errorf(\"bad: address list '%s' has description '%s' (expected '%s')\", addressListID, addressList.Description, expected.Description)\n\t\t}\n\n\t\tif len(addressList.Addresses) != len(expected.Addresses) {\n\t\t\treturn fmt.Errorf(\"bad: address list '%s' has %d addresses or address-ranges (expected '%d')\", addressListID, len(addressList.Addresses), len(expected.Addresses))\n\t\t}\n\n\t\terr = compareAddressListEntries(expected, *addressList)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif len(addressList.ChildLists) != len(expected.ChildLists) {\n\t\t\treturn fmt.Errorf(\"bad: address list '%s' has %d child lists (expected '%d')\", addressListID, len(addressList.ChildLists), len(expected.ChildLists))\n\t\t}\n\n\t\tfor index := range addressList.ChildLists {\n\t\t\texpectedChildListID := expected.ChildLists[index].ID\n\t\t\tactualChildListID := addressList.ChildLists[index].ID\n\n\t\t\tif actualChildListID != expectedChildListID {\n\t\t\t\treturn fmt.Errorf(\"bad: address list '%s' has child list at index %d with Id %s (expected '%s')\",\n\t\t\t\t\taddressListID, index, actualChildListID, expectedChildListID,\n\t\t\t\t)\n\t\t\t}\n\t\t}\n\n\t\treturn nil\n\t}\n}", "func TestAll(t *testing.T) {\n\tfor i := 0; i <= 0xff; i += 7 {\n\t\tassertU16(t, uint16(i))\n\t}\n\tfor i := 0; i <= 0xffff; i += 211 {\n\t\tassertU32(t, uint32(i))\n\t}\n\tfor i := 0; i <= 0xffffffff; i += 1000000007 {\n\t\tassertU64(t, uint64(i))\n\t}\n}", "func assertPinCids(t *testing.T, pins []iface.Pin, cids ...cidContainer) {\n\tt.Helper()\n\n\tif expected, actual := len(cids), len(pins); expected != actual {\n\t\tt.Fatalf(\"expected pin list to have len %d, was %d\", expected, actual)\n\t}\n\n\tcSet := cid.NewSet()\n\tfor _, c := range cids {\n\t\tcSet.Add(c.Cid())\n\t}\n\n\tvalid := true\n\tfor _, p := range pins {\n\t\tc := p.Path().Cid()\n\t\tif cSet.Has(c) {\n\t\t\tcSet.Remove(c)\n\t\t} else {\n\t\t\tvalid = false\n\t\t\tbreak\n\t\t}\n\t}\n\n\tvalid = valid && cSet.Len() == 0\n\n\tif !valid {\n\t\tpinStrs := make([]string, len(pins))\n\t\tfor i, p := range pins {\n\t\t\tpinStrs[i] = p.Path().Cid().String()\n\t\t}\n\t\tpathStrs := make([]string, len(cids))\n\t\tfor i, c := range cids {\n\t\t\tpathStrs[i] = c.Cid().String()\n\t\t}\n\t\tt.Fatalf(\"expected: %s \\nactual: %s\", strings.Join(pathStrs, \", \"), strings.Join(pinStrs, \", \"))\n\t}\n}", "func verifyPartialLedgers(t *testing.T, provider *Provider, targetStatus []msgs.Status) {\n\t// Also double-check that deleted ledgers do not appear in the provider listing.\n\tactiveLedgers, err := provider.List()\n\trequire.NoError(t, err)\n\n\tfor i := 0; i < len(targetStatus); i++ {\n\t\tledgerID := constructTestLedgerID(i)\n\t\tif targetStatus[i] == msgs.Status_UNDER_CONSTRUCTION || targetStatus[i] == msgs.Status_UNDER_DELETION {\n\t\t\tverifyLedgerDoesNotExist(t, provider, ledgerID)\n\t\t\trequire.NotContains(t, ledgerID, activeLedgers)\n\t\t} else {\n\t\t\tverifyLedgerIDExists(t, provider, ledgerID, targetStatus[i])\n\t\t\trequire.Contains(t, activeLedgers, ledgerID)\n\t\t}\n\t}\n}", "func (t *Convert) verifyIndividualTypeCheck() {\n\tfor _, inf := range t.Interface {\n\t\tif inf.Callback {\n\t\t\tt.verifyCallbackInterface(inf)\n\t\t}\n\t}\n}", "func TestAllInt(t *testing.T) {\n\tt.Parallel()\n\tvar tests = []struct {\n\t\ts []int\n\t\texpected bool\n\t}{\n\t\t{[]int{0, 2, 4}, true},\n\t\t{[]int{}, true},\n\t\t{[]int{2, 4, 1}, false},\n\t\t{[]int{1}, false},\n\t\t{[]int{-2, 4}, true},\n\t}\n\tfor _, test := range tests {\n\t\tactual := primitives.AllInt(test.s, func(i int) bool {\n\t\t\treturn i%2 == 0\n\t\t})\n\t\tassert.Equal(t, test.expected, actual, \"expected value '%v' | actual : '%v'\", test.expected, actual)\n\t}\n}", "func testCheckDDCloudAddressListDestroy(state *terraform.State) error {\n\tfor _, res := range state.RootModule().Resources {\n\t\tif res.Type != \"ddcloud_address_list\" {\n\t\t\tcontinue\n\t\t}\n\n\t\taddressListID := res.Primary.ID\n\n\t\tclient := testAccProvider.Meta().(*providerState).Client()\n\t\taddressList, err := client.GetIPAddressList(addressListID)\n\t\tif err != nil {\n\t\t\treturn nil\n\t\t}\n\t\tif addressList != nil {\n\t\t\treturn fmt.Errorf(\"address list '%s' still exists\", addressListID)\n\t\t}\n\t}\n\n\treturn nil\n}", "func (api *PinAPI) pinLsAll(ctx context.Context, typeStr string) <-chan coreiface.Pin {\n\tout := make(chan coreiface.Pin, 1)\n\n\temittedSet := cid.NewSet()\n\n\tAddToResultKeys := func(c cid.Cid, typeStr string) error {\n\t\tif emittedSet.Visit(c) {\n\t\t\tselect {\n\t\t\tcase out <- &pinInfo{\n\t\t\t\tpinType: typeStr,\n\t\t\t\tpath: path.IpldPath(c),\n\t\t\t}:\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn ctx.Err()\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t}\n\n\tgo func() {\n\t\tdefer close(out)\n\n\t\tvar rkeys []cid.Cid\n\t\tvar err error\n\t\tif typeStr == \"recursive\" || typeStr == \"all\" {\n\t\t\tfor streamedCid := range api.pinning.RecursiveKeys(ctx) {\n\t\t\t\tif streamedCid.Err != nil {\n\t\t\t\t\tout <- &pinInfo{err: streamedCid.Err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tif err = AddToResultKeys(streamedCid.C, \"recursive\"); err != nil {\n\t\t\t\t\tout <- &pinInfo{err: err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\trkeys = append(rkeys, streamedCid.C)\n\t\t\t}\n\t\t}\n\t\tif typeStr == \"direct\" || typeStr == \"all\" {\n\t\t\tfor streamedCid := range api.pinning.DirectKeys(ctx) {\n\t\t\t\tif streamedCid.Err != nil {\n\t\t\t\t\tout <- &pinInfo{err: streamedCid.Err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tif err = AddToResultKeys(streamedCid.C, \"direct\"); err != nil {\n\t\t\t\t\tout <- &pinInfo{err: err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif typeStr == \"indirect\" {\n\t\t\t// We need to first visit the direct pins that have priority\n\t\t\t// without emitting them\n\n\t\t\tfor streamedCid := range api.pinning.DirectKeys(ctx) {\n\t\t\t\tif streamedCid.Err != nil {\n\t\t\t\t\tout <- &pinInfo{err: streamedCid.Err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\temittedSet.Add(streamedCid.C)\n\t\t\t}\n\n\t\t\tfor streamedCid := range api.pinning.RecursiveKeys(ctx) {\n\t\t\t\tif streamedCid.Err != nil {\n\t\t\t\t\tout <- &pinInfo{err: streamedCid.Err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\temittedSet.Add(streamedCid.C)\n\t\t\t\trkeys = append(rkeys, streamedCid.C)\n\t\t\t}\n\t\t}\n\t\tif typeStr == \"indirect\" || typeStr == \"all\" {\n\t\t\twalkingSet := cid.NewSet()\n\t\t\tfor _, k := range rkeys {\n\t\t\t\terr = merkledag.Walk(\n\t\t\t\t\tctx, merkledag.GetLinksWithDAG(api.dag), k,\n\t\t\t\t\tfunc(c cid.Cid) bool {\n\t\t\t\t\t\tif !walkingSet.Visit(c) {\n\t\t\t\t\t\t\treturn false\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif emittedSet.Has(c) {\n\t\t\t\t\t\t\treturn true // skipped\n\t\t\t\t\t\t}\n\t\t\t\t\t\terr := AddToResultKeys(c, \"indirect\")\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\tout <- &pinInfo{err: err}\n\t\t\t\t\t\t\treturn false\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn true\n\t\t\t\t\t},\n\t\t\t\t\tmerkledag.SkipRoot(), merkledag.Concurrent(),\n\t\t\t\t)\n\t\t\t\tif err != nil {\n\t\t\t\t\tout <- &pinInfo{err: err}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}()\n\n\treturn out\n}", "func TestOfflineList(t *testing.T) {\n\tl, err := listSample()\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\t// Check number of entries\n\twantN := 235\n\tgotN := len(l.Entries)\n\tif wantN != gotN {\n\t\tt.Fatalf(\"want %d but got %d\\n\", wantN, gotN)\n\t}\n\n\t// Check first entry\n\twant := firstEntry()\n\tgot := l.Entries[0]\n\tif want != got {\n\t\tt.Fatalf(\"want %q but got %q\\n\", want, got)\n\t}\n}", "func TestMgmt_GetAllIDs(t *testing.T) {\n\tfor i, objType := range vpc.ObjTypes() {\n\t\tobjType := objType\n\t\tt.Run(objType.String(), func(t *testing.T) {\n\t\t\tmgr, err := mgmt.New(nil)\n\t\t\tif err != nil {\n\t\t\t\tt.Fatalf(\"[%d] unable to create new VPC Management handle: %v\", i, err)\n\t\t\t}\n\t\t\tdefer mgr.Close()\n\n\t\t\tids, err := mgr.GetAllIDs(objType)\n\t\t\tif err != nil {\n\t\t\t\tt.Fatalf(\"[%d] unable to get IDs for %s VPC objects: %v\", i, objType, err)\n\t\t\t}\n\n\t\t\t_ = ids // Not sure what to do with these UUIDs\n\t\t})\n\t}\n}", "func TestCheckPermissionsSyncing(t *testing.T) {\n\tfor _, tt := range []struct {\n\t\tname string\n\t\tinstanceHealth Indicators\n\n\t\twantEmojis []string\n\t\twantErr string\n\t}{{\n\t\tname: \"no jobs\",\n\t\tinstanceHealth: Indicators{\n\t\t\tPermissionsSyncJobs: struct{ Nodes []permissionSyncJob }{\n\t\t\t\tNodes: nil,\n\t\t\t},\n\t\t},\n\t\twantEmojis: []string{output.EmojiWarning},\n\t\twantErr: \"\",\n\t}, {\n\t\tname: \"healthy\",\n\t\tinstanceHealth: Indicators{\n\t\t\tPermissionsSyncJobs: struct{ Nodes []permissionSyncJob }{\n\t\t\t\tNodes: []permissionSyncJob{{\n\t\t\t\t\tFinishedAt: time.Now(),\n\t\t\t\t\tState: \"SUCCESS\",\n\t\t\t\t\tCodeHostStates: []permissionsProviderStatus{{\n\t\t\t\t\t\tProviderType: \"github\",\n\t\t\t\t\t\tProviderID: \"https://github.com/\",\n\t\t\t\t\t\tStatus: \"SUCCESS\",\n\t\t\t\t\t}},\n\t\t\t\t}},\n\t\t\t},\n\t\t},\n\t\twantEmojis: []string{output.EmojiSuccess},\n\t\twantErr: \"\",\n\t}, {\n\t\tname: \"unhealthy\",\n\t\tinstanceHealth: Indicators{\n\t\t\tPermissionsSyncJobs: struct{ Nodes []permissionSyncJob }{\n\t\t\t\tNodes: []permissionSyncJob{{\n\t\t\t\t\tFinishedAt: time.Now(),\n\t\t\t\t\tState: \"ERROR\",\n\t\t\t\t\tFailureMessage: \"oh no!\",\n\t\t\t\t\tCodeHostStates: []permissionsProviderStatus{{\n\t\t\t\t\t\tProviderType: \"github\",\n\t\t\t\t\t\tProviderID: \"https://github.com/\",\n\t\t\t\t\t\tStatus: \"ERROR\",\n\t\t\t\t\t}},\n\t\t\t\t}},\n\t\t\t},\n\t\t},\n\t\twantEmojis: []string{output.EmojiFailure},\n\t\twantErr: \"permissions sync errors\",\n\t}} {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tvar out bytes.Buffer\n\t\t\terr := checkPermissionsSyncing(output.NewOutput(io.MultiWriter(os.Stderr, &out), output.OutputOpts{}), time.Hour, tt.instanceHealth)\n\t\t\tif tt.wantErr == \"\" {\n\t\t\t\tassert.NoError(t, err)\n\t\t\t} else {\n\t\t\t\trequire.Error(t, err)\n\t\t\t\tassert.Contains(t, err.Error(), tt.wantErr)\n\t\t\t}\n\t\t\tif len(tt.wantEmojis) > 0 {\n\t\t\t\tdata := out.String()\n\t\t\t\tfor _, emoji := range tt.wantEmojis {\n\t\t\t\t\tassert.Contains(t, data, emoji)\n\t\t\t\t}\n\t\t\t}\n\t\t})\n\t}\n}", "func testCheckDDCloudPortListMatches(name string, expected compute.PortList) resource.TestCheckFunc {\n\tname = ensureResourceTypePrefix(name, \"ddcloud_port_list\")\n\n\treturn func(state *terraform.State) error {\n\t\tres, ok := state.RootModule().Resources[name]\n\t\tif !ok {\n\t\t\treturn fmt.Errorf(\"not found: %s\", name)\n\t\t}\n\n\t\tportListID := res.Primary.ID\n\n\t\tclient := testAccProvider.Meta().(*providerState).Client()\n\t\tportList, err := client.GetPortList(portListID)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"bad: get port list: %s\", err)\n\t\t}\n\t\tif portList == nil {\n\t\t\treturn fmt.Errorf(\"bad: port list not found with Id '%s'\", portListID)\n\t\t}\n\n\t\tif portList.Name != expected.Name {\n\t\t\treturn fmt.Errorf(\"bad: port list '%s' has name '%s' (expected '%s')\", portListID, portList.Name, expected.Name)\n\t\t}\n\n\t\tif portList.Description != expected.Description {\n\t\t\treturn fmt.Errorf(\"bad: port list '%s' has description '%s' (expected '%s')\", portListID, portList.Description, expected.Description)\n\t\t}\n\n\t\tif len(portList.Ports) != len(expected.Ports) {\n\t\t\treturn fmt.Errorf(\"bad: port list '%s' has %d ports or port ranges (expected '%d')\", portListID, len(portList.Ports), len(expected.Ports))\n\t\t}\n\n\t\terr = comparePortListEntries(expected, *portList)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif len(portList.ChildLists) != len(expected.ChildLists) {\n\t\t\treturn fmt.Errorf(\"bad: port list '%s' has %d child lists (expected '%d')\", portListID, len(portList.ChildLists), len(expected.ChildLists))\n\t\t}\n\n\t\tfor index := range portList.ChildLists {\n\t\t\texpectedChildListID := expected.ChildLists[index].ID\n\t\t\tactualChildListID := portList.ChildLists[index].ID\n\n\t\t\tif actualChildListID != expectedChildListID {\n\t\t\t\treturn fmt.Errorf(\"bad: port list '%s' has child list at index %d with Id %s (expected '%s')\",\n\t\t\t\t\tportListID, index, actualChildListID, expectedChildListID,\n\t\t\t\t)\n\t\t\t}\n\t\t}\n\n\t\treturn nil\n\t}\n}", "func AssertContainsAll(t Failable, haystack string, needles []string) {\n\tt.Helper()\n\n\tvar missing []string\n\tfor _, needle := range needles {\n\t\tif !strings.Contains(strings.ToLower(haystack), strings.ToLower(needle)) {\n\t\t\tmissing = append(missing, needle)\n\t\t}\n\t}\n\n\tif len(missing) > 0 {\n\t\tt.Fatalf(\"%s: expected the values %v to be in %q but %v were missing\", t.Name(), needles, haystack, missing)\n\t}\n}", "func (m *StorageMock) MinimockAllInspect() {\n\tfor _, e := range m.AllMock.expectations {\n\t\tif mm_atomic.LoadUint64(&e.Counter) < 1 {\n\t\t\tm.t.Errorf(\"Expected call to StorageMock.All with params: %#v\", *e.params)\n\t\t}\n\t}\n\n\t// if default expectation was set then invocations count should be greater than zero\n\tif m.AllMock.defaultExpectation != nil && mm_atomic.LoadUint64(&m.afterAllCounter) < 1 {\n\t\tif m.AllMock.defaultExpectation.params == nil {\n\t\t\tm.t.Error(\"Expected call to StorageMock.All\")\n\t\t} else {\n\t\t\tm.t.Errorf(\"Expected call to StorageMock.All with params: %#v\", *m.AllMock.defaultExpectation.params)\n\t\t}\n\t}\n\t// if func was set then invocations count should be greater than zero\n\tif m.funcAll != nil && mm_atomic.LoadUint64(&m.afterAllCounter) < 1 {\n\t\tm.t.Error(\"Expected call to StorageMock.All\")\n\t}\n}", "func (c CryptoServiceTester) TestListFromMultipleKeystores(t *testing.T) {\n\tcryptoService := c.cryptoServiceFactory()\n\tcryptoService.keyStores = append(cryptoService.keyStores,\n\t\ttrustmanager.NewKeyMemoryStore(passphraseRetriever))\n\n\texpectedKeysIDs := make(map[string]bool) // just want to be able to index by key\n\n\tfor i := 0; i < 3; i++ {\n\t\tprivKey, err := utils.GenerateECDSAKey(rand.Reader)\n\t\trequire.NoError(t, err, c.errorMsg(\"error creating key\"))\n\t\texpectedKeysIDs[privKey.ID()] = true\n\n\t\t// adds one different key to each keystore, and then one key to\n\t\t// both keystores\n\t\tfor j, store := range cryptoService.keyStores {\n\t\t\tif i == j || i == 2 {\n\t\t\t\tstore.AddKey(trustmanager.KeyInfo{Role: data.CanonicalRootRole, Gun: \"\"}, privKey)\n\t\t\t}\n\t\t}\n\t}\n\t// sanity check - each should have 2\n\tfor _, store := range cryptoService.keyStores {\n\t\trequire.Len(t, store.ListKeys(), 2, c.errorMsg(\"added keys wrong\"))\n\t}\n\n\tkeyList := cryptoService.ListKeys(\"root\")\n\trequire.Len(t, keyList, 4,\n\t\tc.errorMsg(\n\t\t\t\"ListKeys should have 4 keys (not necessarily unique) but does not: %v\", keyList))\n\tfor _, k := range keyList {\n\t\t_, ok := expectedKeysIDs[k]\n\t\trequire.True(t, ok, c.errorMsg(\"Unexpected key %s\", k))\n\t}\n\n\tkeyMap := cryptoService.ListAllKeys()\n\trequire.Len(t, keyMap, 3,\n\t\tc.errorMsg(\"ListAllKeys should have 3 unique keys but does not: %v\", keyMap))\n\n\tfor k, role := range keyMap {\n\t\t_, ok := expectedKeysIDs[k]\n\t\trequire.True(t, ok)\n\t\trequire.Equal(t, data.RoleName(\"root\"), role)\n\t}\n}", "func (rst *watcherSyncerTester) expectAllEventsHandled() {\n\tlog.Infof(\"Expecting all events to have been handled\")\n\tfor _, l := range rst.lws {\n\t\tExpect(l.listCallResults).To(HaveLen(0), \"pending list results to be processed\")\n\t\tExpect(l.stopEvents).To(HaveLen(0), \"pending stop events to be processed\")\n\t\tExpect(l.results).To(HaveLen(0), \"pending watch results to be processed\")\n\t}\n}", "func checkSmaps(t *testing.T, proxyURL string) {\n\tvar (\n\t\tsmap1 = tutils.GetClusterMap(t, proxyURL)\n\t\tprimary = smap1.Primary // primary according to the `proxyURL`(*)\n\t)\n\tfor _, psi := range smap1.Pmap {\n\t\tsmap2 := tutils.GetClusterMap(t, psi.URL(cmn.NetworkPublic))\n\t\tuuid, sameOrigin, sameVersion, eq := smap1.Compare(smap2)\n\t\tif eq {\n\t\t\tcontinue\n\t\t}\n\t\terr := fmt.Errorf(\"(%s %s, primary=%s) != (%s %s, primary=%s): (uuid=%s, same-orig=%t, same-ver=%t)\",\n\t\t\tproxyURL, smap1, primary, psi.URL(cmn.NetworkPublic), smap2, smap2.Primary, uuid, sameOrigin, sameVersion)\n\t\tt.Error(err)\n\t}\n}", "func TestAll(t *testing.T) {\n\n\t// remove anything that might have been leftover from failed tests\n\tDeleteApi(TEST_API_AXLE_SERVER, TEST_API_NAME)\n\tDeleteKey(TEST_API_AXLE_SERVER, TEST_KEY_NAME)\n\tDeleteKeyRing(TEST_API_AXLE_SERVER, TEST_KEYRING_NAME)\n\n\ttestPing(t)\n\ttestInfo(t)\n\ttestGetNonExistentApi(t)\n\ttestCreateApi(t)\n\tapi := testGetApi(t)\n\ttestUpdateApi(t, api)\n\ttestApiStats(t, api)\n\tk := testNewKey(t)\n\ttestGetKey(t)\n\ttestUpdateKey(t, k)\n\ttestLinkKey(t, api)\n\ttestApiKeyCharts(t, api)\n\ttestApiKeys(t)\n\ttestApis(t)\n\ttestApisCharts(t)\n\ttestKeyApiCharts(t)\n\ttestKeyApis(t, k)\n\ttestKeyStats(t, k)\n\ttestKeysCharts(t)\n\ttestKeys(t)\n\tkr := testNewKeyRing(t)\n\ttestGetKeyRing(t)\n\ttestUpdateKeyRing(t, kr)\n\ttestKeyRingLinkKey(t)\n\ttestKeyRingKeys(t, kr)\n\ttestKeyRingStats(t, kr)\n\ttestKeyRings(t)\n\ttestKeyRingUnlinkKey(t)\n\ttestKeyRingsEmpty(t)\n\ttestUnlinkKey(t)\n\ttestDeleteKey(t)\n\ttestDeleteKeyRing(t)\n\ttestDeleteApi(t, api)\n}", "func TestConnectednessCorrect(t *testing.T) {\n\tnets := make([]network.Network, 4)\n\tfor i := 0; i < 4; i++ {\n\t\tnets[i] = GenSwarm(t)\n\t}\n\n\t// connect 0-1, 0-2, 0-3, 1-2, 2-3\n\n\tdial := func(a, b network.Network) {\n\t\tDivulgeAddresses(b, a)\n\t\tif _, err := a.DialPeer(context.Background(), b.LocalPeer()); err != nil {\n\t\t\tt.Fatalf(\"Failed to dial: %s\", err)\n\t\t}\n\t}\n\n\tdial(nets[0], nets[1])\n\tdial(nets[0], nets[3])\n\tdial(nets[1], nets[2])\n\tdial(nets[3], nets[2])\n\n\t// The notifications for new connections get sent out asynchronously.\n\t// There is the potential for a race condition here, so we sleep to ensure\n\t// that they have been received.\n\ttime.Sleep(time.Millisecond * 100)\n\n\t// test those connected show up correctly\n\n\t// test connected\n\texpectConnectedness(t, nets[0], nets[1], network.Connected)\n\texpectConnectedness(t, nets[0], nets[3], network.Connected)\n\texpectConnectedness(t, nets[1], nets[2], network.Connected)\n\texpectConnectedness(t, nets[3], nets[2], network.Connected)\n\n\t// test not connected\n\texpectConnectedness(t, nets[0], nets[2], network.NotConnected)\n\texpectConnectedness(t, nets[1], nets[3], network.NotConnected)\n\n\trequire.Len(t, nets[0].Peers(), 2, \"expected net 0 to have two peers\")\n\trequire.Len(t, nets[2].Peers(), 2, \"expected net 2 to have two peers\")\n\trequire.NotZerof(t, nets[1].ConnsToPeer(nets[3].LocalPeer()), \"net 1 should have no connections to net 3\")\n\trequire.NoError(t, nets[2].ClosePeer(nets[1].LocalPeer()))\n\n\ttime.Sleep(time.Millisecond * 50)\n\texpectConnectedness(t, nets[2], nets[1], network.NotConnected)\n\n\tfor _, n := range nets {\n\t\tn.Close()\n\t}\n}", "func validateEntries(t *testing.T, entries, expected []endpoint.Endpoint) {\n\tif len(entries) != len(expected) {\n\t\tt.Fatalf(\"expected %q to match %q\", entries, expected)\n\t}\n\n\tfor i := range entries {\n\t\tif entries[i] != expected[i] {\n\t\t\tt.Fatalf(\"expected %q to match %q\", entries, expected)\n\t\t}\n\t}\n}", "func testCheckDDCloudAddressListExists(name string, exists bool) resource.TestCheckFunc {\n\tname = ensureResourceTypePrefix(name, \"ddcloud_address_list\")\n\n\treturn func(state *terraform.State) error {\n\t\tres, ok := state.RootModule().Resources[name]\n\t\tif !ok {\n\t\t\treturn fmt.Errorf(\"Not found: %s\", name)\n\t\t}\n\n\t\taddressListID := res.Primary.ID\n\n\t\tclient := testAccProvider.Meta().(*providerState).Client()\n\t\taddressList, err := client.GetIPAddressList(addressListID)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"bad: Get address list: %s\", err)\n\t\t}\n\t\tif exists && addressList == nil {\n\t\t\treturn fmt.Errorf(\"bad: address list not found with Id '%s'\", addressListID)\n\t\t} else if !exists && addressList != nil {\n\t\t\treturn fmt.Errorf(\"bad: address list still exists with Id '%s'\", addressListID)\n\t\t}\n\n\t\treturn nil\n\t}\n}", "func TestGetAllInstances(t *testing.T) {\n\tctx, cancelFunc := context.WithTimeout(context.Background(), standardTimeout)\n\tdefer cancelFunc()\n\n\tinstances, err := bat.StartRandomInstances(ctx, \"\", 3)\n\tif err != nil {\n\t\tt.Fatalf(\"Failed to launch instance: %v\", err)\n\t}\n\n\tscheduled, err := bat.WaitForInstancesLaunch(ctx, \"\", instances, false)\n\tdefer func() {\n\t\t_, err := bat.DeleteInstances(ctx, \"\", scheduled)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Failed to delete instances: %v\", err)\n\t\t}\n\t}()\n\tif err != nil {\n\t\tt.Fatalf(\"Instance %s did not launch: %v\", instances[0], err)\n\t}\n\n\tinstanceDetails, err := bat.GetAllInstances(ctx, \"\")\n\tif err != nil {\n\t\tt.Fatalf(\"Failed to retrieve instances: %v\", err)\n\t}\n\n\tfor _, instance := range instances {\n\t\tinstanceDetail, ok := instanceDetails[instance]\n\t\tif !ok {\n\t\t\tt.Fatalf(\"Failed to retrieve instance %s\", instance)\n\t\t}\n\n\t\t// Check some basic information\n\n\t\tif instanceDetail.FlavorID == \"\" || instanceDetail.HostID == \"\" ||\n\t\t\tinstanceDetail.TenantID == \"\" || instanceDetail.MacAddress == \"\" ||\n\t\t\tinstanceDetail.PrivateIP == \"\" {\n\t\t\tt.Fatalf(\"Instance missing information: %+v\", instanceDetail)\n\t\t}\n\t}\n}", "func (l list) Verify() error {\n\tfor _, p := range l.Fields {\n\t\tfor _, s := range p.Values {\n\t\t\tif err := p.Verify(s); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func CheckStateInvariants(st *State, store adt.Store) (*StateSummary, *builtin.MessageAccumulator, error) {\r\n\tacc := &builtin.MessageAccumulator{}\r\n\tacc.Require(st.RootKey.Protocol() == addr.ID, \"root key %v should have ID protocol\", st.RootKey)\r\n\r\n\t// Check verifiers\r\n\tverifiers, err := adt.AsMap(store, st.Verifiers)\r\n\tif err != nil {\r\n\t\treturn nil, nil, err\r\n\t}\r\n\r\n\tallVerifiers := map[addr.Address]DataCap{}\r\n\tvar vcap abi.StoragePower\r\n\tif err = verifiers.ForEach(&vcap, func(key string) error {\r\n\t\tverifier, err := addr.NewFromBytes([]byte(key))\r\n\t\tif err != nil {\r\n\t\t\treturn err\r\n\t\t}\r\n\t\tacc.Require(verifier.Protocol() == addr.ID, \"verifier %v should have ID protocol\", verifier)\r\n\t\tacc.Require(vcap.GreaterThanEqual(big.Zero()), \"verifier %v cap %v is negative\", verifier, vcap)\r\n\t\tallVerifiers[verifier] = vcap.Copy()\r\n\t\treturn nil\r\n\t}); err != nil {\r\n\t\treturn nil, nil, err\r\n\t}\r\n\r\n\t// Check clients\r\n\tclients, err := adt.AsMap(store, st.VerifiedClients)\r\n\tif err != nil {\r\n\t\treturn nil, nil, err\r\n\t}\r\n\r\n\tallClients := map[addr.Address]DataCap{}\r\n\tif err = clients.ForEach(&vcap, func(key string) error {\r\n\t\tclient, err := addr.NewFromBytes([]byte(key))\r\n\t\tif err != nil {\r\n\t\t\treturn err\r\n\t\t}\r\n\t\tacc.Require(client.Protocol() == addr.ID, \"client %v should have ID protocol\", client)\r\n\t\tacc.Require(vcap.GreaterThanEqual(big.Zero()), \"client %v cap %v is negative\", client, vcap)\r\n\t\tallClients[client] = vcap.Copy()\r\n\t\treturn nil\r\n\t}); err != nil {\r\n\t\treturn nil, nil, err\r\n\t}\r\n\r\n\t// Check verifiers and clients are disjoint.\r\n\tfor v := range allVerifiers { //nolint:nomaprange\r\n\t\t_, found := allClients[v]\r\n\t\tacc.Require(!found, \"verifier %v is also a client\", v)\r\n\t}\r\n\t// No need to iterate all clients; any overlap must have been one of all verifiers.\r\n\r\n\treturn &StateSummary{\r\n\t\tVerifiers: allVerifiers,\r\n\t\tClients: allClients,\r\n\t}, acc, nil\r\n}", "func TestUnreachableMarks(t *testing.T) {\n\tseeds := []string {\"127.0.0.1:6000\",}\n\tmanager1 := CreatePeerManager(6000, 6001, nil, FullMode)\n\tmanager2 := CreatePeerManager(7000, 7001, seeds, FullMode)\n\tmanager3 := CreatePeerManager(8000, 8001, seeds, FullMode)\n\n\t// Change update period to lengthen the time between marking a peer unreachable \n\t// and the next status update\n\tmanager1.StatusUpdatePeriod=500*time.Millisecond\n\tmanager2.StatusUpdatePeriod=500*time.Millisecond\n\tmanager3.StatusUpdatePeriod=500*time.Millisecond\n\n\tmarkPeer := func(t *testing.T) {\n\t\tmanager1.MarkPeerUnreachable(\"127.0.0.1:8001\")\n\t\tmanager1.MarkPeerUnreachable(\"127.0.0.1:8001\")\n\t\tmanager1.MarkPeerUnreachable(\"127.0.0.1:8001\")\n\t\tavailable := GetPeerManagerAvailablePeers(manager1)\n\t\texpected := []string {\"127.0.0.1:6001\", \"127.0.0.1:7001\"}\n\t\tif !MapOnlyContains(available, expected) {\n\t\t\tt.Errorf(\"Peer 127.0.0.1:8001 wasn't marked unreachable %v\\n\", available)\n\t\t}\n\t}\n\n\t// After some time has passed all the peers should be available again\n\tallPeers := []string {\"127.0.0.1:6001\", \"127.0.0.1:7001\", \"127.0.0.1:8001\"}\n\tPeerManagerPropagationHelper(t, manager1, manager2, manager3,\n\t\tallPeers, allPeers, allPeers, markPeer, 3200*time.Millisecond, 8*time.Second)\n}", "func assertEqualEndpointLists(t *testing.T, expected, actual []*Endpoint) {\n\texpectedSet := map[string]*Endpoint{}\n\tfor _, ep := range expected {\n\t\tuid, found := ep.getSingleValuedAttrs()[DestinationUID.AttrName()]\n\t\tif !found {\n\t\t\tt.Fatalf(\"expected ep found with no UID is an indication of bad test data: '%v'\", ep)\n\t\t}\n\t\texpectedSet[uid] = ep\n\t}\n\tactualSet := map[string]*Endpoint{}\n\tfor _, ep := range actual {\n\t\tuid, found := ep.getSingleValuedAttrs()[DestinationUID.AttrName()]\n\t\tif !found {\n\t\t\tt.Errorf(\"actual ep found with no UID '%s'\", epDebugInfo(ep))\n\t\t\tcontinue\n\t\t}\n\t\tactualSet[uid] = ep\n\t}\n\tfor uid, expectedEp := range expectedSet {\n\t\tactualEp, found := actualSet[uid]\n\t\tif !found {\n\t\t\tt.Errorf(\"expecting endpoint\\nShortForm: %s\\nLongForm : %s\\nfound none\", epDebugInfo(expectedEp), *expectedEp)\n\t\t\tcontinue\n\t\t}\n\t\tassertEqualEndpoints(t, expectedEp, actualEp)\n\t\tdelete(actualSet, uid)\n\t}\n\tfor _, ep := range actualSet {\n\t\tt.Errorf(\"unexpected endpoint found: %s\", epDebugInfo(ep))\n\t}\n\tif len(expected) != len(actual) {\n\t\tt.Errorf(\"expected endpoint count: %d do not tally with actual count: %d\", len(expected), len(actual))\n\t}\n}", "func TestPasswordSanity(t *testing.T) {\n\tpw := make([]byte, 12)\n\tpw2 := make([]byte, 12)\n\tvar hash []byte\n\tvar hash2 []byte\n\n\tfor i := 0; i < 32; i++ {\n\t\tgrand.Read(pw)\n\t\tgrand.Read(pw2)\n\t\thash = PasswordHash(pw)\n\t\thash2 = PasswordHash(pw2)\n\n\t\tif !PasswordValid(hash, pw) {\n\t\t\tt.Errorf(\"PasswordValid should return true for the pair: %s and %s\", hash, pw)\n\t\t\tt.FailNow()\n\t\t}\n\t\tif PasswordValid(hash2, pw) {\n\t\t\tt.Error(\"PasswordValid is giving false positive\")\n\t\t\tt.FailNow()\n\t\t}\n\t}\n}", "func (m *MockRegionUserInfoDao) GetALLTokenInValidityPeriod() ([]*model.RegionUserInfo, error) {\n\tret := m.ctrl.Call(m, \"GetALLTokenInValidityPeriod\")\n\tret0, _ := ret[0].([]*model.RegionUserInfo)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func Check(collection *collection.Collection, dbg, suggest bool) Results {\n\n\tresults := Results{}\n\n\t// Start tests\n\n\t/* Check to make sure that the public DNS server NS records match\n\t Check to make sure the one of the public and the private NS record servers match\n\t Check to make sure there are at least 1 NS server\n\t*/\n\n\tcollection.PublicMatchNS = reflect.DeepEqual(collection.DNS1NS, collection.DNS2NS)\n\tcollection.LocalMatchNS = reflect.DeepEqual(collection.DNS1NS, collection.LocalNS)\n\tif collection.PublicMatchNS && collection.LocalMatchNS && len(collection.LocalNS) > 0 {\n\t\tresults.ResultNS = true\n\t} else {\n\t\tresults.ResultNS = false\n\t}\n\n\t/* Check to make sure the public DNS server Glue records match\n\t Check to make sure the one of the public and the private Glue record servers match\n\t Check to make sure there the Glue record length matches the ns record length\n\t*/\n\n\tcollection.PublicMatchGlue = reflect.DeepEqual(collection.DNS1Glue, collection.DNS2Glue)\n\tcollection.LocalMatchGlue = reflect.DeepEqual(collection.DNS1Glue, collection.LocalGlue)\n\n\tif collection.PublicMatchGlue && collection.LocalMatchGlue && (len(collection.LocalNS) == len(collection.LocalGlue)) && len(collection.LocalNS) > 0 {\n\t\tresults.ResultGlue = true\n\t} else {\n\t\tresults.ResultGlue = false\n\t}\n\n\t/* Check to make sure that we can access all of the name servers and the numbers match */\n\n\tresults.ResultAccess = true\n\tfor _, a := range collection.EndpointStatus {\n\t\tif a && results.ResultAccess {\n\t\t} else {\n\t\t\tresults.ResultAccess = false\n\t\t}\n\t}\n\tif len(collection.EndpointStatus) != len(collection.LocalNS) || len(collection.EndpointStatus) < 1 {\n\t\tresults.ResultAccess = false\n\t}\n\n\t/* Check to make sure both public DNS server results match\n\t Check that the LocalDNS and one of the remotes match\n\t Check that there is more than 1 A record\n\t*/\n\n\tcollection.PublicMatchA = reflect.DeepEqual(collection.DNS1A, collection.DNS2A)\n\tcollection.LocalMatchA = reflect.DeepEqual(collection.DNS1A, collection.LocalA)\n\n\tif collection.PublicMatchA && collection.LocalMatchA && len(collection.LocalA) > 0 && (len(collection.LocalA) == len(collection.DNS1A)) {\n\t\tresults.ResultA = true\n\t} else {\n\t\tresults.ResultA = false\n\t}\n\n\t// check to make sure the SOA records match the domain name we expect\n\tresults.ResultSOAMatch = collection.SOAMatch\n\n\t// Show test results if suggest or debug\n\tif dbg || suggest {\n\t\tfmt.Printf(\"--------------------------------\\n\")\n\t\tdebugPrint(\"NS Record Test\", results.ResultNS)\n\t\tdebugPrint(\"Glue Record Test\", results.ResultGlue)\n\t\tdebugPrint(\"NS Access Test\", results.ResultAccess)\n\t\tdebugPrint(\"SOA Match Test\", results.ResultSOAMatch)\n\t\tdebugPrint(\"A Record Test\", results.ResultA)\n\t\tfmt.Printf(\"--------------------------------\\n\")\n\t}\n\n\t// only print datastructure if debug is on\n\tif dbg {\n\t\tcolor.Cyan.Printf(\"Results Debug:\\n%+v\\n\", results)\n\t}\n\n\treturn (results)\n}", "func (m *MockFullNode) StateAllMinerFaults(arg0 context.Context, arg1 abi.ChainEpoch, arg2 types0.TipSetKey) ([]*types0.Fault, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"StateAllMinerFaults\", arg0, arg1, arg2)\n\tret0, _ := ret[0].([]*types0.Fault)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func TestNDPOptionsIterCheck(t *testing.T) {\n\ttests := []struct {\n\t\tname string\n\t\tbuf []byte\n\t\texpectedErr error\n\t}{\n\t\t{\n\t\t\tname: \"ZeroLengthField\",\n\t\t\tbuf: []byte{0, 0, 0, 0, 0, 0, 0, 0},\n\t\t\texpectedErr: ErrNDPOptMalformedHeader,\n\t\t},\n\t\t{\n\t\t\tname: \"ValidSourceLinkLayerAddressOption\",\n\t\t\tbuf: []byte{1, 1, 1, 2, 3, 4, 5, 6},\n\t\t\texpectedErr: nil,\n\t\t},\n\t\t{\n\t\t\tname: \"TooSmallSourceLinkLayerAddressOption\",\n\t\t\tbuf: []byte{1, 1, 1, 2, 3, 4, 5},\n\t\t\texpectedErr: io.ErrUnexpectedEOF,\n\t\t},\n\t\t{\n\t\t\tname: \"ValidTargetLinkLayerAddressOption\",\n\t\t\tbuf: []byte{2, 1, 1, 2, 3, 4, 5, 6},\n\t\t\texpectedErr: nil,\n\t\t},\n\t\t{\n\t\t\tname: \"TooSmallTargetLinkLayerAddressOption\",\n\t\t\tbuf: []byte{2, 1, 1, 2, 3, 4, 5},\n\t\t\texpectedErr: io.ErrUnexpectedEOF,\n\t\t},\n\t\t{\n\t\t\tname: \"ValidPrefixInformation\",\n\t\t\tbuf: []byte{\n\t\t\t\t3, 4, 43, 64,\n\t\t\t\t1, 2, 3, 4,\n\t\t\t\t5, 6, 7, 8,\n\t\t\t\t0, 0, 0, 0,\n\t\t\t\t9, 10, 11, 12,\n\t\t\t\t13, 14, 15, 16,\n\t\t\t\t17, 18, 19, 20,\n\t\t\t\t21, 22, 23, 24,\n\t\t\t},\n\t\t\texpectedErr: nil,\n\t\t},\n\t\t{\n\t\t\tname: \"TooSmallPrefixInformation\",\n\t\t\tbuf: []byte{\n\t\t\t\t3, 4, 43, 64,\n\t\t\t\t1, 2, 3, 4,\n\t\t\t\t5, 6, 7, 8,\n\t\t\t\t0, 0, 0, 0,\n\t\t\t\t9, 10, 11, 12,\n\t\t\t\t13, 14, 15, 16,\n\t\t\t\t17, 18, 19, 20,\n\t\t\t\t21, 22, 23,\n\t\t\t},\n\t\t\texpectedErr: io.ErrUnexpectedEOF,\n\t\t},\n\t\t{\n\t\t\tname: \"InvalidPrefixInformationLength\",\n\t\t\tbuf: []byte{\n\t\t\t\t3, 3, 43, 64,\n\t\t\t\t1, 2, 3, 4,\n\t\t\t\t5, 6, 7, 8,\n\t\t\t\t0, 0, 0, 0,\n\t\t\t\t9, 10, 11, 12,\n\t\t\t\t13, 14, 15, 16,\n\t\t\t},\n\t\t\texpectedErr: ErrNDPOptMalformedBody,\n\t\t},\n\t\t{\n\t\t\tname: \"ValidSourceAndTargetLinkLayerAddressWithPrefixInformation\",\n\t\t\tbuf: []byte{\n\t\t\t\t// Source Link-Layer Address.\n\t\t\t\t1, 1, 1, 2, 3, 4, 5, 6,\n\n\t\t\t\t// Target Link-Layer Address.\n\t\t\t\t2, 1, 7, 8, 9, 10, 11, 12,\n\n\t\t\t\t// Prefix information.\n\t\t\t\t3, 4, 43, 64,\n\t\t\t\t1, 2, 3, 4,\n\t\t\t\t5, 6, 7, 8,\n\t\t\t\t0, 0, 0, 0,\n\t\t\t\t9, 10, 11, 12,\n\t\t\t\t13, 14, 15, 16,\n\t\t\t\t17, 18, 19, 20,\n\t\t\t\t21, 22, 23, 24,\n\t\t\t},\n\t\t\texpectedErr: nil,\n\t\t},\n\t\t{\n\t\t\tname: \"ValidSourceAndTargetLinkLayerAddressWithPrefixInformationWithUnrecognized\",\n\t\t\tbuf: []byte{\n\t\t\t\t// Source Link-Layer Address.\n\t\t\t\t1, 1, 1, 2, 3, 4, 5, 6,\n\n\t\t\t\t// Target Link-Layer Address.\n\t\t\t\t2, 1, 7, 8, 9, 10, 11, 12,\n\n\t\t\t\t// 255 is an unrecognized type. If 255 ends up\n\t\t\t\t// being the type for some recognized type,\n\t\t\t\t// update 255 to some other unrecognized value.\n\t\t\t\t255, 2, 1, 2, 3, 4, 5, 6, 1, 2, 3, 4, 5, 6, 7, 8,\n\n\t\t\t\t// Prefix information.\n\t\t\t\t3, 4, 43, 64,\n\t\t\t\t1, 2, 3, 4,\n\t\t\t\t5, 6, 7, 8,\n\t\t\t\t0, 0, 0, 0,\n\t\t\t\t9, 10, 11, 12,\n\t\t\t\t13, 14, 15, 16,\n\t\t\t\t17, 18, 19, 20,\n\t\t\t\t21, 22, 23, 24,\n\t\t\t},\n\t\t\texpectedErr: nil,\n\t\t},\n\t\t{\n\t\t\tname: \"InvalidRecursiveDNSServerCutsOffAddress\",\n\t\t\tbuf: []byte{\n\t\t\t\t25, 4, 0, 0,\n\t\t\t\t0, 0, 0, 0,\n\t\t\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,\n\t\t\t\t0, 1, 2, 3, 4, 5, 6, 7,\n\t\t\t},\n\t\t\texpectedErr: ErrNDPOptMalformedBody,\n\t\t},\n\t\t{\n\t\t\tname: \"InvalidRecursiveDNSServerInvalidLengthField\",\n\t\t\tbuf: []byte{\n\t\t\t\t25, 2, 0, 0,\n\t\t\t\t0, 0, 0, 0,\n\t\t\t\t0, 1, 2, 3, 4, 5, 6, 7, 8,\n\t\t\t},\n\t\t\texpectedErr: io.ErrUnexpectedEOF,\n\t\t},\n\t\t{\n\t\t\tname: \"RecursiveDNSServerTooSmall\",\n\t\t\tbuf: []byte{\n\t\t\t\t25, 1, 0, 0,\n\t\t\t\t0, 0, 0,\n\t\t\t},\n\t\t\texpectedErr: io.ErrUnexpectedEOF,\n\t\t},\n\t\t{\n\t\t\tname: \"RecursiveDNSServerMulticast\",\n\t\t\tbuf: []byte{\n\t\t\t\t25, 3, 0, 0,\n\t\t\t\t0, 0, 0, 0,\n\t\t\t\t255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,\n\t\t\t},\n\t\t\texpectedErr: ErrNDPOptMalformedBody,\n\t\t},\n\t\t{\n\t\t\tname: \"RecursiveDNSServerUnspecified\",\n\t\t\tbuf: []byte{\n\t\t\t\t25, 3, 0, 0,\n\t\t\t\t0, 0, 0, 0,\n\t\t\t\t0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\t\t\t},\n\t\t\texpectedErr: ErrNDPOptMalformedBody,\n\t\t},\n\t\t{\n\t\t\tname: \"DNSSearchListLargeCompliantRFC1035\",\n\t\t\tbuf: []byte{\n\t\t\t\t31, 33, 0, 0,\n\t\t\t\t0, 0, 0, 0,\n\t\t\t\t63, 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',\n\t\t\t\t'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',\n\t\t\t\t'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',\n\t\t\t\t'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',\n\t\t\t\t'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k',\n\t\t\t\t63, 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',\n\t\t\t\t'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',\n\t\t\t\t'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',\n\t\t\t\t'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',\n\t\t\t\t'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k',\n\t\t\t\t63, 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',\n\t\t\t\t'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',\n\t\t\t\t'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',\n\t\t\t\t'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',\n\t\t\t\t'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k',\n\t\t\t\t62, 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',\n\t\t\t\t'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',\n\t\t\t\t'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',\n\t\t\t\t'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',\n\t\t\t\t'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j',\n\t\t\t\t0,\n\t\t\t},\n\t\t\texpectedErr: nil,\n\t\t},\n\t\t{\n\t\t\tname: \"DNSSearchListNonCompliantRFC1035\",\n\t\t\tbuf: []byte{\n\t\t\t\t31, 33, 0, 0,\n\t\t\t\t0, 0, 0, 0,\n\t\t\t\t63, 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',\n\t\t\t\t'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',\n\t\t\t\t'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',\n\t\t\t\t'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',\n\t\t\t\t'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k',\n\t\t\t\t63, 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',\n\t\t\t\t'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',\n\t\t\t\t'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',\n\t\t\t\t'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',\n\t\t\t\t'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k',\n\t\t\t\t63, 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',\n\t\t\t\t'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',\n\t\t\t\t'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',\n\t\t\t\t'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',\n\t\t\t\t'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k',\n\t\t\t\t63, 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',\n\t\t\t\t'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',\n\t\t\t\t'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',\n\t\t\t\t'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',\n\t\t\t\t'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n\t\t\t\t'i', 'j', 'k',\n\t\t\t\t0,\n\t\t\t\t0, 0, 0, 0, 0, 0, 0, 0,\n\t\t\t},\n\t\t\texpectedErr: ErrNDPOptMalformedBody,\n\t\t},\n\t\t{\n\t\t\tname: \"DNSSearchListValidSmall\",\n\t\t\tbuf: []byte{\n\t\t\t\t31, 2, 0, 0,\n\t\t\t\t0, 0, 0, 0,\n\t\t\t\t6, 'a', 'b', 'c', 'd', 'e', 'f',\n\t\t\t\t0,\n\t\t\t},\n\t\t\texpectedErr: nil,\n\t\t},\n\t\t{\n\t\t\tname: \"DNSSearchListTooSmall\",\n\t\t\tbuf: []byte{\n\t\t\t\t31, 1, 0, 0,\n\t\t\t\t0, 0, 0,\n\t\t\t},\n\t\t\texpectedErr: io.ErrUnexpectedEOF,\n\t\t},\n\t}\n\n\tfor _, test := range tests {\n\t\tt.Run(test.name, func(t *testing.T) {\n\t\t\topts := NDPOptions(test.buf)\n\n\t\t\tif _, err := opts.Iter(true); !errors.Is(err, test.expectedErr) {\n\t\t\t\tt.Fatalf(\"got Iter(true) = (_, %v), want = (_, %v)\", err, test.expectedErr)\n\t\t\t}\n\n\t\t\t// test.buf may be malformed but we chose not to check\n\t\t\t// the iterator so it must return true.\n\t\t\tif _, err := opts.Iter(false); err != nil {\n\t\t\t\tt.Fatalf(\"got Iter(false) = (_, %s), want = (_, nil)\", err)\n\t\t\t}\n\t\t})\n\t}\n}", "func TestBuildEndpointList(t *testing.T) {\n\t// Build the offsets we will turn into an endpoint list.\n\toffsets := make(map[string]RemoteOffset)\n\toffsets[\"0\"] = RemoteOffset{Offset: 0, Error: 10}\n\toffsets[\"1\"] = RemoteOffset{Offset: 1, Error: 10}\n\toffsets[\"2\"] = RemoteOffset{Offset: 2, Error: 10}\n\toffsets[\"3\"] = RemoteOffset{Offset: 3, Error: 10}\n\tmanual := hlc.ManualClock(0)\n\tclock := hlc.NewClock(manual.UnixNano)\n\tclock.SetMaxOffset(5 * time.Nanosecond)\n\tremoteClocks := &RemoteClockMonitor{\n\t\toffsets: offsets,\n\t\tlClock: clock,\n\t}\n\n\texpectedList := endpointList{\n\t\tendpoint{offset: -15, endType: -1},\n\t\tendpoint{offset: -14, endType: -1},\n\t\tendpoint{offset: -13, endType: -1},\n\t\tendpoint{offset: -12, endType: -1},\n\t\tendpoint{offset: 15, endType: 1},\n\t\tendpoint{offset: 16, endType: 1},\n\t\tendpoint{offset: 17, endType: 1},\n\t\tendpoint{offset: 18, endType: 1},\n\t}\n\n\tlist := remoteClocks.buildEndpointList()\n\tsort.Sort(list)\n\n\tfor i := range expectedList {\n\t\tif list[i] != expectedList[i] {\n\t\t\tt.Errorf(\"expected index %d of list to be %v, instead %v\",\n\t\t\t\ti, expectedList[i], list[i])\n\t\t}\n\t}\n\n\tif len(list) != len(expectedList) {\n\t\tt.Errorf(\"exptected endpoint list to be size %d, instead %d\",\n\t\t\tlen(expectedList), len(list))\n\t}\n}", "func (pr *Project) VerifyAllDevices() string {\n\tresult := \"\"\n\t// TODO: Вначале проверяем наличие всех драйверов\n\tfor _, sub := range pr.Subsystems {\n\t\tfor _, dev := range sub.RealDevices {\n\t\t\tdrv, ok := pr.DefDrivers.Drivers[dev.Driver]\n\t\t\tif !ok {\n\t\t\t\tresult += \"Error! subsystem \" + sub.Name + \" device \" + dev.Name + \" not found driver \" + dev.Driver + \"\\n\"\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\t// TODO: Потом проверяем все назначения на устройства со стороны драйвера\n\t\t\tfor _, d := range dev.Defs {\n\t\t\t\tif !drv.FindPIN(d.DriverName) {\n\t\t\t\t\tresult += \"Error! subsystem \" + sub.Name + \" device \" + dev.Name + \" driver \" + dev.Driver\n\t\t\t\t\tresult += \" not found \" + d.DriverName\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn result\n\n}", "func listChecks(t *testing.T, expectedKeys map[string]bool, blobs BlobStore) {\n\tfor _, testCase := range testData {\n\t\texpectedKey := toKeyOrDie(t, testCase.expectedHash)\n\t\t// 1 write must succeed and key must match\n\t\tkey, err := blobs.Write(strings.NewReader(testCase.input))\n\t\tassert(err == nil, t, \"Error writing blob %s:%s\", testCase.expectedHash, err)\n\t\tassert(key.Equals(expectedKey), t, \"Expected blob key to be %s but got %s\", testCase.expectedHash, key)\n\t}\n\tblobKeys := blobs.List()\n\tassert(blobKeys != nil, t, \"Error calling List: nil blobKeys returned\")\n\ti := 0\n\tfor blobKey := range blobKeys {\n\t\tassert(blobKey.err == nil, t, \"Error in List stream: %s\", blobKey.err)\n\t\tkey := strings.ToLower(blobKey.key.String())\n\t\tassert(expectedKeys[key], t, \"Unexpected key: %s\", key)\n\t\ti++\n\t}\n}", "func TestCalculateResourceMilliRequirementsOnAllValidClusterSizes(t *testing.T) {\n\tfor name, cis := range validSizes {\n\t\tt.Run(name, func(t *testing.T) {\n\t\t\tcpu, memory := cis.CalculateResourceMilliRequirements(true, true)\n\t\t\tassert.True(t, cpu > 0)\n\t\t\tassert.True(t, memory > 0)\n\t\t\tassert.Equal(t, cpu, cis.CalculateCPUMilliRequirement(true, true))\n\t\t\tassert.Equal(t, memory, cis.CalculateMemoryMilliRequirement(true, true))\n\t\t})\n\t}\n}", "func constructPartialLedgers(t *testing.T, provider *Provider, targetStatus []msgs.Status) {\n\tfor i := 0; i < len(targetStatus); i++ {\n\t\tledgerID := constructTestLedger(t, provider, i)\n\t\trequire.NoError(t, provider.idStore.updateLedgerStatus(ledgerID, targetStatus[i]))\n\t\tverifyLedgerIDExists(t, provider, ledgerID, targetStatus[i])\n\t}\n}", "func allStatesEqual(usl []fleet.UnitStatus) bool {\n\tfor _, us1 := range usl {\n\t\tfor _, us2 := range usl {\n\t\t\tif us1.Current != us2.Current {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tif us1.Desired != us2.Desired {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tfor _, m1 := range us1.Machine {\n\t\t\t\tfor _, m2 := range us2.Machine {\n\t\t\t\t\tif m1.SystemdActive != m2.SystemdActive {\n\t\t\t\t\t\treturn false\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn true\n}", "func TestRandomOpSequences(t *testing.T) {\n\tconst nops = 1000\n\tconst niters = 20\n\n\tvar sl ISkipList\n\tsl.Seed(randSeed1, randSeed2)\n\tfor i := 0; i < niters; i++ {\n\t\tt.Logf(\"----- Generating random sequence of %v operations -----\\n\", nops)\n\t\tops := sliceutils.GenOps(nops, 0)\n\t\tsl.Clear()\n\t\ta := make([]ElemType, 0)\n\t\tfor _, o := range ops {\n\t\t\tt.Logf(\"%s\\n\", sliceutils.PrintOp(&o))\n\t\t\tsliceutils.ApplyOpToSlice(&o, &a)\n\t\t\tapplyOpToISkipList(&o, &sl)\n\t\t\tt.Logf(\"%v\\n\", DebugPrintISkipList(&sl, 3))\n\t\t\tt.Logf(\"%+v\\n\", a)\n\n\t\t\tt.Logf(\"Reported lengths: %v %v\\n\", sl.Length(), len(a))\n\n\t\t\tif len(a) != sl.Length() {\n\t\t\t\tt.Errorf(\"ISkipList has wrong length (%v instead of %v)\\n\", sl.Length(), len(a))\n\t\t\t}\n\n\t\t\t// Equality check by looping over indices.\n\t\t\tt.Logf(\"Testing result via index loop...\\n\")\n\t\t\tfor i, v := range a {\n\t\t\t\te := sl.At(i)\n\t\t\t\tt.Logf(\"Checking %v\\n\", i)\n\t\t\t\tif v != e {\n\t\t\t\t\tt.Errorf(\"Expected value %v at index %v, got %v instead (index loop).\\n\", v, i, e)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t// Equality check using ForAllI\n\t\tt.Logf(\"Testing result via ForAllI()...\")\n\t\tsl.ForAllI(func(i int, v *ElemType) {\n\t\t\tt.Logf(\"Checking %v\\n\", i)\n\t\t\tif *v != a[i] {\n\t\t\t\tt.Errorf(\"Expected value %v at index %v, got %v instead (ForAllI).\\n\", a[i], i, *v)\n\t\t\t}\n\t\t})\n\n\t\t// Copy and then check copy has expected elements using ForAllI.\n\t\tcp := sl.Copy()\n\t\tcp.ForAllI(func(i int, v *ElemType) {\n\t\t\tt.Logf(\"Checking %v\\n\", i)\n\t\t\tif *v != a[i] {\n\t\t\t\tt.Errorf(\"Expected value %v at index %v, got %v instead (ForAllI).\\n\", a[i], i, *v)\n\t\t\t}\n\t\t})\n\t}\n}", "func CheckAllShardBalances(node string, oneAddr string) (balances map[int]float64, err error) {\n\tbalances = make(map[int]float64)\n\n\tparams := []interface{}{oneAddr, \"latest\"}\n\ts, err := sharding.Structure(node)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor _, shard := range s {\n\t\tbalanceRPCReply, err := rpc.Request(rpc.Method.GetBalance, shard.HTTP, params)\n\t\tif err != nil {\n\t\t\tif common.DebugRPC {\n\t\t\t\tfmt.Printf(\"NOTE: Route %s failed.\", shard.HTTP)\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\t\tbalance, _ := balanceRPCReply[\"result\"].(string)\n\t\tbln, _ := big.NewInt(0).SetString(balance[2:], 16)\n\n\t\tshardID := shard.ShardID\n\t\tformattedAmount := common.ConvertBalanceIntoReadableFormat(bln)\n\t\tfloatBalance, err := strconv.ParseFloat(formattedAmount, 32)\n\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tbalances[shardID] = floatBalance\n\t}\n\n\treturn balances, nil\n}", "func TestIsOffline(t *testing.T) {\n\tfor _, ac := range appClassCases {\n\t\tt.Logf(\"start case: %s\", ac.describe)\n\t\tresult := IsOffline(ac.pod)\n\t\tif result != ac.isOffline {\n\t\t\tt.Fatalf(\"offline check err, expect %v, but get %v\", ac.isOffline, result)\n\t\t}\n\t}\n}", "func checkStatusInSchema(crdItemList []apiextensionsv1.CustomResourceDefinition) []string {\n\n\t// These CRDs, at the time this test was written, do not have a \"status\" in the CRD schema\n\t// and subresource.status.\n\t// These can be skipped for now but we don't want the number to increase.\n\t// These CRDs should be tidied up over time.\n\t//\n\texceptionsList := sets.NewString(\n\t\t\"builds.config.openshift.io\",\n\t\t\"clusternetworks.network.openshift.io\",\n\t\t\"consoleclidownloads.console.openshift.io\",\n\t\t\"consoleexternalloglinks.console.openshift.io\",\n\t\t\"consolelinks.console.openshift.io\",\n\t\t\"consolenotifications.console.openshift.io\",\n\t\t\"consoleplugins.console.openshift.io\",\n\t\t\"consolequickstarts.console.openshift.io\",\n\t\t\"consolesamples.console.openshift.io\",\n\t\t\"consoleyamlsamples.console.openshift.io\",\n\t\t\"egressnetworkpolicies.network.openshift.io\",\n\t\t\"hostsubnets.network.openshift.io\",\n\t\t\"imagecontentpolicies.config.openshift.io\",\n\t\t\"imagecontentsourcepolicies.operator.openshift.io\",\n\t\t\"machineconfigs.machineconfiguration.openshift.io\",\n\t\t\"netnamespaces.network.openshift.io\",\n\t\t\"rangeallocations.security.internal.openshift.io\",\n\t\t\"rolebindingrestrictions.authorization.openshift.io\",\n\t\t\"securitycontextconstraints.security.openshift.io\",\n\t)\n\n\tfailures := []string{}\n\tfor _, crdItem := range crdItemList {\n\n\t\t// This test is interested only in CRDs that end with \"openshift.io\".\n\t\tif !strings.HasSuffix(crdItem.ObjectMeta.Name, \"openshift.io\") {\n\t\t\tcontinue\n\t\t}\n\n\t\tcrdName := crdItem.ObjectMeta.Name\n\n\t\t// Skip CRDs in the exceptions list for now.\n\t\tif exceptionsList.Has(crdName) {\n\t\t\tcontinue\n\t\t}\n\n\t\t// Iterate through all versions of the CustomResourceDefinition Spec looking for one with\n\t\t// a schema status element,\n\t\tfoundStatusInSchema := false\n\t\tvar i int\n\t\tfor i = 0; i < len(crdItem.Spec.Versions); i++ {\n\t\t\tif _, ok := crdItem.Spec.Versions[i].Schema.OpenAPIV3Schema.Properties[\"status\"]; ok {\n\t\t\t\tfoundStatusInSchema = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\n\t\tif !foundStatusInSchema {\n\t\t\tfailures = append(failures, fmt.Sprintf(\"CRD %s has no 'status' element in its schema\", crdName))\n\t\t}\n\t}\n\n\treturn failures\n}", "func (m *MockWorkflowManager) StatusWorkflowAll(numInstances int) ([]workflow.InstanceSummary, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"StatusWorkflowAll\", numInstances)\n\tret0, _ := ret[0].([]workflow.InstanceSummary)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func verifyTip(t *testing.T, store syncStoreReader, tip types.TipSet, stateRoot cid.Cid) {\n\tfoundTip, err := store.GetTipSet(tip.Key())\n\trequire.NoError(t, err)\n\tassert.Equal(t, tip, foundTip)\n\n\tfoundState, err := store.GetTipSetStateRoot(tip.Key())\n\trequire.NoError(t, err)\n\tassert.Equal(t, stateRoot, foundState)\n\n\tparent, err := tip.Parents()\n\tassert.NoError(t, err)\n\th, err := tip.Height()\n\tassert.NoError(t, err)\n\tchildTsasSlice, err := store.GetTipSetAndStatesByParentsAndHeight(parent, h)\n\tassert.NoError(t, err)\n\tassert.True(t, containsTipSet(childTsasSlice, tip))\n}", "func TestSuccessfulMultiQuery(t *testing.T) {\n\tlocations, err := metaweather.QueryLocations(\"san\")\n\tif err != nil {\n\t\tt.Fatalf(\"query returned error: %v\", err)\n\t}\n\tif !(len(locations) > 1) {\n\t\tt.Fatalf(\"number of query results is %d\", len(locations))\n\t}\n\tfor i, loc := range locations {\n\t\tif !strings.Contains(strings.ToLower(loc.Title), \"san\") {\n\t\t\tt.Fatalf(\"query result %d contains no 'san': %v\", i, loc)\n\t\t}\n\t}\n}", "func checkAllProviders(e2eFile string) error {\n\tcheckFailed := false\n\tinConformanceCode := false\n\n\tregStartConformance := regexp.MustCompile(patternStartConformance)\n\tregEndConformance := regexp.MustCompile(patternEndConformance)\n\tregSkip := regexp.MustCompile(patternSkip)\n\n\tfileInput, err := os.ReadFile(e2eFile)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Failed to read file %s: %w\", e2eFile, err)\n\t}\n\tscanner := bufio.NewScanner(bytes.NewReader(fileInput))\n\tscanner.Split(bufio.ScanLines)\n\n\tfor scanner.Scan() {\n\t\tline := scanner.Text()\n\t\tif regStartConformance.MatchString(line) {\n\t\t\tif inConformanceCode {\n\t\t\t\treturn errors.New(\"Missed the end of previous conformance test. There might be a bug in this script.\")\n\t\t\t}\n\t\t\tinConformanceCode = true\n\t\t}\n\t\tif inConformanceCode {\n\t\t\tif regSkip.MatchString(line) {\n\t\t\t\t// To list all invalid places in a single operation of this tool, here doesn't return error and continues checking.\n\t\t\t\tfmt.Fprintf(os.Stderr, \"%v: Conformance test should not call any e2eskipper.Skip*()\\n\", e2eFile)\n\t\t\t\tcheckFailed = true\n\t\t\t}\n\t\t\tif regEndConformance.MatchString(line) {\n\t\t\t\tinConformanceCode = false\n\t\t\t}\n\t\t}\n\t}\n\tif inConformanceCode {\n\t\treturn errors.New(\"Missed the end of previous conformance test. There might be a bug in this script.\")\n\t}\n\tif checkFailed {\n\t\treturn errors.New(\"We need to fix the above errors.\")\n\t}\n\treturn nil\n}", "func verifyRemainingObjects(t *testing.T, clientSet clientset.Interface, namespace string, rsNum, podNum int) (bool, error) {\n\trsClient := clientSet.Extensions().ReplicaSets(namespace)\n\tpodClient := clientSet.Core().Pods(namespace)\n\tpods, err := podClient.List(metav1.ListOptions{})\n\tif err != nil {\n\t\treturn false, fmt.Errorf(\"Failed to list pods: %v\", err)\n\t}\n\tvar ret = true\n\tif len(pods.Items) != podNum {\n\t\tret = false\n\t\tt.Logf(\"expect %d pods, got %d pods\", podNum, len(pods.Items))\n\t}\n\trss, err := rsClient.List(metav1.ListOptions{})\n\tif err != nil {\n\t\treturn false, fmt.Errorf(\"Failed to list replica sets: %v\", err)\n\t}\n\tif len(rss.Items) != rsNum {\n\t\tret = false\n\t\tt.Logf(\"expect %d RSs, got %d RSs\", rsNum, len(rss.Items))\n\t}\n\treturn ret, nil\n}", "func TestPasswordSanity(t *testing.T) {\n\tif testing.Short() {\n\t\tt.Skip(\"skipping test in short mode.\")\n\t\treturn\n\t}\n\tpw := make([]byte, 12)\n\tpw2 := make([]byte, 12)\n\tvar hash, hash2 []byte\n\n\tfor i := 0; i < 8; i++ {\n\t\tgrand.Read(pw)\n\t\tgrand.Read(pw2)\n\t\thash = grypto.PasswordHash(pw)\n\t\thash2 = grypto.PasswordHash(pw2)\n\n\t\tif !grypto.PasswordValid(hash, pw) {\n\t\t\tt.Errorf(\"PasswordValid should return true for the pair: %s and %s\", hash, pw)\n\t\t\tt.FailNow()\n\t\t}\n\t\tif grypto.PasswordValid(hash2, pw) {\n\t\t\tt.Error(\"PasswordValid is giving false positive\")\n\t\t\tt.FailNow()\n\t\t}\n\t}\n}", "func TestListCheckpoints(t *testing.T) {\n\tsrv := testingServer()\n\n\t// Create a model and hyperparameters under which to test checkpoint functionality\n\tmodelID := \"test-model\"\n\tmodel := api.CreateModelRequest{\n\t\tModel: &api.Model{\n\t\t\tModelId: modelID,\n\t\t\tDescription: \"This is a test\",\n\t\t},\n\t}\n\tctx := context.Background()\n\t_, err := srv.CreateModel(ctx, &model)\n\tif err != nil {\n\t\tt.Error(err)\n\t}\n\n\thyperparametersID := \"test-hyperparameters\"\n\thyperparameters := make(map[string]string)\n\thyperparameters[\"parameter\"] = \"parameter-value\"\n\n\thpCreationRequest := api.CreateHyperParametersRequest{\n\t\tModelId: modelID,\n\t\tHyperParametersId: hyperparametersID,\n\t\tHyperParameters: hyperparameters,\n\t}\n\t_, err = srv.CreateHyperParameters(ctx, &hpCreationRequest)\n\tif err != nil {\n\t\tt.Error(err)\n\t}\n\n\tckptCreationRequests := make([]api.CreateCheckpointRequest, 21)\n\tfor i := range ckptCreationRequests {\n\t\tcheckpointID := fmt.Sprintf(\"checkpoint-%d\", i)\n\t\tlink := fmt.Sprintf(\"http://example.com/checkpoints-for-test/%d.zip\", i)\n\t\tinfo := make(map[string]string)\n\t\tinfo[\"parameter\"] = fmt.Sprintf(\"value-for-%d\", i)\n\t\tckptCreationRequests[i] = api.CreateCheckpointRequest{\n\t\t\tModelId: modelID,\n\t\t\tHyperParametersId: hyperparametersID,\n\t\t\tCheckpointId: checkpointID,\n\t\t\tLink: link,\n\t\t\tInfo: info,\n\t\t}\n\t}\n\n\tcheckpointIDs := make([]string, len(ckptCreationRequests))\n\tfor i, req := range ckptCreationRequests {\n\t\tcheckpointIDs[i] = req.CheckpointId\n\t\t_, err := srv.CreateCheckpoint(ctx, &req)\n\t\tif err != nil {\n\t\t\tt.Error(err)\n\t\t}\n\t}\n\t// NOTE: CheckpointIds are sorted lexicographically, not chronologically!\n\tsort.Strings(checkpointIDs)\n\n\t// ListCheckpoints does not return checkpoint IDs, but rather tags of the form\n\t// <modelID>:<hyperparmetersID>:<checkpointId>\n\t// We account for this with hyperparametersTags\n\tcheckpointTags := make([]string, len(checkpointIDs))\n\tfor i, checkpointID := range checkpointIDs {\n\t\tcheckpointTags[i] = fmt.Sprintf(\"%s:%s:%s\", modelID, hyperparametersID, checkpointID)\n\t}\n\n\ttype ListCheckpointsTest struct {\n\t\tServer *api.RepositoryServer\n\t\tModelId string\n\t\tHyperparametersId string\n\t\tMarker string\n\t\tMaxItems int32\n\t\tExpectedCheckpointIds []string\n\t}\n\n\ttests := []ListCheckpointsTest{\n\t\t{\n\t\t\tServer: &srv,\n\t\t\tModelId: modelID,\n\t\t\tHyperparametersId: hyperparametersID,\n\t\t\tMaxItems: int32(5),\n\t\t\tExpectedCheckpointIds: checkpointTags[0:5],\n\t\t},\n\t\t{\n\t\t\tServer: &srv,\n\t\t\tModelId: modelID,\n\t\t\tHyperparametersId: hyperparametersID,\n\t\t\tMarker: checkpointIDs[2],\n\t\t\tMaxItems: int32(5),\n\t\t\tExpectedCheckpointIds: checkpointTags[2:7],\n\t\t},\n\t\t{\n\t\t\tServer: &srv,\n\t\t\tModelId: modelID,\n\t\t\tHyperparametersId: hyperparametersID,\n\t\t\tMarker: checkpointIDs[16],\n\t\t\tMaxItems: int32(5),\n\t\t\tExpectedCheckpointIds: checkpointTags[16:21],\n\t\t},\n\t\t{\n\t\t\tServer: &srv,\n\t\t\tModelId: modelID,\n\t\t\tHyperparametersId: hyperparametersID,\n\t\t\tMarker: checkpointIDs[16],\n\t\t\tMaxItems: int32(6),\n\t\t\tExpectedCheckpointIds: checkpointTags[16:21],\n\t\t},\n\t\t// TODO(frederick): Specification says that list endpoints should return items AFTER marker,\n\t\t// not after and including marker. No need to change behaviour, just make the two consistent.\n\t\t{\n\t\t\tServer: &srv,\n\t\t\tModelId: modelID,\n\t\t\tHyperparametersId: hyperparametersID,\n\t\t\tMarker: checkpointIDs[0],\n\t\t\tMaxItems: int32(20),\n\t\t\tExpectedCheckpointIds: checkpointTags[0:20],\n\t\t},\n\t}\n\n\tfor i, test := range tests {\n\t\tlistCkptRequest := api.ListCheckpointsRequest{\n\t\t\tModelId: test.ModelId,\n\t\t\tHyperParametersId: test.HyperparametersId,\n\t\t\tMarker: test.Marker,\n\t\t\tMaxItems: test.MaxItems,\n\t\t}\n\t\ttsrv := *test.Server\n\t\tlistCkptResponse, err := tsrv.ListCheckpoints(ctx, &listCkptRequest)\n\t\tif err != nil {\n\t\t\tt.Error(err)\n\t\t}\n\t\terrorMessage := fmt.Sprintf(\"Test %d: ListCheckpoints response does not contain the expected CheckpointIds\", i)\n\t\tassert.Equalf(t, test.ExpectedCheckpointIds, listCkptResponse.CheckpointIds, errorMessage)\n\t}\n}", "func expectPeers(t *testing.T, s *libp2p.Service, addrs ...boson.Address) {\n\tt.Helper()\n\n\tpeers := s.Peers()\n\n\tif len(peers) != len(addrs) {\n\t\tt.Fatalf(\"got peers %v, want %v\", len(peers), len(addrs))\n\t}\n\n\tsort.Slice(addrs, func(i, j int) bool {\n\t\treturn bytes.Compare(addrs[i].Bytes(), addrs[j].Bytes()) == -1\n\t})\n\tsort.Slice(peers, func(i, j int) bool {\n\t\treturn bytes.Compare(peers[i].Address.Bytes(), peers[j].Address.Bytes()) == -1\n\t})\n\n\tfor i, got := range peers {\n\t\twant := addrs[i]\n\t\tif !got.Address.Equal(want) {\n\t\t\tt.Errorf(\"got %v peer %s, want %s\", i, got.Address, want)\n\t\t}\n\t}\n}", "func All(config, sysprobeYamlCfg ddconfig.ConfigReaderWriter, syscfg *sysconfig.Config) []Check {\n\treturn []Check{\n\t\tNewProcessCheck(config),\n\t\tNewContainerCheck(config),\n\t\tNewRTContainerCheck(config),\n\t\tNewConnectionsCheck(config, sysprobeYamlCfg, syscfg),\n\t\tNewPodCheck(),\n\t\tNewProcessDiscoveryCheck(config),\n\t\tNewProcessEventsCheck(config),\n\t}\n}", "func TestAllShares(t *testing.T) {\n\tshares := []m.Share{\n\t\t{\n\t\t\tID: uuid.MustParse(\"f43b0e48-13cc-4c6c-8a23-3a18a670effd\"),\n\t\t\tIsPublic: true,\n\t\t},\n\t\t{\n\t\t\tID: uuid.MustParse(\"a558aca3-fb40-400b-8dc6-ae49c705c791\"),\n\t\t\tIsPublic: false,\n\t\t},\n\t}\n\tdb.Create(&shares[0])\n\tdb.Create(&shares[1])\n\tdefer db.Delete(&shares[0])\n\tdefer db.Delete(&shares[1])\n\n\tt.Run(\"happy path\", func(t *testing.T) {\n\t\t// request\n\t\tres, _ := http.Get(url + \"/shares\")\n\t\t// parse\n\t\tbody, _ := ioutil.ReadAll(res.Body)\n\t\tvar actual []m.Share\n\t\tvar expected = []m.Share{parseShare(shares[0])}\n\t\t_ = json.Unmarshal(body, &actual)\n\t\t// assertions\n\t\tassert.Equal(t, http.StatusOK, res.StatusCode)\n\t\tassert.Len(t, actual, len(expected))\n\t\tassert.Equal(t, expected, actual)\n\t})\n\n\tt.Run(\"with admin key\", func(t *testing.T) {\n\t\t// do request\n\t\treq, _ := http.NewRequest(\"GET\", fmt.Sprint(url, \"/shares\"), nil)\n\t\treq.Header.Set(\"Authorization\", \"Bearer \"+base64.StdEncoding.EncodeToString([]byte(os.Getenv(\"ADMIN_KEY\"))))\n\t\tres, _ := http.DefaultClient.Do(req)\n\t\t// parse\n\t\tbody, _ := ioutil.ReadAll(res.Body)\n\t\tvar actual []m.Share\n\t\tvar expected = []m.Share{parseShare(shares[0]), parseShare(shares[1])}\n\t\t_ = json.Unmarshal(body, &actual)\n\t\t// assertions\n\t\tassert.Equal(t, http.StatusOK, res.StatusCode)\n\t\tassert.Len(t, actual, len(expected))\n\t\tassert.Equal(t, expected, actual)\n\t})\n}", "func expectPeersEventually(t *testing.T, s *libp2p.Service, addrs ...boson.Address) {\n\tt.Helper()\n\n\tvar peers []p2p.Peer\n\tfor i := 0; i < 100; i++ {\n\t\tpeers = s.Peers()\n\t\tif len(peers) == len(addrs) {\n\t\t\tbreak\n\t\t}\n\t\ttime.Sleep(10 * time.Millisecond)\n\t}\n\n\tif len(peers) != len(addrs) {\n\t\tt.Fatalf(\"got peers %v, want %v\", len(peers), len(addrs))\n\t}\n\n\tsort.Slice(addrs, func(i, j int) bool {\n\t\treturn bytes.Compare(addrs[i].Bytes(), addrs[j].Bytes()) == -1\n\t})\n\tsort.Slice(peers, func(i, j int) bool {\n\t\treturn bytes.Compare(peers[i].Address.Bytes(), peers[j].Address.Bytes()) == -1\n\t})\n\n\tfor i, got := range peers {\n\t\twant := addrs[i]\n\t\tif !got.Address.Equal(want) {\n\t\t\tt.Errorf(\"got %v peer %s, want %s\", i, got.Address, want)\n\t\t}\n\t}\n}", "func (th *testHelper) assertPresentInCollectionM(chaincodeName, marbleName string, peerList ...*nwo.Peer) {\n\tcommand := commands.ChaincodeQuery{\n\t\tChannelID: th.channelID,\n\t\tName: chaincodeName,\n\t\tCtor: fmt.Sprintf(`{\"Args\":[\"readMarble\",\"%s\"]}`, marbleName),\n\t}\n\texpectedMsg := fmt.Sprintf(`{\"docType\":\"marble\",\"name\":\"%s\"`, marbleName)\n\tfor _, peer := range peerList {\n\t\tth.queryChaincode(peer, command, expectedMsg, true)\n\t}\n}", "func (ctl *Ctl) CheckSpecFlags() error {\n\tfor _, registryJSON := range ctl.ScannerPodImageFacadeInternalRegistriesJSONSlice {\n\t\tregistry := &opssightv1.RegistryAuth{}\n\t\terr := json.Unmarshal([]byte(registryJSON), registry)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Invalid Registry Format\")\n\t\t}\n\t}\n\treturn nil\n}", "func TestMemList(t *testing.T) {\n\t// setup\n\tmemBlobs := NewMemBlobAdmin(crypto.SHA1)\n\texpectedKeys := buildExpectedKeys()\n\t// exercise\n\tlistChecks(t, expectedKeys, memBlobs)\n}", "func TestVtctldListAllTablets(t *testing.T) {\n\tdefer cluster.PanicHandler(t)\n\turl := fmt.Sprintf(\"http://%s:%d/api/keyspaces/\", clusterInstance.Hostname, clusterInstance.VtctldHTTPPort)\n\ttestURL(t, url, \"keyspace url\")\n\n\thealthCheckURL := fmt.Sprintf(\"http://%s:%d/debug/health\", clusterInstance.Hostname, clusterInstance.VtctldHTTPPort)\n\ttestURL(t, healthCheckURL, \"vtctld health check url\")\n\n\ttestListAllTablets(t)\n\tdeleteCell(t)\n\taddCellback(t)\n}", "func assertBalanceAmounts(amounts []*rosetta.Amount) error {\n\tcurrencies := make([]*rosetta.Currency, 0)\n\tfor _, amount := range amounts {\n\t\t// Ensure a currency is used at most once in balance.Amounts\n\t\tif containsCurrency(currencies, amount.Currency) {\n\t\t\treturn fmt.Errorf(\"currency %+v used in balance multiple times\", amount.Currency)\n\t\t}\n\t\tcurrencies = append(currencies, amount.Currency)\n\n\t\tif err := Amount(amount); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func TestPointingPairs(t *testing.T) {\n\tinputBoard := []byte(pointingPairsParam)\n\tb := NewBoard(inputBoard)\n\n\tassert.True(t, b.candidates[18].Contains('1'))\n\tassert.True(t, b.candidates[20].Contains('1'))\n\n\tassert.True(t, b.candidates[32].Contains('2'))\n\tassert.True(t, b.candidates[41].Contains('2'))\n\tassert.True(t, b.candidates[50].Contains('2'))\n\tassert.True(t, b.candidates[68].Contains('2'))\n\n\tassert.True(t, b.candidates[32].Contains('3'))\n\tassert.True(t, b.candidates[41].Contains('3'))\n\tassert.True(t, b.candidates[50].Contains('3'))\n\tassert.True(t, b.candidates[68].Contains('3'))\n\n\tassert.True(t, b.candidates[36].Contains('4'))\n\tassert.True(t, b.candidates[37].Contains('4'))\n\n\tassert.True(t, b.candidates[39].Contains('6'))\n\n\tassert.True(t, b.candidates[71].Contains('7'))\n\n\tassert.True(t, b.candidates[69].Contains('8'))\n\tassert.True(t, b.candidates[70].Contains('8'))\n\tassert.True(t, b.candidates[71].Contains('8'))\n\n\tassert.True(t, b.candidates[57].Contains('9'))\n\n\tb.PointingPairs()\n\n\tassert.True(t, !b.candidates[18].Contains('1'))\n\tassert.True(t, !b.candidates[20].Contains('1'))\n\n\tassert.True(t, !b.candidates[32].Contains('2'))\n\tassert.True(t, !b.candidates[41].Contains('2'))\n\tassert.True(t, !b.candidates[50].Contains('2'))\n\tassert.True(t, !b.candidates[68].Contains('2'))\n\n\tassert.True(t, !b.candidates[32].Contains('3'))\n\tassert.True(t, !b.candidates[41].Contains('3'))\n\tassert.True(t, !b.candidates[50].Contains('3'))\n\tassert.True(t, !b.candidates[68].Contains('3'))\n\n\tassert.True(t, !b.candidates[36].Contains('4'))\n\tassert.True(t, !b.candidates[37].Contains('4'))\n\n\tassert.True(t, !b.candidates[39].Contains('6'))\n\n\tassert.True(t, !b.candidates[71].Contains('7'))\n\n\tassert.True(t, !b.candidates[69].Contains('8'))\n\tassert.True(t, !b.candidates[70].Contains('8'))\n\tassert.True(t, !b.candidates[71].Contains('8'))\n\n\tassert.True(t, !b.candidates[57].Contains('9'))\n}", "func TestMemIdmAll(t *testing.T) {\n\tidm := memidm.New()\n\tsidm := test.NewSuiteIdm(t, idm)\n\tsidm.TestAll(t)\n}", "func TestDropSectorsVerify(t *testing.T) {\n\ttests := []struct {\n\t\tnumDropped, oldNum uint64\n\t\terr error\n\t}{\n\t\t{0, 0, nil},\n\t\t{0, 1, nil},\n\t\t{1, 1, nil},\n\t\t{2, 1, fmt.Errorf(\"bad input: numSectors (%v) is greater than the number of sectors in the contract (%v)\", 2, 1)},\n\t}\n\tfor _, test := range tests {\n\t\terr := dropSectorsVerify(test.numDropped, test.oldNum)\n\t\tif err != test.err && err.Error() != test.err.Error() {\n\t\t\tt.Errorf(\"dropSectorsVerify(%v, %v): expected '%v', got '%v'\", test.numDropped, test.oldNum, test.err, err)\n\t\t}\n\t}\n}", "func TestMgmt_CountTypes(t *testing.T) {\n\ttests := []struct {\n\t\tobjType vpc.ObjType\n\t\tcount int64\n\t}{\n\t\t{\n\t\t\tobjType: vpc.ObjTypeSwitch,\n\t\t\tcount: -1,\n\t\t},\n\t}\n\n\tfor i, test := range tests {\n\t\ttest := test\n\t\tt.Run(test.objType.String(), func(t *testing.T) {\n\t\t\tmgr, err := mgmt.New(nil)\n\t\t\tif err != nil {\n\t\t\t\tt.Fatalf(\"[%d] unable to create new VPC Management handle: %v\", i, err)\n\t\t\t}\n\t\t\tdefer mgr.Close()\n\n\t\t\tcount, err := mgr.CountType(test.objType)\n\t\t\tif err != nil {\n\t\t\t\tt.Fatalf(\"[%d] unable to get a count of %s VPC objects: %v\", i, test.objType, err)\n\t\t\t}\n\n\t\t\tswitch {\n\t\t\tcase test.count == -1 && count >= 0:\n\t\t\tcase test.count >= 0 && int64(count) != test.count:\n\t\t\t\tt.Errorf(\"[%d] wrong number of %s VPC objects\", i, test.objType)\n\t\t\t}\n\t\t})\n\t}\n}", "func TestGetPeers(t *testing.T) {\n\tfor _, handler := range testHandlers {\n\t\tif len(handler.GetPeers(serviceTag)) != numberOfNodes-1 {\n\t\t\tt.Fatal(\"Not all nodes are connected to each other.\")\n\t\t}\n\t}\n}", "func assertReplicasNotOnSameNode(t *testing.T) {\n\ttables, _ := fakePegasusCluster.meta.ListAvailableApps()\n\tfor _, tb := range tables {\n\t\tresp, _ := fakePegasusCluster.meta.QueryConfig(tb.AppName)\n\t\tassert.Equal(t, len(resp.Partitions), int(tb.PartitionCount))\n\n\t\tfor _, p := range resp.Partitions {\n\t\t\tfor _, sec := range p.Secondaries {\n\t\t\t\tassert.NotEqual(t, p.Primary.GetAddress(), sec.GetAddress())\n\t\t\t}\n\t\t\tif len(p.Secondaries) >= 2 {\n\t\t\t\tassert.NotEqual(t, p.Secondaries[0].GetAddress(), p.Secondaries[1].GetAddress())\n\t\t\t}\n\t\t}\n\t}\n}", "func (s IntegrationSuite) TestCheckSchemaAllowAllDefiner(t *testing.T) {\n\tdir := getDir(t, \"testdata/routines\")\n\topts, err := OptionsForDir(dir)\n\tif err != nil {\n\t\tt.Fatalf(\"Unexpected error from OptionsForDir: %v\", err)\n\t}\n\n\tlogicalSchema := dir.LogicalSchemas[0]\n\twsOpts, err := workspace.OptionsForDir(dir, s.d.Instance)\n\tif err != nil {\n\t\tt.Fatalf(\"Unexpected error from workspace.OptionsForDir: %v\", err)\n\t}\n\twsSchema, err := workspace.ExecLogicalSchema(logicalSchema, wsOpts)\n\tif err != nil {\n\t\tt.Fatalf(\"Unexpected error from workspace.ExecLogicalSchema: %v\", err)\n\t} else if len(wsSchema.Failures) != 0 {\n\t\tt.Fatalf(\"Unexpectedly found %d workspace failures\", len(wsSchema.Failures))\n\t}\n\n\t// There's intentionally no .skeema file here; force the flavor value\n\t// corresponding to the current Dockerized test db here\n\topts.Flavor = s.d.Flavor()\n\n\t// Should have no annotations at all!\n\tresult := CheckSchema(wsSchema, opts)\n\tif len(result.Annotations) > 0 {\n\t\tt.Errorf(\"Expected 0 annotations, instead found %d\", len(result.Annotations))\n\t}\n}", "func All(property interface{}) Truth {\n\tmustBeCleanStart()\n\terr := quick.Check(property, nil)\n\treturn Truth{err == nil, fmt.Sprintf(\"try %v\", err)}\n}", "func (m *MockReader) List() ([]*entity.StreetMarket, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"List\")\n\tret0, _ := ret[0].([]*entity.StreetMarket)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func testResourceAll(t *testing.T, s *Service) {\n\tres, err := s.ResourceAll(context.TODO())\n\tif err != nil {\n\t\tt.Logf(\"testResourceAll error(%v) \\n\", err)\n\t\treturn\n\t}\n\tt.Logf(\"testResourceAll res: %+v \\n\", res)\n}", "func (m *MockWhiteListIPRepository) FindAll() ([]*entities.WhiteListIP, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"FindAll\")\n\tret0, _ := ret[0].([]*entities.WhiteListIP)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockRepository) List() ([]*entity.StreetMarket, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"List\")\n\tret0, _ := ret[0].([]*entity.StreetMarket)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func TestLevelAll(t *testing.T) {\n\ttests := []struct {\n\t\tname string\n\t\tstart Level\n\t\tall []Level\n\t\twant bool\n\t}{\n\t\t{\n\t\t\tname: \"All Panic and Error in Panic Level\",\n\t\t\tstart: Panic,\n\t\t\tall: []Level{Panic, Error},\n\t\t\twant: false,\n\t\t},\n\t\t{\n\t\t\tname: \"All Panic and Error in Panic and Error Level\",\n\t\t\tstart: Panic | Error,\n\t\t\tall: []Level{Panic, Error},\n\t\t\twant: true,\n\t\t},\n\t}\n\n\tfor _, test := range tests {\n\t\tt.Run(test.name, func(t *testing.T) {\n\t\t\tif got := test.start.All(test.all...); got != test.want {\n\t\t\t\tt.Errorf(\"%s got = %v; want %v\", test.name, got, test.want)\n\t\t\t}\n\t\t})\n\t}\n}", "func (_m *BlockchainRetriever) GetAllCommitteeValidatorCandidateFlattenListFromDatabase() ([]string, error) {\n\tret := _m.Called()\n\n\tvar r0 []string\n\tif rf, ok := ret.Get(0).(func() []string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]string)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func() error); ok {\n\t\tr1 = rf()\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func TestSummaryMap_OverlappingCorpora(t *testing.T) {\n\tunittest.SmallTest(t)\n\n\tconst corpusOneUntriaged = \"1114c84eaa5dde4a247c93d9b93a136e\"\n\tconst corpusTwoUntriaged = \"222b0d44658ad9c451c39e38c9281d47\"\n\tconst corpusOne = \"corpusOne\"\n\tconst corpusTwo = \"corpusTwo\"\n\n\tcommits := bug_revert.MakeTestCommits()[:2]\n\n\ttile := &tiling.Tile{\n\t\tCommits: commits,\n\t\tTraces: map[tiling.TraceID]*tiling.Trace{\n\t\t\t\",device=alpha,name=test_one,source_type=corpusOne,\": tiling.NewTrace(types.DigestSlice{\n\t\t\t\tbug_revert.AlfaPositiveDigest, corpusOneUntriaged,\n\t\t\t}, map[string]string{\n\t\t\t\t\"device\": bug_revert.AlphaDevice,\n\t\t\t\ttypes.PrimaryKeyField: string(bug_revert.TestOne),\n\t\t\t\ttypes.CorpusField: corpusOne,\n\t\t\t}, nil),\n\t\t\t\",device=beta,name=test_one,source_type=corpusTwo,\": tiling.NewTrace(types.DigestSlice{\n\t\t\t\tcorpusTwoUntriaged, corpusTwoUntriaged,\n\t\t\t}, map[string]string{\n\t\t\t\t\"device\": bug_revert.BetaDevice,\n\t\t\t\ttypes.PrimaryKeyField: string(bug_revert.TestOne),\n\t\t\t\ttypes.CorpusField: corpusTwo,\n\t\t\t}, nil),\n\t\t},\n\t}\n\n\tvar e expectations.Expectations\n\te.Set(bug_revert.TestOne, bug_revert.AlfaPositiveDigest, expectations.Positive)\n\n\tdc := digest_counter.New(tile)\n\tblamer, err := blame.New(tile, &e)\n\trequire.NoError(t, err)\n\n\ttr := asSlice(tile.Traces)\n\n\td := Data{\n\t\tTraces: tr,\n\t\tExpectations: &e,\n\t\tByTrace: dc.ByTrace(),\n\t\tBlamer: blamer,\n\t}\n\n\tsum := d.Calculate(nil, nil, true)\n\tassert.Len(t, sum, 2)\n\trequire.Equal(t, []*TriageStatus{\n\t\t{\n\t\t\tName: bug_revert.TestOne,\n\t\t\tUntriaged: 1,\n\t\t\tUntHashes: types.DigestSlice{corpusOneUntriaged},\n\t\t\tNum: 1,\n\t\t\tCorpus: corpusOne,\n\t\t\tBlame: []blame.WeightedBlame{\n\t\t\t\t{\n\t\t\t\t\tAuthor: bug_revert.InnocentAuthor,\n\t\t\t\t\tProb: 0.5,\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tAuthor: bug_revert.BuggyAuthor,\n\t\t\t\t\tProb: 0.5,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tName: bug_revert.TestOne,\n\t\t\tUntriaged: 1,\n\t\t\tUntHashes: types.DigestSlice{corpusTwoUntriaged},\n\t\t\tNum: 1,\n\t\t\tCorpus: corpusTwo,\n\t\t\tBlame: []blame.WeightedBlame{\n\t\t\t\t{\n\t\t\t\t\tAuthor: bug_revert.InnocentAuthor,\n\t\t\t\t\tProb: 0.5,\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tAuthor: bug_revert.BuggyAuthor,\n\t\t\t\t\tProb: 0.5,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}, sum)\n}", "func TestIntegrationAll(t *testing.T) {\n\ts := New(genericRegions[0], genericRegions)\n\tresult := len(s.All())\n\tif len(s.sessions) != result {\n\t\tt.Fatalf(\"All() length invalid, expected: %d, got: %d\", len(genericRegions), result)\n\t}\n}", "func readTestData(t *testing.T, r FileSetReader, shard uint32, timestamp time.Time, entries []testEntry) {\n\tfor _, underTest := range readTestTypes {\n\t\terr := r.Open(testNs1ID, 0, timestamp)\n\t\trequire.NoError(t, err)\n\n\t\trequire.Equal(t, len(entries), r.Entries())\n\t\trequire.Equal(t, 0, r.EntriesRead())\n\n\t\tbloomFilter, err := r.ReadBloomFilter()\n\t\tassert.NoError(t, err)\n\t\t// Make sure the bloom filter doesn't always return true\n\t\tassert.False(t, bloomFilter.Test([]byte(\"some_random_data\")))\n\t\texpectedM, expectedK := bloom.EstimateFalsePositiveRate(\n\t\t\tuint(len(entries)), defaultIndexBloomFilterFalsePositivePercent)\n\t\tassert.Equal(t, expectedK, bloomFilter.K())\n\t\t// EstimateFalsePositiveRate always returns at least 1, so skip this check\n\t\t// if len entries is 0\n\t\tif len(entries) > 0 {\n\t\t\tassert.Equal(t, expectedM, bloomFilter.M())\n\t\t}\n\n\t\tfor i := 0; i < r.Entries(); i++ {\n\t\t\tswitch underTest {\n\t\t\tcase readTestTypeData:\n\t\t\t\tid, data, checksum, err := r.Read()\n\t\t\t\trequire.NoError(t, err)\n\n\t\t\t\tdata.IncRef()\n\n\t\t\t\tassert.Equal(t, entries[i].id, id.String())\n\t\t\t\tassert.True(t, bytes.Equal(entries[i].data, data.Get()))\n\t\t\t\tassert.Equal(t, digest.Checksum(entries[i].data), checksum)\n\n\t\t\t\tassert.Equal(t, i+1, r.EntriesRead())\n\n\t\t\t\t// Verify that the bloomFilter was bootstrapped properly by making sure it\n\t\t\t\t// at least contains every ID\n\t\t\t\tassert.True(t, bloomFilter.Test(id.Data().Get()))\n\n\t\t\t\tid.Finalize()\n\t\t\t\tdata.DecRef()\n\t\t\t\tdata.Finalize()\n\t\t\tcase readTestTypeMetadata:\n\t\t\t\tid, length, checksum, err := r.ReadMetadata()\n\t\t\t\trequire.NoError(t, err)\n\n\t\t\t\tassert.True(t, id.Equal(id))\n\t\t\t\tassert.Equal(t, digest.Checksum(entries[i].data), checksum)\n\t\t\t\tassert.Equal(t, len(entries[i].data), length)\n\n\t\t\t\tassert.Equal(t, i+1, r.MetadataRead())\n\n\t\t\t\t// Verify that the bloomFilter was bootstrapped properly by making sure it\n\t\t\t\t// at least contains every ID\n\t\t\t\tassert.True(t, bloomFilter.Test(id.Data().Get()))\n\n\t\t\t\tid.Finalize()\n\t\t\t}\n\t\t}\n\n\t\trequire.NoError(t, r.Close())\n\t}\n}", "func TestBuildPeerManagerNodeList(t *testing.T) {\n\tg := &Gateway{\n\t\tnodes: map[modules.NetAddress]*node{\n\t\t\t\"foo\": {NetAddress: \"foo\", WasOutboundPeer: true},\n\t\t\t\"bar\": {NetAddress: \"bar\", WasOutboundPeer: false},\n\t\t\t\"baz\": {NetAddress: \"baz\", WasOutboundPeer: true},\n\t\t\t\"quux\": {NetAddress: \"quux\", WasOutboundPeer: false},\n\t\t},\n\t}\n\tnodelist := g.buildPeerManagerNodeList()\n\t// all outbound nodes should be at the front of the list\n\tvar i int\n\tfor i < len(nodelist) && g.nodes[nodelist[i]].WasOutboundPeer {\n\t\ti++\n\t}\n\tfor i < len(nodelist) && !g.nodes[nodelist[i]].WasOutboundPeer {\n\t\ti++\n\t}\n\tif i != len(nodelist) {\n\t\tt.Fatal(\"bad nodelist:\", nodelist)\n\t}\n}", "func (tp *TestSuite) TestPinLsPrecedence(t *testing.T) {\n\t// Testing precedence of recursive, direct and indirect pins\n\t// Results should be recursive > indirect, direct > indirect, and recursive > direct\n\n\tt.Run(\"TestPinLsPredenceRecursiveIndirect\", tp.TestPinLsPredenceRecursiveIndirect)\n\tt.Run(\"TestPinLsPrecedenceDirectIndirect\", tp.TestPinLsPrecedenceDirectIndirect)\n\tt.Run(\"TestPinLsPrecedenceRecursiveDirect\", tp.TestPinLsPrecedenceRecursiveDirect)\n}", "func AfterEachCheck() error {\n\tvar errorMsg = \"\"\n\n\tlogf.Log.Info(\"AfterEachCheck\")\n\n\t// Phase 1 to delete dangling resources\n\tpvcs, _ := gTestEnv.KubeInt.CoreV1().PersistentVolumeClaims(\"default\").List(context.TODO(), metaV1.ListOptions{})\n\tif len(pvcs.Items) != 0 {\n\t\terrorMsg += \" found leftover PersistentVolumeClaims\"\n\t\tlogf.Log.Info(\"AfterEachCheck: found leftover PersistentVolumeClaims, test fails.\")\n\t}\n\n\tpvs, _ := gTestEnv.KubeInt.CoreV1().PersistentVolumes().List(context.TODO(), metaV1.ListOptions{})\n\tif len(pvs.Items) != 0 {\n\t\terrorMsg += \" found leftover PersistentVolumes\"\n\t\tlogf.Log.Info(\"AfterEachCheck: found leftover PersistentVolumes, test fails.\")\n\t}\n\n\t// Mayastor volumes\n\tmsvGVR := schema.GroupVersionResource{\n\t\tGroup: \"openebs.io\",\n\t\tVersion: \"v1alpha1\",\n\t\tResource: \"mayastorvolumes\",\n\t}\n\tmsvs, _ := gTestEnv.DynamicClient.Resource(msvGVR).Namespace(NSMayastor).List(context.TODO(), metaV1.ListOptions{})\n\tif len(msvs.Items) != 0 {\n\t\terrorMsg += \" found leftover MayastorVolumes\"\n\t\tlogf.Log.Info(\"AfterEachCheck: found leftover MayastorVolumes, test fails.\")\n\t}\n\n\t// Check that Mayastor pods are healthy no restarts or fails.\n\terr := CheckPods(NSMayastor)\n\tif err != nil {\n\t\terrorMsg = fmt.Sprintf(\"%s %v\", errorMsg, err)\n\t}\n\n\tif len(errorMsg) != 0 {\n\t\treturn errors.New(errorMsg)\n\t}\n\treturn nil\n}", "func (m *MockCommitClient) ListCheckRuns(org, repo, ref string) (*github.CheckRunList, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ListCheckRuns\", org, repo, ref)\n\tret0, _ := ret[0].(*github.CheckRunList)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func verifyMatchNodes(hosts []kubeoneapi.HostConfig, nodes corev1.NodeList, logger logrus.FieldLogger, verbose bool) []error {\n\tif len(nodes.Items) != len(hosts) {\n\t\tlogger.Errorf(\"Mismatch between nodes in the cluster (%d) and nodes defined in the manifest (%d).\", len(nodes.Items), len(hosts))\n\n\t\treturn []error{fail.RuntimeError{\n\t\t\tOp: \"checking match between Nodes and Hosts\",\n\t\t\tErr: errors.Errorf(\"expected %d cluster nodes but got %d\", len(nodes.Items), len(hosts)),\n\t\t}}\n\t}\n\n\tnodesFound := map[string]bool{}\n\n\tfor _, node := range nodes.Items {\n\t\tnodesFound[node.Name] = false\n\n\t\tfor _, host := range hosts {\n\t\t\tfor _, addr := range node.Status.Addresses {\n\t\t\t\tswitch addr.Type {\n\t\t\t\tcase corev1.NodeInternalIP, corev1.NodeExternalIP:\n\t\t\t\t\tswitch addr.Address {\n\t\t\t\t\tcase host.PrivateAddress, host.PublicAddress:\n\t\t\t\t\t\tnodesFound[node.Name] = true\n\t\t\t\t\t\tif verbose {\n\t\t\t\t\t\t\tlogger.Infof(\"Found endpoint %q (type %s) for the node %q.\", addr.Address, addr.Type, node.ObjectMeta.Name)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\tcase corev1.NodeExternalDNS, corev1.NodeHostName, corev1.NodeInternalDNS:\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tvar errs []error\n\n\tfor nodeName, found := range nodesFound {\n\t\tif !found {\n\t\t\terrs = append(errs, fail.RuntimeError{\n\t\t\t\tOp: \"matching found Nodes\",\n\t\t\t\tErr: errors.Errorf(\"unable to match node %q to machines defined in the manifest\", nodeName),\n\t\t\t})\n\t\t}\n\t}\n\n\treturn errs\n}", "func (t TopicInfo) AllReplicasInSync() bool {\n\tfor _, partition := range t.Partitions {\n\t\tif !util.SameElements(partition.Replicas, partition.ISR) {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func TestAllBootEntryVars(t *testing.T) {\n\tbevs := AllBootEntryVars()\n\tif len(bevs) != 11 {\n\t\tfor i, e := range bevs {\n\t\t\tt.Logf(\"#%d: %s\", i, e)\n\t\t}\n\t\tt.Errorf(\"expected 11 boot vars, got %d\", len(bevs))\n\t}\n}", "func (m *MockDao) AllInfo() ([]*model.Info, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AllInfo\")\n\tret0, _ := ret[0].([]*model.Info)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func TestTransactionSignaturesLists_MarshallBinary_MaxElementsCount(t *testing.T) {\n\n\t// Reference data initialisation.\n\tTSLs := &TSLs{}\n\tfor j := 0; j < TSLsMaxCount; j++ {\n\t\tTSL := NewTSL()\n\t\t_, err := rand.Read(TSL.TxUUID.Bytes[:])\n\t\tif err != nil {\n\t\t\tt.Fatal()\n\t\t}\n\n\t\tsig := &lamport.Signature{}\n\t\t_, err = rand.Read(sig.Bytes[:])\n\t\tif err != nil {\n\t\t\tt.Fatal()\n\t\t}\n\t\t_ = TSL.Members.Add(sig)\n\n\t\terr = TSLs.Add(TSL)\n\t\tif err != nil {\n\t\t\tt.Fatal()\n\t\t}\n\t}\n\n\t// Marshalling.\n\tbinary, _ := TSLs.MarshalBinary()\n\n\trestoredTSLs := &TSLs{}\n\t_ = restoredTSLs.UnmarshalBinary(binary)\n\n\t// Checks\n\tif restoredTSLs.Count() != TSLsMaxCount {\n\t\tt.Fatal()\n\t}\n\tif restoredTSLs.Count() != TSLs.Count() {\n\t\tt.Fatal()\n\t}\n\n\tfor i, restoredTSL := range restoredTSLs.At {\n\t\ttransactionUUIDsAreEqual := bytes.Compare(\n\t\t\trestoredTSL.TxUUID.Bytes[:],\n\t\t\tTSLs.At[i].TxUUID.Bytes[:]) == 0\n\n\t\tif !transactionUUIDsAreEqual {\n\t\t\tt.Fatal()\n\t\t}\n\n\t\tfor j, sig := range restoredTSL.Members.At {\n\t\t\tsigNIsEqual := bytes.Compare(sig.Bytes[:], TSLs.At[i].Members.At[j].Bytes[:]) == 0\n\t\t\tif !sigNIsEqual {\n\t\t\t\tt.Fatal()\n\t\t\t}\n\t\t}\n\t}\n}", "func checkPowLimitsAreConsistent(t *testing.T, params *chaincfg.Params) {\n\tpowLimitBigInt := params.PowLimit\n\tpowLimitCompact := params.PowLimitBits\n\n\ttoBig := standalone.CompactToBig(powLimitCompact)\n\ttoCompact := standalone.BigToCompact(powLimitBigInt)\n\n\t// Check params.PowLimitBits matches params.PowLimit converted\n\t// into the compact form\n\tif toCompact != powLimitCompact {\n\t\tt.Fatalf(\"PowLimit values mismatch:\\n\"+\n\t\t\t\"params.PowLimit :%064x\\n\"+\n\t\t\t\" :%x\\n\"+\n\t\t\t\"params.PowLimitBits:%064x\\n\"+\n\t\t\t\" :%x\\n\"+\n\t\t\t\"params.PowLimit is not consistent with the params.PowLimitBits\",\n\t\t\tpowLimitBigInt, toCompact, toBig, powLimitCompact)\n\t}\n}", "func TestAllAthenaSchemas(t *testing.T) {\n\tfor _, eventType := range eventTypes {\n\t\tschema, err := GetEventSchemaFromType(eventType)\n\t\trequire.NoError(t, err)\n\t\tviewSchema, err := schema.ViewSchema()\n\t\trequire.NoError(t, err)\n\t\trequire.NotEmpty(t, viewSchema)\n\t\ttableSchema, err := schema.TableSchema()\n\t\trequire.NoError(t, err)\n\t\trequire.NotEmpty(t, tableSchema)\n\t}\n}", "func checkPallocBits(t *testing.T, got, want *PallocBits) bool {\n\td := DiffPallocBits(got, want)\n\tif len(d) != 0 {\n\t\tt.Errorf(\"%d range(s) different\", len(d))\n\t\tfor _, bits := range d {\n\t\t\tt.Logf(\"\\t@ bit index %d\", bits.I)\n\t\t\tt.Logf(\"\\t| got: %s\", StringifyPallocBits(got, bits))\n\t\t\tt.Logf(\"\\t| want: %s\", StringifyPallocBits(want, bits))\n\t\t}\n\t\treturn false\n\t}\n\treturn true\n}", "func newFooterTest(t *testing.T) {\n\n\tresult := newFooter(1, 15, 1, 1).List\n\texpected := []string{\"1\", \"2\", \"...\", \"15\"}\n\tfor i := 0; i < len(expected); i++ {\n\t\tif result[i] != expected[i] {\n\t\t\tt.Error()\n\t\t}\n\t}\n\n}", "func TestGetMinterAddresses(t *testing.T) {\n\n\tdbsql, err := sql.Open(\"postgres\", \"user=postgres dbname=gorm password=simsim sslmode=disable\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tdb, err := InitDB(dbsql)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\taddresses, err := db.GetMinterAddresses(344178872)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tif addresses != nil {\n\t\tt.Errorf(\"Addresses must be empty %d!\", len(addresses))\n\t}\n\n}", "func (checker PodChecker) runIndividualChecks() *models.IstioTypeValidations {\n\ttypeValidations := models.IstioTypeValidations{}\n\tif len(checker.Pods) == 0 {\n\t\treturn &typeValidations\n\t}\n\n\tnameValidations := models.IstioNameValidations{}\n\ttypeValidations[podsCheckerType] = &nameValidations\n\n\tfor _, pod := range checker.Pods {\n\t\tvalidation := models.IstioValidation{\n\t\t\tName: pod.ObjectMeta.Name,\n\t\t\tObjectType: podsCheckerType,\n\t\t\tValid: true,\n\t\t}\n\t\tnameValidations[pod.ObjectMeta.Name] = &validation\n\n\t\tcheckers := checker.enabledCheckersFor(&pod)\n\n\t\tfor _, podChecker := range checkers {\n\t\t\tchecks, isValid := podChecker.Check()\n\t\t\tvalidation.Checks = append(validation.Checks, checks...)\n\t\t\tvalidation.Valid = validation.Valid && isValid\n\t\t}\n\t}\n\n\treturn &typeValidations\n}", "func TestValidatorDippingInAndOut(t *testing.T) {\n\t// initial setup\n\tapp := simapp.Setup(false)\n\tctx := app.BaseApp.NewContext(false, tmproto.Header{})\n\tapp.CustomSlashingKeeper.SetParams(ctx, testslashing.TestParams())\n\n\tpower := int64(100)\n\n\tpks := simapp.CreateTestPubKeys(3)\n\tsimapp.AddTestAddrsFromPubKeys(app, ctx, pks, sdk.TokensFromConsensusPower(200, sdk.DefaultPowerReduction))\n\n\taddr, val := pks[0].Address(), pks[0]\n\tconsAddr := sdk.ConsAddress(addr)\n\ttstaking := teststaking.NewHelper(t, ctx, app.CustomStakingKeeper, app.CustomGovKeeper)\n\tvalAddr := sdk.ValAddress(addr)\n\n\ttstaking.CreateValidator(valAddr, val, true)\n\tstaking.EndBlocker(ctx, app.CustomStakingKeeper)\n\n\t// 100 first blocks OK\n\theight := int64(0)\n\tfor ; height < int64(100); height++ {\n\t\tctx = ctx.WithBlockHeight(height)\n\t\tapp.CustomSlashingKeeper.HandleValidatorSignature(ctx, val.Address(), power, true)\n\t}\n\n\t// add one more validator into the set\n\ttstaking.CreateValidator(sdk.ValAddress(pks[1].Address()), pks[1], true)\n\tvalidatorUpdates := staking.EndBlocker(ctx, app.CustomStakingKeeper)\n\trequire.Equal(t, 1, len(validatorUpdates))\n\ttstaking.CheckValidator(valAddr, stakingtypes.Active)\n\ttstaking.CheckValidator(sdk.ValAddress(pks[1].Address()), stakingtypes.Active)\n\n\t// 600 more blocks happened\n\theight = 700\n\tctx = ctx.WithBlockHeight(height)\n\n\tvalidatorUpdates = staking.EndBlocker(ctx, app.CustomStakingKeeper)\n\trequire.Equal(t, 0, len(validatorUpdates))\n\ttstaking.CheckValidator(valAddr, stakingtypes.Active)\n\n\t// shouldn't be inactive/kicked yet\n\ttstaking.CheckValidator(valAddr, stakingtypes.Active)\n\n\t// validator misses 500 more blocks, 501 total\n\tlatest := height\n\tfor ; height < latest+501; height++ {\n\t\tctx = ctx.WithBlockHeight(height)\n\t\tapp.CustomSlashingKeeper.HandleValidatorSignature(ctx, addr, 1, false)\n\t}\n\n\t// should now be inactive & kicked\n\tstaking.EndBlocker(ctx, app.CustomStakingKeeper)\n\ttstaking.CheckValidator(valAddr, stakingtypes.Inactive)\n\n\t// check all the signing information\n\tsignInfo, found := app.CustomSlashingKeeper.GetValidatorSigningInfo(ctx, consAddr)\n\trequire.True(t, found)\n\trequire.Equal(t, int64(10), signInfo.MischanceConfidence)\n\trequire.Equal(t, int64(111), signInfo.Mischance)\n\trequire.Equal(t, int64(99), signInfo.LastPresentBlock)\n\trequire.Equal(t, int64(121), signInfo.MissedBlocksCounter)\n\trequire.Equal(t, int64(100), signInfo.ProducedBlocksCounter)\n\n\t// some blocks pass\n\theight = int64(5000)\n\tctx = ctx.WithBlockHeight(height)\n\n\t// Try pausing on inactive node here, should fail\n\terr := app.CustomSlashingKeeper.Pause(ctx, valAddr)\n\trequire.Error(t, err)\n\n\t// validator rejoins and starts signing again\n\tapp.CustomSlashingKeeper.Activate(ctx, valAddr)\n\tapp.CustomSlashingKeeper.HandleValidatorSignature(ctx, val.Address(), 1, true)\n\theight++\n\n\t// validator should be active after signing next block after active\n\tstaking.EndBlocker(ctx, app.CustomStakingKeeper)\n\ttstaking.CheckValidator(valAddr, stakingtypes.Active)\n\n\t// miss one block after pause\n\tapp.CustomSlashingKeeper.HandleValidatorSignature(ctx, val.Address(), 1, false)\n\theight++\n\n\t// Try pausing on active node here, should success\n\terr = app.CustomSlashingKeeper.Pause(ctx, valAddr)\n\trequire.NoError(t, err)\n\tstaking.EndBlocker(ctx, app.CustomStakingKeeper)\n\ttstaking.CheckValidator(valAddr, stakingtypes.Paused)\n\n\t// validator misses 501 blocks\n\tlatest = height\n\tfor ; height < latest+501; height++ {\n\t\tctx = ctx.WithBlockHeight(height)\n\t\tapp.CustomSlashingKeeper.HandleValidatorSignature(ctx, val.Address(), 1, false)\n\t}\n\n\t// validator should not be in inactive status since node is paused\n\tstaking.EndBlocker(ctx, app.CustomStakingKeeper)\n\ttstaking.CheckValidator(valAddr, stakingtypes.Paused)\n\n\t// After reentering after unpause, check if signature info is recovered correctly\n\tsignInfo, found = app.CustomSlashingKeeper.GetValidatorSigningInfo(ctx, consAddr)\n\trequire.True(t, found)\n\trequire.Equal(t, int64(1), signInfo.MischanceConfidence)\n\trequire.Equal(t, int64(0), signInfo.Mischance)\n\trequire.Equal(t, int64(5000), signInfo.LastPresentBlock)\n\trequire.Equal(t, int64(122), signInfo.MissedBlocksCounter)\n\trequire.Equal(t, int64(101), signInfo.ProducedBlocksCounter)\n\n\t// Try activating paused node: should unpause but it's activating - should fail\n\terr = app.CustomSlashingKeeper.Activate(ctx, valAddr)\n\trequire.Error(t, err)\n\tstaking.EndBlocker(ctx, app.CustomStakingKeeper)\n\ttstaking.CheckValidator(valAddr, stakingtypes.Paused)\n\n\t// Unpause node and it should be active\n\terr = app.CustomSlashingKeeper.Unpause(ctx, valAddr)\n\trequire.NoError(t, err)\n\tstaking.EndBlocker(ctx, app.CustomStakingKeeper)\n\ttstaking.CheckValidator(valAddr, stakingtypes.Active)\n\n\t// After reentering after unpause, check if signature info is recovered correctly\n\tsignInfo, found = app.CustomSlashingKeeper.GetValidatorSigningInfo(ctx, consAddr)\n\trequire.True(t, found)\n\trequire.Equal(t, int64(1), signInfo.MischanceConfidence)\n\trequire.Equal(t, int64(0), signInfo.Mischance)\n\trequire.Equal(t, int64(5000), signInfo.LastPresentBlock)\n\trequire.Equal(t, int64(122), signInfo.MissedBlocksCounter)\n\trequire.Equal(t, int64(101), signInfo.ProducedBlocksCounter)\n\n\t// Miss another 501 blocks\n\tlatest = height\n\tfor ; height < latest+501; height++ {\n\t\tctx = ctx.WithBlockHeight(height)\n\t\tapp.CustomSlashingKeeper.HandleValidatorSignature(ctx, val.Address(), 1, false)\n\t}\n\n\t// validator should be in inactive status\n\tstaking.EndBlocker(ctx, app.CustomStakingKeeper)\n\ttstaking.CheckValidator(valAddr, stakingtypes.Inactive)\n}", "func verifyTables(\n\tt *testing.T,\n\ttc *testcluster.TestCluster,\n\tcompleted chan int,\n\texpectedNumOfTables int,\n\tdescIDStart int64,\n) {\n\tdescIDEnd := descIDStart + int64(expectedNumOfTables)\n\tusedTableIDs := make(map[sqlbase.ID]string)\n\tvar count int\n\tfor id := range completed {\n\t\tcount++\n\t\ttableName := fmt.Sprintf(\"table_%d\", id)\n\t\tkvDB := tc.Servers[count%tc.NumServers()].KVClient().(*client.DB)\n\t\ttableDesc := sqlbase.GetTableDescriptor(kvDB, \"test\", tableName)\n\t\tif int64(tableDesc.ID) < descIDStart || int64(tableDesc.ID) >= descIDEnd {\n\t\t\tt.Fatalf(\n\t\t\t\t\"table %s's ID %d is not within the expected range of %d to %d\",\n\t\t\t\ttableName,\n\t\t\t\ttableDesc.ID,\n\t\t\t\tdescIDStart,\n\t\t\t\tdescIDEnd,\n\t\t\t)\n\t\t}\n\t\tusedTableIDs[tableDesc.ID] = tableName\n\t}\n\n\tif e, a := expectedNumOfTables, len(usedTableIDs); e != a {\n\t\tt.Fatalf(\"expected %d tables created, only got %d\", e, a)\n\t}\n\n\tkvDB := tc.Servers[count%tc.NumServers()].KVClient().(*client.DB)\n\tif descID, err := kvDB.Get(context.Background(), keys.DescIDGenerator); err != nil {\n\t\tt.Fatal(err)\n\t} else {\n\t\tif e, a := descIDEnd, descID.ValueInt(); e != a {\n\t\t\tt.Fatalf(\"expected next descriptor ID to be %d, got %d\", e, a)\n\t\t}\n\t}\n}", "func TestTransactionSignaturesLists_MarshallBinary_OneTSL_1024Signatures(t *testing.T) {\n\n\t// Reference data initialisation.\n\ttsl := NewTSL()\n\ttsl.TxUUID.Bytes = [16]byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6}\n\tfor i := 0; i < 1024; i++ {\n\t\tsig := &lamport.Signature{}\n\t\t_, err := rand.Read(sig.Bytes[:])\n\t\tif err != nil {\n\t\t\tt.Fatal()\n\t\t}\n\n\t\t_ = tsl.Members.Add(sig)\n\t}\n\n\ttsls := &TSLs{}\n\t_ = tsls.Add(tsl)\n\n\t// Marshalling.\n\tbinary, _ := tsls.MarshalBinary()\n\n\trestoredTSLs := &TSLs{}\n\t_ = restoredTSLs.UnmarshalBinary(binary)\n\n\t// Checks\n\tif tsls.Count() != restoredTSLs.Count() {\n\t\tt.Fatal()\n\t}\n\n\t// Transaction UUID\n\ttransactionUUIDsAreEqual := bytes.Compare(\n\t\ttsls.At[0].TxUUID.Bytes[:],\n\t\trestoredTSLs.At[0].TxUUID.Bytes[:]) == 0\n\n\tif !transactionUUIDsAreEqual {\n\t\tt.Fatal()\n\t}\n\n\t// At count\n\tpubKeysCountAreEqual := restoredTSLs.At[0].Members.Count() == restoredTSLs.At[0].Members.Count()\n\tif !pubKeysCountAreEqual {\n\t\tt.Fatal()\n\t}\n\n\t// At data\n\tfor i, sig := range tsls.At[0].Members.At {\n\t\trestoredSig := restoredTSLs.At[0].Members.At[i]\n\t\tsigNIsEqual := bytes.Compare(sig.Bytes[:], restoredSig.Bytes[:]) == 0\n\t\tif !sigNIsEqual {\n\t\t\tt.Fatal()\n\t\t}\n\t}\n}", "func verifyMembership(n *nwo.Network, expectedPeers []*nwo.Peer, channelName string, chaincodes ...string) {\n\texpectedDiscoveredPeers := make([]nwo.DiscoveredPeer, 0, len(expectedPeers))\n\tfor _, peer := range expectedPeers {\n\t\texpectedDiscoveredPeers = append(expectedDiscoveredPeers, n.DiscoveredPeer(peer, chaincodes...))\n\t}\n\tfor _, peer := range expectedPeers {\n\t\tEventually(nwo.DiscoverPeers(n, peer, \"User1\", channelName), n.EventuallyTimeout).Should(ConsistOf(expectedDiscoveredPeers))\n\t}\n}", "func TestValidateSignature(t *testing.T){\n testTables := []struct {\n object *mtr.CTObject\n expected bool\n\n }{\n {&sthCTObject, true},\n {&withoutBlobCTObject, false},\n {&sthInvalidCTObject, false},\n\n }\n\n for _, testTable := range testTables{\n result := ValidateSignature(testTable.object)\n if testTable.expected != result {\n t.Errorf(\"Error validating %s, got %v when expecting %v\", testTable.object.TypeID, result, testTable.expected)\n }\n }\n}", "func (s List) IdentifiesAll(targets []*unstructured.Unstructured) bool {\n\tif len(s) == len(targets) && len(s) == 0 {\n\t\treturn true\n\t}\n\tif len(s) != len(targets) {\n\t\treturn false\n\t}\n\tfor _, t := range targets {\n\t\tif !s.ContainsByIdentity(t) {\n\t\t\t// return false if any item does not match\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func verifyTokenAll(t *testing.T, given string, expected string) {\n\tlex := New(strings.NewReader(given))\n\ttokens, err := lex.TokenAll()\n\tif err != nil {\n\t\tt.Fatalf(\"lexer failed: %s\", err)\n\t}\n\tverifyTokens(t, tokens, strings.Split(expected, \",\"))\n}", "func (th *testHelper) assertPresentInCollectionMPD(chaincodeName, marbleName string, peerList ...*nwo.Peer) {\n\tcommand := commands.ChaincodeQuery{\n\t\tChannelID: th.channelID,\n\t\tName: chaincodeName,\n\t\tCtor: fmt.Sprintf(`{\"Args\":[\"readMarblePrivateDetails\",\"%s\"]}`, marbleName),\n\t}\n\texpectedMsg := fmt.Sprintf(`{\"docType\":\"marblePrivateDetails\",\"name\":\"%s\"`, marbleName)\n\tfor _, peer := range peerList {\n\t\tth.queryChaincode(peer, command, expectedMsg, true)\n\t}\n}", "func (m *MockKeystore) GetAll(prefix string) ([]keystoreregistry.KeyValueVersion, error) {\n\tret := m.ctrl.Call(m, \"GetAll\", prefix)\n\tret0, _ := ret[0].([]keystoreregistry.KeyValueVersion)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func TestMultiSchemaSupport(t *testing.T) {\n\twithEachTestDB(t, func(t *testing.T, tdb *TestDB) {\n\t\tmusic := NewMigrator(WithDialect(tdb.Dialect), WithTableName(\"music_migrations\"))\n\t\tcontacts := NewMigrator(WithDialect(tdb.Dialect), WithTableName(\"contacts_migrations\"))\n\n\t\t// Use the same connection for both sets of migrations\n\t\tdb := tdb.Connect(t)\n\t\tdefer func() { _ = db.Close() }()\n\n\t\t// Apply the Music migrations\n\t\terr := music.Apply(db, testMigrations(t, \"music\"))\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Failed to apply music migrations: %s\", err)\n\t\t}\n\n\t\t// ... then the Contacts Migrations\n\t\terr = contacts.Apply(db, testMigrations(t, \"contacts\"))\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Failed to apply contact migrations: %s\", err)\n\t\t}\n\n\t\t// Then run a SELECT COUNT(*) query on each table to ensure that all of the\n\t\t// expected tables are co-existing in the same database and that they all\n\t\t// contain the expected number of rows (this approach is admittedly odd,\n\t\t// but it relies only on ANSI SQL code, so it should run on any SQL database).\n\t\texpectedRowCounts := map[string]int{\n\t\t\t\"music_migrations\": 3,\n\t\t\t\"contacts_migrations\": 3,\n\t\t\t\"contacts\": 1,\n\t\t\t\"phone_numbers\": 3,\n\t\t\t\"addresses\": 2,\n\t\t\t\"artists\": 0,\n\t\t\t\"albums\": 0,\n\t\t\t\"tracks\": 0,\n\t\t}\n\t\tfor table, expectedRowCount := range expectedRowCounts {\n\t\t\tqtn := tdb.Dialect.QuotedTableName(\"\", table)\n\t\t\tactualCount := -1 // Don't initialize to 0 because that's an expected value\n\t\t\tquery := fmt.Sprintf(\"SELECT COUNT(*) FROM %s\", qtn)\n\t\t\trows, err := db.Query(query)\n\t\t\tif err != nil {\n\t\t\t\tt.Error(err)\n\t\t\t}\n\t\t\tif rows != nil && rows.Next() {\n\t\t\t\terr = rows.Scan(&actualCount)\n\t\t\t\tif err != nil {\n\t\t\t\t\tt.Error(err)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tt.Errorf(\"Expected rows\")\n\t\t\t}\n\t\t\tif actualCount != expectedRowCount {\n\t\t\t\tt.Errorf(\"Expected %d rows in table %s. Got %d\", expectedRowCount, qtn, actualCount)\n\t\t\t}\n\t\t}\n\t})\n}", "func TestOsIdmAll(t *testing.T) {\n\tidm := osidm.New()\n\n\tsidm := test.NewSuiteIdm(t, idm)\n\tsidm.TestAll(t)\n}" ]
[ "0.57152236", "0.5705283", "0.5404145", "0.53803885", "0.535705", "0.5251964", "0.517158", "0.5146774", "0.5102792", "0.50746727", "0.5027239", "0.50203794", "0.5005792", "0.4999631", "0.4996603", "0.4986261", "0.497155", "0.49601915", "0.49462947", "0.49434614", "0.49411762", "0.49409756", "0.4930555", "0.49227855", "0.49174199", "0.49133143", "0.49036992", "0.4884772", "0.48667085", "0.48644662", "0.48475263", "0.48471433", "0.48426747", "0.48401952", "0.48390806", "0.48347223", "0.48307934", "0.48256528", "0.48205593", "0.48160055", "0.48096305", "0.48090258", "0.4802155", "0.48018846", "0.47998792", "0.4799081", "0.47964478", "0.47950944", "0.47909117", "0.47843927", "0.47842538", "0.47753355", "0.47632042", "0.4751336", "0.4739923", "0.47317123", "0.47279397", "0.47254378", "0.47207212", "0.47057974", "0.4704658", "0.47024092", "0.46961334", "0.46940246", "0.46933678", "0.4691183", "0.46809423", "0.46748683", "0.4669692", "0.46667188", "0.4661458", "0.46513134", "0.4650164", "0.46427062", "0.46420044", "0.46324396", "0.46285215", "0.4628411", "0.46246517", "0.46236572", "0.46208665", "0.4620639", "0.46176714", "0.46147907", "0.4603102", "0.46024048", "0.45990583", "0.45963845", "0.45959756", "0.45918244", "0.45912942", "0.4590175", "0.45860484", "0.45857972", "0.4581862", "0.45752713", "0.4572936", "0.45708027", "0.45673725", "0.45670837" ]
0.88563794
0
ListBuckets : List buckets on remote
func (o *ObjectStorage) ListBuckets() (*s3.ListBucketsOutput, error) { return o.Client.ListBuckets(nil) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func remoteBucketList(remoteURLObj interface{}) (interface{}, error) {\n\tremoteURL := remoteURLObj.(string)\n\treturn couchbase.GetBucketList(remoteURL)\n}", "func (m *memClient) ListBuckets(ctx context.Context) ([]string, error) {\n\treturn nil, errors.New(\"unimplemented\")\n}", "func ListBuckets(w http.ResponseWriter, r *http.Request) *appError {\n s3, err := getS3(r)\n if err != nil {\n return &appError{err: err, status: http.StatusInternalServerError, json: http.StatusText(http.StatusInternalServerError)}\n }\n response, _ := s3Request(s3, \"\", \"GET\", \"/\", make(map[string][]string), \"\")\n listBucketsResp := &ListBucketsResp{}\n xml.NewDecoder(strings.NewReader(response.Body)).Decode(listBucketsResp)\n buckets := []string{}\n for _, bucket := range listBucketsResp.Buckets {\n buckets = append(buckets, bucket.Name)\n }\n rendering.JSON(w, http.StatusOK, buckets)\n\n return nil\n}", "func ListBuckets(w http.ResponseWriter, r *http.Request) *appError {\n s3, err := getS3(r)\n if err != nil {\n return &appError{err: err, status: http.StatusInternalServerError, json: http.StatusText(http.StatusInternalServerError)}\n }\n response, _ := s3Request(s3, \"\", \"GET\", \"/\", make(map[string][]string), \"\")\n listBucketsResp := &ListBucketsResp{}\n xml.NewDecoder(strings.NewReader(response.Body)).Decode(listBucketsResp)\n buckets := []string{}\n for _, bucket := range listBucketsResp.Buckets {\n buckets = append(buckets, bucket.Name)\n }\n rendering.JSON(w, http.StatusOK, buckets)\n\n return nil\n}", "func (service *S3Service) ListBuckets() ([]*S3Bucket,error) {\n request := service.newS3Request().prepare()\n response,err := request.execute(service.client)\n if err != nil {\n return nil, err\n }\n\n defer response.Close()\n\n // FIXME - process list of buckets\n return make([]*S3Bucket,0),nil\n}", "func (b *fakeBosClient) ListBuckets() (*api.ListBucketsResult, error) {\n\treturn nil, fmt.Errorf(\"test\")\n}", "func listBuckets(sess *session.Session, prefix string, t *testing.T) error {\n\t// Create S3 service client\n\tsvc := s3.New(sess)\n\n\tresult, err := svc.ListBuckets(nil)\n\tif err != nil {\n\t\tt.Log(\"Could not list buckets\")\n\t\treturn err\n\t}\n\n\tfor _, b := range result.Buckets {\n\t\tif strings.HasPrefix(*b.Name, prefix) {\n\t\t\tt.Log(*b.Name)\n\t\t}\n\t}\n\n\treturn nil\n}", "func (l *pydioObjects) ListBuckets(ctx context.Context) ([]minio.BucketInfo, error) {\n\n\treturn []minio.BucketInfo{\n\t\t{Name: \"io\", Created: time.Now()},\n\t\t{Name: \"data\", Created: time.Now()},\n\t}, nil\n\n}", "func RemoteBucketList(remoteURL string) ([]couchbase.BucketInfo, error) {\n\tbucketInfosObj, err := simple_utils.ExecWithTimeout2(remoteBucketList, remoteURL, base.DefaultHttpTimeout, logger_utils)\n\tif bucketInfosObj != nil {\n\t\treturn bucketInfosObj.([]couchbase.BucketInfo), err\n\t} else {\n\t\treturn nil, err\n\t}\n}", "func (taker TakerStorageGCP) ListBuckets(project *reportProject) (gcpBuckets []*storage.Bucket, err error) {\n\tif objResponse, objErr := taker.storageService.Buckets.List(project.gcpProject.ProjectId).Do(); objErr == nil {\n\t\tgcpBuckets = objResponse.Items\n\t} else {\n\t\terr = objErr\n\t}\n\treturn\n}", "func (s *ProviderGRPC) ListBuckets(ctx context.Context, req *pb.ListBucketsRequest) (*pb.ListBucketsResponse, error) {\n\tif req.Project.Id == \"\" {\n\t\treturn nil, errors.New(\"Project ID is required\")\n\t}\n\tvar buckets []*pb.Bucket\n\tit := s.client.Buckets(ctx, req.Project.Id)\n\tfor {\n\t\tbattrs, err := it.Next()\n\t\tif err == iterator.Done {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"Bucket iterator failed: %v\", err)\n\t\t}\n\t\tbuckets = append(buckets, &pb.Bucket{\n\t\t\tName: battrs.Name,\n\t\t})\n\t}\n\treturn &pb.ListBucketsResponse{Buckets: buckets}, nil\n}", "func ListBuckets(svc *s3.S3) []*s3.Bucket {\n\n\tresult, err := svc.ListBuckets(nil)\n\tExitErrorf(\"Unable to list buckets, %v\", err)\n\n\tfmt.Println(\"Buckets:\")\n\tfor _, b := range result.Buckets {\n\t\tfmt.Printf(\"* %s created on %s\\n\", aws.StringValue(b.Name), aws.TimeValue(b.CreationDate))\n\t}\n\n\treturn result.Buckets\n}", "func getBucketList(arg string) error {\n\taddress, err := util.GetAddress(arg)\n\tif err != nil {\n\t\treturn output.NewError(output.AddressError, \"\", err)\n\t}\n\tbl, err := getBucketListByAddress(address)\n\tif err != nil {\n\t\treturn err\n\t}\n\tvar bucketlist []*bucket\n\tfor _, b := range bl.Buckets {\n\t\tbucket, err := newBucket(b)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tbucketlist = append(bucketlist, bucket)\n\t}\n\tmessage := bucketlistMessage{\n\t\tNode: config.ReadConfig.Endpoint,\n\t\tBucketlist: bucketlist,\n\t}\n\tfmt.Println(message.String())\n\treturn nil\n}", "func ListBuckets(showCreationDate bool) error {\r\n\r\n\tresult, err := s3Clinet.ListBuckets(nil)\r\n\r\n\tif err != nil {\r\n\t\tif awsErr, ok := err.(awserr.Error); ok {\r\n\t\t\treturn errors.New(awsErr.Message())\r\n\t\t}\r\n\t\treturn errors.New(err.Error())\r\n\t}\r\n\r\n\tif showCreationDate {\r\n\t\tfor _, b := range result.Buckets {\r\n\t\t\tfmt.Printf(\"%s\\t%s\\n\", aws.TimeValue(b.CreationDate), aws.StringValue(b.Name))\r\n\t\t}\r\n\t} else {\r\n\t\tfor _, b := range result.Buckets {\r\n\t\t\tfmt.Printf(\"%s\\n\", aws.StringValue(b.Name))\r\n\t\t}\r\n\t}\r\n\r\n\treturn nil\r\n}", "func ListBuckets(w http.ResponseWriter, r *http.Request) {\n\n\tsvc := s3.New(sess)\n\n\tpageVars := PageVars{}\n\taddPageVars(r, &pageVars)\n\n\t// get bucket list fomr s3 api\n\tresult, err := svc.ListBuckets(nil)\n\n\tif err != nil {\n\t\tif awsErr, ok := err.(awserr.Error); ok {\n\t\t\tpageVars.ErrorM = awsErr.Message()\n\t\t} else {\n\t\t\tpageVars.ErrorM = \"Failed to load buckets list\"\n\t\t}\n\t} else {\n\t\tpageVars.BList = result.Buckets\n\t}\n\n\trender(w, \"bucketlist\", pageVars)\n}", "func (client *Client) ListBuckets(request *ListBucketsRequest) (response *ListBucketsResponse, err error) {\n\tresponse = CreateListBucketsResponse()\n\terr = client.DoAction(request, response)\n\treturn\n}", "func (b *Buckets) RemoteBuckets(ctx context.Context, id thread.ID) (list []Info, err error) {\n\tctx = b.Context(ctx)\n\tvar threads []cmd.Thread\n\tif id.Defined() {\n\t\tthreads = []cmd.Thread{{ID: id}}\n\t} else {\n\t\tthreads = b.clients.ListThreads(ctx, true)\n\t}\n\tfor _, t := range threads {\n\t\tctx = common.NewThreadIDContext(ctx, t.ID)\n\t\tres, err := b.clients.Buckets.List(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tfor _, root := range res.Roots {\n\t\t\tinfo, err := pbRootToInfo(root)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tlist = append(list, info)\n\t\t}\n\t}\n\treturn list, nil\n}", "func (rpcMethod *RPCMethod) listKeys(response *ResponseParameters) error {\n\n\t//open a read transaction\n\trpcMethod.rpcServer.boltDB.View(func(tx *bolt.Tx) error {\n\t\tvar cursor *bolt.Cursor\n\t\tcursor = tx.Cursor()\n\n\t\t//append to reselt the list of buckets\n\t\tresponse.Result = make([]interface{}, 0, 10)\n\t\tfor k, _ := cursor.First(); k != nil; k, _ = cursor.Next() {\n\t\t\trpcMethod.rpcServer.logger.Println(\"BUCKET \", string(k))\n\t\t\tresponse.Result = append(response.Result, string(k))\n\t\t}\n\n\t\treturn nil\n\t})\n\n\tresponse.Error = nil\n\n\treturn nil\n\n}", "func (rpcMethod *RPCMethod) listKeys(response *ResponseParameters) error {\n\n\t//open a read transaction\n\trpcMethod.rpcServer.boltDB.View(func(tx *bolt.Tx) error {\n\t\tvar cursor *bolt.Cursor\n\t\tcursor = tx.Cursor()\n\n\t\t//append to reselt the list of buckets\n\t\tresponse.Result = make([]interface{}, 0, 10)\n\t\tfor k, _ := cursor.First(); k != nil; k, _ = cursor.Next() {\n\t\t\trpcMethod.rpcServer.logger.Println(\"BUCKET \", string(k))\n\t\t\tresponse.Result = append(response.Result, string(k))\n\t\t}\n\n\t\treturn nil\n\t})\n\n\tresponse.Error = nil\n\n\treturn nil\n\n}", "func (t *targetrunner) listbucket(w http.ResponseWriter, r *http.Request, bucket string) (tag string, ok bool) {\n\tvar (\n\t\tjsbytes []byte\n\t\terrstr string\n\t\terrcode int\n\t)\n\tislocal := t.bmdowner.get().islocal(bucket)\n\terrstr, errcode = t.checkLocalQueryParameter(bucket, r, islocal)\n\tif errstr != \"\" {\n\t\tt.invalmsghdlr(w, r, errstr, errcode)\n\t\treturn\n\t}\n\tuseCache, errstr, errcode := t.checkCacheQueryParameter(r)\n\tif errstr != \"\" {\n\t\tt.invalmsghdlr(w, r, errstr, errcode)\n\t\treturn\n\t}\n\tmsg := &GetMsg{}\n\tif t.readJSON(w, r, msg) != nil {\n\t\treturn\n\t}\n\tif islocal {\n\t\ttag = \"local\"\n\t\tif errstr, ok = t.doLocalBucketList(w, r, bucket, msg); errstr != \"\" {\n\t\t\tt.invalmsghdlr(w, r, errstr)\n\t\t}\n\t\treturn // ======================================>\n\t}\n\t// cloud bucket\n\tif useCache {\n\t\ttag = \"cloud cached\"\n\t\tjsbytes, errstr, errcode = t.listCachedObjects(bucket, msg)\n\t} else {\n\t\ttag = \"cloud\"\n\t\tjsbytes, errstr, errcode = getcloudif().listbucket(t.contextWithAuth(r), bucket, msg)\n\t}\n\tif errstr != \"\" {\n\t\tif errcode == 0 {\n\t\t\tt.invalmsghdlr(w, r, errstr)\n\t\t} else {\n\t\t\tt.invalmsghdlr(w, r, errstr, errcode)\n\t\t}\n\t\treturn\n\t}\n\tok = t.writeJSON(w, r, jsbytes, \"listbucket\")\n\treturn\n}", "func (api *bucketAPI) ApisrvList(ctx context.Context, opts *api.ListWatchOptions) ([]*objstore.Bucket, error) {\n\tif api.ct.resolver != nil {\n\t\tapicl, err := api.ct.apiClient()\n\t\tif err != nil {\n\t\t\tapi.ct.logger.Errorf(\"Error creating API server clent. Err: %v\", err)\n\t\t\treturn nil, err\n\t\t}\n\n\t\treturn apicl.ObjstoreV1().Bucket().List(context.Background(), opts)\n\t}\n\n\t// List from local cache\n\tctkitObjs, err := api.List(ctx, opts)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret []*objstore.Bucket\n\tfor _, obj := range ctkitObjs {\n\t\tret = append(ret, &obj.Bucket)\n\t}\n\treturn ret, nil\n}", "func (c *Client) GetBuckets() ([]string, error) {\n\tres, err := c.do(\"GET\", \"/buckets?buckets=true\", nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif res.StatusCode != 200 {\n\t\tres.Body.Close()\n\t\treturn nil, fmt.Errorf(\"Status Code %d\", res.StatusCode)\n\t}\n\n\tbmap := make(map[string][]string)\n\tdec := json.NewDecoder(res.Body)\n\terr = dec.Decode(&bmap)\n\tres.Body.Close()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Error decoding body: %s\", err.Error())\n\t}\n\n\tstrs, ok := bmap[\"buckets\"]\n\tif !ok {\n\t\treturn nil, errors.New(\"Unexpected body formatting.\")\n\t}\n\treturn strs, nil\n}", "func (*hdfsProvider) ListBuckets(cmn.QueryBcks) (buckets cmn.Bcks, errCode int, err error) {\n\tdebug.Assert(false)\n\treturn\n}", "func getBucketListByVoter(addr string, offset, limit uint32) error {\n\taddress, err := util.GetAddress(addr)\n\tif err != nil {\n\t\treturn output.NewError(output.AddressError, \"\", err)\n\t}\n\tbl, err := getBucketListByVoterAddress(address, offset, limit)\n\tif err != nil {\n\t\treturn err\n\t}\n\tvar bucketlist []*bucket\n\tfor _, b := range bl.Buckets {\n\t\tbucket, err := newBucket(b)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tbucketlist = append(bucketlist, bucket)\n\t}\n\tmessage := bucketlistMessage{\n\t\tNode: config.ReadConfig.Endpoint,\n\t\tBucketlist: bucketlist,\n\t}\n\tfmt.Println(message.String())\n\treturn nil\n}", "func getBucketList(method, addr string, args ...string) (err error) {\n\toffset, limit := uint64(0), uint64(1000)\n\tif len(args) > 0 {\n\t\toffset, err = strconv.ParseUint(args[0], 10, 64)\n\t\tif err != nil {\n\t\t\treturn output.NewError(output.ValidationError, \"invalid offset\", err)\n\t\t}\n\t}\n\tif len(args) > 1 {\n\t\tlimit, err = strconv.ParseUint(args[1], 10, 64)\n\t\tif err != nil {\n\t\t\treturn output.NewError(output.ValidationError, \"invalid limit\", err)\n\t\t}\n\t}\n\tswitch method {\n\tcase bucketlistMethodByVoter:\n\t\treturn getBucketListByVoter(addr, uint32(offset), uint32(limit))\n\tcase bucketlistMethodByCandidate:\n\t\treturn getBucketListByCand(addr, uint32(offset), uint32(limit))\n\t}\n\treturn output.NewError(output.InputError, \"unknown <method>\", nil)\n}", "func ListBucket(a S3Account, bucket string) {\n\treq := NewRequest(a, \"GET\", bucket, \"/\", nil)\n\tbody := req.Send()\n\tlbr := ListBucketResult{}\n\txml.Unmarshal(body, &lbr)\n\tfor _,cp := range lbr.CommonPrefixes {\n\t\tcp.pp()\n\t}\n\tfor _,c := range lbr.Contents {\n\t\tc.pp()\n\t}\n}", "func (api *bucketAPI) List(ctx context.Context, opts *api.ListWatchOptions) ([]*Bucket, error) {\n\tvar objlist []*Bucket\n\tobjs, err := api.ct.List(\"Bucket\", ctx, opts)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor _, obj := range objs {\n\t\tswitch tp := obj.(type) {\n\t\tcase *Bucket:\n\t\t\teobj := obj.(*Bucket)\n\t\t\tobjlist = append(objlist, eobj)\n\t\tdefault:\n\t\t\tlog.Fatalf(\"Got invalid object type %v while looking for Bucket\", tp)\n\t\t}\n\t}\n\n\treturn objlist, nil\n}", "func listCloudBucketsHandler(c *cli.Context) (err error) {\n\tbck := cmn.Bck{\n\t\tName: c.Args().First(),\n\t\tProvider: cmn.Cloud,\n\t}\n\tif bck.Name == \"\" {\n\t\treturn listBucketNames(c, bck)\n\t}\n\n\tbck.Name = strings.TrimSuffix(bck.Name, \"/\")\n\treturn listBucketObj(c, bck)\n}", "func listBuckets() (resp *s3.ListBucketsOutput) {\n\tresp, err := s3session.ListBuckets(&s3.ListBucketsInput{})\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn resp\n}", "func (s *S3Store) ListBuckets() (bkts []string, err error) {\n\tvar req s3.ListBucketsInput\n\tvar resp *s3.ListBucketsOutput\n\tif resp, err = s.svc.ListBuckets(&req); err != nil {\n\t\treturn\n\t}\n\tfor _, bkt := range resp.Buckets {\n\t\tbkts = append(bkts, aws.StringValue(bkt.Name))\n\t}\n\treturn\n}", "func (c *defaultGcsClient) GetBuckets(ctxIn context.Context, project string) (buckets []string, err error) {\n\tctx, span := trace.StartSpan(ctxIn, \"(*defaultGcsClient).GetBuckets\")\n\tdefer span.End()\n\n\tbucketsIterator := c.client.Buckets(ctx, project)\n\tfor {\n\t\t// error or not found\n\t\tb, err := bucketsIterator.Next()\n\t\tif err == iterator.Done {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\treturn []string{}, errors.Wrap(err, fmt.Sprintf(\"Buckets.Next() failed for project %s\", project))\n\t\t}\n\t\tbuckets = append(buckets, b.Name)\n\t}\n\treturn buckets, err\n}", "func (client *Client) ListBucketsWithCallback(request *ListBucketsRequest, callback func(response *ListBucketsResponse, err error)) <-chan int {\n\tresult := make(chan int, 1)\n\terr := client.AddAsyncTask(func() {\n\t\tvar response *ListBucketsResponse\n\t\tvar err error\n\t\tdefer close(result)\n\t\tresponse, err = client.ListBuckets(request)\n\t\tcallback(response, err)\n\t\tresult <- 1\n\t})\n\tif err != nil {\n\t\tdefer close(result)\n\t\tcallback(nil, err)\n\t\tresult <- 0\n\t}\n\treturn result\n}", "func (m mockService) ListBuckets(ctx context.Context, params *s3.ListBucketsInput, optFns ...func(*s3.Options)) (*s3.ListBucketsOutput, error) {\n\treturn m.listBucketsOutput, nil\n}", "func (cache *SiaCacheLayer) ListBuckets() (buckets []SiaBucketInfo, e *SiaServiceError) {\n\tcache.debugmsg(\"SiaCacheLayer.ListBuckets\")\n\n\treturn cache.dbListBuckets()\n}", "func (client *Client) ListBucketsWithChan(request *ListBucketsRequest) (<-chan *ListBucketsResponse, <-chan error) {\n\tresponseChan := make(chan *ListBucketsResponse, 1)\n\terrChan := make(chan error, 1)\n\terr := client.AddAsyncTask(func() {\n\t\tdefer close(responseChan)\n\t\tdefer close(errChan)\n\t\tresponse, err := client.ListBuckets(request)\n\t\tif err != nil {\n\t\t\terrChan <- err\n\t\t} else {\n\t\t\tresponseChan <- response\n\t\t}\n\t})\n\tif err != nil {\n\t\terrChan <- err\n\t\tclose(responseChan)\n\t\tclose(errChan)\n\t}\n\treturn responseChan, errChan\n}", "func (bm *BucketManager) GetBuckets() ([]*BucketSettings, error) {\n\treq := &gocbcore.HttpRequest{\n\t\tService: gocbcore.ServiceType(MgmtService),\n\t\tPath: \"/pools/default/buckets\",\n\t\tMethod: \"GET\",\n\t}\n\n\tresp, err := bm.httpClient.DoHttpRequest(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif resp.StatusCode != 200 {\n\t\tdata, err := ioutil.ReadAll(resp.Body)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\terr = resp.Body.Close()\n\t\tif err != nil {\n\t\t\tlogDebugf(\"Failed to close socket (%s)\", err)\n\t\t}\n\t\treturn nil, networkError{statusCode: resp.StatusCode, message: string(data)}\n\t}\n\n\tvar bucketsData []*bucketDataIn\n\tjsonDec := json.NewDecoder(resp.Body)\n\terr = jsonDec.Decode(&bucketsData)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar buckets []*BucketSettings\n\tfor _, bucketData := range bucketsData {\n\t\tbuckets = append(buckets, bucketDataInToSettings(bucketData))\n\t}\n\n\treturn buckets, nil\n}", "func listp(client *storage.Client, projectId string) ([]*storage.BucketAttrs, error) {\n ctx := context.Background()\n var buckets []*storage.BucketAttrs\n it := client.Buckets(ctx, projectId)\n for {\n battrs, err := it.Next()\n if err == iterator.Done {\n break\n }\n if err != nil {\n return nil, err\n }\n buckets = append(buckets, battrs)\n }\n return buckets, nil\n}", "func (c *Client) ListBucket(bucket string, startAt string, maxKeys int) (items []*Item, err error) {\n\tif maxKeys < 0 {\n\t\treturn nil, errors.New(\"invalid negative maxKeys\")\n\t}\n\tmarker := startAt\n\tfor len(items) < maxKeys {\n\t\tfetchN := maxKeys - len(items)\n\t\tif fetchN > maxList {\n\t\t\tfetchN = maxList\n\t\t}\n\t\tvar bres listBucketResults\n\n\t\turl_ := fmt.Sprintf(\"%s?marker=%s&max-keys=%d\",\n\t\t\tc.bucketURL(bucket), url.QueryEscape(marker), fetchN)\n\n\t\t// Try the enumerate three times, since Amazon likes to close\n\t\t// https connections a lot, and Go sucks at dealing with it:\n\t\t// https://code.google.com/p/go/issues/detail?id=3514\n\t\tconst maxTries = 5\n\t\tfor try := 1; try <= maxTries; try++ {\n\t\t\ttime.Sleep(time.Duration(try-1) * 100 * time.Millisecond)\n\t\t\treq := newReq(url_)\n\t\t\tc.Auth.SignRequest(req)\n\t\t\tres, err := c.transport().RoundTrip(req)\n\t\t\tif err != nil {\n\t\t\t\tif try < maxTries {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif res.StatusCode != http.StatusOK {\n\t\t\t\tif res.StatusCode < 500 {\n\t\t\t\t\tbody, _ := ioutil.ReadAll(io.LimitReader(res.Body, 1<<20))\n\t\t\t\t\taerr := &Error{\n\t\t\t\t\t\tOp: \"ListBucket\",\n\t\t\t\t\t\tCode: res.StatusCode,\n\t\t\t\t\t\tBody: body,\n\t\t\t\t\t\tHeader: res.Header,\n\t\t\t\t\t}\n\t\t\t\t\taerr.parseXML()\n\t\t\t\t\tres.Body.Close()\n\t\t\t\t\treturn nil, aerr\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tbres = listBucketResults{}\n\t\t\t\tvar logbuf bytes.Buffer\n\t\t\t\terr = xml.NewDecoder(io.TeeReader(res.Body, &logbuf)).Decode(&bres)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Printf(\"Error parsing s3 XML response: %v for %q\", err, logbuf.Bytes())\n\t\t\t\t} else if bres.MaxKeys != fetchN || bres.Name != bucket || bres.Marker != marker {\n\t\t\t\t\terr = fmt.Errorf(\"Unexpected parse from server: %#v from: %s\", bres, logbuf.Bytes())\n\t\t\t\t\tlog.Print(err)\n\t\t\t\t}\n\t\t\t}\n\t\t\tres.Body.Close()\n\t\t\tif err != nil {\n\t\t\t\tif try < maxTries-1 {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tlog.Print(err)\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tbreak\n\t\t}\n\t\tfor _, it := range bres.Contents {\n\t\t\tif it.Key == marker && it.Key != startAt {\n\t\t\t\t// Skip first dup on pages 2 and higher.\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif it.Key < startAt {\n\t\t\t\treturn nil, fmt.Errorf(\"Unexpected response from Amazon: item key %q but wanted greater than %q\", it.Key, startAt)\n\t\t\t}\n\t\t\titems = append(items, it)\n\t\t\tmarker = it.Key\n\t\t}\n\t\tif !bres.IsTruncated {\n\t\t\t// log.Printf(\"Not truncated. so breaking. items = %d; len Contents = %d, url = %s\", len(items), len(bres.Contents), url_)\n\t\t\tbreak\n\t\t}\n\t}\n\treturn items, nil\n}", "func (driver donutDriver) GetBuckets() ([]string, error) {\n\treturn nil, notImplemented()\n}", "func (svc *GCSclient) list(bucketName string, filePrefix string) ([]string, error) {\n\tctx, cancel := context.WithTimeout(context.Background(), 300*time.Second)\n\tdefer cancel()\n\t// List all objects in a bucket using pagination\n\tvar files []string\n\tit := svc.Bucket(bucketName).Objects(ctx, &storage.Query{Prefix: filePrefix})\n\tfor {\n\t\tobj, err := it.Next()\n\t\tif err == iterator.Done {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tfiles = append(files, obj.Name)\n\t}\n\tsort.Strings(files)\n\treturn files, nil\n}", "func BucketList(kind, cloud string) ([]string, error) {\n\tvar ret []string\n\n\tkind = strings.ToLower(kind)\n\tcloud = strings.ToLower(cloud)\n\tb, ok := bucketType[kind]\n\tif !ok {\n\t\treturn ret, errors.ErrBucketInvalid\n\t}\n\tt, ok := b[cloud]\n\tif !ok {\n\t\treturn ret, errors.ErrBucketInvalid\n\t}\n\n\treturn EnumValues(t)\n}", "func (cbi *CandidatesBucketsIndexer) GetBuckets(height uint64, offset, limit uint32) ([]byte, uint64, error) {\n\tif height > cbi.latestBucketsHeight {\n\t\theight = cbi.latestBucketsHeight\n\t}\n\tbuckets := &iotextypes.VoteBucketList{}\n\tret, err := cbi.kvStore.Get(StakingBucketsNamespace, byteutil.Uint64ToBytesBigEndian(height))\n\tif errors.Cause(err) == db.ErrNotExist {\n\t\td, err := proto.Marshal(buckets)\n\t\treturn d, height, err\n\t}\n\tif err != nil {\n\t\treturn nil, height, err\n\t}\n\tif err := proto.Unmarshal(ret, buckets); err != nil {\n\t\treturn nil, height, err\n\t}\n\tlength := uint32(len(buckets.Buckets))\n\tif offset >= length {\n\t\td, err := proto.Marshal(&iotextypes.VoteBucketList{})\n\t\treturn d, height, err\n\t}\n\tend := offset + limit\n\tif end > uint32(len(buckets.Buckets)) {\n\t\tend = uint32(len(buckets.Buckets))\n\t}\n\tbuckets.Buckets = buckets.Buckets[offset:end]\n\td, err := proto.Marshal(buckets)\n\treturn d, height, err\n}", "func (bucket Bucket) ListObjects(options ...Option) (ListObjectsResult, error) {\n\tvar out ListObjectsResult\n\n\toptions = append(options, EncodingType(\"url\"))\n\tparams, err := getRawParams(options)\n\tif err != nil {\n\t\treturn out, err\n\t}\n\n\tresp, err := bucket.do(\"GET\", \"\", params, options, nil, nil)\n\tif err != nil {\n\t\treturn out, err\n\t}\n\tdefer resp.Body.Close()\n\n\terr = xmlUnmarshal(resp.Body, &out)\n\tif err != nil {\n\t\treturn out, err\n\t}\n\n\terr = decodeListObjectsResult(&out)\n\treturn out, err\n}", "func (sb S3Buckets) getAll(configObj config.Config) ([]*string, error) {\n\tinput := &s3.ListBucketsInput{}\n\toutput, err := sb.Client.ListBuckets(input)\n\tif err != nil {\n\t\treturn nil, errors.WithStackTrace(err)\n\t}\n\n\tvar names []*string\n\ttotalBuckets := len(output.Buckets)\n\tif totalBuckets == 0 {\n\t\treturn nil, nil\n\t}\n\n\tbatchSize := sb.MaxBatchSize()\n\ttotalBatches := int(math.Ceil(float64(totalBuckets) / float64(batchSize)))\n\tbatchCount := 1\n\n\t// Batch the get operation\n\tfor batchStart := 0; batchStart < totalBuckets; batchStart += batchSize {\n\t\tbatchEnd := int(math.Min(float64(batchStart)+float64(batchSize), float64(totalBuckets)))\n\t\tlogging.Logger.Debugf(\"Getting - %d-%d buckets of batch %d/%d\", batchStart+1, batchEnd, batchCount, totalBatches)\n\t\ttargetBuckets := output.Buckets[batchStart:batchEnd]\n\t\tbucketNames, err := sb.getBucketNames(targetBuckets, configObj)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tnames = append(names, bucketNames...)\n\t\tbatchCount++\n\t}\n\n\treturn names, nil\n}", "func (h *Handler) List(bucket string) ([]string, error) {\n\tklog.V(10).Info(\"List S3 Objects \", bucket)\n\n\treq := h.Client.ListObjectsRequest(&s3.ListObjectsInput{Bucket: &bucket})\n\tp := s3.NewListObjectsPaginator(req)\n\n\tvar keys []string\n\n\tfor p.Next(context.TODO()) {\n\t\tpage := p.CurrentPage()\n\t\tfor _, obj := range page.Contents {\n\t\t\tkeys = append(keys, *obj.Key)\n\t\t}\n\t}\n\n\tif err := p.Err(); err != nil {\n\t\tklog.Error(\"failed to list objects. error: \", err)\n\t\treturn nil, err\n\t}\n\n\tklog.V(10).Info(\"List S3 Objects result \", keys)\n\n\treturn keys, nil\n}", "func (t *targetrunner) listCachedObjects(bucket string, msg *GetMsg) (outbytes []byte, errstr string, errcode int) {\n\treslist, err := t.prepareLocalObjectList(bucket, msg)\n\tif err != nil {\n\t\treturn nil, err.Error(), 0\n\t}\n\n\toutbytes, err = json.Marshal(reslist)\n\treturn\n}", "func getBucketListByCand(candName string, offset, limit uint32) error {\n\tbl, err := getBucketListByCandidateName(candName, offset, limit)\n\tif err != nil {\n\t\treturn err\n\t}\n\tvar bucketlist []*bucket\n\tfor _, b := range bl.Buckets {\n\t\tbucket, err := newBucket(b)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tbucketlist = append(bucketlist, bucket)\n\t}\n\tmessage := bucketlistMessage{\n\t\tNode: config.ReadConfig.Endpoint,\n\t\tBucketlist: bucketlist,\n\t}\n\tfmt.Println(message.String())\n\treturn nil\n}", "func (s GRPCServer) List(ctx context.Context, op *proto.Operation) (*proto.ListResult, error) {\n\tif len(op.Bucket) == 0 {\n\t\treturn nil, errors.New(\"bucket is required\")\n\t}\n\n\tkeys := [][]byte{}\n\tif err := s.DB.View(func(tx *bolt.Tx) error {\n\t\tb := tx.Bucket(op.Bucket)\n\t\tif b == nil {\n\t\t\treturn nil\n\t\t}\n\n\t\tb.ForEach(func(k, v []byte) error {\n\t\t\tkeys = append(keys, k)\n\t\t\treturn nil\n\t\t})\n\t\treturn nil\n\t}); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &proto.ListResult{Keys: keys}, nil\n}", "func (api BucketAPI3L) ListBuckets3L(ctx context.Context, region, limit, startAt string) (result ListedBuckets, err error) {\n\tif err = api.Token.RefreshTokenIfRequired(api.Auth); err != nil {\n\t\treturn\n\t}\n\n\tpath := api.Auth.Host + api.BucketsAPIPath\n\n\treturn listBuckets(ctx, api.RateLimiter, path, region, limit, startAt, api.Token.Bearer().AccessToken)\n}", "func (server *minioAPI) listBucketsHandler(w http.ResponseWriter, req *http.Request) {\n\tacceptsContentType := getContentType(req)\n\tbuckets, err := server.storage.ListBuckets()\n\tswitch err := err.(type) {\n\tcase nil:\n\t\t{\n\t\t\tresponse := generateBucketsListResult(buckets)\n\t\t\tw.Write(writeObjectHeadersAndResponse(w, response, acceptsContentType))\n\t\t}\n\tcase mstorage.ImplementationError:\n\t\t{\n\t\t\tlog.Println(err)\n\t\t\terror := errorCodeError(InternalError)\n\t\t\terrorResponse := getErrorResponse(error, \"\")\n\t\t\tw.WriteHeader(error.HTTPStatusCode)\n\t\t\tw.Write(writeErrorResponse(w, errorResponse, acceptsContentType))\n\t\t}\n\tcase mstorage.BackendCorrupted:\n\t\t{\n\t\t\tlog.Println(err)\n\t\t\terror := errorCodeError(InternalError)\n\t\t\terrorResponse := getErrorResponse(error, \"\")\n\t\t\tw.WriteHeader(error.HTTPStatusCode)\n\t\t\tw.Write(writeErrorResponse(w, errorResponse, acceptsContentType))\n\t\t}\n\t}\n}", "func generateListBucketsResponse(buckets []BucketInfo) ListBucketsResponse {\n\tvar listbuckets []Bucket\n\tvar data = ListBucketsResponse{}\n\tvar owner = Owner{}\n\n\towner.ID = globalMinioDefaultOwnerID\n\tfor _, bucket := range buckets {\n\t\tvar listbucket = Bucket{}\n\t\tlistbucket.Name = bucket.Name\n\t\tlistbucket.CreationDate = bucket.Created.UTC().Format(timeFormatAMZLong)\n\t\tlistbuckets = append(listbuckets, listbucket)\n\t}\n\n\tdata.Owner = owner\n\tdata.Buckets.Buckets = listbuckets\n\n\treturn data\n}", "func (o *ListBuckets) Execute(args []string) error {\n\tclient, err := Client()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tresponse, err := client.ListBuckets()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif opts.Verbose {\n\t\tfmt.Printf(\"%-30s%-35s%-15s\\n\", \"Name\", \"Id\", \"Type\")\n\t\tfor _, bucket := range response {\n\t\t\tfmt.Printf(\"%-30s%-35s%-15s\\n\", bucket.Name, bucket.ID, bucket.BucketType)\n\t\t}\n\t} else {\n\t\tfor _, bucket := range response {\n\t\t\tfmt.Println(bucket.Name)\n\t\t}\n\t}\n\n\treturn nil\n}", "func listb(client *storage.Client, bucketName string) ([]*storage.ObjectAttrs, error) {\n ctx := context.Background()\n var objects []*storage.ObjectAttrs\n it := client.Bucket(bucketName).Objects(ctx, nil)\n for {\n oattrs, err := it.Next()\n if err == iterator.Done {\n break\n }\n if err != nil {\n return nil, err\n }\n objects = append(objects, oattrs)\n }\n return objects, nil\n}", "func getAllBuckets() ([]*Bucket, error) {\n\tbuckets := make([]*Bucket, 0)\n\n\tsvc, err := s3Service(\"\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresp, err := svc.ListBuckets(&s3.ListBucketsInput{})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor _, b := range resp.Buckets {\n\t\tbucket := &Bucket{Name: *b.Name, CreationDate: *b.CreationDate}\n\t\tbuckets = append(buckets, bucket)\n\t}\n\treturn buckets, nil\n}", "func ListS3Bucket(awsregion string) string {\n\t// aws session credentials\n\taws_session := session.Must(session.NewSessionWithOptions(session.Options{\n\t\tSharedConfigState: session.SharedConfigEnable,\n\t}))\n\n\t// s3 session\n\ts3_session := s3.New(aws_session, aws.NewConfig().WithRegion(awsregion))\n\n\t// create bucket\n\tinput := &s3.ListBucketsInput{}\n\tresult, err := s3_session.ListBuckets(input)\n\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\tif len(result.Buckets) == 0 {\n\t\treturn \"Bucket list is empty\"\n\t}\n\treturn result.String()\n}", "func (a *FileStorageApiService) GetBuckets(Ctx _context.Context) ApiGetBucketsRequest {\n\treturn ApiGetBucketsRequest{\n\t\tApiService: a,\n\t\tCtx: Ctx,\n\t}\n}", "func (o *TemplateSummaryResources) GetBuckets() []TemplateSummaryBucket {\n\tif o == nil {\n\t\tvar ret []TemplateSummaryBucket\n\t\treturn ret\n\t}\n\n\treturn o.Buckets\n}", "func (s *Server) Buckets() []*Bucket {\n\treturn s.buckets\n}", "func (be *s3) List(t backend.Type, done <-chan struct{}) <-chan string {\n\tdebug.Log(\"s3.List\", \"listing %v\", t)\n\tch := make(chan string)\n\n\tprefix := be.s3path(t, \"\")\n\n\tlistresp := be.client.ListObjects(be.bucketname, prefix, true, done)\n\n\tgo func() {\n\t\tdefer close(ch)\n\t\tfor obj := range listresp {\n\t\t\tm := strings.TrimPrefix(obj.Key, prefix)\n\t\t\tif m == \"\" {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tselect {\n\t\t\tcase ch <- m:\n\t\t\tcase <-done:\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}()\n\n\treturn ch\n}", "func getBucketObjects(svc *s3.S3, sess *session.Session, bucketName string) {\n\n\tquery := &s3.ListObjectsV2Input{\n\t\tBucket: aws.String(bucketName),\n\t\tPrefix: aws.String(prefix),\n\t}\n\n\t// Pagination Flag used to check if we need to go further (default is 1000 otherwise)\n\ttruncatedListing := true\n\tpageCount := 0\n\n\tfor truncatedListing && pageCount < maxPages {\n\t\tresp, err := svc.ListObjectsV2(query)\n\t\tpageCount++\n\n\t\tif err != nil {\n\t\t\tfmt.Println(err.Error())\n\t\t\treturn\n\t\t}\n\t\t// Get all objects for this page\n\t\tgetObjectsPage(resp, svc, bucketName)\n\n\t\t// Set continuation token\n\t\tquery.ContinuationToken = resp.NextContinuationToken\n\t\ttruncatedListing = *resp.IsTruncated\n\n\t\tif verbose == \"y\" {\n\t\t\tfmt.Printf(\"page Num %d, recCount %d \\n\", pageCount, numberOfRetrievedFiles)\n\t\t}\n\t}\n\n}", "func (s *storageBucketLister) List(selector labels.Selector) (ret []*v1beta1.StorageBucket, err error) {\n\terr = cache.ListAll(s.indexer, selector, func(m interface{}) {\n\t\tret = append(ret, m.(*v1beta1.StorageBucket))\n\t})\n\treturn ret, err\n}", "func (a *FileStorageApiService) GetBucketsExecute(r ApiGetBucketsRequest) (MultipleBucket, *_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodGet\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\tlocalVarReturnValue MultipleBucket\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.Ctx, \"FileStorageApiService.GetBuckets\")\n\tif localBasePath == \"/\" {\n\t localBasePath = \"\"\n\t}\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/v2/file_storage/buckets\"\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\tif r.P_limit != nil {\n\t\tlocalVarQueryParams.Add(\"limit\", parameterToString(*r.P_limit, \"\"))\n\t}\n\tif r.P_offset != nil {\n\t\tlocalVarQueryParams.Add(\"offset\", parameterToString(*r.P_offset, \"\"))\n\t}\n\tif r.P_order != nil {\n\t\tlocalVarQueryParams.Add(\"order\", parameterToString(*r.P_order, \"\"))\n\t}\n\tif r.P_orderBy != nil {\n\t\tlocalVarQueryParams.Add(\"order_by\", parameterToString(*r.P_orderBy, \"\"))\n\t}\n\tif r.P_resourceOwnerId != nil {\n\t\tlocalVarQueryParams.Add(\"resource_owner_id\", parameterToString(*r.P_resourceOwnerId, \"\"))\n\t}\n\tif r.P_name != nil {\n\t\tlocalVarQueryParams.Add(\"name\", parameterToString(*r.P_name, \"\"))\n\t}\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\treq, err := a.client.prepareRequest(r.Ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func mainAdminBucketRemoteList(ctx *cli.Context) error {\n\tcheckAdminBucketRemoteListSyntax(ctx)\n\n\t// Additional command specific theme customization.\n\tconsole.SetColor(\"RemoteListMessage\", color.New(color.Bold, color.FgHiGreen))\n\tconsole.SetColor(\"RemoteListEmpty\", color.New(color.FgYellow))\n\tconsole.SetColor(\"SourceBucket\", color.New(color.FgYellow))\n\tconsole.SetColor(\"TargetBucket\", color.New(color.FgYellow))\n\tconsole.SetColor(\"TargetURL\", color.New(color.FgHiWhite))\n\tconsole.SetColor(\"ARN\", color.New(color.FgCyan))\n\tconsole.SetColor(\"Arrow\", color.New(color.FgHiWhite))\n\n\t// Get the alias parameter from cli\n\targs := ctx.Args()\n\taliasedURL := args.Get(0)\n\t_, sourceBucket := url2Alias(aliasedURL)\n\n\t// Create a new MinIO Admin Client\n\tclient, err := newAdminClient(aliasedURL)\n\tfatalIf(err, \"Unable to initialize admin connection.\")\n\ttargets, e := client.ListRemoteTargets(globalContext, sourceBucket, ctx.String(\"service\"))\n\tfatalIf(probe.NewError(e).Trace(args...), \"Unable to list remote target\")\n\tprintRemotes(ctx, aliasedURL, targets)\n\treturn nil\n}", "func Hashlist(url string, secure bool, accesskey string, secretkey string, bucket string) string {\n\tlog.SetFlags(log.Lshortfile)\n\n\t// New returns an Amazon S3 compatible client object. API compatibility (v2 or v4) is automatically\n\t// determined based on the Endpoint value.\n\ts3Client, err := minio.New(url, accesskey, secretkey, secure)\n\tif err != nil {\n\t\tjc.SendString(fmt.Sprint(err))\n\t\treturn \"ERROR\"\n\t}\n\t// Create a done channel to control 'ListObjects' go routine.\n\tdoneCh := make(chan struct{})\n\n\t// Indicate to our routine to exit cleanly upon return.\n\tdefer close(doneCh)\n\n\t// List all objects from a bucket-name with a matching prefix.\n\tvar snapshots []string\n\tfor object := range s3Client.ListObjects(bucket, \"\", secure, doneCh) {\n\t\tif object.Err != nil {\n\t\t\tjc.SendString(fmt.Sprint(object.Err))\n\t\t\treturn \"ERROR\"\n\t\t}\n\t\tmatched, err := regexp.MatchString(\".hsh$\", object.Key)\n\t\tif err != nil {\n\t\t\treturn \"ERROR\"\n\t\t}\n\t\tif matched == true {\n\t\t\tsnapshots = append(snapshots, object.Key)\n\t\t\tsnapshots = append(snapshots, \"\\n\")\n\t\t}\n\t}\n\tif len(snapshots) > 0 {\n\t\treturn strings.Join(snapshots, \"\\n\")\n\t}\n\treturn \"ERROR\"\n}", "func (s *V3Backend) List(ctx context.Context, key string) ([]args.Pair, error) {\n\tresp, err := s.Client.Get(ctx, key, etcd.WithPrefix())\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif len(resp.Kvs) == 0 {\n\t\treturn nil, errors.New(fmt.Sprintf(\"'%s' not found\", key))\n\t}\n\tresult := make([]args.Pair, 0)\n\tfor _, node := range resp.Kvs {\n\t\tresult = append(result, args.Pair{Key: string(node.Key), Value: node.Value})\n\t}\n\treturn result, nil\n}", "func (b Bucket) ListObject(args ...Params) (*ListBucketResult, error) {\n\tv := new(ListBucketResult)\n\n\terr := b.Do(\"GET\", \"\", nil, v, args...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn v, nil\n}", "func getObjects(storageCli *storage.Client, bucketName string) []string {\n\tvar objectList []string\n\tquery := &storage.Query{Prefix: \"\"}\n\n\tbucket := storageCli.Bucket(bucketName)\n\tobjects := bucket.Objects(context.Background(), query)\n\n\tfor {\n\t\tattrs, err := objects.Next()\n\t\tif err == iterator.Done {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\tframework.FailfWithOffset(2, \"Failed to get objects from bucket %s\", err)\n\t\t}\n\n\t\tobjectList = append(objectList, attrs.Name)\n\t}\n\n\treturn objectList\n}", "func ListObjects(bucketName string, keyPrefix string) (keysList []*string, err error) {\n\tsvc := s3.New(sessionutils.Session)\n\tres, err := svc.ListObjects(&s3.ListObjectsInput{\n\t\tBucket: aws.String(bucketName),\n\t\tPrefix: aws.String(keyPrefix),\n\t})\n\n\tif err != nil {\n\t\tfmt.Printf(\"Error listing bucket:\\n%v\\n\", err)\n\t\treturn keysList, err\n\t}\n\n\tif len(res.Contents) > 0 {\n\t\tfor _, obj := range res.Contents {\n\t\t\tkeysList = append(keysList, obj.Key)\n\t\t}\n\t}\n\n\treturn keysList, nil\n\n}", "func (s *storage) Buckets() resource.Buckets {\n\treturn s.buckets\n}", "func ListTimeBuckets(cmd *cobra.Command, args []string) error {\n\n\tclient, err := auth.GetClient()\n\tif err != nil {\n\t\treturn err\n\t}\n\t// Parse all flags\n\n\tvar sid string\n\terr = flags.ParseFlag(cmd.Flags(), \"sid\", &sid)\n\tif err != nil {\n\t\treturn fmt.Errorf(`error parsing \"sid\": ` + err.Error())\n\t}\n\n\t// Silence Usage\n\tcmd.SilenceUsage = true\n\n\tresp, err := client.SearchService.ListTimeBuckets(sid)\n\tif err != nil {\n\t\treturn err\n\t}\n\tjsonx.Pprint(cmd, resp)\n\treturn nil\n}", "func (rt RouteTable) GetBuckets() (k []Bucket, err error) {\n\tbs := []Bucket{}\n\tb := rt.ht.GetBuckets()\n\tfor i, v := range b {\n\t\tbs[i] = KBucket{nodes: convertNetworkNodes(v)}\n\t}\n\n\treturn bs, nil\n}", "func (c *Client) ReadBuckets(t MetricType, o ...Modifier) ([]*Bucketpoint, error) {\n\to = prepend(o, c.Url(\"GET\", TypeEndpoint(t), DataEndpoint()))\n\n\tr, err := c.Send(o...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdefer r.Body.Close()\n\n\tif r.StatusCode == http.StatusOK {\n\t\tb, err := ioutil.ReadAll(r.Body)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// Check for GaugeBucketpoint and so on for the rest.. uh\n\t\tbp := []*Bucketpoint{}\n\t\tif b != nil {\n\t\t\tif err = json.Unmarshal(b, &bp); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t\treturn bp, nil\n\t} else if r.StatusCode > 399 {\n\t\treturn nil, c.parseErrorResponse(r)\n\t}\n\n\treturn nil, nil\n}", "func (*ListBucketsResponse) Descriptor() ([]byte, []int) {\n\treturn file_s3_proto_s3_proto_rawDescGZIP(), []int{55}\n}", "func (g *gcs) List(ctx context.Context, remotePath string) (files []interface{}, err error) {\n\titer := g.bucket.Objects(g.context, nil)\n\tfor {\n\t\tattributes, err := iter.Next()\n\t\tif err == iterator.Done {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tfiles = append(files, attributes.Name)\n\t}\n\n\treturn\n}", "func (c *Client) ListKeys(bucket string) ([][]byte, error) {\n\treqstruct := &RpbListKeysReq{\n\t\tBucket: []byte(bucket),\n\t}\n\n\tresponse, err := c.ReqMultiResp(reqstruct, \"RpbListKeysReq\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tkeys := response.([][]byte)\n\n\treturn keys, nil\n}", "func (m *Planner) GetBuckets()([]PlannerBucketable) {\n return m.buckets\n}", "func (c *ConsulClient) List(ctx context.Context, prefix string) ([]Pair, error) {\n\tspan, ctx := opentracing.StartSpanFromContext(ctx, \"ConsulClient.List\")\n\tdefer span.Finish()\n\n\tregistryOperationCount.WithLabelValues(env, \"List\").Inc()\n\n\tstartTime := time.Now()\n\tdefer func() {\n\t\tregistryOperationTimeTaken.WithLabelValues(env, \"List\").Observe(time.Now().Sub(startTime).Seconds())\n\t}()\n\n\tkvs, _, err := c.client.KV().List(prefix, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tpairs := []Pair{}\n\n\tfor _, kv := range kvs {\n\t\tpairs = append(pairs, Pair{\n\t\t\tKey: kv.Key,\n\t\t\tValue: kv.Value,\n\t\t})\n\t}\n\n\treturn pairs, nil\n}", "func (s storageBucketNamespaceLister) List(selector labels.Selector) (ret []*v1beta1.StorageBucket, err error) {\n\terr = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) {\n\t\tret = append(ret, m.(*v1beta1.StorageBucket))\n\t})\n\treturn ret, err\n}", "func (c *FakeFirebaseStorageBuckets) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.FirebaseStorageBucketList, err error) {\n\tobj, err := c.Fake.\n\t\tInvokes(testing.NewListAction(firebasestoragebucketsResource, firebasestoragebucketsKind, c.ns, opts), &v1alpha1.FirebaseStorageBucketList{})\n\n\tif obj == nil {\n\t\treturn nil, err\n\t}\n\n\tlabel, _, _ := testing.ExtractFromListOptions(opts)\n\tif label == nil {\n\t\tlabel = labels.Everything()\n\t}\n\tlist := &v1alpha1.FirebaseStorageBucketList{ListMeta: obj.(*v1alpha1.FirebaseStorageBucketList).ListMeta}\n\tfor _, item := range obj.(*v1alpha1.FirebaseStorageBucketList).Items {\n\t\tif label.Matches(labels.Set(item.Labels)) {\n\t\t\tlist.Items = append(list.Items, item)\n\t\t}\n\t}\n\treturn list, err\n}", "func (c *Client) ListBucketKeys(bucket string) ([]string, error) {\n\tres, err := c.do(\"GET\", \"/buckets/\"+bucket+\"/keys?keys=true\", nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif res.StatusCode != 200 {\n\t\tres.Body.Close()\n\t\treturn nil, fmt.Errorf(\"Status Code %d\", res.StatusCode)\n\t}\n\tbmap := make(map[string][]string)\n\tdec := json.NewDecoder(res.Body)\n\terr = dec.Decode(&bmap)\n\tres.Body.Close()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Error decoding body: %s\", err.Error())\n\t}\n\tstrs, ok := bmap[\"keys\"]\n\tif !ok {\n\t\treturn nil, errors.New(\"Unexpected body formatting.\")\n\t}\n\treturn strs, nil\n}", "func getAllS3Buckets() ([]types.Bucket, error) {\n\tout, err := client.ListBuckets(context.TODO(), &s3.ListBucketsInput{})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn out.Buckets, nil\n}", "func (*ListBucketsRequest) Descriptor() ([]byte, []int) {\n\treturn file_s3_proto_s3_proto_rawDescGZIP(), []int{83}\n}", "func (b *fakeBosClient) ListObjects(bucket string, args *api.ListObjectsArgs) (\n\t*api.ListObjectsResult, error) {\n\tvar (\n\t\tmarker int\n\t\terr error\n\t)\n\tif args.Marker == \"\" {\n\t\tmarker, err = strconv.Atoi(bucket)\n\t} else {\n\t\tmarker, err = strconv.Atoi(args.Marker)\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif marker < len(b.results) {\n\t\treturn b.results[marker], nil\n\t}\n\treturn nil, fmt.Errorf(\"Error in list objects\")\n}", "func (g genericPlugin) List(gvk schema.GroupVersionKind, namespace string,\n\tclient plugin.KubernetesConnector) ([]helm.KubernetesResource, error) {\n\n\tvar returnData []helm.KubernetesResource\n\treturn returnData, nil\n}", "func ListObjects(bucket, prefix string) ([]*s3.Object, error) {\n\n\t// Connection to s3 server\n\tstorage_connection := s3.New(storage_session)\n\n\t// Upload a new object\n\tobjects, err := storage_connection.ListObjects(&s3.ListObjectsInput{\n\t\tBucket: aws.String(bucket),\n\t\tPrefix: aws.String(prefix),\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn objects.Contents, nil\n}", "func (p *bucketProvider) ListObjects(ctx context.Context, bucket, path string, recursive bool) (buckets.ObjectIterator, error) {\n\tbkt := p.client.Bucket(bucket)\n\tif path != \"\" && !strings.HasSuffix(path, \"/\") {\n\t\t// An object can have the same name as a path prefix. Append a\n\t\t// \"/\" to make sure we don't include it.\n\t\tpath = path + \"/\"\n\t}\n\n\tvar delimiter string\n\tif recursive {\n\t\tdelimiter = \"\"\n\t} else {\n\t\tdelimiter = \"/\"\n\t}\n\n\tquery := &storage.Query{\n\t\tDelimiter: delimiter,\n\t\tPrefix: path,\n\t\tVersions: false,\n\t}\n\tit := bkt.Objects(ctx, query)\n\n\treturn &iteratorAdapter{\n\t\tit: it,\n\t\tbucket: bucket,\n\t}, nil\n}", "func (m *MockClient) ListBuckets(arg0 *s3.ListBucketsInput) (*s3.ListBucketsOutput, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ListBuckets\", arg0)\n\tret0, _ := ret[0].(*s3.ListBucketsOutput)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockClient) ListBuckets(arg0 *s3.ListBucketsInput) (*s3.ListBucketsOutput, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ListBuckets\", arg0)\n\tret0, _ := ret[0].(*s3.ListBucketsOutput)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (t *ThrottlingHandler) buckets(r *http.Request) []string {\n\tresult := make([]string, 0, 1)\n\n\t// Session ID\n\tif sessId := session.SessionId(r); sessId != \"\" {\n\t\tresult = append(result, fmt.Sprintf(\"sessId:%s\", sessId))\n\t}\n\n\treturn result\n}", "func List(k8sClient client.Client, obj client.ObjectList) error {\n\treturn k8sClient.List(context.TODO(), obj, &client.ListOptions{})\n}", "func TestFSListBuckets(t *testing.T) {\n\t// Prepare for tests\n\tdisk := filepath.Join(globalTestTmpDir, \"minio-\"+nextSuffix())\n\tdefer os.RemoveAll(disk)\n\n\tobj := initFSObjects(disk, t)\n\tfs := obj.(*FSObjects)\n\n\tbucketName := \"bucket\"\n\tif err := obj.MakeBucketWithLocation(context.Background(), bucketName, \"\"); err != nil {\n\t\tt.Fatal(\"Unexpected error: \", err)\n\t}\n\n\tGlobalServiceDoneCh <- struct{}{}\n\n\t// Create a bucket with invalid name\n\tif err := os.MkdirAll(pathJoin(fs.fsPath, \"vo^\"), 0777); err != nil {\n\t\tt.Fatal(\"Unexpected error: \", err)\n\t}\n\tf, err := os.Create(pathJoin(fs.fsPath, \"test\"))\n\tif err != nil {\n\t\tt.Fatal(\"Unexpected error: \", err)\n\t}\n\tf.Close()\n\n\t// Test list buckets to have only one entry.\n\tbuckets, err := fs.ListBuckets(context.Background())\n\tif err != nil {\n\t\tt.Fatal(\"Unexpected error: \", err)\n\t}\n\tif len(buckets) != 1 {\n\t\tt.Fatal(\"ListBuckets not working properly\", buckets)\n\t}\n\n\t// Test ListBuckets with disk not found.\n\tos.RemoveAll(disk)\n\tif _, err := fs.ListBuckets(context.Background()); err != nil {\n\t\tif err != errDiskNotFound {\n\t\t\tt.Fatal(\"Unexpected error: \", err)\n\t\t}\n\t}\n}", "func (sb S3Buckets) getBucketNames(targetBuckets []*s3.Bucket, configObj config.Config) ([]*string, error) {\n\tvar bucketNames []*string\n\tbucketCh := make(chan *S3Bucket, len(targetBuckets))\n\tvar wg sync.WaitGroup\n\n\tfor _, bucket := range targetBuckets {\n\t\twg.Add(1)\n\t\tgo func(bucket *s3.Bucket) {\n\t\t\tdefer wg.Done()\n\t\t\tsb.getBucketInfo(bucket, bucketCh, configObj)\n\t\t}(bucket)\n\t}\n\n\tgo func() {\n\t\twg.Wait()\n\t\tclose(bucketCh)\n\t}()\n\n\t// Start reading from the channel as soon as the data comes in - so that skip\n\t// messages are shown to the user as soon as possible\n\tfor bucketData := range bucketCh {\n\t\tif bucketData.Error != nil {\n\t\t\tlogging.Logger.Debugf(\"Skipping - Bucket %s - region - %s - error: %s\", bucketData.Name, sb.Region, bucketData.Error)\n\t\t\tcontinue\n\t\t}\n\t\tif !bucketData.IsValid {\n\t\t\tlogging.Logger.Debugf(\"Skipping - Bucket %s - region - %s - %s\", bucketData.Name, sb.Region, bucketData.InvalidReason)\n\t\t\tcontinue\n\t\t}\n\n\t\tbucketNames = append(bucketNames, aws.String(bucketData.Name))\n\t}\n\n\treturn bucketNames, nil\n}", "func (base Base) ListRemote() (result []string, err error) {\n\treturn\n}", "func listFiles(svc *storage.Service, bucketName string, filePrefix string) ([]string, error) {\n\t// List all objects in a bucket using pagination\n\tvar files []string\n\ttoken := \"\"\n\tfor {\n\t\tcall := svc.Objects.List(bucketName)\n\t\tcall.Prefix(filePrefix)\n\t\tif token != \"\" {\n\t\t\tcall = call.PageToken(token)\n\t\t}\n\t\tres, err := call.Do()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tfor _, object := range res.Items {\n\t\t\tfiles = append(files, object.Name)\n\t\t}\n\t\tif token = res.NextPageToken; token == \"\" {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn files, nil\n}", "func (s *S3Client) ListObjects(bucket, prefix string) ([]*string, error) {\n\tparams := &s3.ListObjectsV2Input{\n\t\tBucket: aws.String(bucket),\n\t\tPrefix: aws.String(prefix),\n\t}\n\tobjectKeys := make([]*string, 0)\n\tfn := func(page *s3.ListObjectsV2Output, lastPage bool) bool {\n\t\tfor _, object := range page.Contents {\n\t\t\tobjectKeys = append(objectKeys, object.Key)\n\t\t}\n\t\treturn true\n\t}\n\terr := s.s3Client.ListObjectsV2Pages(params, fn)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn objectKeys, nil\n}", "func (o *ObjectStorage) ListTopLevelObjectsInBucket(bucket *string) (*s3.ListObjectsOutput, error) {\n\tdelimiter := \"/\"\n\tlistObjectInput := &s3.ListObjectsInput{\n\t\tBucket: bucket,\n\t\tDelimiter: &delimiter,\n\t}\n\n\treturn o.Client.ListObjects(listObjectInput)\n}", "func (c *Printer) PrintBuckets() {\n\tfor {\n\t\tbucket, more := <-c.channel\n\t\tif more {\n\t\t\tfmt.Fprintf(out, \"%s\\n\", bucket)\n\t\t} else {\n\t\t\tc.done <- true\n\t\t\treturn\n\t\t}\n\t}\n}", "func (a *API) CountBuckets(ctx context.Context, _ *None) (*BucketCountsResponse, error) {\n\tvar err error\n\n\tvar loginCount, passwordCount, ipCount int\n\n\tloginCount, err = a.LoginStorage.Count(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tpasswordCount, err = a.PasswordStorage.Count(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tipCount, err = a.IPStorage.Count(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &BucketCountsResponse{\n\t\tLogin: uint32(loginCount),\n\t\tPassword: uint32(passwordCount),\n\t\tIp: uint32(ipCount),\n\t}, nil\n}", "func (s *BucketService) FindBuckets(ctx context.Context, filter influxdb.BucketFilter, opt ...influxdb.FindOptions) ([]*influxdb.Bucket, int, error) {\n\tspan, ctx := tracing.StartSpanFromContext(ctx)\n\tdefer span.Finish()\n\n\t// TODO: we'll likely want to push this operation into the database eventually since fetching the whole list of data\n\t// will likely be expensive.\n\tbs, _, err := s.s.FindBuckets(ctx, filter, opt...)\n\tif err != nil {\n\t\treturn nil, 0, err\n\t}\n\n\t// This filters without allocating\n\t// https://github.com/golang/go/wiki/SliceTricks#filtering-without-allocating\n\tbuckets := bs[:0]\n\tfor _, b := range bs {\n\t\t// HACK: remove once system buckets are migrated away from hard coded values\n\t\tif b.Type == influxdb.BucketTypeSystem {\n\t\t\tbuckets = append(buckets, b)\n\t\t\tcontinue\n\t\t}\n\n\t\terr := authorizeReadBucket(ctx, b.OrgID, b.ID)\n\t\tif err != nil && influxdb.ErrorCode(err) != influxdb.EUnauthorized {\n\t\t\treturn nil, 0, err\n\t\t}\n\n\t\tif influxdb.ErrorCode(err) == influxdb.EUnauthorized {\n\t\t\tcontinue\n\t\t}\n\n\t\tbuckets = append(buckets, b)\n\t}\n\n\treturn buckets, len(buckets), nil\n}", "func ListObjects(service *awss3.S3, container string, filter api.ObjectFilter) ([]string, error) {\n\tvar objs []string\n\n\tvar prefix string\n\tif filter.Path != \"\" || filter.Prefix != \"\" {\n\t\tprefix = strings.Join([]string{filter.Path, filter.Prefix}, \"/\")\n\t}\n\terr := service.ListObjectsV2Pages(&awss3.ListObjectsV2Input{Bucket: aws.String(container), Prefix: aws.String(prefix)},\n\t\tfunc(out *awss3.ListObjectsV2Output, last bool) bool {\n\t\t\tfor _, o := range out.Contents {\n\t\t\t\tobjs = append(objs, *o.Key)\n\t\t\t}\n\t\t\treturn last\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn objs, err\n}" ]
[ "0.7637069", "0.73562723", "0.7331727", "0.7331727", "0.7204461", "0.7192622", "0.7130636", "0.709906", "0.7084855", "0.7079804", "0.7046759", "0.7031411", "0.700239", "0.6996695", "0.6905541", "0.6904109", "0.6846874", "0.6839996", "0.6839996", "0.68232626", "0.68193644", "0.6786202", "0.67394274", "0.66987044", "0.6684698", "0.66812223", "0.6679634", "0.66788214", "0.6664154", "0.6655107", "0.65434784", "0.652384", "0.6481827", "0.6478166", "0.64563626", "0.6424767", "0.64139646", "0.6406057", "0.6395674", "0.63308424", "0.6286666", "0.6286658", "0.62717026", "0.62011063", "0.6165012", "0.615481", "0.61276436", "0.61027366", "0.60871863", "0.60819525", "0.6046966", "0.6021711", "0.6004885", "0.593024", "0.59192055", "0.5901951", "0.5897187", "0.5893044", "0.588987", "0.58787614", "0.5870763", "0.58692336", "0.5865309", "0.5838776", "0.57908237", "0.5755519", "0.57530606", "0.57416373", "0.57126224", "0.56915337", "0.56874144", "0.5641243", "0.5635802", "0.56119335", "0.55906713", "0.5587584", "0.5578691", "0.55623156", "0.55507064", "0.5548919", "0.55396557", "0.5535336", "0.5531102", "0.55145144", "0.5494569", "0.5457907", "0.5448065", "0.5448065", "0.5445684", "0.544387", "0.53995436", "0.5394463", "0.5390937", "0.5383598", "0.53708017", "0.53582793", "0.5353831", "0.53459126", "0.5324138", "0.52984744" ]
0.60293263
51
ListTopLevelObjectsInBucket : Returns the list of "top level" objects in bucket assuming they're '/' delimited
func (o *ObjectStorage) ListTopLevelObjectsInBucket(bucket *string) (*s3.ListObjectsOutput, error) { delimiter := "/" listObjectInput := &s3.ListObjectsInput{ Bucket: bucket, Delimiter: &delimiter, } return o.Client.ListObjects(listObjectInput) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (p *bucketProvider) ListObjects(ctx context.Context, bucket, path string, recursive bool) (buckets.ObjectIterator, error) {\n\tbkt := p.client.Bucket(bucket)\n\tif path != \"\" && !strings.HasSuffix(path, \"/\") {\n\t\t// An object can have the same name as a path prefix. Append a\n\t\t// \"/\" to make sure we don't include it.\n\t\tpath = path + \"/\"\n\t}\n\n\tvar delimiter string\n\tif recursive {\n\t\tdelimiter = \"\"\n\t} else {\n\t\tdelimiter = \"/\"\n\t}\n\n\tquery := &storage.Query{\n\t\tDelimiter: delimiter,\n\t\tPrefix: path,\n\t\tVersions: false,\n\t}\n\tit := bkt.Objects(ctx, query)\n\n\treturn &iteratorAdapter{\n\t\tit: it,\n\t\tbucket: bucket,\n\t}, nil\n}", "func (c *Client) ListS3TopLevelDirs(bucket string) ([]string, error) {\n\t// find first top-level directory\n\ts3Objects, err := c.ListS3Prefix(bucket, \"\", false, pointer.Int64(1), nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdirs := []string{}\n\tfor _, prefix := range ConvertS3ObjectsToKeys(s3Objects...) {\n\t\tdirs = append(dirs, files.GetTopLevelDirectory(prefix))\n\t}\n\n\t// detect all remaining top-level dirs\n\tfor {\n\t\tif len(dirs) == 0 {\n\t\t\tbreak\n\t\t}\n\t\tpreviousDir := dirs[len(dirs)-1]\n\t\ts3Objects, err := c.ListS3Prefix(\n\t\t\tbucket,\n\t\t\t\"\",\n\t\t\ttrue,\n\t\t\tpointer.Int64(1),\n\t\t\tpointer.String(filepath.Join(previousDir, \"~~~\")),\n\t\t)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif len(ConvertS3ObjectsToKeys(s3Objects...)) == 0 {\n\t\t\tbreak\n\t\t}\n\t\tfor _, prefix := range ConvertS3ObjectsToKeys(s3Objects...) {\n\t\t\tdirs = append(dirs, files.GetTopLevelDirectory(prefix))\n\t\t}\n\t}\n\n\treturn dirs, nil\n}", "func (b Bucket) ListObject(args ...Params) (*ListBucketResult, error) {\n\tv := new(ListBucketResult)\n\n\terr := b.Do(\"GET\", \"\", nil, v, args...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn v, nil\n}", "func getBucketObjects(svc *s3.S3, sess *session.Session, bucketName string) {\n\n\tquery := &s3.ListObjectsV2Input{\n\t\tBucket: aws.String(bucketName),\n\t\tPrefix: aws.String(prefix),\n\t}\n\n\t// Pagination Flag used to check if we need to go further (default is 1000 otherwise)\n\ttruncatedListing := true\n\tpageCount := 0\n\n\tfor truncatedListing && pageCount < maxPages {\n\t\tresp, err := svc.ListObjectsV2(query)\n\t\tpageCount++\n\n\t\tif err != nil {\n\t\t\tfmt.Println(err.Error())\n\t\t\treturn\n\t\t}\n\t\t// Get all objects for this page\n\t\tgetObjectsPage(resp, svc, bucketName)\n\n\t\t// Set continuation token\n\t\tquery.ContinuationToken = resp.NextContinuationToken\n\t\ttruncatedListing = *resp.IsTruncated\n\n\t\tif verbose == \"y\" {\n\t\t\tfmt.Printf(\"page Num %d, recCount %d \\n\", pageCount, numberOfRetrievedFiles)\n\t\t}\n\t}\n\n}", "func ListObjects(bucketName string, keyPrefix string) (keysList []*string, err error) {\n\tsvc := s3.New(sessionutils.Session)\n\tres, err := svc.ListObjects(&s3.ListObjectsInput{\n\t\tBucket: aws.String(bucketName),\n\t\tPrefix: aws.String(keyPrefix),\n\t})\n\n\tif err != nil {\n\t\tfmt.Printf(\"Error listing bucket:\\n%v\\n\", err)\n\t\treturn keysList, err\n\t}\n\n\tif len(res.Contents) > 0 {\n\t\tfor _, obj := range res.Contents {\n\t\t\tkeysList = append(keysList, obj.Key)\n\t\t}\n\t}\n\n\treturn keysList, nil\n\n}", "func (h *Handler) List(bucket string) ([]string, error) {\n\tklog.V(10).Info(\"List S3 Objects \", bucket)\n\n\treq := h.Client.ListObjectsRequest(&s3.ListObjectsInput{Bucket: &bucket})\n\tp := s3.NewListObjectsPaginator(req)\n\n\tvar keys []string\n\n\tfor p.Next(context.TODO()) {\n\t\tpage := p.CurrentPage()\n\t\tfor _, obj := range page.Contents {\n\t\t\tkeys = append(keys, *obj.Key)\n\t\t}\n\t}\n\n\tif err := p.Err(); err != nil {\n\t\tklog.Error(\"failed to list objects. error: \", err)\n\t\treturn nil, err\n\t}\n\n\tklog.V(10).Info(\"List S3 Objects result \", keys)\n\n\treturn keys, nil\n}", "func ListBucket(a S3Account, bucket string) {\n\treq := NewRequest(a, \"GET\", bucket, \"/\", nil)\n\tbody := req.Send()\n\tlbr := ListBucketResult{}\n\txml.Unmarshal(body, &lbr)\n\tfor _,cp := range lbr.CommonPrefixes {\n\t\tcp.pp()\n\t}\n\tfor _,c := range lbr.Contents {\n\t\tc.pp()\n\t}\n}", "func ListObjects(w http.ResponseWriter, r *http.Request) {\n\n\tsvc := s3.New(sess)\n\n\tpageVars := PageVars{}\n\taddPageVars(r, &pageVars)\n\n\tif len(pageVars.BName) <= 0 {\n\t\tif len(pageVars.ErrorM) <= 0 {\n\t\t\tpageVars.ErrorM = \"Invalid bucket name\"\n\t\t}\n\t\trender(w, \"objectlist\", pageVars)\n\t} else {\n\n\t\tif len(pageVars.Prefix) <= 0 {\n\t\t\tpageVars.Prefix = \"\"\n\t\t}\n\n\t\tif len(pageVars.Delimiter) <= 0 {\n\t\t\tpageVars.Delimiter = \"\"\n\t\t}\n\n\t\t// Get the list of items\n\t\tresp, err := svc.ListObjectsV2(&s3.ListObjectsV2Input{\n\t\t\tBucket: aws.String(pageVars.BName),\n\t\t\tPrefix: aws.String(pageVars.Prefix),\n\t\t\tDelimiter: aws.String(pageVars.Delimiter),\n\t\t})\n\n\t\tif err != nil {\n\t\t\tif awsErr, ok := err.(awserr.Error); ok {\n\t\t\t\tpageVars.ErrorM = awsErr.Message()\n\t\t\t} else {\n\t\t\t\tpageVars.ErrorM = \"Failed to get objects\"\n\t\t\t}\n\t\t} else {\n\t\t\tfor _, o := range resp.Contents {\n\t\t\t\tvar shouldAdd = true\n\t\t\t\t// check how many / in prefix\n\t\t\t\tns := strings.Count(pageVars.Prefix, \"/\")\n\t\t\t\t// check if its folder\n\t\t\t\tif strings.HasSuffix(*o.Key, \"/\") {\t\t\t\t\t\n\t\t\t\t\t// check if its top level folder\n\t\t\t\t\tif strings.Count(*o.Key, \"/\") == (ns + 1) {\n\t\t\t\t\t\tshouldAdd = true\n\t\t\t\t\t} else {\n\t\t\t\t\t\tshouldAdd = false\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\t// check if its top level folder\n\t\t\t\t\tif strings.Count(*o.Key, \"/\") == ns {\n\t\t\t\t\t\tshouldAdd = true\n\t\t\t\t\t} else {\n\t\t\t\t\t\tshouldAdd = false\n\t\t\t\t\t}\n\t\t\t\t}\t\t\t\t\n\n\t\t\t\tif shouldAdd {\n\t\t\t\t\tname := *o.Key\t\t\t\t\t\n\t\t\t\t\tif len(pageVars.Prefix) > 0 {\n\t\t\t\t\t\tname = strings.Replace(name, pageVars.Prefix, \"\", -1)\n\t\t\t\t\t}\n\t\t\t\t\tif strings.HasSuffix(*o.Key, \"/\") {\n\t\t\t\t\t\tod := ObjectDetails{*o.Key, name, *o.LastModified, *o.Size, *o.StorageClass, \"Folder\"}\n\t\t\t\t\t\tpageVars.OList = append(pageVars.OList, od)\n\t\t\t\t\t} else {\n\t\t\t\t\t\tod := ObjectDetails{*o.Key, name, *o.LastModified, *o.Size, *o.StorageClass, \"File\"}\n\t\t\t\t\t\tpageVars.OList = append(pageVars.OList, od)\n\t\t\t\t\t}\t\t\t\t\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t}\t\t\t\t\n\t\t\t}\n\t\t\t// add folder names f prefix ends with /\n\t\t\tsl := strings.Split(pageVars.Prefix, \"/\")\n\t\t\tpp := \"\"\n\t\t\t// remove last element as its empy due to trailing /\n\t\t\tif len(sl) > 0 {\n\t\t\t\tsl = sl[:len(sl)-1]\n\t\t\t\tfor _, fld := range sl {\n\t\t\t\t\tpp = pp+fld+\"/\"\n\t\t\t\t\tpageVars.FList = append(pageVars.FList, FolderDetails{fld, pp})\t\t\t\t\t\n\t\t\t\t}\n\t\t\t\t\n\t\t\t\tpageVars.FCount = len(pageVars.FList)\n\t\t\t}\n\t\t\t\n\t\t}\n\n\t\trender(w, \"objectlist\", pageVars)\n\t}\n\n}", "func (s *S3Client) ListObjects(bucket, prefix string) ([]*string, error) {\n\tparams := &s3.ListObjectsV2Input{\n\t\tBucket: aws.String(bucket),\n\t\tPrefix: aws.String(prefix),\n\t}\n\tobjectKeys := make([]*string, 0)\n\tfn := func(page *s3.ListObjectsV2Output, lastPage bool) bool {\n\t\tfor _, object := range page.Contents {\n\t\t\tobjectKeys = append(objectKeys, object.Key)\n\t\t}\n\t\treturn true\n\t}\n\terr := s.s3Client.ListObjectsV2Pages(params, fn)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn objectKeys, nil\n}", "func getObjects(storageCli *storage.Client, bucketName string) []string {\n\tvar objectList []string\n\tquery := &storage.Query{Prefix: \"\"}\n\n\tbucket := storageCli.Bucket(bucketName)\n\tobjects := bucket.Objects(context.Background(), query)\n\n\tfor {\n\t\tattrs, err := objects.Next()\n\t\tif err == iterator.Done {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\tframework.FailfWithOffset(2, \"Failed to get objects from bucket %s\", err)\n\t\t}\n\n\t\tobjectList = append(objectList, attrs.Name)\n\t}\n\n\treturn objectList\n}", "func ListObjects(bucket, prefix string) ([]*s3.Object, error) {\n\n\t// Connection to s3 server\n\tstorage_connection := s3.New(storage_session)\n\n\t// Upload a new object\n\tobjects, err := storage_connection.ListObjects(&s3.ListObjectsInput{\n\t\tBucket: aws.String(bucket),\n\t\tPrefix: aws.String(prefix),\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn objects.Contents, nil\n}", "func (bucket Bucket) ListObjects(options ...Option) (ListObjectsResult, error) {\n\tvar out ListObjectsResult\n\n\toptions = append(options, EncodingType(\"url\"))\n\tparams, err := getRawParams(options)\n\tif err != nil {\n\t\treturn out, err\n\t}\n\n\tresp, err := bucket.do(\"GET\", \"\", params, options, nil, nil)\n\tif err != nil {\n\t\treturn out, err\n\t}\n\tdefer resp.Body.Close()\n\n\terr = xmlUnmarshal(resp.Body, &out)\n\tif err != nil {\n\t\treturn out, err\n\t}\n\n\terr = decodeListObjectsResult(&out)\n\treturn out, err\n}", "func (o *ObjectStorage) ListObjectsWithPrefixInBucket(bucket *string, prefix *string) (*s3.ListObjectsOutput, error) {\n\tlistObjectInput := &s3.ListObjectsInput{\n\t\tBucket: bucket,\n\t\tPrefix: prefix,\n\t}\n\n\treturn o.Client.ListObjects(listObjectInput)\n}", "func ListObjects(service *awss3.S3, container string, filter api.ObjectFilter) ([]string, error) {\n\tvar objs []string\n\n\tvar prefix string\n\tif filter.Path != \"\" || filter.Prefix != \"\" {\n\t\tprefix = strings.Join([]string{filter.Path, filter.Prefix}, \"/\")\n\t}\n\terr := service.ListObjectsV2Pages(&awss3.ListObjectsV2Input{Bucket: aws.String(container), Prefix: aws.String(prefix)},\n\t\tfunc(out *awss3.ListObjectsV2Output, last bool) bool {\n\t\t\tfor _, o := range out.Contents {\n\t\t\t\tobjs = append(objs, *o.Key)\n\t\t\t}\n\t\t\treturn last\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn objs, err\n}", "func listBuckets(sess *session.Session, prefix string, t *testing.T) error {\n\t// Create S3 service client\n\tsvc := s3.New(sess)\n\n\tresult, err := svc.ListBuckets(nil)\n\tif err != nil {\n\t\tt.Log(\"Could not list buckets\")\n\t\treturn err\n\t}\n\n\tfor _, b := range result.Buckets {\n\t\tif strings.HasPrefix(*b.Name, prefix) {\n\t\t\tt.Log(*b.Name)\n\t\t}\n\t}\n\n\treturn nil\n}", "func (s *S3) BucketTree(bucket string) (string, error) {\n\toutputs, err := s.listObjects(bucket, \"/\")\n\tif err != nil || outputs == nil {\n\t\treturn \"\", err\n\t}\n\tvar contents []*s3.Object\n\tvar prefixes []*s3.CommonPrefix\n\tfor _, output := range outputs {\n\t\tcontents = append(contents, output.Contents...)\n\t\tprefixes = append(prefixes, output.CommonPrefixes...)\n\t}\n\ttree := treeprint.New()\n\t// Add top-level files.\n\tfor _, object := range contents {\n\t\ttree.AddNode(aws.StringValue(object.Key))\n\t}\n\t// Recursively add folders and their children.\n\tif err := s.addNodes(tree, prefixes, bucket); err != nil {\n\t\treturn \"\", err\n\t}\n\treturn tree.String(), nil\n}", "func (b NeteaseNOSBackend) ListObjects(prefix string) ([]Object, error) {\n\tvar objects []Object\n\n\tprefix = pathutil.Join(b.Prefix, prefix)\n\n\tlistRequest := &model.ListObjectsRequest{\n\t\tBucket: b.Bucket,\n\t\tPrefix: prefix,\n\t\tDelimiter: \"\",\n\t\tMarker: \"\",\n\t\tMaxKeys: 100,\n\t}\n\n\tfor {\n\t\tvar lor *model.ListObjectsResult\n\t\tlor, err := b.Client.ListObjects(listRequest)\n\t\tif err != nil {\n\t\t\treturn objects, err\n\t\t}\n\n\t\tfor _, obj := range lor.Contents {\n\t\t\tpath := removePrefixFromObjectPath(prefix, obj.Key)\n\t\t\tif objectPathIsInvalid(path) {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tlocal, _ := time.LoadLocation(\"Local\")\n\t\t\t// LastModified time layout in NOS is 2006-01-02T15:04:05 -0700\n\t\t\tt, _ := time.ParseInLocation(\"2006-01-02T15:04:05 -0700\", obj.LastModified, local)\n\t\t\tobject := Object{\n\t\t\t\tPath: path,\n\t\t\t\tContent: []byte{},\n\t\t\t\tLastModified: t,\n\t\t\t}\n\t\t\tobjects = append(objects, object)\n\t\t}\n\t\tif !lor.IsTruncated {\n\t\t\tbreak\n\t\t}\n\n\t}\n\n\treturn objects, nil\n}", "func List(sess *session.Session, bucket string, prefix string) ([]S3Item, error) {\n\tsvc := s3.New(sess)\n\n\tdelimiter := \"/\"\n\n\tresp, err := svc.ListObjects(&s3.ListObjectsInput{\n\t\tBucket: aws.String(bucket),\n\t\tPrefix: aws.String(prefix),\n\t\tDelimiter: &delimiter,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\titems := make([]S3Item, 0)\n\n\tfor _, p := range resp.CommonPrefixes {\n\t\tif strings.TrimSuffix(*(p.Prefix), \"/\") == strings.TrimSuffix(prefix, \"/\") {\n\t\t\tcontinue\n\t\t}\n\t\tpstr := *(p.Prefix)\n\t\tcutprefix := pstr[0 : len(pstr)-1]\n\t\tidxDelimiter := strings.LastIndex(cutprefix, \"/\")\n\t\tif idxDelimiter < 0 {\n\t\t\tidxDelimiter = -1\n\t\t}\n\t\titems = append(items, S3Item{\n\t\t\tType: \"directory\",\n\t\t\tName: cutprefix[idxDelimiter+1:],\n\t\t\tFullpath: *(p.Prefix),\n\t\t\tSize: sprintSize(0),\n\t\t\tLastModified: \"\",\n\t\t})\n\t}\n\tfor _, content := range resp.Contents {\n\t\tif strings.TrimSuffix(*(content.Key), \"/\") == strings.TrimSuffix(prefix, \"/\") {\n\t\t\tcontinue\n\t\t}\n\t\tkey := *(content.Key)\n\t\tidxDelimiter := strings.LastIndex(key, \"/\")\n\t\tif idxDelimiter < 0 {\n\t\t\tidxDelimiter = -1\n\t\t}\n\t\titems = append(items, S3Item{\n\t\t\tType: \"file\",\n\t\t\tName: key[idxDelimiter+1:],\n\t\t\tFullpath: key,\n\t\t\tSize: sprintSize(*(content.Size)),\n\t\t\tLastModified: content.LastModified.Format(\"2006-01-02 15:04:05\"),\n\t\t})\n\t}\n\n\treturn items, nil\n}", "func (m *mockS3Client) ListObjects(in *s3.ListObjectsInput) (*s3.ListObjectsOutput, error) {\n\tvar contents []*s3.Object\n\tfor key := range m.objects {\n\t\tif strings.HasPrefix(key, *in.Prefix) {\n\t\t\tkeyPtr := new(string)\n\t\t\t*keyPtr = key\n\t\t\ttempObj := &s3.Object{\n\t\t\t\tKey: keyPtr,\n\t\t\t}\n\t\t\tcontents = append(contents, tempObj)\n\t\t}\n\t}\n\tout := &s3.ListObjectsOutput{\n\t\tPrefix: in.Prefix,\n\t\tContents: contents,\n\t}\n\treturn out, nil\n}", "func ListBuckets(w http.ResponseWriter, r *http.Request) *appError {\n s3, err := getS3(r)\n if err != nil {\n return &appError{err: err, status: http.StatusInternalServerError, json: http.StatusText(http.StatusInternalServerError)}\n }\n response, _ := s3Request(s3, \"\", \"GET\", \"/\", make(map[string][]string), \"\")\n listBucketsResp := &ListBucketsResp{}\n xml.NewDecoder(strings.NewReader(response.Body)).Decode(listBucketsResp)\n buckets := []string{}\n for _, bucket := range listBucketsResp.Buckets {\n buckets = append(buckets, bucket.Name)\n }\n rendering.JSON(w, http.StatusOK, buckets)\n\n return nil\n}", "func ListBuckets(w http.ResponseWriter, r *http.Request) *appError {\n s3, err := getS3(r)\n if err != nil {\n return &appError{err: err, status: http.StatusInternalServerError, json: http.StatusText(http.StatusInternalServerError)}\n }\n response, _ := s3Request(s3, \"\", \"GET\", \"/\", make(map[string][]string), \"\")\n listBucketsResp := &ListBucketsResp{}\n xml.NewDecoder(strings.NewReader(response.Body)).Decode(listBucketsResp)\n buckets := []string{}\n for _, bucket := range listBucketsResp.Buckets {\n buckets = append(buckets, bucket.Name)\n }\n rendering.JSON(w, http.StatusOK, buckets)\n\n return nil\n}", "func main() {\n\tflag.Parse()\n\tif len(bucketName) == 0 {\n\t\tflag.PrintDefaults()\n\t\tlog.Fatalf(\"invalid parameters, bucket name required\")\n\t}\n\n\t// Load the SDK's configuration from environment and shared config, and\n\t// create the client with this.\n\tcfg, err := config.LoadDefaultConfig(context.TODO())\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to load SDK configuration, %v\", err)\n\t}\n\n\tclient := s3.NewFromConfig(cfg)\n\n\t// Set the parameters based on the CLI flag inputs.\n\tparams := &s3.ListObjectsV2Input{\n\t\tBucket: &bucketName,\n\t}\n\tif len(objectPrefix) != 0 {\n\t\tparams.Prefix = &objectPrefix\n\t}\n\tif len(objectDelimiter) != 0 {\n\t\tparams.Delimiter = &objectDelimiter\n\t}\n\n\t// TODO replace this with the code generate paginator when available\n\t// s3.NewListObjectsV2Paginator()\n\tp := NewS3ListObjectsV2Paginator(client, params, func(o *S3ListObjectsV2PaginatorOptions) {\n\t\tif v := int32(maxKeys); v != 0 {\n\t\t\to.Limit = v\n\t\t}\n\t})\n\n\t// Iterate through the S3 object pages, printing each object returned.\n\tvar i int\n\tlog.Println(\"Objects:\")\n\tfor p.HasMorePages() {\n\t\ti++\n\n\t\t// Next Page takes a new context for each page retrieval. This is where\n\t\t// you could add timeouts or deadlines.\n\t\tpage, err := p.NextPage(context.TODO())\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"failed to get page %v, %v\", i, err)\n\t\t}\n\n\t\t// Log the objects found\n\t\tfor _, obj := range page.Contents {\n\t\t\tfmt.Println(\"Object:\", *obj.Key)\n\t\t}\n\t}\n}", "func (t *targetrunner) listbucket(w http.ResponseWriter, r *http.Request, bucket string) (tag string, ok bool) {\n\tvar (\n\t\tjsbytes []byte\n\t\terrstr string\n\t\terrcode int\n\t)\n\tislocal := t.bmdowner.get().islocal(bucket)\n\terrstr, errcode = t.checkLocalQueryParameter(bucket, r, islocal)\n\tif errstr != \"\" {\n\t\tt.invalmsghdlr(w, r, errstr, errcode)\n\t\treturn\n\t}\n\tuseCache, errstr, errcode := t.checkCacheQueryParameter(r)\n\tif errstr != \"\" {\n\t\tt.invalmsghdlr(w, r, errstr, errcode)\n\t\treturn\n\t}\n\tmsg := &GetMsg{}\n\tif t.readJSON(w, r, msg) != nil {\n\t\treturn\n\t}\n\tif islocal {\n\t\ttag = \"local\"\n\t\tif errstr, ok = t.doLocalBucketList(w, r, bucket, msg); errstr != \"\" {\n\t\t\tt.invalmsghdlr(w, r, errstr)\n\t\t}\n\t\treturn // ======================================>\n\t}\n\t// cloud bucket\n\tif useCache {\n\t\ttag = \"cloud cached\"\n\t\tjsbytes, errstr, errcode = t.listCachedObjects(bucket, msg)\n\t} else {\n\t\ttag = \"cloud\"\n\t\tjsbytes, errstr, errcode = getcloudif().listbucket(t.contextWithAuth(r), bucket, msg)\n\t}\n\tif errstr != \"\" {\n\t\tif errcode == 0 {\n\t\t\tt.invalmsghdlr(w, r, errstr)\n\t\t} else {\n\t\t\tt.invalmsghdlr(w, r, errstr, errcode)\n\t\t}\n\t\treturn\n\t}\n\tok = t.writeJSON(w, r, jsbytes, \"listbucket\")\n\treturn\n}", "func (c *Client) ListBucket(bucket string, startAt string, maxKeys int) (items []*Item, err error) {\n\tif maxKeys < 0 {\n\t\treturn nil, errors.New(\"invalid negative maxKeys\")\n\t}\n\tmarker := startAt\n\tfor len(items) < maxKeys {\n\t\tfetchN := maxKeys - len(items)\n\t\tif fetchN > maxList {\n\t\t\tfetchN = maxList\n\t\t}\n\t\tvar bres listBucketResults\n\n\t\turl_ := fmt.Sprintf(\"%s?marker=%s&max-keys=%d\",\n\t\t\tc.bucketURL(bucket), url.QueryEscape(marker), fetchN)\n\n\t\t// Try the enumerate three times, since Amazon likes to close\n\t\t// https connections a lot, and Go sucks at dealing with it:\n\t\t// https://code.google.com/p/go/issues/detail?id=3514\n\t\tconst maxTries = 5\n\t\tfor try := 1; try <= maxTries; try++ {\n\t\t\ttime.Sleep(time.Duration(try-1) * 100 * time.Millisecond)\n\t\t\treq := newReq(url_)\n\t\t\tc.Auth.SignRequest(req)\n\t\t\tres, err := c.transport().RoundTrip(req)\n\t\t\tif err != nil {\n\t\t\t\tif try < maxTries {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif res.StatusCode != http.StatusOK {\n\t\t\t\tif res.StatusCode < 500 {\n\t\t\t\t\tbody, _ := ioutil.ReadAll(io.LimitReader(res.Body, 1<<20))\n\t\t\t\t\taerr := &Error{\n\t\t\t\t\t\tOp: \"ListBucket\",\n\t\t\t\t\t\tCode: res.StatusCode,\n\t\t\t\t\t\tBody: body,\n\t\t\t\t\t\tHeader: res.Header,\n\t\t\t\t\t}\n\t\t\t\t\taerr.parseXML()\n\t\t\t\t\tres.Body.Close()\n\t\t\t\t\treturn nil, aerr\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tbres = listBucketResults{}\n\t\t\t\tvar logbuf bytes.Buffer\n\t\t\t\terr = xml.NewDecoder(io.TeeReader(res.Body, &logbuf)).Decode(&bres)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Printf(\"Error parsing s3 XML response: %v for %q\", err, logbuf.Bytes())\n\t\t\t\t} else if bres.MaxKeys != fetchN || bres.Name != bucket || bres.Marker != marker {\n\t\t\t\t\terr = fmt.Errorf(\"Unexpected parse from server: %#v from: %s\", bres, logbuf.Bytes())\n\t\t\t\t\tlog.Print(err)\n\t\t\t\t}\n\t\t\t}\n\t\t\tres.Body.Close()\n\t\t\tif err != nil {\n\t\t\t\tif try < maxTries-1 {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tlog.Print(err)\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tbreak\n\t\t}\n\t\tfor _, it := range bres.Contents {\n\t\t\tif it.Key == marker && it.Key != startAt {\n\t\t\t\t// Skip first dup on pages 2 and higher.\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif it.Key < startAt {\n\t\t\t\treturn nil, fmt.Errorf(\"Unexpected response from Amazon: item key %q but wanted greater than %q\", it.Key, startAt)\n\t\t\t}\n\t\t\titems = append(items, it)\n\t\t\tmarker = it.Key\n\t\t}\n\t\tif !bres.IsTruncated {\n\t\t\t// log.Printf(\"Not truncated. so breaking. items = %d; len Contents = %d, url = %s\", len(items), len(bres.Contents), url_)\n\t\t\tbreak\n\t\t}\n\t}\n\treturn items, nil\n}", "func remoteBucketList(remoteURLObj interface{}) (interface{}, error) {\n\tremoteURL := remoteURLObj.(string)\n\treturn couchbase.GetBucketList(remoteURL)\n}", "func (xl xlObjects) listObjects(ctx context.Context, bucket, prefix, marker, delimiter string, maxKeys int) (loi ListObjectsInfo, e error) {\n\t// Default is recursive, if delimiter is set then list non recursive.\n\trecursive := true\n\tif delimiter == SlashSeparator {\n\t\trecursive = false\n\t}\n\n\twalkResultCh, endWalkCh := xl.listPool.Release(listParams{bucket, recursive, marker, prefix, false})\n\tif walkResultCh == nil {\n\t\tendWalkCh = make(chan struct{})\n\t\tlistDir := listDirFactory(ctx, xl.getLoadBalancedDisks()...)\n\t\twalkResultCh = startTreeWalk(ctx, bucket, prefix, marker, recursive, listDir, endWalkCh)\n\t}\n\n\tvar objInfos []ObjectInfo\n\tvar eof bool\n\tvar nextMarker string\n\tfor i := 0; i < maxKeys; {\n\n\t\twalkResult, ok := <-walkResultCh\n\t\tif !ok {\n\t\t\t// Closed channel.\n\t\t\teof = true\n\t\t\tbreak\n\t\t}\n\t\tentry := walkResult.entry\n\t\tvar objInfo ObjectInfo\n\t\tif hasSuffix(entry, SlashSeparator) {\n\t\t\t// Object name needs to be full path.\n\t\t\tobjInfo.Bucket = bucket\n\t\t\tobjInfo.Name = entry\n\t\t\tobjInfo.IsDir = true\n\t\t} else {\n\t\t\t// Set the Mode to a \"regular\" file.\n\t\t\tvar err error\n\t\t\tobjInfo, err = xl.getObjectInfo(ctx, bucket, entry)\n\t\t\tif err != nil {\n\t\t\t\t// Ignore errFileNotFound as the object might have got\n\t\t\t\t// deleted in the interim period of listing and getObjectInfo(),\n\t\t\t\t// ignore quorum error as it might be an entry from an outdated disk.\n\t\t\t\tif IsErrIgnored(err, []error{\n\t\t\t\t\terrFileNotFound,\n\t\t\t\t\terrXLReadQuorum,\n\t\t\t\t}...) {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\treturn loi, toObjectErr(err, bucket, prefix)\n\t\t\t}\n\t\t}\n\t\tnextMarker = objInfo.Name\n\t\tobjInfos = append(objInfos, objInfo)\n\t\ti++\n\t\tif walkResult.end {\n\t\t\teof = true\n\t\t\tbreak\n\t\t}\n\t}\n\n\tparams := listParams{bucket, recursive, nextMarker, prefix, false}\n\tif !eof {\n\t\txl.listPool.Set(params, walkResultCh, endWalkCh)\n\t}\n\n\tresult := ListObjectsInfo{}\n\tfor _, objInfo := range objInfos {\n\t\tif objInfo.IsDir && delimiter == SlashSeparator {\n\t\t\tresult.Prefixes = append(result.Prefixes, objInfo.Name)\n\t\t\tcontinue\n\t\t}\n\t\tresult.Objects = append(result.Objects, objInfo)\n\t}\n\n\tif !eof {\n\t\tresult.IsTruncated = true\n\t\tif len(objInfos) > 0 {\n\t\t\tresult.NextMarker = objInfos[len(objInfos)-1].Name\n\t\t}\n\t}\n\n\treturn result, nil\n}", "func (b *fakeBosClient) ListObjects(bucket string, args *api.ListObjectsArgs) (\n\t*api.ListObjectsResult, error) {\n\tvar (\n\t\tmarker int\n\t\terr error\n\t)\n\tif args.Marker == \"\" {\n\t\tmarker, err = strconv.Atoi(bucket)\n\t} else {\n\t\tmarker, err = strconv.Atoi(args.Marker)\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif marker < len(b.results) {\n\t\treturn b.results[marker], nil\n\t}\n\treturn nil, fmt.Errorf(\"Error in list objects\")\n}", "func (svc *GCSclient) list(bucketName string, filePrefix string) ([]string, error) {\n\tctx, cancel := context.WithTimeout(context.Background(), 300*time.Second)\n\tdefer cancel()\n\t// List all objects in a bucket using pagination\n\tvar files []string\n\tit := svc.Bucket(bucketName).Objects(ctx, &storage.Query{Prefix: filePrefix})\n\tfor {\n\t\tobj, err := it.Next()\n\t\tif err == iterator.Done {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tfiles = append(files, obj.Name)\n\t}\n\tsort.Strings(files)\n\treturn files, nil\n}", "func (api *bucketAPI) List(ctx context.Context, opts *api.ListWatchOptions) ([]*Bucket, error) {\n\tvar objlist []*Bucket\n\tobjs, err := api.ct.List(\"Bucket\", ctx, opts)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor _, obj := range objs {\n\t\tswitch tp := obj.(type) {\n\t\tcase *Bucket:\n\t\t\teobj := obj.(*Bucket)\n\t\t\tobjlist = append(objlist, eobj)\n\t\tdefault:\n\t\t\tlog.Fatalf(\"Got invalid object type %v while looking for Bucket\", tp)\n\t\t}\n\t}\n\n\treturn objlist, nil\n}", "func (xl xlObjects) ListObjects(ctx context.Context, bucket, prefix, marker, delimiter string, maxKeys int) (loi ListObjectsInfo, e error) {\n\tif err := checkListObjsArgs(ctx, bucket, prefix, marker, delimiter, xl); err != nil {\n\t\treturn loi, err\n\t}\n\n\t// With max keys of zero we have reached eof, return right here.\n\tif maxKeys == 0 {\n\t\treturn loi, nil\n\t}\n\n\t// Marker is set validate pre-condition.\n\tif marker != \"\" {\n\t\t// Marker not common with prefix is not implemented.Send an empty response\n\t\tif !hasPrefix(marker, prefix) {\n\t\t\treturn ListObjectsInfo{}, e\n\t\t}\n\t}\n\n\t// For delimiter and prefix as '/' we do not list anything at all\n\t// since according to s3 spec we stop at the 'delimiter' along\n\t// with the prefix. On a flat namespace with 'prefix' as '/'\n\t// we don't have any entries, since all the keys are of form 'keyName/...'\n\tif delimiter == SlashSeparator && prefix == SlashSeparator {\n\t\treturn loi, nil\n\t}\n\n\t// Over flowing count - reset to maxObjectList.\n\tif maxKeys < 0 || maxKeys > maxObjectList {\n\t\tmaxKeys = maxObjectList\n\t}\n\n\t// Initiate a list operation, if successful filter and return quickly.\n\tlistObjInfo, err := xl.listObjects(ctx, bucket, prefix, marker, delimiter, maxKeys)\n\tif err == nil {\n\t\t// We got the entries successfully return.\n\t\treturn listObjInfo, nil\n\t}\n\n\t// Return error at the end.\n\treturn loi, toObjectErr(err, bucket, prefix)\n}", "func (s *S3Storage) List(prefix string, recursive bool) ([]string, error) {\n\tvar keys []string\n\n\tprefixPath := s.Filename(prefix)\n\tresult, err := s.svc.ListObjects(&s3.ListObjectsInput{\n\t\tBucket: s.bucket,\n\t\tPrefix: aws.String(prefixPath),\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfor _, k := range result.Contents {\n\t\tif strings.HasPrefix(*k.Key, prefix) {\n\t\t\tkeys = append(keys, *k.Key)\n\t\t}\n\t}\n\t//\n\treturn keys, nil\n}", "func getObjectsPage(bucketObjectsList *s3.ListObjectsV2Output, s3Client *s3.S3, bucketName string) {\n\n\t// Iterate through the files inside the bucket\n\tfor _, key := range bucketObjectsList.Contents {\n\t\ts3Key := *key.Key\n\n\t\tcount++\n\n\t\ts3Prefix, videoID, parseErr := parseS3Key(s3Key)\n\t\tif parseErr != nil {\n\t\t\tcontinue\n\t\t}\n\t\tif count%1000 == 0 {\n\t\t\tfmt.Printf(\"s3key: count %d, key %s, s3prefix %s, videoID %s\\n\", count, s3Key, s3Prefix, videoID)\n\t\t}\n\n\t\t_, dberr := db.Exec(sqlInsertStmt,\n\t\t\tvideoID,\n\t\t\ts3BucketName,\n\t\t\ts3Prefix,\n\t\t\ts3Key)\n\t\tif dberr != nil {\n\t\t\tfmt.Println(\"DB insert Error\", dberr)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func (l *pydioObjects) ListBuckets(ctx context.Context) ([]minio.BucketInfo, error) {\n\n\treturn []minio.BucketInfo{\n\t\t{Name: \"io\", Created: time.Now()},\n\t\t{Name: \"data\", Created: time.Now()},\n\t}, nil\n\n}", "func GetBucketObjects(ctx *pulumi.Context, args *GetBucketObjectsArgs, opts ...pulumi.InvokeOption) (*GetBucketObjectsResult, error) {\n\tvar rv GetBucketObjectsResult\n\terr := ctx.Invoke(\"aws:s3/getBucketObjects:getBucketObjects\", args, &rv, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &rv, nil\n}", "func (l *pydioObjects) ListObjects(ctx context.Context, bucket string, prefix string, marker string, delimiter string, maxKeys int /*, versions bool*/) (loi minio.ListObjectsInfo, e error) {\n\n\t//objects, prefixes, err := l.ListPydioObjects(ctx, bucket, prefix, delimiter, maxKeys, versions)\n\tobjects, prefixes, err := l.ListPydioObjects(ctx, bucket, prefix, delimiter, maxKeys, false)\n\tif err != nil {\n\t\treturn loi, pydioToMinioError(err, bucket, prefix)\n\t}\n\n\t// log.Printf(\"[ListObjects] Returning %d objects and %d prefixes (V1) for prefix %s\\n\", len(objects), len(prefixes), prefix)\n\n\treturn minio.ListObjectsInfo{\n\t\tIsTruncated: false,\n\t\tNextMarker: \"\",\n\t\tPrefixes: prefixes,\n\t\tObjects: objects,\n\t}, nil\n\n}", "func (c *storageClient) forEachObject(bucket, prefix string, recursive bool, fn func(*s3types.Object) error) error {\n\t// The \"/\" value can be used at command-level to mean that we want to\n\t// list from the root of the bucket, but the actual bucket root is an\n\t// empty prefix.\n\tif prefix == \"/\" {\n\t\tprefix = \"\"\n\t}\n\n\tdirs := make(map[string]struct{})\n\n\tvar ct string\n\tfor {\n\t\tres, err := c.ListObjectsV2(gContext, &s3.ListObjectsV2Input{\n\t\t\tBucket: aws.String(bucket),\n\t\t\tPrefix: aws.String(prefix),\n\t\t\tContinuationToken: aws.String(ct),\n\t\t})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tct = aws.ToString(res.NextContinuationToken)\n\n\t\tfor _, o := range res.Contents {\n\t\t\t// If not invoked in recursive mode, split object keys on the \"/\" separator and skip\n\t\t\t// objects \"below\" the base directory prefix.\n\t\t\tparts := strings.SplitN(strings.TrimPrefix(aws.ToString(o.Key), prefix), \"/\", 2)\n\t\t\tif len(parts) > 1 && !recursive {\n\t\t\t\tdir := path.Base(parts[0])\n\t\t\t\tif _, ok := dirs[dir]; !ok {\n\t\t\t\t\tdirs[dir] = struct{}{}\n\t\t\t\t}\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\t// If the prefix doesn't end with a trailing prefix separator (\"/\"),\n\t\t\t// consider it as a single object key and match only one exact result\n\t\t\t// (except in recursive mode, where the prefix is expected to be a\n\t\t\t// \"directory\").\n\t\t\tif !recursive && !strings.HasSuffix(prefix, \"/\") && aws.ToString(o.Key) != prefix {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\to := o\n\t\t\tif err := fn(&o); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\n\t\tif !res.IsTruncated {\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn nil\n}", "func listCloudBucketsHandler(c *cli.Context) (err error) {\n\tbck := cmn.Bck{\n\t\tName: c.Args().First(),\n\t\tProvider: cmn.Cloud,\n\t}\n\tif bck.Name == \"\" {\n\t\treturn listBucketNames(c, bck)\n\t}\n\n\tbck.Name = strings.TrimSuffix(bck.Name, \"/\")\n\treturn listBucketObj(c, bck)\n}", "func (b *Bucket) Objects() map[string][]byte {\n\treturn b.objects\n}", "func (service *S3Service) ListBuckets() ([]*S3Bucket,error) {\n request := service.newS3Request().prepare()\n response,err := request.execute(service.client)\n if err != nil {\n return nil, err\n }\n\n defer response.Close()\n\n // FIXME - process list of buckets\n return make([]*S3Bucket,0),nil\n}", "func (d *driver) List(ctx context.Context, opath string) ([]string, error) {\n\tpath := opath\n\tif path != \"/\" && opath[len(path)-1] != '/' {\n\t\tpath = path + \"/\"\n\t}\n\n\t// This is to cover for the cases when the rootDirectory of the driver is either \"\" or \"/\".\n\t// In those cases, there is no root prefix to replace and we must actually add a \"/\" to all\n\t// results in order to keep them as valid paths as recognized by storagedriver.PathRegexp\n\tprefix := \"\"\n\tif d.obsPath(\"\") == \"\" {\n\t\tprefix = \"/\"\n\t}\n\n\toutput, err := d.Client.ListObjects(&obs.ListObjectsInput{\n\t\tListObjsInput: obs.ListObjsInput{\n\t\t\tPrefix: d.obsPath(path),\n\t\t\tMaxKeys: listMax,\n\t\t\tDelimiter: \"/\",\n\t\t},\n\t\tBucket: d.Bucket,\n\t})\n\tif err != nil {\n\t\treturn nil, parseError(opath, err)\n\t}\n\n\tfiles := []string{}\n\tdirectories := []string{}\n\n\tfor {\n\t\tfor _, key := range output.Contents {\n\t\t\tfiles = append(files, strings.Replace(key.Key, d.obsPath(\"\"), prefix, 1))\n\t\t}\n\n\t\tfor _, commonPrefix := range output.CommonPrefixes {\n// commonPrefix := commonPrefix>Prefix\n\t\t\tdirectories = append(directories, strings.Replace(commonPrefix[0:len(commonPrefix)-1], d.obsPath(\"\"), prefix, 1))\n\t\t}\n\n\t\tif output.IsTruncated {\n\t\t\toutput, err = d.Client.ListObjects(&obs.ListObjectsInput{\n\t\t\t\tListObjsInput: obs.ListObjsInput{\n\t\t\t\t\tPrefix: d.obsPath(path),\n\t\t\t\t\tDelimiter: \"/\",\n\t\t\t\t\tMaxKeys: listMax,\n\t\t\t\t},\n\t\t\t\tBucket: d.Bucket,\n\t\t\t\tMarker: output.NextMarker,\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif opath != \"/\" {\n\t\tif len(files) == 0 && len(directories) == 0 {\n\t\t\t// Treat empty output as missing directory, since we don't actually\n\t\t\t// have directories in obs.\n\t\t\treturn nil, storagedriver.PathNotFoundError{Path: opath}\n\t\t}\n\t}\n\treturn append(files, directories...), nil\n}", "func (xl xlObjects) ListObjects(bucket, prefix, marker, delimiter string, maxKeys int) (ListObjectsInfo, error) {\n\treturn listObjectsCommon(xl, bucket, prefix, marker, delimiter, maxKeys)\n}", "func (s *s3) List(key string) ([]string, error) {\n\tif key != \"\" && !strings.HasSuffix(key, \"/\") {\n\t\tkey += \"/\"\n\t}\n\n\tresult, err := s.client.ListObjectsV2(&awss3.ListObjectsV2Input{\n\t\tPrefix: aws.String(key),\n\t\tBucket: aws.String(s.bucket),\n\t})\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfiles := []string{}\n\tfor _, obj := range result.Contents {\n\t\t_, file := path.Split(*obj.Key)\n\t\tfiles = append(files, file)\n\t}\n\treturn files, nil\n}", "func (m *memClient) ListObjects(prefix string) ([]string, error) {\n\tdefer m.Unlock()\n\tm.Lock()\n\tret := []string{}\n\tfor k := range m.store {\n\t\tret = append(ret, k)\n\t}\n\treturn ret, nil\n}", "func (cache *SiaCacheLayer) ListObjects(bucket string) (objects []SiaObjectInfo, e *SiaServiceError) {\n\tcache.debugmsg(\"SiaCacheLayer.ListObjects\")\n\n\treturn cache.dbListObjects(bucket)\n}", "func (b *Bucket) ListObjectInfo() ([]ObjectInfo, error) {\n\t// set session\n\tsess, err := b.setSession()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// list objects\n\tsvc := b.newS3func(sess)\n\tresult, err := svc.ListObjects(&s3.ListObjectsInput{\n\t\tBucket: aws.String(b.BucketName),\n\t\tPrefix: aws.String(crPath),\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// make ObjectInfo list\n\tobjInfoList := make([]ObjectInfo, 0)\n\tfor _, obj := range result.Contents {\n\t\tobjInfo := ObjectInfo{\n\t\t\tName: strings.TrimPrefix(aws.StringValue(obj.Key), crPath),\n\t\t\tSize: aws.Int64Value(obj.Size),\n\t\t\tTimestamp: aws.TimeValue(obj.LastModified),\n\t\t\tBucketConfigName: b.Name,\n\t\t}\n\t\tobjInfoList = append(objInfoList, objInfo)\n\t}\n\n\treturn objInfoList, nil\n}", "func (c *Client) GetObjects(ctx context.Context, entriesDir string) *gcs.ObjectIterator {\n\treturn c.gcsClient.Bucket(c.bucket).Objects(ctx, &gcs.Query{\n\t\tPrefix: entriesDir,\n\t})\n}", "func (be *s3) List(t backend.Type, done <-chan struct{}) <-chan string {\n\tdebug.Log(\"s3.List\", \"listing %v\", t)\n\tch := make(chan string)\n\n\tprefix := be.s3path(t, \"\")\n\n\tlistresp := be.client.ListObjects(be.bucketname, prefix, true, done)\n\n\tgo func() {\n\t\tdefer close(ch)\n\t\tfor obj := range listresp {\n\t\t\tm := strings.TrimPrefix(obj.Key, prefix)\n\t\t\tif m == \"\" {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tselect {\n\t\t\tcase ch <- m:\n\t\t\tcase <-done:\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}()\n\n\treturn ch\n}", "func Explore(client *storage.Client, thinClient *ThinClient, bucket string, prefix string) ([]*Node, error) {\n // Combine bucket lifecycle to each node\n ctx := context.Background()\n lifecycle, err := thinClient.Lifecycle(ctx, bucket)\n if err != nil {\n return nil, err\n }\n\n objs, err := listq(client, bucket, prefix, \"/\", false, true)\n if err != nil {\n return nil, err\n }\n var nodes []*Node\n for _, element := range objs {\n var node Node\n node.Bucket = bucket\n node.Lifecycle = lifecycle\n if element.Name == \"\" && element.Prefix != \"\" {\n node.Ntype = \"DIR\"\n node.Name = element.Prefix\n node.FQPN = element.Prefix\n } else {\n node.Ntype = \"OBJ\"\n pathSlice := strings.Split(element.Name, \"/\")\n pathLength := len(pathSlice)\n node.Name = pathSlice[pathLength - 1]\n node.FQPN = element.Name\n }\n node.ACL = element.ACL\n node.Owner = element.Owner\n node.Size = element.Size\n\n nodes = append(nodes, &node)\n }\n return nodes, nil\n}", "func (s3ops *S3Operations) GetBucketItems(bucket string, prefix string, index int) (items []string, probs bool) {\n\n\tsess, _ := session.NewSession()\n\tvar config = &aws.Config{}\n\tif os.Getenv(\"AWS_WEB_IDENTITY_TOKEN_FILE\") != \"\" {\n\t\teksCreds := awscredentials.Credentials{}\n\t\tcreds := *eksCreds.GetWebIdentityCredentials()\n\t\tconfig = &aws.Config{\n\t\t\tRegion: aws.String(\"eu-west-1\"),\n\t\t\tCredentials: credentials.NewStaticCredentials(*creds.AccessKeyId, *creds.SecretAccessKey, *creds.SessionToken),\n\t\t}\n\t} else {\n\t\tconfig = &aws.Config{\n\t\t\tRegion: aws.String(\"eu-west-1\"),\n\t\t}\n\t}\n\n\t// Create a client from just a config.r\n\tclient := s3.New(sess, config)\n\n\tlog.WithFields(log.Fields{\n\t\t\"Bucket\": bucket,\n\t\t\"Prefix\": prefix,\n\t}).Info(\"------ FETCHING BUCKET DETAILS ----\")\n\tparams := &s3.ListObjectsV2Input{Bucket: aws.String(bucket), Prefix: aws.String(prefix)}\n\ttenants := make(map[string]struct{})\n\t// Example iterating over at most 3 pages of a ListObjectsV2 operation.\n\terr := client.ListObjectsV2Pages(params,\n\t\tfunc(page *s3.ListObjectsV2Output, lastPage bool) bool {\n\t\t\tfor _, content := range page.Contents {\n\t\t\t\tparts := strings.Split(*content.Key, \"/\")\n\t\t\t\t// you can use the ,ok idiom to check for existing keys\n\t\t\t\tif _, ok := tenants[parts[index]]; !ok {\n\t\t\t\t\ttenants[parts[index]] = struct{}{}\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn lastPage != true\n\t\t})\n\tif err != nil {\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"err\": err.Error(),\n\t\t\t\"Bucket\": bucket,\n\t\t\t\"Prefix\": prefix,\n\t\t}).Error(\"Problems Getting Bucket Details\")\n\t\tprobs = true\n\t} else {\n\n\t\tkeys := make([]string, 0, len(tenants))\n\t\tfor k := range tenants {\n\t\t\tkeys = append(keys, k)\n\t\t}\n\t\titems = keys\n\t\tprobs = false\n\t}\n\treturn\n}", "func listb(client *storage.Client, bucketName string) ([]*storage.ObjectAttrs, error) {\n ctx := context.Background()\n var objects []*storage.ObjectAttrs\n it := client.Bucket(bucketName).Objects(ctx, nil)\n for {\n oattrs, err := it.Next()\n if err == iterator.Done {\n break\n }\n if err != nil {\n return nil, err\n }\n objects = append(objects, oattrs)\n }\n return objects, nil\n}", "func (t *targetrunner) listCachedObjects(bucket string, msg *GetMsg) (outbytes []byte, errstr string, errcode int) {\n\treslist, err := t.prepareLocalObjectList(bucket, msg)\n\tif err != nil {\n\t\treturn nil, err.Error(), 0\n\t}\n\n\toutbytes, err = json.Marshal(reslist)\n\treturn\n}", "func (rpcMethod *RPCMethod) listKeys(response *ResponseParameters) error {\n\n\t//open a read transaction\n\trpcMethod.rpcServer.boltDB.View(func(tx *bolt.Tx) error {\n\t\tvar cursor *bolt.Cursor\n\t\tcursor = tx.Cursor()\n\n\t\t//append to reselt the list of buckets\n\t\tresponse.Result = make([]interface{}, 0, 10)\n\t\tfor k, _ := cursor.First(); k != nil; k, _ = cursor.Next() {\n\t\t\trpcMethod.rpcServer.logger.Println(\"BUCKET \", string(k))\n\t\t\tresponse.Result = append(response.Result, string(k))\n\t\t}\n\n\t\treturn nil\n\t})\n\n\tresponse.Error = nil\n\n\treturn nil\n\n}", "func (rpcMethod *RPCMethod) listKeys(response *ResponseParameters) error {\n\n\t//open a read transaction\n\trpcMethod.rpcServer.boltDB.View(func(tx *bolt.Tx) error {\n\t\tvar cursor *bolt.Cursor\n\t\tcursor = tx.Cursor()\n\n\t\t//append to reselt the list of buckets\n\t\tresponse.Result = make([]interface{}, 0, 10)\n\t\tfor k, _ := cursor.First(); k != nil; k, _ = cursor.Next() {\n\t\t\trpcMethod.rpcServer.logger.Println(\"BUCKET \", string(k))\n\t\t\tresponse.Result = append(response.Result, string(k))\n\t\t}\n\n\t\treturn nil\n\t})\n\n\tresponse.Error = nil\n\n\treturn nil\n\n}", "func (g *gcs) List(ctx context.Context, prefix string) ([]*fs.FileInfo, error) {\n\tvar files []*fs.FileInfo\n\tit := g.bucket.Objects(ctx, &storage.Query{\n\t\tPrefix: prefix,\n\t})\n\tfor {\n\t\tattrs, err := it.Next()\n\t\tif err == iterator.Done {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tfiles = append(files, fileinfo(attrs))\n\t}\n\treturn files, nil\n}", "func ListBuckets(w http.ResponseWriter, r *http.Request) {\n\n\tsvc := s3.New(sess)\n\n\tpageVars := PageVars{}\n\taddPageVars(r, &pageVars)\n\n\t// get bucket list fomr s3 api\n\tresult, err := svc.ListBuckets(nil)\n\n\tif err != nil {\n\t\tif awsErr, ok := err.(awserr.Error); ok {\n\t\t\tpageVars.ErrorM = awsErr.Message()\n\t\t} else {\n\t\t\tpageVars.ErrorM = \"Failed to load buckets list\"\n\t\t}\n\t} else {\n\t\tpageVars.BList = result.Buckets\n\t}\n\n\trender(w, \"bucketlist\", pageVars)\n}", "func BucketGetKeysWithPrefix(tx *bolt.Tx, bucket string, prefix string, stripPrefix bool) []string {\n\tb := tx.Bucket([]byte(bucket))\n\tif b == nil {\n\t\treturn nil\n\t}\n\tc := b.Cursor()\n\tvar results []string\n\tprefixBytes := []byte(prefix)\n\tfor k, _ := c.Seek(prefixBytes); k != nil && bytes.HasPrefix(k, prefixBytes); k, _ = c.Next() {\n\t\tif stripPrefix {\n\t\t\tk = k[len(prefix):]\n\t\t}\n\t\tresults = append(results, string(k))\n\t}\n\treturn results\n}", "func (s *S3Storage) List(prefix string, maxSize int) ([]string, error) {\n\tprefix = s.addPrefix(prefix)\n\tpathSeparator := \"\"\n\tmarker := \"\"\n\n\titems := make([]string, 0, 1000)\n\tfor maxSize > 0 {\n\t\t// Don't ask for more than 1000 keys at a time. This makes\n\t\t// testing simpler because S3 will return at most 1000 keys even if you\n\t\t// ask for more, but s3test will return more than 1000 keys if you ask\n\t\t// for more. TODO(agf): Fix this behavior in s3test.\n\t\tmaxReqSize := 1000\n\t\tif maxSize < 1000 {\n\t\t\tmaxReqSize = maxSize\n\t\t}\n\t\tcontents, err := s.bucket.List(prefix, pathSeparator, marker, maxReqSize)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tmaxSize -= maxReqSize\n\n\t\tfor _, key := range contents.Contents {\n\t\t\titems = append(items, s.removePrefix(key.Key))\n\t\t}\n\t\tif contents.IsTruncated {\n\t\t\tmarker = s.addPrefix(items[len(items)-1])\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn items, nil\n}", "func (s *S3Store) ListBucketFiles(bucket, prefix string) (prefixs []string, keys []KeyInfo, err error) {\n\tvar req s3.ListObjectsInput\n\tvar resp *s3.ListObjectsOutput\n\treq.SetBucket(bucket)\n\treq.SetPrefix(prefix)\n\treq.SetDelimiter(\"/\")\n\tif resp, err = s.svc.ListObjects(&req); err != nil {\n\t\treturn\n\t}\n\tfor _, c := range resp.Contents {\n\t\tvar info KeyInfo\n\t\tinfo.Key = aws.StringValue(c.Key)\n\t\tinfo.Size = aws.Int64Value(c.Size)\n\t\tinfo.Stamp = aws.TimeValue(c.LastModified)\n\t\tkeys = append(keys, info)\n\t}\n\tfor _, c := range resp.CommonPrefixes {\n\t\tprefixs = append(prefixs, aws.StringValue(c.Prefix))\n\t}\n\treturn\n}", "func listq(client *storage.Client, bucketName string, prefix string, delimiter string, versions bool, selfIgnore bool) ([]*storage.ObjectAttrs, error){\n ctx := context.Background()\n var objects []*storage.ObjectAttrs\n it := client.Bucket(bucketName).Objects(ctx, &storage.Query{\n Prefix: prefix,\n Delimiter: delimiter,\n Versions: versions,\n })\n for {\n oattrs, err := it.Next()\n if err == iterator.Done {\n break\n }\n if err != nil {\n return nil, err\n }\n if selfIgnore == true && oattrs.Prefix == prefix && oattrs.Prefix != \"\" {\n continue\n }\n objects = append(objects, oattrs)\n }\n return objects, nil\n}", "func (c *Client) ListBucketKeys(bucket string) ([]string, error) {\n\tres, err := c.do(\"GET\", \"/buckets/\"+bucket+\"/keys?keys=true\", nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif res.StatusCode != 200 {\n\t\tres.Body.Close()\n\t\treturn nil, fmt.Errorf(\"Status Code %d\", res.StatusCode)\n\t}\n\tbmap := make(map[string][]string)\n\tdec := json.NewDecoder(res.Body)\n\terr = dec.Decode(&bmap)\n\tres.Body.Close()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Error decoding body: %s\", err.Error())\n\t}\n\tstrs, ok := bmap[\"keys\"]\n\tif !ok {\n\t\treturn nil, errors.New(\"Unexpected body formatting.\")\n\t}\n\treturn strs, nil\n}", "func (s *S3Store) ListBuckets() (bkts []string, err error) {\n\tvar req s3.ListBucketsInput\n\tvar resp *s3.ListBucketsOutput\n\tif resp, err = s.svc.ListBuckets(&req); err != nil {\n\t\treturn\n\t}\n\tfor _, bkt := range resp.Buckets {\n\t\tbkts = append(bkts, aws.StringValue(bkt.Name))\n\t}\n\treturn\n}", "func ListS3Bucket(awsregion string) string {\n\t// aws session credentials\n\taws_session := session.Must(session.NewSessionWithOptions(session.Options{\n\t\tSharedConfigState: session.SharedConfigEnable,\n\t}))\n\n\t// s3 session\n\ts3_session := s3.New(aws_session, aws.NewConfig().WithRegion(awsregion))\n\n\t// create bucket\n\tinput := &s3.ListBucketsInput{}\n\tresult, err := s3_session.ListBuckets(input)\n\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\tif len(result.Buckets) == 0 {\n\t\treturn \"Bucket list is empty\"\n\t}\n\treturn result.String()\n}", "func (l *pydioObjects) ListObjectsV2(ctx context.Context, bucket, prefix, continuationToken, delimiter string, maxKeys int, fetchOwner bool, startAfter string) (result minio.ListObjectsV2Info, err error) {\n\n\tobjects, prefixes, err := l.ListPydioObjects(ctx, bucket, prefix, delimiter, maxKeys, false)\n\tif err != nil {\n\t\treturn result, pydioToMinioError(err, bucket, prefix)\n\t}\n\n\t// log.Printf(\"\\n[ListObjectsV2] Returning %d objects and %d prefixes (V2) for prefix %s\\n\", len(objects), len(prefixes), prefix)\n\n\treturn minio.ListObjectsV2Info{\n\t\tIsTruncated: false,\n\t\tPrefixes: prefixes,\n\t\tObjects: objects,\n\n\t\tContinuationToken: \"\",\n\t\tNextContinuationToken: \"\",\n\t}, nil\n\n}", "func (sb S3Buckets) getAll(configObj config.Config) ([]*string, error) {\n\tinput := &s3.ListBucketsInput{}\n\toutput, err := sb.Client.ListBuckets(input)\n\tif err != nil {\n\t\treturn nil, errors.WithStackTrace(err)\n\t}\n\n\tvar names []*string\n\ttotalBuckets := len(output.Buckets)\n\tif totalBuckets == 0 {\n\t\treturn nil, nil\n\t}\n\n\tbatchSize := sb.MaxBatchSize()\n\ttotalBatches := int(math.Ceil(float64(totalBuckets) / float64(batchSize)))\n\tbatchCount := 1\n\n\t// Batch the get operation\n\tfor batchStart := 0; batchStart < totalBuckets; batchStart += batchSize {\n\t\tbatchEnd := int(math.Min(float64(batchStart)+float64(batchSize), float64(totalBuckets)))\n\t\tlogging.Logger.Debugf(\"Getting - %d-%d buckets of batch %d/%d\", batchStart+1, batchEnd, batchCount, totalBatches)\n\t\ttargetBuckets := output.Buckets[batchStart:batchEnd]\n\t\tbucketNames, err := sb.getBucketNames(targetBuckets, configObj)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tnames = append(names, bucketNames...)\n\t\tbatchCount++\n\t}\n\n\treturn names, nil\n}", "func (api *bucketAPI) ApisrvList(ctx context.Context, opts *api.ListWatchOptions) ([]*objstore.Bucket, error) {\n\tif api.ct.resolver != nil {\n\t\tapicl, err := api.ct.apiClient()\n\t\tif err != nil {\n\t\t\tapi.ct.logger.Errorf(\"Error creating API server clent. Err: %v\", err)\n\t\t\treturn nil, err\n\t\t}\n\n\t\treturn apicl.ObjstoreV1().Bucket().List(context.Background(), opts)\n\t}\n\n\t// List from local cache\n\tctkitObjs, err := api.List(ctx, opts)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvar ret []*objstore.Bucket\n\tfor _, obj := range ctkitObjs {\n\t\tret = append(ret, &obj.Bucket)\n\t}\n\treturn ret, nil\n}", "func ListBuckets(showCreationDate bool) error {\r\n\r\n\tresult, err := s3Clinet.ListBuckets(nil)\r\n\r\n\tif err != nil {\r\n\t\tif awsErr, ok := err.(awserr.Error); ok {\r\n\t\t\treturn errors.New(awsErr.Message())\r\n\t\t}\r\n\t\treturn errors.New(err.Error())\r\n\t}\r\n\r\n\tif showCreationDate {\r\n\t\tfor _, b := range result.Buckets {\r\n\t\t\tfmt.Printf(\"%s\\t%s\\n\", aws.TimeValue(b.CreationDate), aws.StringValue(b.Name))\r\n\t\t}\r\n\t} else {\r\n\t\tfor _, b := range result.Buckets {\r\n\t\t\tfmt.Printf(\"%s\\n\", aws.StringValue(b.Name))\r\n\t\t}\r\n\t}\r\n\r\n\treturn nil\r\n}", "func (sb S3Buckets) emptyBucket(bucketName *string, isVersioned bool) error {\n\t// Since the error may happen in the inner function handler for the pager, we need a function scoped variable that\n\t// the inner function can set when there is an error.\n\tvar errOut error\n\tpageId := 1\n\n\t// Handle versioned buckets.\n\tif isVersioned {\n\t\terr := sb.Client.ListObjectVersionsPages(\n\t\t\t&s3.ListObjectVersionsInput{\n\t\t\t\tBucket: bucketName,\n\t\t\t\tMaxKeys: aws.Int64(int64(sb.MaxBatchSize())),\n\t\t\t},\n\t\t\tfunc(page *s3.ListObjectVersionsOutput, lastPage bool) (shouldContinue bool) {\n\t\t\t\tlogging.Logger.Debugf(\"Deleting page %d of object versions (%d objects) from bucket %s\", pageId, len(page.Versions), aws.StringValue(bucketName))\n\t\t\t\tif err := sb.deleteObjectVersions(bucketName, page.Versions); err != nil {\n\t\t\t\t\tlogging.Logger.Errorf(\"Error deleting objects versions for page %d from bucket %s: %s\", pageId, aws.StringValue(bucketName), err)\n\t\t\t\t\terrOut = err\n\t\t\t\t\treturn false\n\t\t\t\t}\n\t\t\t\tlogging.Logger.Debugf(\"[OK] - deleted page %d of object versions (%d objects) from bucket %s\", pageId, len(page.Versions), aws.StringValue(bucketName))\n\n\t\t\t\tlogging.Logger.Debugf(\"Deleting page %d of deletion markers (%d deletion markers) from bucket %s\", pageId, len(page.DeleteMarkers), aws.StringValue(bucketName))\n\t\t\t\tif err := sb.deleteDeletionMarkers(bucketName, page.DeleteMarkers); err != nil {\n\t\t\t\t\tlogging.Logger.Debugf(\"Error deleting deletion markers for page %d from bucket %s: %s\", pageId, aws.StringValue(bucketName), err)\n\t\t\t\t\terrOut = err\n\t\t\t\t\treturn false\n\t\t\t\t}\n\t\t\t\tlogging.Logger.Debugf(\"[OK] - deleted page %d of deletion markers (%d deletion markers) from bucket %s\", pageId, len(page.DeleteMarkers), aws.StringValue(bucketName))\n\n\t\t\t\tpageId++\n\t\t\t\treturn true\n\t\t\t},\n\t\t)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif errOut != nil {\n\t\t\treturn errOut\n\t\t}\n\t\treturn nil\n\t}\n\n\t// Handle non versioned buckets.\n\terr := sb.Client.ListObjectsV2Pages(\n\t\t&s3.ListObjectsV2Input{\n\t\t\tBucket: bucketName,\n\t\t\tMaxKeys: aws.Int64(int64(sb.MaxBatchSize())),\n\t\t},\n\t\tfunc(page *s3.ListObjectsV2Output, lastPage bool) (shouldContinue bool) {\n\t\t\tlogging.Logger.Debugf(\"Deleting object page %d (%d objects) from bucket %s\", pageId, len(page.Contents), aws.StringValue(bucketName))\n\t\t\tif err := sb.deleteObjects(bucketName, page.Contents); err != nil {\n\t\t\t\tlogging.Logger.Errorf(\"Error deleting objects for page %d from bucket %s: %s\", pageId, aws.StringValue(bucketName), err)\n\t\t\t\terrOut = err\n\t\t\t\treturn false\n\t\t\t}\n\t\t\tlogging.Logger.Debugf(\"[OK] - deleted object page %d (%d objects) from bucket %s\", pageId, len(page.Contents), aws.StringValue(bucketName))\n\n\t\t\tpageId++\n\t\t\treturn true\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif errOut != nil {\n\t\treturn errOut\n\t}\n\treturn nil\n}", "func getBucketList(arg string) error {\n\taddress, err := util.GetAddress(arg)\n\tif err != nil {\n\t\treturn output.NewError(output.AddressError, \"\", err)\n\t}\n\tbl, err := getBucketListByAddress(address)\n\tif err != nil {\n\t\treturn err\n\t}\n\tvar bucketlist []*bucket\n\tfor _, b := range bl.Buckets {\n\t\tbucket, err := newBucket(b)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tbucketlist = append(bucketlist, bucket)\n\t}\n\tmessage := bucketlistMessage{\n\t\tNode: config.ReadConfig.Endpoint,\n\t\tBucketlist: bucketlist,\n\t}\n\tfmt.Println(message.String())\n\treturn nil\n}", "func listFiles(svc *storage.Service, bucketName string, filePrefix string) ([]string, error) {\n\t// List all objects in a bucket using pagination\n\tvar files []string\n\ttoken := \"\"\n\tfor {\n\t\tcall := svc.Objects.List(bucketName)\n\t\tcall.Prefix(filePrefix)\n\t\tif token != \"\" {\n\t\t\tcall = call.PageToken(token)\n\t\t}\n\t\tres, err := call.Do()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tfor _, object := range res.Items {\n\t\t\tfiles = append(files, object.Name)\n\t\t}\n\t\tif token = res.NextPageToken; token == \"\" {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn files, nil\n}", "func (hp *hdfsProvider) ListObjects(bck *meta.Bck, msg *apc.LsoMsg, lst *cmn.LsoResult) (int, error) {\n\tvar (\n\t\th = cmn.BackendHelpers.HDFS\n\t\tidx int\n\t)\n\tmsg.PageSize = calcPageSize(msg.PageSize, hp.MaxPageSize())\n\n\terr := hp.c.Walk(bck.Props.Extra.HDFS.RefDirectory, func(path string, fi os.FileInfo, err error) error {\n\t\tif err != nil {\n\t\t\tif cos.IsEOF(err) {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\treturn err\n\t\t}\n\t\tif uint(len(lst.Entries)) >= msg.PageSize {\n\t\t\treturn skipDir(fi)\n\t\t}\n\t\tobjName := strings.TrimPrefix(strings.TrimPrefix(path, bck.Props.Extra.HDFS.RefDirectory), string(filepath.Separator))\n\t\tif msg.Prefix != \"\" {\n\t\t\tif fi.IsDir() {\n\t\t\t\tif !cmn.DirHasOrIsPrefix(objName, msg.Prefix) {\n\t\t\t\t\treturn skipDir(fi)\n\t\t\t\t}\n\t\t\t} else if !cmn.ObjHasPrefix(objName, msg.Prefix) {\n\t\t\t\treturn skipDir(fi)\n\t\t\t}\n\t\t}\n\t\tif msg.ContinuationToken != \"\" && objName <= msg.ContinuationToken {\n\t\t\treturn nil\n\t\t}\n\t\tif msg.StartAfter != \"\" && objName <= msg.StartAfter {\n\t\t\treturn nil\n\t\t}\n\t\tif fi.IsDir() {\n\t\t\treturn nil\n\t\t}\n\n\t\tvar entry *cmn.LsoEntry\n\t\tif idx < len(lst.Entries) {\n\t\t\tentry = lst.Entries[idx]\n\t\t} else {\n\t\t\tentry = &cmn.LsoEntry{Name: objName}\n\t\t\tlst.Entries = append(lst.Entries, entry)\n\t\t}\n\t\tidx++\n\t\tentry.Size = fi.Size()\n\t\tif msg.WantProp(apc.GetPropsChecksum) {\n\t\t\tfr, err := hp.c.Open(path)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tdefer fr.Close()\n\t\t\tcksum, err := fr.Checksum()\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif v, ok := h.EncodeCksum(cksum); ok {\n\t\t\t\tentry.Checksum = v\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t})\n\tif err != nil {\n\t\treturn hdfsErrorToAISError(err)\n\t}\n\tlst.Entries = lst.Entries[:idx]\n\t// Set continuation token only if we reached the page size.\n\tif uint(len(lst.Entries)) >= msg.PageSize {\n\t\tlst.ContinuationToken = lst.Entries[len(lst.Entries)-1].Name\n\t}\n\treturn 0, nil\n}", "func getAllBuckets() ([]*Bucket, error) {\n\tbuckets := make([]*Bucket, 0)\n\n\tsvc, err := s3Service(\"\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresp, err := svc.ListBuckets(&s3.ListBucketsInput{})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor _, b := range resp.Buckets {\n\t\tbucket := &Bucket{Name: *b.Name, CreationDate: *b.CreationDate}\n\t\tbuckets = append(buckets, bucket)\n\t}\n\treturn buckets, nil\n}", "func ListObjectV1Handler(w http.ResponseWriter, r *http.Request) {\n\t// Future Work\n\t// TODO: Support Marker\n\t// TODO: Support Paging\n\n\tv := mux.Vars(r)\n\tbucket, err := GetInterceptorBucket(v[\"bucket\"])\n\tif err != nil {\n\t\tSendNoSuchBucketError(v[\"bucket\"], w, r)\n\t\treturn\n\t}\n\n\tuquery := r.URL.Query()\n\treadBucket := bucket.GetReadBucket()\n\twriteBucket := bucket.GetWriteBucket()\n\tri := listObjectInput(readBucket, uquery)\n\twi := listObjectInput(writeBucket, uquery)\n\trequestBuckets := []*db.S3Bucket{readBucket, writeBucket}\n\n\trchan := make(chan listObjectV1ResponseResult)\n\tgo getListObjects(readBucket, 100, ri, rchan)\n\tgo getListObjects(writeBucket, 1, wi, rchan)\n\n\tresults := make([]listObjectV1ResponseResult, 2)\n\tfor i := range requestBuckets {\n\t\tresults[i] = <-rchan\n\t\tif results[i].Error != nil {\n\t\t\t// TODO: Implement Error handling correctly\n\t\t\tSendInternalError(\"Something Happend. Maybe your bucket settings is wrong.\", w, r)\n\t\t\treturn\n\t\t}\n\t}\n\n\tfr := mergeListObjectResponse(bucket, results)\n\tapi.SendSuccessXml(w, *fr)\n}", "func (c *BaseController) ListObjects(r *web.Request) (*web.Response, error) {\n\tctx := r.Context()\n\tlog.C(ctx).Debugf(\"Getting all %ss\", c.objectType)\n\tobjectList, err := c.repository.List(ctx, c.objectType, query.CriteriaForContext(ctx)...)\n\tif err != nil {\n\t\treturn nil, util.HandleStorageError(err, string(c.objectType))\n\t}\n\n\tfor i := 0; i < objectList.Len(); i++ {\n\t\tobj := objectList.ItemAt(i)\n\t\tstripCredentials(ctx, obj)\n\t}\n\n\treturn util.NewJSONResponse(http.StatusOK, objectList)\n}", "func (c *Client) ListS3DirOneLevel(bucket string, s3Dir string, maxResults *int64, startAfter *string) ([]string, error) {\n\ts3Dir = s.EnsureSuffix(s3Dir, \"/\")\n\n\tallNames := strset.New()\n\n\terr := c.S3Iterator(bucket, s3Dir, true, nil, startAfter, func(object *s3.Object) (bool, error) {\n\t\trelativePath := strings.TrimPrefix(*object.Key, s3Dir)\n\t\toneLevelPath := strings.Split(relativePath, \"/\")[0]\n\t\tallNames.Add(oneLevelPath)\n\n\t\tif maxResults != nil && int64(len(allNames)) >= *maxResults {\n\t\t\treturn false, nil\n\t\t}\n\t\treturn true, nil\n\t})\n\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, S3Path(bucket, s3Dir))\n\t}\n\n\treturn allNames.SliceSorted(), nil\n}", "func (c *Client) GetBuckets() ([]string, error) {\n\tres, err := c.do(\"GET\", \"/buckets?buckets=true\", nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif res.StatusCode != 200 {\n\t\tres.Body.Close()\n\t\treturn nil, fmt.Errorf(\"Status Code %d\", res.StatusCode)\n\t}\n\n\tbmap := make(map[string][]string)\n\tdec := json.NewDecoder(res.Body)\n\terr = dec.Decode(&bmap)\n\tres.Body.Close()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Error decoding body: %s\", err.Error())\n\t}\n\n\tstrs, ok := bmap[\"buckets\"]\n\tif !ok {\n\t\treturn nil, errors.New(\"Unexpected body formatting.\")\n\t}\n\treturn strs, nil\n}", "func bucketParts(bucket string) (bucketname, path string) {\n\ts3Prefix := \"s3://\"\n\tif strings.HasPrefix(bucket, s3Prefix) {\n\t\tbucket = strings.Replace(bucket, s3Prefix, \"\", 1)\n\t}\n\tparts := strings.SplitN(bucket, \"/\", 2)\n\n\tif len(parts) <= 1 {\n\t\tpath = \"\"\n\t} else {\n\t\tpath = parts[1]\n\t}\n\treturn parts[0], path\n}", "func getObjects(path string) ([]ObjectIdent, error) {\n\tfset := token.NewFileSet()\n\tm, err := parsePath(path, fset)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tconf := &types.Config{\n\t\tIgnoreFuncBodies: true,\n\t\tDisableUnusedImportCheck: true,\n\t\tImporter: importer.Default(),\n\t}\n\terrCh := make(chan error)\n\tvar sharedChs []<-chan ObjectIdent\n\tvar result []ObjectIdent\n\n\tfor _, tree := range m {\n\t\tc := make(chan ObjectIdent)\n\t\tsharedChs = append(sharedChs, c)\n\t\tgo func(tree *ast.Package) {\n\t\t\terrCh <- getObjectsPkg(tree, conf, fset, c)\n\t\t}(tree)\n\t}\n\n\tfinalCh := converge(sharedChs)\n\tfor {\n\t\tselect {\n\t\tcase obj, ok := <-finalCh:\n\t\t\tif !ok {\n\t\t\t\treturn result, nil\n\t\t\t}\n\t\t\tresult = append(result, obj)\n\t\tcase err := <-errCh:\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t}\n}", "func ListBuckets(svc *s3.S3) []*s3.Bucket {\n\n\tresult, err := svc.ListBuckets(nil)\n\tExitErrorf(\"Unable to list buckets, %v\", err)\n\n\tfmt.Println(\"Buckets:\")\n\tfor _, b := range result.Buckets {\n\t\tfmt.Printf(\"* %s created on %s\\n\", aws.StringValue(b.Name), aws.TimeValue(b.CreationDate))\n\t}\n\n\treturn result.Buckets\n}", "func (o *ObjectNode) getBucketV1Handler(w http.ResponseWriter, r *http.Request) {\n\t// check args\n\t_, bucket, _, vl, err := o.parseRequestParams(r)\n\tif err != nil {\n\t\tlog.LogErrorf(\"getBucketV1Handler: parse request parameters fail, requestID(%v) err(%v)\",\n\t\t\tRequestIDFromRequest(r), err)\n\t\t_ = NoSuchBucket.ServeResponse(w, r)\n\t\treturn\n\t}\n\n\t// get options\n\tmarker := r.URL.Query().Get(ParamMarker)\n\tprefix := r.URL.Query().Get(ParamPrefix)\n\tmaxKeys := r.URL.Query().Get(ParamMaxKeys)\n\tdelimiter := r.URL.Query().Get(ParamPartDelimiter)\n\n\tvar maxKeysInt uint64\n\tif maxKeys != \"\" {\n\t\tmaxKeysInt, err = strconv.ParseUint(maxKeys, 10, 16)\n\t\tif err != nil {\n\t\t\tlog.LogErrorf(\"getBucketV1Handler: parse max key fail, requestID(%v) err(%v)\", RequestIDFromRequest(r), err)\n\t\t\t_ = InvalidArgument.ServeResponse(w, r)\n\t\t\treturn\n\t\t}\n\t\tif maxKeysInt > MaxKeys {\n\t\t\tmaxKeysInt = MaxKeys\n\t\t}\n\t} else {\n\t\tmaxKeysInt = uint64(MaxKeys)\n\t}\n\n\tlistBucketRequest := &ListBucketRequestV1{\n\t\tprefix: prefix,\n\t\tdelimiter: delimiter,\n\t\tmarker: marker,\n\t\tmaxKeys: maxKeysInt,\n\t}\n\n\tfsFileInfos, nextMarker, isTruncated, prefixes, err := vl.ListFilesV1(listBucketRequest)\n\tif err != nil {\n\t\tlog.LogErrorf(\"getBucketV1Handler: list file fail, requestID(%v), err(%v)\", r.URL, err)\n\t\t_ = InvalidArgument.ServeResponse(w, r)\n\t\treturn\n\t}\n\n\t// get owner\n\taceesKey, _ := vl.OSSSecure()\n\tbucketOwner := NewBucketOwner(aceesKey)\n\tvar contents = make([]*Content, 0)\n\tif len(fsFileInfos) > 0 {\n\t\tfor _, fsFileInfo := range fsFileInfos {\n\t\t\tcontent := &Content{\n\t\t\t\tKey: fsFileInfo.Path,\n\t\t\t\tLastModified: formatTimeISO(fsFileInfo.ModifyTime),\n\t\t\t\tETag: fsFileInfo.ETag,\n\t\t\t\tSize: int(fsFileInfo.Size),\n\t\t\t\tStorageClass: StorageClassStandard,\n\t\t\t\tOwner: bucketOwner,\n\t\t\t}\n\t\t\tcontents = append(contents, content)\n\t\t}\n\t}\n\n\tvar commonPrefixes = make([]*CommonPrefix, 0)\n\tfor _, prefix := range prefixes {\n\t\tcommonPrefix := &CommonPrefix{\n\t\t\tPrefix: prefix,\n\t\t}\n\t\tcommonPrefixes = append(commonPrefixes, commonPrefix)\n\t}\n\n\tlistBucketResult := &ListBucketResult{\n\t\tBucket: bucket,\n\t\tPrefix: prefix,\n\t\tMarker: marker,\n\t\tMaxKeys: int(maxKeysInt),\n\t\tDelimiter: delimiter,\n\t\tIsTruncated: isTruncated,\n\t\tNextMarker: nextMarker,\n\t\tContents: contents,\n\t\tCommonPrefixes: commonPrefixes,\n\t}\n\n\tvar bytes []byte\n\tvar marshalError error\n\tif bytes, marshalError = MarshalXMLEntity(listBucketResult); marshalError != nil {\n\t\tlog.LogErrorf(\"getBucketV1Handler: marshal result fail, requestID(%v) err(%v)\", RequestIDFromRequest(r), err)\n\t\t_ = InvalidArgument.ServeResponse(w, r)\n\t\treturn\n\t}\n\n\t// set response header\n\tw.Header().Set(HeaderNameContentType, HeaderValueContentTypeXML)\n\tw.Header().Set(HeaderNameContentLength, strconv.Itoa(len(bytes)))\n\tw.Write(bytes)\n\n\treturn\n}", "func listFiles(prefix, delimiter, marker string, maxKeys int, b *s3.Bucket) (files []s3.Key, err error) {\n\tresp, err := b.List(prefix, delimiter, marker, maxKeys)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// append to files\n\tfor _, fl := range resp.Contents {\n\t\tif strings.Contains(fl.Key, \"index.html\") || strings.Contains(fl.Key, \"static\") || strings.Contains(fl.Key, \"logs\") {\n\t\t\tcontinue\n\t\t}\n\n\t\tfiles = append(files, fl)\n\t}\n\n\t// recursion for the recursion god\n\tif resp.IsTruncated && resp.NextMarker != \"\" {\n\t\tf, err := listFiles(resp.Prefix, resp.Delimiter, resp.NextMarker, resp.MaxKeys, b)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// append to files\n\t\tfiles = append(files, f...)\n\t}\n\n\treturn files, nil\n}", "func (*hdfsProvider) ListBuckets(cmn.QueryBcks) (buckets cmn.Bcks, errCode int, err error) {\n\tdebug.Assert(false)\n\treturn\n}", "func listFiles(prefix, delimiter, marker string, maxKeys int, b *s3.Bucket) (files []s3.Key, err error) {\n\tresp, err := b.List(prefix, delimiter, marker, maxKeys)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// append to files\n\tfiles = append(files, resp.Contents...)\n\n\t// recursion for the recursion god\n\tif resp.IsTruncated && resp.NextMarker != \"\" {\n\t\tf, err := listFiles(resp.Prefix, resp.Delimiter, resp.NextMarker, resp.MaxKeys, b)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// append to files\n\t\tfiles = append(files, f...)\n\t}\n\n\treturn files, nil\n}", "func (b *fakeBosClient) ListBuckets() (*api.ListBucketsResult, error) {\n\treturn nil, fmt.Errorf(\"test\")\n}", "func ListPublicObjects(c echo.Context) error {\n\tpublicObjectStore, err := createPublicObjectStore(c)\n\tif err != nil {\n\t\tglog.Error(err)\n\t\treturn echo.NewHTTPError(http.StatusInternalServerError, err.Error())\n\t}\n\n\th := NewPublicObjectHandler(publicObjectStore)\n\treturn h.List(c)\n}", "func (l *gcsGateway) AnonListObjects(bucket string, prefix string, marker string, delimiter string, maxKeys int) (ListObjectsInfo, error) {\n\tresult, err := l.anonClient.ListObjects(bucket, prefix, marker, delimiter, maxKeys)\n\tif err != nil {\n\t\treturn ListObjectsInfo{}, s3ToObjectError(traceError(err), bucket)\n\t}\n\n\treturn fromMinioClientListBucketResult(bucket, result), nil\n}", "func (s Client) GetVersions(prefix string) ([]string, error) {\n\tdv, err := s.ListObjects(prefix, \"/\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar versions []string\n\t// split the object prefixes to get the last part which is the version\n\tfor _, p := range dv.CommonPrefixes {\n\t\ts := strings.Split(strings.Trim(*p.Prefix, \"/\"), \"/\")\n\t\tversions = append(versions, s[len(s)-1])\n\t}\n\treturn versions, nil\n}", "func (m *memClient) ListBuckets(ctx context.Context) ([]string, error) {\n\treturn nil, errors.New(\"unimplemented\")\n}", "func (mcm *MinioChunkManager) ListWithPrefix(ctx context.Context, prefix string, recursive bool) ([]string, []time.Time, error) {\n\n\t// cannot use ListObjects(ctx, bucketName, Opt{Prefix:prefix, Recursive:true})\n\t// if minio has lots of objects under the provided path\n\t// recursive = true may timeout during the recursive browsing the objects.\n\t// See also: https://github.com/milvus-io/milvus/issues/19095\n\n\tvar objectsKeys []string\n\tvar modTimes []time.Time\n\n\ttasks := list.New()\n\ttasks.PushBack(prefix)\n\tfor tasks.Len() > 0 {\n\t\te := tasks.Front()\n\t\tpre := e.Value.(string)\n\t\ttasks.Remove(e)\n\n\t\t// TODO add concurrent call if performance matters\n\t\t// only return current level per call\n\t\tobjects := mcm.listMinioObjects(ctx, mcm.bucketName, minio.ListObjectsOptions{Prefix: pre, Recursive: false})\n\n\t\tfor object := range objects {\n\t\t\tif object.Err != nil {\n\t\t\t\tlog.Warn(\"failed to list with prefix\", zap.String(\"bucket\", mcm.bucketName), zap.String(\"prefix\", prefix), zap.Error(object.Err))\n\t\t\t\treturn nil, nil, object.Err\n\t\t\t}\n\n\t\t\t// with tailing \"/\", object is a \"directory\"\n\t\t\tif strings.HasSuffix(object.Key, \"/\") && recursive {\n\t\t\t\t// enqueue when recursive is true\n\t\t\t\tif object.Key != pre {\n\t\t\t\t\ttasks.PushBack(object.Key)\n\t\t\t\t}\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tobjectsKeys = append(objectsKeys, object.Key)\n\t\t\tmodTimes = append(modTimes, object.LastModified)\n\t\t}\n\t}\n\n\treturn objectsKeys, modTimes, nil\n}", "func GetBlobKeys(ctx context.Context, bucketURL string) ([]string, error) {\n\treturn GetBlobKeysWithPrefix(ctx, bucketURL, \"\")\n}", "func (c *defaultGcsClient) GetBuckets(ctxIn context.Context, project string) (buckets []string, err error) {\n\tctx, span := trace.StartSpan(ctxIn, \"(*defaultGcsClient).GetBuckets\")\n\tdefer span.End()\n\n\tbucketsIterator := c.client.Buckets(ctx, project)\n\tfor {\n\t\t// error or not found\n\t\tb, err := bucketsIterator.Next()\n\t\tif err == iterator.Done {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\treturn []string{}, errors.Wrap(err, fmt.Sprintf(\"Buckets.Next() failed for project %s\", project))\n\t\t}\n\t\tbuckets = append(buckets, b.Name)\n\t}\n\treturn buckets, err\n}", "func (c *ctrl) objects() []string {\n\tlist := make([]string, len(c.objs)-1)\n\tfor i, j := 0, 0; i < len(c.objs); i++ {\n\t\tif c.objs[i] == internalObject {\n\t\t\tcontinue\n\t\t}\n\t\tlist[j] = c.objs[i]\n\t\tj = j + 1\n\t}\n\treturn list\n}", "func Hashlist(url string, secure bool, accesskey string, secretkey string, bucket string) string {\n\tlog.SetFlags(log.Lshortfile)\n\n\t// New returns an Amazon S3 compatible client object. API compatibility (v2 or v4) is automatically\n\t// determined based on the Endpoint value.\n\ts3Client, err := minio.New(url, accesskey, secretkey, secure)\n\tif err != nil {\n\t\tjc.SendString(fmt.Sprint(err))\n\t\treturn \"ERROR\"\n\t}\n\t// Create a done channel to control 'ListObjects' go routine.\n\tdoneCh := make(chan struct{})\n\n\t// Indicate to our routine to exit cleanly upon return.\n\tdefer close(doneCh)\n\n\t// List all objects from a bucket-name with a matching prefix.\n\tvar snapshots []string\n\tfor object := range s3Client.ListObjects(bucket, \"\", secure, doneCh) {\n\t\tif object.Err != nil {\n\t\t\tjc.SendString(fmt.Sprint(object.Err))\n\t\t\treturn \"ERROR\"\n\t\t}\n\t\tmatched, err := regexp.MatchString(\".hsh$\", object.Key)\n\t\tif err != nil {\n\t\t\treturn \"ERROR\"\n\t\t}\n\t\tif matched == true {\n\t\t\tsnapshots = append(snapshots, object.Key)\n\t\t\tsnapshots = append(snapshots, \"\\n\")\n\t\t}\n\t}\n\tif len(snapshots) > 0 {\n\t\treturn strings.Join(snapshots, \"\\n\")\n\t}\n\treturn \"ERROR\"\n}", "func (dbm *DBManager) GetKeyList(bucket, prefix string) ([]string, error) {\n\tvar err error\n\tvar results []string\n\tif err = dbm.openDB(); err != nil {\n\t\treturn nil, err\n\t}\n\tdefer dbm.closeDB()\n\n\tresults = make([]string, 0)\n\n\tseekPrefix := func(tx *boltsecTx) error {\n\t\tprefixKey := []byte(prefix)\n\n\t\tbkt := tx.Bucket([]byte(bucket))\n\n\t\tif bkt == nil {\n\t\t\treturn bolt.ErrBucketNotFound\n\t\t}\n\n\t\tcursor := bkt.Cursor()\n\t\tfor k, _ := cursor.Seek(prefixKey); k != nil && bytes.HasPrefix(k, prefixKey); k, _ = cursor.Next() {\n\t\t\tresults = append(results, string(k))\n\t\t}\n\t\treturn nil\n\t}\n\n\tif err = dbm.db.view(seekPrefix); err != nil {\n\t\tLogger.Printf(\"GetByPrefix return %s\", err)\n\t}\n\n\treturn results, err\n}", "func (zk *dbZk) List(path string) ([]string, error) {\n\tb, _ := zk.ZkCli.Exist(path)\n\tif !b {\n\t\treturn nil, nil\n\t}\n\n\tvar failed bool\n\tstarted := time.Now()\n\n\tchilds, err := zk.ZkCli.GetChildren(path)\n\tif err != nil {\n\t\tfailed = true\n\t}\n\n\tstore.ReportStorageOperatorMetrics(store.StoreOperatorFetch, started, failed)\n\treturn childs, err\n}", "func (m *Main) LoadBucketContents() {\n\t// Use the commented line to restrict the number of files loaded (for local testing)\n\t//resp, err := m.S3svc.ListObjects(&s3.ListObjectsInput{Bucket: aws.String(m.Bucket), Prefix: aws.String(m.Prefix),MaxKeys: aws.Int64(10)})\n\tresp, err := m.S3svc.ListObjects(&s3.ListObjectsInput{Bucket: aws.String(m.Bucket), Prefix: aws.String(m.Prefix)})\n\tif err != nil {\n\t\texitErrorf(\"Unable to list items in bucket %q, %v\", m.Bucket, err)\n\t}\n\tm.S3files = resp.Contents\n}", "func GetBlobKeysWithPrefix(ctx context.Context, bucketURL, filePrefix string) ([]string, error) {\n\tbucket, err := blob.OpenBucket(ctx, bucketURL)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error from blob.OpenBucket: %w\", err)\n\t}\n\tdefer bucket.Close()\n\treturn blobKeysPrefix(ctx, bucket, filePrefix)\n}", "func BucketList(kind, cloud string) ([]string, error) {\n\tvar ret []string\n\n\tkind = strings.ToLower(kind)\n\tcloud = strings.ToLower(cloud)\n\tb, ok := bucketType[kind]\n\tif !ok {\n\t\treturn ret, errors.ErrBucketInvalid\n\t}\n\tt, ok := b[cloud]\n\tif !ok {\n\t\treturn ret, errors.ErrBucketInvalid\n\t}\n\n\treturn EnumValues(t)\n}", "func (taker TakerStorageGCP) ListBuckets(project *reportProject) (gcpBuckets []*storage.Bucket, err error) {\n\tif objResponse, objErr := taker.storageService.Buckets.List(project.gcpProject.ProjectId).Do(); objErr == nil {\n\t\tgcpBuckets = objResponse.Items\n\t} else {\n\t\terr = objErr\n\t}\n\treturn\n}", "func listp(client *storage.Client, projectId string) ([]*storage.BucketAttrs, error) {\n ctx := context.Background()\n var buckets []*storage.BucketAttrs\n it := client.Buckets(ctx, projectId)\n for {\n battrs, err := it.Next()\n if err == iterator.Done {\n break\n }\n if err != nil {\n return nil, err\n }\n buckets = append(buckets, battrs)\n }\n return buckets, nil\n}", "func (s *S3) EmptyBucket(bucket string) error {\n\tvar listResp *s3.ListObjectVersionsOutput\n\tvar err error\n\n\tbucketExists, err := s.bucketExists(bucket)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"unable to determine the existence of bucket %s: %w\", bucket, err)\n\t}\n\n\tif !bucketExists {\n\t\treturn nil\n\t}\n\n\tlistParams := &s3.ListObjectVersionsInput{\n\t\tBucket: aws.String(bucket),\n\t}\n\t// Remove all versions of all objects.\n\tfor {\n\t\tlistResp, err = s.s3Client.ListObjectVersions(listParams)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"list objects for bucket %s: %w\", bucket, err)\n\t\t}\n\t\tvar objectsToDelete []*s3.ObjectIdentifier\n\t\tfor _, object := range listResp.Versions {\n\t\t\tobjectsToDelete = append(objectsToDelete, &s3.ObjectIdentifier{\n\t\t\t\tKey: object.Key,\n\t\t\t\tVersionId: object.VersionId,\n\t\t\t})\n\t\t}\n\t\t// After deleting other versions, remove delete markers version.\n\t\t// For info on \"delete marker\": https://docs.aws.amazon.com/AmazonS3/latest/dev/DeleteMarker.html\n\t\tfor _, deleteMarker := range listResp.DeleteMarkers {\n\t\t\tobjectsToDelete = append(objectsToDelete, &s3.ObjectIdentifier{\n\t\t\t\tKey: deleteMarker.Key,\n\t\t\t\tVersionId: deleteMarker.VersionId,\n\t\t\t})\n\t\t}\n\t\tif len(objectsToDelete) == 0 {\n\t\t\treturn nil\n\t\t}\n\t\tdelResp, err := s.s3Client.DeleteObjects(&s3.DeleteObjectsInput{\n\t\t\tBucket: aws.String(bucket),\n\t\t\tDelete: &s3.Delete{\n\t\t\t\tObjects: objectsToDelete,\n\t\t\t\tQuiet: aws.Bool(true), // we don't care about success values\n\t\t\t},\n\t\t})\n\t\tswitch {\n\t\tcase err != nil:\n\t\t\treturn fmt.Errorf(\"delete objects from bucket %s: %w\", bucket, err)\n\t\tcase len(delResp.Errors) > 0:\n\t\t\treturn errors.Join(\n\t\t\t\tfmt.Errorf(\"%d/%d objects failed to delete\", len(delResp.Errors), len(objectsToDelete)),\n\t\t\t\tfmt.Errorf(\"first failed on key %q: %s\", aws.StringValue(delResp.Errors[0].Key), aws.StringValue(delResp.Errors[0].Message)),\n\t\t\t)\n\t\t}\n\t\tif !aws.BoolValue(listResp.IsTruncated) {\n\t\t\treturn nil\n\t\t}\n\t\tlistParams.KeyMarker = listResp.NextKeyMarker\n\t\tlistParams.VersionIdMarker = listResp.NextVersionIdMarker\n\t}\n}" ]
[ "0.7018481", "0.66991544", "0.64845294", "0.6481506", "0.6443924", "0.6430037", "0.64253634", "0.6385843", "0.63267326", "0.6228572", "0.6141758", "0.61149603", "0.60993594", "0.6077026", "0.605615", "0.6047106", "0.6011735", "0.59979975", "0.5979807", "0.5942583", "0.5942583", "0.5934656", "0.5934081", "0.5911666", "0.5901245", "0.58956826", "0.5893085", "0.58671635", "0.58251643", "0.58159566", "0.5802651", "0.57949096", "0.5760591", "0.5749021", "0.57449913", "0.57358503", "0.5729659", "0.5656447", "0.5624625", "0.56180024", "0.5608204", "0.5599568", "0.55977255", "0.5592431", "0.55887026", "0.55750537", "0.55401987", "0.55374914", "0.5502941", "0.54557765", "0.54235077", "0.5400579", "0.5400579", "0.5381405", "0.53731334", "0.53622127", "0.5346199", "0.53452593", "0.52873194", "0.5285075", "0.5279359", "0.5273885", "0.5271756", "0.5270588", "0.5264343", "0.52579546", "0.5247991", "0.52368885", "0.52246", "0.52238584", "0.5221282", "0.5207314", "0.52015543", "0.5200471", "0.51834613", "0.5161766", "0.5145861", "0.5145528", "0.51433504", "0.51429707", "0.51423347", "0.511964", "0.510956", "0.5095949", "0.5091351", "0.5088471", "0.5084182", "0.5074471", "0.50041497", "0.49979916", "0.4990775", "0.498691", "0.49821138", "0.4979418", "0.49725428", "0.49673715", "0.49595323", "0.49355206", "0.4926484", "0.49206874" ]
0.84327096
0
ListObjectsWithPrefixInBucket : Returns the list of objects with the specified prefix assuming they're '/' delimited
func (o *ObjectStorage) ListObjectsWithPrefixInBucket(bucket *string, prefix *string) (*s3.ListObjectsOutput, error) { listObjectInput := &s3.ListObjectsInput{ Bucket: bucket, Prefix: prefix, } return o.Client.ListObjects(listObjectInput) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (mcm *MinioChunkManager) ListWithPrefix(ctx context.Context, prefix string, recursive bool) ([]string, []time.Time, error) {\n\n\t// cannot use ListObjects(ctx, bucketName, Opt{Prefix:prefix, Recursive:true})\n\t// if minio has lots of objects under the provided path\n\t// recursive = true may timeout during the recursive browsing the objects.\n\t// See also: https://github.com/milvus-io/milvus/issues/19095\n\n\tvar objectsKeys []string\n\tvar modTimes []time.Time\n\n\ttasks := list.New()\n\ttasks.PushBack(prefix)\n\tfor tasks.Len() > 0 {\n\t\te := tasks.Front()\n\t\tpre := e.Value.(string)\n\t\ttasks.Remove(e)\n\n\t\t// TODO add concurrent call if performance matters\n\t\t// only return current level per call\n\t\tobjects := mcm.listMinioObjects(ctx, mcm.bucketName, minio.ListObjectsOptions{Prefix: pre, Recursive: false})\n\n\t\tfor object := range objects {\n\t\t\tif object.Err != nil {\n\t\t\t\tlog.Warn(\"failed to list with prefix\", zap.String(\"bucket\", mcm.bucketName), zap.String(\"prefix\", prefix), zap.Error(object.Err))\n\t\t\t\treturn nil, nil, object.Err\n\t\t\t}\n\n\t\t\t// with tailing \"/\", object is a \"directory\"\n\t\t\tif strings.HasSuffix(object.Key, \"/\") && recursive {\n\t\t\t\t// enqueue when recursive is true\n\t\t\t\tif object.Key != pre {\n\t\t\t\t\ttasks.PushBack(object.Key)\n\t\t\t\t}\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tobjectsKeys = append(objectsKeys, object.Key)\n\t\t\tmodTimes = append(modTimes, object.LastModified)\n\t\t}\n\t}\n\n\treturn objectsKeys, modTimes, nil\n}", "func List(sess *session.Session, bucket string, prefix string) ([]S3Item, error) {\n\tsvc := s3.New(sess)\n\n\tdelimiter := \"/\"\n\n\tresp, err := svc.ListObjects(&s3.ListObjectsInput{\n\t\tBucket: aws.String(bucket),\n\t\tPrefix: aws.String(prefix),\n\t\tDelimiter: &delimiter,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\titems := make([]S3Item, 0)\n\n\tfor _, p := range resp.CommonPrefixes {\n\t\tif strings.TrimSuffix(*(p.Prefix), \"/\") == strings.TrimSuffix(prefix, \"/\") {\n\t\t\tcontinue\n\t\t}\n\t\tpstr := *(p.Prefix)\n\t\tcutprefix := pstr[0 : len(pstr)-1]\n\t\tidxDelimiter := strings.LastIndex(cutprefix, \"/\")\n\t\tif idxDelimiter < 0 {\n\t\t\tidxDelimiter = -1\n\t\t}\n\t\titems = append(items, S3Item{\n\t\t\tType: \"directory\",\n\t\t\tName: cutprefix[idxDelimiter+1:],\n\t\t\tFullpath: *(p.Prefix),\n\t\t\tSize: sprintSize(0),\n\t\t\tLastModified: \"\",\n\t\t})\n\t}\n\tfor _, content := range resp.Contents {\n\t\tif strings.TrimSuffix(*(content.Key), \"/\") == strings.TrimSuffix(prefix, \"/\") {\n\t\t\tcontinue\n\t\t}\n\t\tkey := *(content.Key)\n\t\tidxDelimiter := strings.LastIndex(key, \"/\")\n\t\tif idxDelimiter < 0 {\n\t\t\tidxDelimiter = -1\n\t\t}\n\t\titems = append(items, S3Item{\n\t\t\tType: \"file\",\n\t\t\tName: key[idxDelimiter+1:],\n\t\t\tFullpath: key,\n\t\t\tSize: sprintSize(*(content.Size)),\n\t\t\tLastModified: content.LastModified.Format(\"2006-01-02 15:04:05\"),\n\t\t})\n\t}\n\n\treturn items, nil\n}", "func (s *S3Storage) List(prefix string, recursive bool) ([]string, error) {\n\tvar keys []string\n\n\tprefixPath := s.Filename(prefix)\n\tresult, err := s.svc.ListObjects(&s3.ListObjectsInput{\n\t\tBucket: s.bucket,\n\t\tPrefix: aws.String(prefixPath),\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfor _, k := range result.Contents {\n\t\tif strings.HasPrefix(*k.Key, prefix) {\n\t\t\tkeys = append(keys, *k.Key)\n\t\t}\n\t}\n\t//\n\treturn keys, nil\n}", "func listBuckets(sess *session.Session, prefix string, t *testing.T) error {\n\t// Create S3 service client\n\tsvc := s3.New(sess)\n\n\tresult, err := svc.ListBuckets(nil)\n\tif err != nil {\n\t\tt.Log(\"Could not list buckets\")\n\t\treturn err\n\t}\n\n\tfor _, b := range result.Buckets {\n\t\tif strings.HasPrefix(*b.Name, prefix) {\n\t\t\tt.Log(*b.Name)\n\t\t}\n\t}\n\n\treturn nil\n}", "func BucketGetKeysWithPrefix(tx *bolt.Tx, bucket string, prefix string, stripPrefix bool) []string {\n\tb := tx.Bucket([]byte(bucket))\n\tif b == nil {\n\t\treturn nil\n\t}\n\tc := b.Cursor()\n\tvar results []string\n\tprefixBytes := []byte(prefix)\n\tfor k, _ := c.Seek(prefixBytes); k != nil && bytes.HasPrefix(k, prefixBytes); k, _ = c.Next() {\n\t\tif stripPrefix {\n\t\t\tk = k[len(prefix):]\n\t\t}\n\t\tresults = append(results, string(k))\n\t}\n\treturn results\n}", "func (s *S3Client) ListObjects(bucket, prefix string) ([]*string, error) {\n\tparams := &s3.ListObjectsV2Input{\n\t\tBucket: aws.String(bucket),\n\t\tPrefix: aws.String(prefix),\n\t}\n\tobjectKeys := make([]*string, 0)\n\tfn := func(page *s3.ListObjectsV2Output, lastPage bool) bool {\n\t\tfor _, object := range page.Contents {\n\t\t\tobjectKeys = append(objectKeys, object.Key)\n\t\t}\n\t\treturn true\n\t}\n\terr := s.s3Client.ListObjectsV2Pages(params, fn)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn objectKeys, nil\n}", "func (m *mockS3Client) ListObjects(in *s3.ListObjectsInput) (*s3.ListObjectsOutput, error) {\n\tvar contents []*s3.Object\n\tfor key := range m.objects {\n\t\tif strings.HasPrefix(key, *in.Prefix) {\n\t\t\tkeyPtr := new(string)\n\t\t\t*keyPtr = key\n\t\t\ttempObj := &s3.Object{\n\t\t\t\tKey: keyPtr,\n\t\t\t}\n\t\t\tcontents = append(contents, tempObj)\n\t\t}\n\t}\n\tout := &s3.ListObjectsOutput{\n\t\tPrefix: in.Prefix,\n\t\tContents: contents,\n\t}\n\treturn out, nil\n}", "func (b NeteaseNOSBackend) ListObjects(prefix string) ([]Object, error) {\n\tvar objects []Object\n\n\tprefix = pathutil.Join(b.Prefix, prefix)\n\n\tlistRequest := &model.ListObjectsRequest{\n\t\tBucket: b.Bucket,\n\t\tPrefix: prefix,\n\t\tDelimiter: \"\",\n\t\tMarker: \"\",\n\t\tMaxKeys: 100,\n\t}\n\n\tfor {\n\t\tvar lor *model.ListObjectsResult\n\t\tlor, err := b.Client.ListObjects(listRequest)\n\t\tif err != nil {\n\t\t\treturn objects, err\n\t\t}\n\n\t\tfor _, obj := range lor.Contents {\n\t\t\tpath := removePrefixFromObjectPath(prefix, obj.Key)\n\t\t\tif objectPathIsInvalid(path) {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tlocal, _ := time.LoadLocation(\"Local\")\n\t\t\t// LastModified time layout in NOS is 2006-01-02T15:04:05 -0700\n\t\t\tt, _ := time.ParseInLocation(\"2006-01-02T15:04:05 -0700\", obj.LastModified, local)\n\t\t\tobject := Object{\n\t\t\t\tPath: path,\n\t\t\t\tContent: []byte{},\n\t\t\t\tLastModified: t,\n\t\t\t}\n\t\t\tobjects = append(objects, object)\n\t\t}\n\t\tif !lor.IsTruncated {\n\t\t\tbreak\n\t\t}\n\n\t}\n\n\treturn objects, nil\n}", "func ListObjects(bucketName string, keyPrefix string) (keysList []*string, err error) {\n\tsvc := s3.New(sessionutils.Session)\n\tres, err := svc.ListObjects(&s3.ListObjectsInput{\n\t\tBucket: aws.String(bucketName),\n\t\tPrefix: aws.String(keyPrefix),\n\t})\n\n\tif err != nil {\n\t\tfmt.Printf(\"Error listing bucket:\\n%v\\n\", err)\n\t\treturn keysList, err\n\t}\n\n\tif len(res.Contents) > 0 {\n\t\tfor _, obj := range res.Contents {\n\t\t\tkeysList = append(keysList, obj.Key)\n\t\t}\n\t}\n\n\treturn keysList, nil\n\n}", "func ListObjects(bucket, prefix string) ([]*s3.Object, error) {\n\n\t// Connection to s3 server\n\tstorage_connection := s3.New(storage_session)\n\n\t// Upload a new object\n\tobjects, err := storage_connection.ListObjects(&s3.ListObjectsInput{\n\t\tBucket: aws.String(bucket),\n\t\tPrefix: aws.String(prefix),\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn objects.Contents, nil\n}", "func (svc *GCSclient) list(bucketName string, filePrefix string) ([]string, error) {\n\tctx, cancel := context.WithTimeout(context.Background(), 300*time.Second)\n\tdefer cancel()\n\t// List all objects in a bucket using pagination\n\tvar files []string\n\tit := svc.Bucket(bucketName).Objects(ctx, &storage.Query{Prefix: filePrefix})\n\tfor {\n\t\tobj, err := it.Next()\n\t\tif err == iterator.Done {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tfiles = append(files, obj.Name)\n\t}\n\tsort.Strings(files)\n\treturn files, nil\n}", "func (p *bucketProvider) ListObjects(ctx context.Context, bucket, path string, recursive bool) (buckets.ObjectIterator, error) {\n\tbkt := p.client.Bucket(bucket)\n\tif path != \"\" && !strings.HasSuffix(path, \"/\") {\n\t\t// An object can have the same name as a path prefix. Append a\n\t\t// \"/\" to make sure we don't include it.\n\t\tpath = path + \"/\"\n\t}\n\n\tvar delimiter string\n\tif recursive {\n\t\tdelimiter = \"\"\n\t} else {\n\t\tdelimiter = \"/\"\n\t}\n\n\tquery := &storage.Query{\n\t\tDelimiter: delimiter,\n\t\tPrefix: path,\n\t\tVersions: false,\n\t}\n\tit := bkt.Objects(ctx, query)\n\n\treturn &iteratorAdapter{\n\t\tit: it,\n\t\tbucket: bucket,\n\t}, nil\n}", "func (l *pydioObjects) ListObjects(ctx context.Context, bucket string, prefix string, marker string, delimiter string, maxKeys int /*, versions bool*/) (loi minio.ListObjectsInfo, e error) {\n\n\t//objects, prefixes, err := l.ListPydioObjects(ctx, bucket, prefix, delimiter, maxKeys, versions)\n\tobjects, prefixes, err := l.ListPydioObjects(ctx, bucket, prefix, delimiter, maxKeys, false)\n\tif err != nil {\n\t\treturn loi, pydioToMinioError(err, bucket, prefix)\n\t}\n\n\t// log.Printf(\"[ListObjects] Returning %d objects and %d prefixes (V1) for prefix %s\\n\", len(objects), len(prefixes), prefix)\n\n\treturn minio.ListObjectsInfo{\n\t\tIsTruncated: false,\n\t\tNextMarker: \"\",\n\t\tPrefixes: prefixes,\n\t\tObjects: objects,\n\t}, nil\n\n}", "func ExampleBucket_MapPrefix() {\n\tbx, _ := buckets.Open(tempfile())\n\tdefer os.Remove(bx.Path())\n\tdefer bx.Close()\n\n\t// Create a new things bucket.\n\tthings, _ := bx.New([]byte(\"things\"))\n\n\t// Setup items to insert.\n\titems := []struct {\n\t\tKey, Value []byte\n\t}{\n\t\t{[]byte(\"A\"), []byte(\"1\")}, // `A` prefix match\n\t\t{[]byte(\"AA\"), []byte(\"2\")}, // match\n\t\t{[]byte(\"AAA\"), []byte(\"3\")}, // match\n\t\t{[]byte(\"AAB\"), []byte(\"2\")}, // match\n\t\t{[]byte(\"B\"), []byte(\"O\")},\n\t\t{[]byte(\"BA\"), []byte(\"0\")},\n\t\t{[]byte(\"BAA\"), []byte(\"0\")},\n\t}\n\n\t// Insert 'em.\n\tif err := things.Insert(items); err != nil {\n\t\tfmt.Printf(\"could not insert items in `things` bucket: %v\\n\", err)\n\t}\n\n\t// Now collect each item whose key starts with \"A\".\n\tprefix := []byte(\"A\")\n\n\t// Setup slice of items.\n\ttype item struct {\n\t\tKey, Value []byte\n\t}\n\tresults := []item{}\n\n\t// Anon func to map over matched keys.\n\tdo := func(k, v []byte) error {\n\t\tresults = append(results, item{k, v})\n\t\treturn nil\n\t}\n\n\tif err := things.MapPrefix(do, prefix); err != nil {\n\t\tfmt.Printf(\"could not map items with prefix %s: %v\\n\", prefix, err)\n\t}\n\n\tfor _, item := range results {\n\t\tfmt.Printf(\"%s -> %s\\n\", item.Key, item.Value)\n\t}\n\t// Output:\n\t// A -> 1\n\t// AA -> 2\n\t// AAA -> 3\n\t// AAB -> 2\n}", "func GetBlobKeysWithPrefix(ctx context.Context, bucketURL, filePrefix string) ([]string, error) {\n\tbucket, err := blob.OpenBucket(ctx, bucketURL)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error from blob.OpenBucket: %w\", err)\n\t}\n\tdefer bucket.Close()\n\treturn blobKeysPrefix(ctx, bucket, filePrefix)\n}", "func ListObjects(w http.ResponseWriter, r *http.Request) {\n\n\tsvc := s3.New(sess)\n\n\tpageVars := PageVars{}\n\taddPageVars(r, &pageVars)\n\n\tif len(pageVars.BName) <= 0 {\n\t\tif len(pageVars.ErrorM) <= 0 {\n\t\t\tpageVars.ErrorM = \"Invalid bucket name\"\n\t\t}\n\t\trender(w, \"objectlist\", pageVars)\n\t} else {\n\n\t\tif len(pageVars.Prefix) <= 0 {\n\t\t\tpageVars.Prefix = \"\"\n\t\t}\n\n\t\tif len(pageVars.Delimiter) <= 0 {\n\t\t\tpageVars.Delimiter = \"\"\n\t\t}\n\n\t\t// Get the list of items\n\t\tresp, err := svc.ListObjectsV2(&s3.ListObjectsV2Input{\n\t\t\tBucket: aws.String(pageVars.BName),\n\t\t\tPrefix: aws.String(pageVars.Prefix),\n\t\t\tDelimiter: aws.String(pageVars.Delimiter),\n\t\t})\n\n\t\tif err != nil {\n\t\t\tif awsErr, ok := err.(awserr.Error); ok {\n\t\t\t\tpageVars.ErrorM = awsErr.Message()\n\t\t\t} else {\n\t\t\t\tpageVars.ErrorM = \"Failed to get objects\"\n\t\t\t}\n\t\t} else {\n\t\t\tfor _, o := range resp.Contents {\n\t\t\t\tvar shouldAdd = true\n\t\t\t\t// check how many / in prefix\n\t\t\t\tns := strings.Count(pageVars.Prefix, \"/\")\n\t\t\t\t// check if its folder\n\t\t\t\tif strings.HasSuffix(*o.Key, \"/\") {\t\t\t\t\t\n\t\t\t\t\t// check if its top level folder\n\t\t\t\t\tif strings.Count(*o.Key, \"/\") == (ns + 1) {\n\t\t\t\t\t\tshouldAdd = true\n\t\t\t\t\t} else {\n\t\t\t\t\t\tshouldAdd = false\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\t// check if its top level folder\n\t\t\t\t\tif strings.Count(*o.Key, \"/\") == ns {\n\t\t\t\t\t\tshouldAdd = true\n\t\t\t\t\t} else {\n\t\t\t\t\t\tshouldAdd = false\n\t\t\t\t\t}\n\t\t\t\t}\t\t\t\t\n\n\t\t\t\tif shouldAdd {\n\t\t\t\t\tname := *o.Key\t\t\t\t\t\n\t\t\t\t\tif len(pageVars.Prefix) > 0 {\n\t\t\t\t\t\tname = strings.Replace(name, pageVars.Prefix, \"\", -1)\n\t\t\t\t\t}\n\t\t\t\t\tif strings.HasSuffix(*o.Key, \"/\") {\n\t\t\t\t\t\tod := ObjectDetails{*o.Key, name, *o.LastModified, *o.Size, *o.StorageClass, \"Folder\"}\n\t\t\t\t\t\tpageVars.OList = append(pageVars.OList, od)\n\t\t\t\t\t} else {\n\t\t\t\t\t\tod := ObjectDetails{*o.Key, name, *o.LastModified, *o.Size, *o.StorageClass, \"File\"}\n\t\t\t\t\t\tpageVars.OList = append(pageVars.OList, od)\n\t\t\t\t\t}\t\t\t\t\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t}\t\t\t\t\n\t\t\t}\n\t\t\t// add folder names f prefix ends with /\n\t\t\tsl := strings.Split(pageVars.Prefix, \"/\")\n\t\t\tpp := \"\"\n\t\t\t// remove last element as its empy due to trailing /\n\t\t\tif len(sl) > 0 {\n\t\t\t\tsl = sl[:len(sl)-1]\n\t\t\t\tfor _, fld := range sl {\n\t\t\t\t\tpp = pp+fld+\"/\"\n\t\t\t\t\tpageVars.FList = append(pageVars.FList, FolderDetails{fld, pp})\t\t\t\t\t\n\t\t\t\t}\n\t\t\t\t\n\t\t\t\tpageVars.FCount = len(pageVars.FList)\n\t\t\t}\n\t\t\t\n\t\t}\n\n\t\trender(w, \"objectlist\", pageVars)\n\t}\n\n}", "func (m *memClient) ListObjects(prefix string) ([]string, error) {\n\tdefer m.Unlock()\n\tm.Lock()\n\tret := []string{}\n\tfor k := range m.store {\n\t\tret = append(ret, k)\n\t}\n\treturn ret, nil\n}", "func (l *Location) ListByPrefix(prefix string) ([]string, error) {\n\n\tvar filenames []string\n\tclient, err := l.fileSystem.Client(l.Authority)\n\tif err != nil {\n\t\treturn filenames, err\n\t}\n\t// start timer once action is completed\n\tdefer l.fileSystem.connTimerStart()\n\n\tfullpath := path.Join(l.Path(), prefix)\n\t// check if last char is not /, aka is not a dir, get base of path\n\tbaseprefix := \"\"\n\tr, _ := utf8.DecodeLastRuneInString(fullpath)\n\tif r != '/' {\n\t\tbaseprefix = path.Base(fullpath)\n\t}\n\tfullpath = utils.EnsureTrailingSlash(path.Dir(fullpath))\n\tfileinfos, err := client.ReadDir(fullpath)\n\tif err != nil {\n\t\treturn filenames, err\n\t}\n\n\tfor _, fileinfo := range fileinfos {\n\t\tif !fileinfo.IsDir() {\n\t\t\tname := fileinfo.Name()\n\t\t\tif baseprefix != \"\" {\n\t\t\t\tif strings.HasPrefix(name, baseprefix) {\n\t\t\t\t\tfilenames = append(filenames, name)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tfilenames = append(filenames, name)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn filenames, nil\n}", "func ListPrefix(ctx context.Context, prefix string) (KeyValuePairs, error) {\n\tv, err := Client().ListPrefix(ctx, prefix)\n\tTrace(\"ListPrefix\", err, logrus.Fields{fieldPrefix: prefix, fieldNumEntries: len(v)})\n\treturn v, err\n}", "func (g *gcs) List(ctx context.Context, prefix string) ([]*fs.FileInfo, error) {\n\tvar files []*fs.FileInfo\n\tit := g.bucket.Objects(ctx, &storage.Query{\n\t\tPrefix: prefix,\n\t})\n\tfor {\n\t\tattrs, err := it.Next()\n\t\tif err == iterator.Done {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tfiles = append(files, fileinfo(attrs))\n\t}\n\treturn files, nil\n}", "func TrimPrefix(objects []string, prefix string) []string {\n\tvar results []string\n\tfor _, object := range objects {\n\t\tresults = append(results, strings.TrimPrefix(object, prefix))\n\t}\n\treturn results\n}", "func (h *Handler) List(bucket string) ([]string, error) {\n\tklog.V(10).Info(\"List S3 Objects \", bucket)\n\n\treq := h.Client.ListObjectsRequest(&s3.ListObjectsInput{Bucket: &bucket})\n\tp := s3.NewListObjectsPaginator(req)\n\n\tvar keys []string\n\n\tfor p.Next(context.TODO()) {\n\t\tpage := p.CurrentPage()\n\t\tfor _, obj := range page.Contents {\n\t\t\tkeys = append(keys, *obj.Key)\n\t\t}\n\t}\n\n\tif err := p.Err(); err != nil {\n\t\tklog.Error(\"failed to list objects. error: \", err)\n\t\treturn nil, err\n\t}\n\n\tklog.V(10).Info(\"List S3 Objects result \", keys)\n\n\treturn keys, nil\n}", "func (c *Cache) List(prefix string) (items []interface{}, err error) {\n\n\tc.mu.Lock()\n\tdefer c.mu.Unlock()\n\n\tvar (\n\t\tbuf map[string]vendor.Item\n\t\tkey []byte\n\t\tprefixBuf = []byte(prefix)\n\t)\n\n\tbuf = c.db.Items()\n\n\tif len(buf) == 0 {\n\t\terr = cache.NOT_FOUND\n\t} else {\n\t\tfor k, val := range buf {\n\t\t\tkey = []byte(k)\n\t\t\tif !bytes.HasPrefix(key, prefixBuf) {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\titems = append(items, val.Object)\n\t\t}\n\n\t}\n\n\treturn\n}", "func getBlobsWithPrefix(tx *sql.Tx, prefix string) ([]*Blob, error) {\n\tvar blobs []*Blob\n\trows, err := tx.Query(\"SELECT * from blobinfo WHERE hasPrefix(digest, $1)\", prefix)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfor rows.Next() {\n\t\tblob := &Blob{}\n\t\tif err := blobRowScan(rows, blob); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tblobs = append(blobs, blob)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, err\n\t}\n\treturn blobs, err\n}", "func ListObjects(service *awss3.S3, container string, filter api.ObjectFilter) ([]string, error) {\n\tvar objs []string\n\n\tvar prefix string\n\tif filter.Path != \"\" || filter.Prefix != \"\" {\n\t\tprefix = strings.Join([]string{filter.Path, filter.Prefix}, \"/\")\n\t}\n\terr := service.ListObjectsV2Pages(&awss3.ListObjectsV2Input{Bucket: aws.String(container), Prefix: aws.String(prefix)},\n\t\tfunc(out *awss3.ListObjectsV2Output, last bool) bool {\n\t\t\tfor _, o := range out.Contents {\n\t\t\t\tobjs = append(objs, *o.Key)\n\t\t\t}\n\t\t\treturn last\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn objs, err\n}", "func (be *s3) List(t backend.Type, done <-chan struct{}) <-chan string {\n\tdebug.Log(\"s3.List\", \"listing %v\", t)\n\tch := make(chan string)\n\n\tprefix := be.s3path(t, \"\")\n\n\tlistresp := be.client.ListObjects(be.bucketname, prefix, true, done)\n\n\tgo func() {\n\t\tdefer close(ch)\n\t\tfor obj := range listresp {\n\t\t\tm := strings.TrimPrefix(obj.Key, prefix)\n\t\t\tif m == \"\" {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tselect {\n\t\t\tcase ch <- m:\n\t\t\tcase <-done:\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}()\n\n\treturn ch\n}", "func (s *S3Storage) List(prefix string, maxSize int) ([]string, error) {\n\tprefix = s.addPrefix(prefix)\n\tpathSeparator := \"\"\n\tmarker := \"\"\n\n\titems := make([]string, 0, 1000)\n\tfor maxSize > 0 {\n\t\t// Don't ask for more than 1000 keys at a time. This makes\n\t\t// testing simpler because S3 will return at most 1000 keys even if you\n\t\t// ask for more, but s3test will return more than 1000 keys if you ask\n\t\t// for more. TODO(agf): Fix this behavior in s3test.\n\t\tmaxReqSize := 1000\n\t\tif maxSize < 1000 {\n\t\t\tmaxReqSize = maxSize\n\t\t}\n\t\tcontents, err := s.bucket.List(prefix, pathSeparator, marker, maxReqSize)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tmaxSize -= maxReqSize\n\n\t\tfor _, key := range contents.Contents {\n\t\t\titems = append(items, s.removePrefix(key.Key))\n\t\t}\n\t\tif contents.IsTruncated {\n\t\t\tmarker = s.addPrefix(items[len(items)-1])\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn items, nil\n}", "func (c *storageClient) forEachObject(bucket, prefix string, recursive bool, fn func(*s3types.Object) error) error {\n\t// The \"/\" value can be used at command-level to mean that we want to\n\t// list from the root of the bucket, but the actual bucket root is an\n\t// empty prefix.\n\tif prefix == \"/\" {\n\t\tprefix = \"\"\n\t}\n\n\tdirs := make(map[string]struct{})\n\n\tvar ct string\n\tfor {\n\t\tres, err := c.ListObjectsV2(gContext, &s3.ListObjectsV2Input{\n\t\t\tBucket: aws.String(bucket),\n\t\t\tPrefix: aws.String(prefix),\n\t\t\tContinuationToken: aws.String(ct),\n\t\t})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tct = aws.ToString(res.NextContinuationToken)\n\n\t\tfor _, o := range res.Contents {\n\t\t\t// If not invoked in recursive mode, split object keys on the \"/\" separator and skip\n\t\t\t// objects \"below\" the base directory prefix.\n\t\t\tparts := strings.SplitN(strings.TrimPrefix(aws.ToString(o.Key), prefix), \"/\", 2)\n\t\t\tif len(parts) > 1 && !recursive {\n\t\t\t\tdir := path.Base(parts[0])\n\t\t\t\tif _, ok := dirs[dir]; !ok {\n\t\t\t\t\tdirs[dir] = struct{}{}\n\t\t\t\t}\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\t// If the prefix doesn't end with a trailing prefix separator (\"/\"),\n\t\t\t// consider it as a single object key and match only one exact result\n\t\t\t// (except in recursive mode, where the prefix is expected to be a\n\t\t\t// \"directory\").\n\t\t\tif !recursive && !strings.HasSuffix(prefix, \"/\") && aws.ToString(o.Key) != prefix {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\to := o\n\t\t\tif err := fn(&o); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\n\t\tif !res.IsTruncated {\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn nil\n}", "func listq(client *storage.Client, bucketName string, prefix string, delimiter string, versions bool, selfIgnore bool) ([]*storage.ObjectAttrs, error){\n ctx := context.Background()\n var objects []*storage.ObjectAttrs\n it := client.Bucket(bucketName).Objects(ctx, &storage.Query{\n Prefix: prefix,\n Delimiter: delimiter,\n Versions: versions,\n })\n for {\n oattrs, err := it.Next()\n if err == iterator.Done {\n break\n }\n if err != nil {\n return nil, err\n }\n if selfIgnore == true && oattrs.Prefix == prefix && oattrs.Prefix != \"\" {\n continue\n }\n objects = append(objects, oattrs)\n }\n return objects, nil\n}", "func ListAllForPrefix(db *badger.DB, sk StorageKey, id string) ([]string, error) {\n\ttotal := make([]string, 0, 20)\n\n\tpfx := MakeKey(sk, id)\n\topts := badger.DefaultIteratorOptions\n\topts.PrefetchValues = false\n\topts.Prefix = pfx\n\terr := db.View(func(tx *badger.Txn) error {\n\t\tit := tx.NewIterator(opts)\n\t\tdefer it.Close()\n\t\tfor it.Seek(pfx); it.ValidForPrefix(pfx); it.Next() {\n\t\t\tkeybuf := it.Item().Key()\n\t\t\tk := string(keybuf[len(pfx):])\n\t\t\ttotal = append(total, string(k))\n\t\t}\n\t\treturn nil\n\t})\n\n\treturn total, err\n}", "func ListBucket(a S3Account, bucket string) {\n\treq := NewRequest(a, \"GET\", bucket, \"/\", nil)\n\tbody := req.Send()\n\tlbr := ListBucketResult{}\n\txml.Unmarshal(body, &lbr)\n\tfor _,cp := range lbr.CommonPrefixes {\n\t\tcp.pp()\n\t}\n\tfor _,c := range lbr.Contents {\n\t\tc.pp()\n\t}\n}", "func (dbm *DBManager) GetKeyList(bucket, prefix string) ([]string, error) {\n\tvar err error\n\tvar results []string\n\tif err = dbm.openDB(); err != nil {\n\t\treturn nil, err\n\t}\n\tdefer dbm.closeDB()\n\n\tresults = make([]string, 0)\n\n\tseekPrefix := func(tx *boltsecTx) error {\n\t\tprefixKey := []byte(prefix)\n\n\t\tbkt := tx.Bucket([]byte(bucket))\n\n\t\tif bkt == nil {\n\t\t\treturn bolt.ErrBucketNotFound\n\t\t}\n\n\t\tcursor := bkt.Cursor()\n\t\tfor k, _ := cursor.Seek(prefixKey); k != nil && bytes.HasPrefix(k, prefixKey); k, _ = cursor.Next() {\n\t\t\tresults = append(results, string(k))\n\t\t}\n\t\treturn nil\n\t}\n\n\tif err = dbm.db.view(seekPrefix); err != nil {\n\t\tLogger.Printf(\"GetByPrefix return %s\", err)\n\t}\n\n\treturn results, err\n}", "func (dbm *DBManager) GetByPrefix(bucket, prefix string) ([][]byte, error) {\n\tvar err error\n\tvar results [][]byte\n\tif err = dbm.openDB(); err != nil {\n\t\treturn nil, err\n\t}\n\tdefer dbm.closeDB()\n\n\tresults = make([][]byte, 0)\n\n\tseekPrefix := func(tx *boltsecTx) error {\n\t\tprefixKey := []byte(prefix)\n\n\t\tbkt := tx.Bucket([]byte(bucket))\n\n\t\tif bkt == nil {\n\t\t\treturn bolt.ErrBucketNotFound\n\t\t}\n\n\t\tcursor := bkt.Cursor()\n\t\tfor k, v := cursor.Seek(prefixKey); bytes.HasPrefix(k, prefixKey); k, v = cursor.Next() {\n\n\t\t\tif dbm.cryptor != nil {\n\t\t\t\t//secret key is set, decrypt the content before return\n\t\t\t\tcontent := make([]byte, len(v))\n\t\t\t\tcopy(content, v)\n\n\t\t\t\tdec, err := dbm.cryptor.decrypt(content)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn errors.New(\"Decrypt error from db\")\n\t\t\t\t}\n\t\t\t\tresults = append(results, dec)\n\t\t\t} else {\n\t\t\t\tresults = append(results, v)\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t}\n\n\tif err = dbm.db.view(seekPrefix); err != nil {\n\t\tLogger.Printf(\"GetByPrefix return %s\", err)\n\t}\n\n\treturn results, err\n}", "func (o ClusterS3ImportOutput) BucketPrefix() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ClusterS3Import) *string { return v.BucketPrefix }).(pulumi.StringPtrOutput)\n}", "func (rad *Radix) Prefix(prefix string) *list.List {\n\trad.lock.Lock()\n\tdefer rad.lock.Unlock()\n\tl := list.New()\n\tn, _ := rad.root.lookup([]rune(prefix))\n\tif n == nil {\n\t\treturn l\n\t}\n\tn.addToList(l)\n\treturn l\n}", "func (c *Client) List(prefix string) ([]*store.KVPair, error) {\n\treq := &api.Request{\n\t\tAction: api.List,\n\t\tKey: prefix,\n\t}\n\n\tres, err := c.do(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar ls []*store.KVPair\n\tfor _, kv := range res.List {\n\t\tls = append(ls, kvToLibKV(kv))\n\t}\n\tif len(ls) == 0 {\n\t\treturn nil, store.ErrKeyNotFound\n\t}\n\n\treturn ls, nil\n}", "func (o *ObjectStorage) ListTopLevelObjectsInBucket(bucket *string) (*s3.ListObjectsOutput, error) {\n\tdelimiter := \"/\"\n\tlistObjectInput := &s3.ListObjectsInput{\n\t\tBucket: bucket,\n\t\tDelimiter: &delimiter,\n\t}\n\n\treturn o.Client.ListObjects(listObjectInput)\n}", "func (b *fakeBosClient) ListObjects(bucket string, args *api.ListObjectsArgs) (\n\t*api.ListObjectsResult, error) {\n\tvar (\n\t\tmarker int\n\t\terr error\n\t)\n\tif args.Marker == \"\" {\n\t\tmarker, err = strconv.Atoi(bucket)\n\t} else {\n\t\tmarker, err = strconv.Atoi(args.Marker)\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif marker < len(b.results) {\n\t\treturn b.results[marker], nil\n\t}\n\treturn nil, fmt.Errorf(\"Error in list objects\")\n}", "func listFiles(svc *storage.Service, bucketName string, filePrefix string) ([]string, error) {\n\t// List all objects in a bucket using pagination\n\tvar files []string\n\ttoken := \"\"\n\tfor {\n\t\tcall := svc.Objects.List(bucketName)\n\t\tcall.Prefix(filePrefix)\n\t\tif token != \"\" {\n\t\t\tcall = call.PageToken(token)\n\t\t}\n\t\tres, err := call.Do()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tfor _, object := range res.Items {\n\t\t\tfiles = append(files, object.Name)\n\t\t}\n\t\tif token = res.NextPageToken; token == \"\" {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn files, nil\n}", "func (o InstanceS3ImportOutput) BucketPrefix() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v InstanceS3Import) *string { return v.BucketPrefix }).(pulumi.StringPtrOutput)\n}", "func (w *MockSafePointKV) GetWithPrefix(prefix string) ([]*mvccpb.KeyValue, error) {\n\tw.mockLock.RLock()\n\tdefer w.mockLock.RUnlock()\n\tkvs := make([]*mvccpb.KeyValue, 0, len(w.store))\n\tfor k, v := range w.store {\n\t\tif strings.HasPrefix(k, prefix) {\n\t\t\tkvs = append(kvs, &mvccpb.KeyValue{Key: []byte(k), Value: []byte(v)})\n\t\t}\n\t}\n\treturn kvs, nil\n}", "func (c *Client) DeleteObjectsWithPrefix(ctx context.Context, bucket, prefix string) error {\n\tin := &s3.ListObjectsInput{\n\t\tBucket: aws.String(bucket),\n\t\tPrefix: aws.String(prefix),\n\t}\n\n\tvar delErr error\n\tif err := c.S3.ListObjectsPagesWithContext(ctx, in, func(page *s3.ListObjectsOutput, lastPage bool) bool {\n\t\tobjectIDs := make([]*s3.ObjectIdentifier, 0)\n\t\tfor _, key := range page.Contents {\n\t\t\tobj := &s3.ObjectIdentifier{\n\t\t\t\tKey: key.Key,\n\t\t\t}\n\t\t\tobjectIDs = append(objectIDs, obj)\n\t\t}\n\n\t\tif len(objectIDs) != 0 {\n\t\t\tif _, delErr = c.S3.DeleteObjectsWithContext(ctx, &s3.DeleteObjectsInput{\n\t\t\t\tBucket: aws.String(bucket),\n\t\t\t\tDelete: &s3.Delete{\n\t\t\t\t\tObjects: objectIDs,\n\t\t\t\t\tQuiet: aws.Bool(true),\n\t\t\t\t},\n\t\t\t}); delErr != nil {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t\treturn !lastPage\n\t}); err != nil {\n\t\treturn err\n\t}\n\n\tif delErr != nil {\n\t\tif aerr, ok := delErr.(awserr.Error); ok && aerr.Code() == s3.ErrCodeNoSuchKey {\n\t\t\treturn nil\n\t\t}\n\t\treturn delErr\n\t}\n\treturn nil\n}", "func (tx *remoteTx) ForPrefix(bucket string, prefix []byte, walker func(k, v []byte) error) error {\n\tc, err := tx.Cursor(bucket)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer c.Close()\n\n\tfor k, v, err := c.Seek(prefix); k != nil; k, v, err = c.Next() {\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif !bytes.HasPrefix(k, prefix) {\n\t\t\tbreak\n\t\t}\n\t\tif err := walker(k, v); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (storage *Storage) GetAllWithPrefix(prefix string) (map[string]string, error) {\n\tentries := make(map[string]string)\n\tfor k, v := range storage.Db {\n\t\tif strings.HasPrefix(k, prefix) {\n\t\t\tentries[k] = v\n\t\t}\n\t}\n\treturn entries, nil\n}", "func (c *Checker) TypesWithPrefix(prefix string) (res []string) {\n\tc.mu.Lock()\n\tdefer c.mu.Unlock()\n\n\tfor scope := c.cur; scope != nil; scope = scope.Parent {\n\t\tfor k, obj := range scope.Objs {\n\t\t\tif obj.Kind != ObjType {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif strings.HasPrefix(k, prefix) {\n\t\t\t\tres = append(res, k)\n\t\t\t}\n\t\t}\n\t}\n\treturn res\n}", "func TriggerLambdaArnsForBucketWithPrefix(bucket string, prefix string, arns []*string) error {\n\tlistObjs := &s3.ListObjectsInput{\n\t\tBucket: aws.String(bucket),\n\t\tPrefix: aws.String(prefix),\n\t}\n\n\treturn triggerLambdaArnsForListObjectsInput(bucket, arns, listObjs)\n}", "func (xl xlObjects) ListObjects(ctx context.Context, bucket, prefix, marker, delimiter string, maxKeys int) (loi ListObjectsInfo, e error) {\n\tif err := checkListObjsArgs(ctx, bucket, prefix, marker, delimiter, xl); err != nil {\n\t\treturn loi, err\n\t}\n\n\t// With max keys of zero we have reached eof, return right here.\n\tif maxKeys == 0 {\n\t\treturn loi, nil\n\t}\n\n\t// Marker is set validate pre-condition.\n\tif marker != \"\" {\n\t\t// Marker not common with prefix is not implemented.Send an empty response\n\t\tif !hasPrefix(marker, prefix) {\n\t\t\treturn ListObjectsInfo{}, e\n\t\t}\n\t}\n\n\t// For delimiter and prefix as '/' we do not list anything at all\n\t// since according to s3 spec we stop at the 'delimiter' along\n\t// with the prefix. On a flat namespace with 'prefix' as '/'\n\t// we don't have any entries, since all the keys are of form 'keyName/...'\n\tif delimiter == SlashSeparator && prefix == SlashSeparator {\n\t\treturn loi, nil\n\t}\n\n\t// Over flowing count - reset to maxObjectList.\n\tif maxKeys < 0 || maxKeys > maxObjectList {\n\t\tmaxKeys = maxObjectList\n\t}\n\n\t// Initiate a list operation, if successful filter and return quickly.\n\tlistObjInfo, err := xl.listObjects(ctx, bucket, prefix, marker, delimiter, maxKeys)\n\tif err == nil {\n\t\t// We got the entries successfully return.\n\t\treturn listObjInfo, nil\n\t}\n\n\t// Return error at the end.\n\treturn loi, toObjectErr(err, bucket, prefix)\n}", "func (s *DiffStore) GetByPrefix(prefix []byte) (items []KV) {\n\ts.tree.AscendGreaterOrEqual(&storeKV{key: prefix}, func(i btree.Item) bool {\n\t\tv := i.(*storeKV)\n\n\t\tif !bytes.HasPrefix(v.key, prefix) {\n\t\t\treturn false\n\t\t}\n\n\t\tif v.state == ItemDeleted {\n\t\t\treturn true\n\t\t}\n\n\t\titems = append(items, KV{v.key, v.value})\n\n\t\treturn true\n\t})\n\n\treturn\n}", "func objectNamePrefixNonEmpty(\n\tctx context.Context,\n\tbucket gcs.Bucket,\n\tprefix string) (nonEmpty bool, err error) {\n\treq := &gcs.ListObjectsRequest{\n\t\tPrefix: prefix,\n\t\tMaxResults: 1,\n\t}\n\n\tlisting, err := bucket.ListObjects(ctx, req)\n\tif err != nil {\n\t\terr = fmt.Errorf(\"ListObjects: %v\", err)\n\t\treturn\n\t}\n\n\tnonEmpty = len(listing.Objects) != 0\n\treturn\n}", "func (s *S3Store) ListBucketFiles(bucket, prefix string) (prefixs []string, keys []KeyInfo, err error) {\n\tvar req s3.ListObjectsInput\n\tvar resp *s3.ListObjectsOutput\n\treq.SetBucket(bucket)\n\treq.SetPrefix(prefix)\n\treq.SetDelimiter(\"/\")\n\tif resp, err = s.svc.ListObjects(&req); err != nil {\n\t\treturn\n\t}\n\tfor _, c := range resp.Contents {\n\t\tvar info KeyInfo\n\t\tinfo.Key = aws.StringValue(c.Key)\n\t\tinfo.Size = aws.Int64Value(c.Size)\n\t\tinfo.Stamp = aws.TimeValue(c.LastModified)\n\t\tkeys = append(keys, info)\n\t}\n\tfor _, c := range resp.CommonPrefixes {\n\t\tprefixs = append(prefixs, aws.StringValue(c.Prefix))\n\t}\n\treturn\n}", "func (cs ConsulStorage) List(ctx context.Context, prefix string, recursive bool) ([]string, error) {\n\tvar keysFound []string\n\n\t// get a list of all keys at prefix\n\tkeys, _, err := cs.ConsulClient.KV().Keys(cs.prefixKey(prefix), \"\", ConsulQueryDefaults(ctx))\n\tif err != nil {\n\t\treturn keysFound, err\n\t}\n\n\tif len(keys) == 0 {\n\t\treturn keysFound, fs.ErrNotExist\n\t}\n\n\t// remove default prefix from keys\n\tfor _, key := range keys {\n\t\tif strings.HasPrefix(key, cs.prefixKey(prefix)) {\n\t\t\tkey = strings.TrimPrefix(key, cs.Prefix+\"/\")\n\t\t\tkeysFound = append(keysFound, key)\n\t\t}\n\t}\n\n\t// if recursive wanted, just return all keys\n\tif recursive {\n\t\treturn keysFound, nil\n\t}\n\n\t// for non-recursive split path and look for unique keys just under given prefix\n\tkeysMap := make(map[string]bool)\n\tfor _, key := range keysFound {\n\t\tdir := strings.Split(strings.TrimPrefix(key, prefix+\"/\"), \"/\")\n\t\tkeysMap[dir[0]] = true\n\t}\n\n\tkeysFound = make([]string, 0)\n\tfor key := range keysMap {\n\t\tkeysFound = append(keysFound, path.Join(prefix, key))\n\t}\n\n\treturn keysFound, nil\n}", "func List(path_prefix string) []string {\n pth := strings.ToLower(path.Clean(path_prefix))\n if pth == \"/\" { pth = \"\" }\n pths := strings.Split(pth,\"/\")\n if pths[0] == \"\" { pths = pths[1:] } // if pth starts with \"/\"\n pil := assets\n for _, p := range pths {\n pil = pil.sub[p]\n if pil == nil { return nil }\n }\n \n res := make([]string,0,len(assets.sub))\n list(&res, pil, pths)\n return res\n}", "func (d *driver) List(ctx context.Context, opath string) ([]string, error) {\n\tpath := opath\n\tif path != \"/\" && opath[len(path)-1] != '/' {\n\t\tpath = path + \"/\"\n\t}\n\n\t// This is to cover for the cases when the rootDirectory of the driver is either \"\" or \"/\".\n\t// In those cases, there is no root prefix to replace and we must actually add a \"/\" to all\n\t// results in order to keep them as valid paths as recognized by storagedriver.PathRegexp\n\tprefix := \"\"\n\tif d.obsPath(\"\") == \"\" {\n\t\tprefix = \"/\"\n\t}\n\n\toutput, err := d.Client.ListObjects(&obs.ListObjectsInput{\n\t\tListObjsInput: obs.ListObjsInput{\n\t\t\tPrefix: d.obsPath(path),\n\t\t\tMaxKeys: listMax,\n\t\t\tDelimiter: \"/\",\n\t\t},\n\t\tBucket: d.Bucket,\n\t})\n\tif err != nil {\n\t\treturn nil, parseError(opath, err)\n\t}\n\n\tfiles := []string{}\n\tdirectories := []string{}\n\n\tfor {\n\t\tfor _, key := range output.Contents {\n\t\t\tfiles = append(files, strings.Replace(key.Key, d.obsPath(\"\"), prefix, 1))\n\t\t}\n\n\t\tfor _, commonPrefix := range output.CommonPrefixes {\n// commonPrefix := commonPrefix>Prefix\n\t\t\tdirectories = append(directories, strings.Replace(commonPrefix[0:len(commonPrefix)-1], d.obsPath(\"\"), prefix, 1))\n\t\t}\n\n\t\tif output.IsTruncated {\n\t\t\toutput, err = d.Client.ListObjects(&obs.ListObjectsInput{\n\t\t\t\tListObjsInput: obs.ListObjsInput{\n\t\t\t\t\tPrefix: d.obsPath(path),\n\t\t\t\t\tDelimiter: \"/\",\n\t\t\t\t\tMaxKeys: listMax,\n\t\t\t\t},\n\t\t\t\tBucket: d.Bucket,\n\t\t\t\tMarker: output.NextMarker,\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif opath != \"/\" {\n\t\tif len(files) == 0 && len(directories) == 0 {\n\t\t\t// Treat empty output as missing directory, since we don't actually\n\t\t\t// have directories in obs.\n\t\t\treturn nil, storagedriver.PathNotFoundError{Path: opath}\n\t\t}\n\t}\n\treturn append(files, directories...), nil\n}", "func getObjects(storageCli *storage.Client, bucketName string) []string {\n\tvar objectList []string\n\tquery := &storage.Query{Prefix: \"\"}\n\n\tbucket := storageCli.Bucket(bucketName)\n\tobjects := bucket.Objects(context.Background(), query)\n\n\tfor {\n\t\tattrs, err := objects.Next()\n\t\tif err == iterator.Done {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\tframework.FailfWithOffset(2, \"Failed to get objects from bucket %s\", err)\n\t\t}\n\n\t\tobjectList = append(objectList, attrs.Name)\n\t}\n\n\treturn objectList\n}", "func (o ClusterS3ImportPtrOutput) BucketPrefix() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *ClusterS3Import) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.BucketPrefix\n\t}).(pulumi.StringPtrOutput)\n}", "func (b Bucket) ListObject(args ...Params) (*ListBucketResult, error) {\n\tv := new(ListBucketResult)\n\n\terr := b.Do(\"GET\", \"\", nil, v, args...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn v, nil\n}", "func (o InstanceS3ImportPtrOutput) BucketPrefix() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *InstanceS3Import) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.BucketPrefix\n\t}).(pulumi.StringPtrOutput)\n}", "func (c *config) PrefixKeys(prefix string) []string {\n c.m.Lock()\n defer c.m.Unlock()\n\n keys := []string{}\n for k, _ := range c.conf {\n if strings.HasPrefix(k, prefix) {\n keys = append(keys, k)\n }\n }\n return keys\n}", "func (s *EtcdStorage) List(ctx context.Context, prefix string) ([]string, error) {\n\tif !strings.HasSuffix(prefix, \"/\") {\n\t\tprefix += \"/\"\n\t}\n\n\tresp, err := s.c.Get(ctx, prefix, etcdclient.WithPrefix())\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tkeys := map[string]struct{}{}\n\tfor _, kv := range resp.Kvs {\n\t\tkey := string(kv.Key)\n\n\t\tif strings.HasPrefix(key, prefix) {\n\t\t\tkey = strings.TrimPrefix(key, prefix)\n\t\t\tif len(key) == 0 {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tif i := strings.Index(key, \"/\"); i == -1 {\n\t\t\t\tkeys[key] = struct{}{}\n\t\t\t} else {\n\t\t\t\tkeys[key[:i+1]] = struct{}{}\n\t\t\t}\n\t\t}\n\t}\n\n\tkeysList := make([]string, len(keys))\n\ti := 0\n\tfor key := range keys {\n\t\tkeysList[i] = key\n\t\ti++\n\t}\n\n\tsort.Strings(keysList)\n\n\treturn keysList, nil\n}", "func (p *s3URLPartsExtension) searchObjectPrefixAndPatternFromS3URL() (prefix, pattern string, isWildcardSearch bool) {\n\t// If the objectKey is empty, it means the url provided is of a bucket,\n\t// then all object inside buckets needs to be included, so prefix is \"\" and pattern is set to *, isWildcardSearch false\n\tif p.ObjectKey == \"\" {\n\t\tpattern = \"*\"\n\t\treturn\n\t}\n\t// Check for wildcard\n\twildCardIndex := gCopyUtil.firstIndexOfWildCard(p.ObjectKey)\n\t// If no wildcard exits and url represents a virtual directory or a object, search everything in the virtual directory\n\t// or specifically the object.\n\tif wildCardIndex < 0 {\n\t\t// prefix is the path of virtual directory after the bucket, pattern is *, isWildcardSearch false\n\t\t// Example 1: https://<bucket-name>/vd-1/, prefix = /vd-1/\n\t\t// Example 2: https://<bucket-name>/vd-1/vd-2/, prefix = /vd-1/vd-2/\n\t\t// Example 3: https://<bucket-name>/vd-1/abc, prefix = /vd1/abc\n\t\tprefix = p.ObjectKey\n\t\tpattern = \"*\"\n\t\treturn\n\t}\n\n\t// Is wildcard search\n\tisWildcardSearch = true\n\t// wildcard exists prefix will be the content of object key till the wildcard index\n\t// Example: https://<bucket-name>/vd-1/vd-2/abc*\n\t// prefix = /vd-1/vd-2/abc, pattern = /vd-1/vd-2/abc*, isWildcardSearch true\n\tprefix = p.ObjectKey[:wildCardIndex]\n\tpattern = p.ObjectKey\n\n\treturn\n}", "func (xl xlObjects) listObjects(ctx context.Context, bucket, prefix, marker, delimiter string, maxKeys int) (loi ListObjectsInfo, e error) {\n\t// Default is recursive, if delimiter is set then list non recursive.\n\trecursive := true\n\tif delimiter == SlashSeparator {\n\t\trecursive = false\n\t}\n\n\twalkResultCh, endWalkCh := xl.listPool.Release(listParams{bucket, recursive, marker, prefix, false})\n\tif walkResultCh == nil {\n\t\tendWalkCh = make(chan struct{})\n\t\tlistDir := listDirFactory(ctx, xl.getLoadBalancedDisks()...)\n\t\twalkResultCh = startTreeWalk(ctx, bucket, prefix, marker, recursive, listDir, endWalkCh)\n\t}\n\n\tvar objInfos []ObjectInfo\n\tvar eof bool\n\tvar nextMarker string\n\tfor i := 0; i < maxKeys; {\n\n\t\twalkResult, ok := <-walkResultCh\n\t\tif !ok {\n\t\t\t// Closed channel.\n\t\t\teof = true\n\t\t\tbreak\n\t\t}\n\t\tentry := walkResult.entry\n\t\tvar objInfo ObjectInfo\n\t\tif hasSuffix(entry, SlashSeparator) {\n\t\t\t// Object name needs to be full path.\n\t\t\tobjInfo.Bucket = bucket\n\t\t\tobjInfo.Name = entry\n\t\t\tobjInfo.IsDir = true\n\t\t} else {\n\t\t\t// Set the Mode to a \"regular\" file.\n\t\t\tvar err error\n\t\t\tobjInfo, err = xl.getObjectInfo(ctx, bucket, entry)\n\t\t\tif err != nil {\n\t\t\t\t// Ignore errFileNotFound as the object might have got\n\t\t\t\t// deleted in the interim period of listing and getObjectInfo(),\n\t\t\t\t// ignore quorum error as it might be an entry from an outdated disk.\n\t\t\t\tif IsErrIgnored(err, []error{\n\t\t\t\t\terrFileNotFound,\n\t\t\t\t\terrXLReadQuorum,\n\t\t\t\t}...) {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\treturn loi, toObjectErr(err, bucket, prefix)\n\t\t\t}\n\t\t}\n\t\tnextMarker = objInfo.Name\n\t\tobjInfos = append(objInfos, objInfo)\n\t\ti++\n\t\tif walkResult.end {\n\t\t\teof = true\n\t\t\tbreak\n\t\t}\n\t}\n\n\tparams := listParams{bucket, recursive, nextMarker, prefix, false}\n\tif !eof {\n\t\txl.listPool.Set(params, walkResultCh, endWalkCh)\n\t}\n\n\tresult := ListObjectsInfo{}\n\tfor _, objInfo := range objInfos {\n\t\tif objInfo.IsDir && delimiter == SlashSeparator {\n\t\t\tresult.Prefixes = append(result.Prefixes, objInfo.Name)\n\t\t\tcontinue\n\t\t}\n\t\tresult.Objects = append(result.Objects, objInfo)\n\t}\n\n\tif !eof {\n\t\tresult.IsTruncated = true\n\t\tif len(objInfos) > 0 {\n\t\t\tresult.NextMarker = objInfos[len(objInfos)-1].Name\n\t\t}\n\t}\n\n\treturn result, nil\n}", "func getBucketObjects(svc *s3.S3, sess *session.Session, bucketName string) {\n\n\tquery := &s3.ListObjectsV2Input{\n\t\tBucket: aws.String(bucketName),\n\t\tPrefix: aws.String(prefix),\n\t}\n\n\t// Pagination Flag used to check if we need to go further (default is 1000 otherwise)\n\ttruncatedListing := true\n\tpageCount := 0\n\n\tfor truncatedListing && pageCount < maxPages {\n\t\tresp, err := svc.ListObjectsV2(query)\n\t\tpageCount++\n\n\t\tif err != nil {\n\t\t\tfmt.Println(err.Error())\n\t\t\treturn\n\t\t}\n\t\t// Get all objects for this page\n\t\tgetObjectsPage(resp, svc, bucketName)\n\n\t\t// Set continuation token\n\t\tquery.ContinuationToken = resp.NextContinuationToken\n\t\ttruncatedListing = *resp.IsTruncated\n\n\t\tif verbose == \"y\" {\n\t\t\tfmt.Printf(\"page Num %d, recCount %d \\n\", pageCount, numberOfRetrievedFiles)\n\t\t}\n\t}\n\n}", "func (s *s3) List(key string) ([]string, error) {\n\tif key != \"\" && !strings.HasSuffix(key, \"/\") {\n\t\tkey += \"/\"\n\t}\n\n\tresult, err := s.client.ListObjectsV2(&awss3.ListObjectsV2Input{\n\t\tPrefix: aws.String(key),\n\t\tBucket: aws.String(s.bucket),\n\t})\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfiles := []string{}\n\tfor _, obj := range result.Contents {\n\t\t_, file := path.Split(*obj.Key)\n\t\tfiles = append(files, file)\n\t}\n\treturn files, nil\n}", "func (bucket Bucket) ListObjects(options ...Option) (ListObjectsResult, error) {\n\tvar out ListObjectsResult\n\n\toptions = append(options, EncodingType(\"url\"))\n\tparams, err := getRawParams(options)\n\tif err != nil {\n\t\treturn out, err\n\t}\n\n\tresp, err := bucket.do(\"GET\", \"\", params, options, nil, nil)\n\tif err != nil {\n\t\treturn out, err\n\t}\n\tdefer resp.Body.Close()\n\n\terr = xmlUnmarshal(resp.Body, &out)\n\tif err != nil {\n\t\treturn out, err\n\t}\n\n\terr = decodeListObjectsResult(&out)\n\treturn out, err\n}", "func (r *Bucket) BucketPrefix() pulumi.StringOutput {\n\treturn (pulumi.StringOutput)(r.s.State[\"bucketPrefix\"])\n}", "func (s Client) GetVersions(prefix string) ([]string, error) {\n\tdv, err := s.ListObjects(prefix, \"/\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar versions []string\n\t// split the object prefixes to get the last part which is the version\n\tfor _, p := range dv.CommonPrefixes {\n\t\ts := strings.Split(strings.Trim(*p.Prefix, \"/\"), \"/\")\n\t\tversions = append(versions, s[len(s)-1])\n\t}\n\treturn versions, nil\n}", "func (c *Client) DownloadPrefixFromS3(bucket string, prefix string, localDirPath string, shouldTrimDirPrefix bool, maxFiles *int64) error {\n\tif _, err := files.CreateDirIfMissing(localDirPath); err != nil {\n\t\treturn err\n\t}\n\tcreatedDirs := strset.New(localDirPath)\n\n\tvar trimPrefix string\n\tif shouldTrimDirPrefix {\n\t\tlastIndex := strings.LastIndex(prefix, \"/\")\n\t\tif lastIndex == -1 {\n\t\t\ttrimPrefix = \"\"\n\t\t} else {\n\t\t\ttrimPrefix = prefix[:lastIndex+1]\n\t\t}\n\t}\n\n\terr := c.S3Iterator(bucket, prefix, true, maxFiles, nil, func(object *s3.Object) (bool, error) {\n\t\tlocalRelPath := *object.Key\n\t\tif shouldTrimDirPrefix {\n\t\t\tlocalRelPath = strings.TrimPrefix(localRelPath, trimPrefix)\n\t\t}\n\n\t\tlocalPath := filepath.Join(localDirPath, localRelPath)\n\n\t\t// check for directory objects\n\t\tif strings.HasSuffix(*object.Key, \"/\") {\n\t\t\tif !createdDirs.Has(localPath) {\n\t\t\t\tif _, err := files.CreateDirIfMissing(localPath); err != nil {\n\t\t\t\t\treturn false, err\n\t\t\t\t}\n\t\t\t\tcreatedDirs.Add(localPath)\n\t\t\t}\n\t\t\treturn true, nil\n\t\t}\n\n\t\tlocalDir := filepath.Dir(localPath)\n\t\tif !createdDirs.Has(localDir) {\n\t\t\tif _, err := files.CreateDirIfMissing(localDir); err != nil {\n\t\t\t\treturn false, err\n\t\t\t}\n\t\t\tcreatedDirs.Add(localDir)\n\t\t}\n\n\t\tif err := c.DownloadFileFromS3(bucket, *object.Key, localPath); err != nil {\n\t\t\treturn false, err\n\t\t}\n\n\t\treturn true, nil\n\t})\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func prefixLoad(db *gorocksdb.DB, prefix string) ([]string, []string, error) {\n\tif db == nil {\n\t\treturn nil, nil, errors.New(\"Rocksdb instance is nil when do prefixLoad\")\n\t}\n\treadOpts := gorocksdb.NewDefaultReadOptions()\n\tdefer readOpts.Destroy()\n\treadOpts.SetPrefixSameAsStart(true)\n\titer := db.NewIterator(readOpts)\n\tdefer iter.Close()\n\tkeys := make([]string, 0)\n\tvalues := make([]string, 0)\n\titer.Seek([]byte(prefix))\n\tfor ; iter.Valid(); iter.Next() {\n\t\tkey := iter.Key()\n\t\tvalue := iter.Value()\n\t\tkeys = append(keys, string(key.Data()))\n\t\tkey.Free()\n\t\tvalues = append(values, string(value.Data()))\n\t\tvalue.Free()\n\t}\n\treturn keys, values, nil\n}", "func (se *StorageEndpoint) loadRangeByPrefix(prefix string, f func(k, v string)) error {\n\tnextKey := prefix\n\tendKey := clientv3.GetPrefixRangeEnd(prefix)\n\tfor {\n\t\tkeys, values, err := se.LoadRange(nextKey, endKey, MinKVRangeLimit)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfor i := range keys {\n\t\t\tf(strings.TrimPrefix(keys[i], prefix), values[i])\n\t\t}\n\t\tif len(keys) < MinKVRangeLimit {\n\t\t\treturn nil\n\t\t}\n\t\tnextKey = keys[len(keys)-1] + \"\\x00\"\n\t}\n}", "func (l *pydioObjects) ListObjectsV2(ctx context.Context, bucket, prefix, continuationToken, delimiter string, maxKeys int, fetchOwner bool, startAfter string) (result minio.ListObjectsV2Info, err error) {\n\n\tobjects, prefixes, err := l.ListPydioObjects(ctx, bucket, prefix, delimiter, maxKeys, false)\n\tif err != nil {\n\t\treturn result, pydioToMinioError(err, bucket, prefix)\n\t}\n\n\t// log.Printf(\"\\n[ListObjectsV2] Returning %d objects and %d prefixes (V2) for prefix %s\\n\", len(objects), len(prefixes), prefix)\n\n\treturn minio.ListObjectsV2Info{\n\t\tIsTruncated: false,\n\t\tPrefixes: prefixes,\n\t\tObjects: objects,\n\n\t\tContinuationToken: \"\",\n\t\tNextContinuationToken: \"\",\n\t}, nil\n\n}", "func (xl xlObjects) ListObjects(bucket, prefix, marker, delimiter string, maxKeys int) (ListObjectsInfo, error) {\n\treturn listObjectsCommon(xl, bucket, prefix, marker, delimiter, maxKeys)\n}", "func (o BucketLoggingOutput) LogObjectPrefix() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BucketLogging) *string { return v.LogObjectPrefix }).(pulumi.StringPtrOutput)\n}", "func (o BucketLoggingOutput) LogObjectPrefix() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BucketLogging) *string { return v.LogObjectPrefix }).(pulumi.StringPtrOutput)\n}", "func (m *memClient) RemoveObjects(prefix string) error {\n\tdefer m.Unlock()\n\tm.Lock()\n\tvar delKeys []string\n\tfor k := range m.store {\n\t\tif strings.HasPrefix(k, prefix) {\n\t\t\tdelKeys = append(delKeys, k)\n\t\t}\n\t}\n\tfor i := range delKeys {\n\t\tdelete(m.store, delKeys[i])\n\t}\n\treturn nil\n}", "func (c *Client) WatchPrefix(ctx context.Context, prefix string, opts ...easykv.WatchOption) (uint64, error) {\n\tvar options easykv.WatchOptions\n\tfor _, o := range opts {\n\t\to(&options)\n\t}\n\n\trespChan := make(chan watchResponse)\n\tgo func() {\n\t\topts := api.QueryOptions{\n\t\t\tWaitIndex: options.WaitIndex,\n\t\t}\n\t\t_, meta, err := c.client.List(prefix, &opts)\n\t\tif err != nil {\n\t\t\trespChan <- watchResponse{options.WaitIndex, err}\n\t\t\treturn\n\t\t}\n\t\trespChan <- watchResponse{meta.LastIndex, err}\n\t}()\n\tfor {\n\t\tselect {\n\t\tcase <-ctx.Done():\n\t\t\treturn options.WaitIndex, easykv.ErrWatchCanceled\n\t\tcase r := <-respChan:\n\t\t\treturn r.waitIndex, r.err\n\t\t}\n\t}\n}", "func (s *Storage) PrefixIterator(prefix []byte) (storage.Iterator, error) {\n\treturn s.iterator(prefix, storage.PrefixEnd(prefix))\n}", "func (db *memorydb) IterKeysWithPrefix(ctx context.Context, prefix []byte) <-chan []byte {\n\tkeys := db.KeysWithPrefix(prefix)\n\n\tout := make(chan []byte)\n\tgo func() {\n\t\tdefer close(out)\n\n\t\tfor _, k := range keys {\n\t\t\tselect {\n\t\t\tcase out <- k:\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}()\n\treturn out\n}", "func generateListObjectsV1Response(bucket, prefix, marker, delimiter, encodingType string, maxKeys int, resp ListObjectsInfo) ListObjectsResponse {\n\tvar contents []Object\n\tvar prefixes []CommonPrefix\n\tvar owner = Owner{}\n\tvar data = ListObjectsResponse{}\n\n\towner.ID = globalMinioDefaultOwnerID\n\tfor _, object := range resp.Objects {\n\t\tvar content = Object{}\n\t\tif object.Name == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tcontent.Key = s3EncodeName(object.Name, encodingType)\n\t\tcontent.LastModified = object.ModTime.UTC().Format(timeFormatAMZLong)\n\t\tif object.ETag != \"\" {\n\t\t\tcontent.ETag = \"\\\"\" + object.ETag + \"\\\"\"\n\t\t}\n\t\tcontent.Size = object.Size\n\t\tcontent.StorageClass = object.StorageClass\n\t\tcontent.Owner = owner\n\t\tcontents = append(contents, content)\n\t}\n\tdata.Name = bucket\n\tdata.Contents = contents\n\n\tdata.EncodingType = encodingType\n\tdata.Prefix = s3EncodeName(prefix, encodingType)\n\tdata.Marker = s3EncodeName(marker, encodingType)\n\tdata.Delimiter = s3EncodeName(delimiter, encodingType)\n\tdata.MaxKeys = maxKeys\n\n\tdata.NextMarker = s3EncodeName(resp.NextMarker, encodingType)\n\tdata.IsTruncated = resp.IsTruncated\n\tfor _, prefix := range resp.Prefixes {\n\t\tvar prefixItem = CommonPrefix{}\n\t\tprefixItem.Prefix = s3EncodeName(prefix, encodingType)\n\t\tprefixes = append(prefixes, prefixItem)\n\t}\n\tdata.CommonPrefixes = prefixes\n\treturn data\n}", "func TestMapPrefix(t *testing.T) {\n\tbx := NewTestDB()\n\tdefer bx.Close()\n\n\t// Create a new things bucket.\n\tthings, err := bx.New([]byte(\"things\"))\n\tif err != nil {\n\t\tt.Error(err.Error())\n\t}\n\n\t// Setup items to insert.\n\titems := []struct {\n\t\tKey, Value []byte\n\t}{\n\t\t{[]byte(\"A\"), []byte(\"1\")}, // `A` prefix match\n\t\t{[]byte(\"AA\"), []byte(\"2\")}, // match\n\t\t{[]byte(\"AAA\"), []byte(\"3\")}, // match\n\t\t{[]byte(\"AAB\"), []byte(\"2\")}, // match\n\t\t{[]byte(\"B\"), []byte(\"O\")},\n\t\t{[]byte(\"BA\"), []byte(\"0\")},\n\t\t{[]byte(\"BAA\"), []byte(\"0\")},\n\t}\n\n\t// Insert 'em.\n\tif err := things.Insert(items); err != nil {\n\t\tt.Error(err.Error())\n\t}\n\n\t// Now collect each item whose key starts with \"A\".\n\tprefix := []byte(\"A\")\n\n\t// Expected items for keys with prefix \"A\".\n\texpected := []struct {\n\t\tKey, Value []byte\n\t}{\n\t\t{[]byte(\"A\"), []byte(\"1\")},\n\t\t{[]byte(\"AA\"), []byte(\"2\")},\n\t\t{[]byte(\"AAA\"), []byte(\"3\")},\n\t\t{[]byte(\"AAB\"), []byte(\"2\")},\n\t}\n\n\t// Setup slice of items to collect results.\n\ttype item struct {\n\t\tKey, Value []byte\n\t}\n\tresults := []item{}\n\n\t// Anon func to map over matched keys.\n\tdo := func(k, v []byte) error {\n\t\tresults = append(results, item{k, v})\n\t\treturn nil\n\t}\n\n\tif err := things.MapPrefix(do, prefix); err != nil {\n\t\tt.Error(err.Error())\n\t}\n\n\tfor i, want := range expected {\n\t\tgot := results[i]\n\t\tif !bytes.Equal(got.Key, want.Key) {\n\t\t\tt.Errorf(\"got %v, want %v\", got.Key, want.Key)\n\t\t}\n\t\tif !bytes.Equal(got.Value, want.Value) {\n\t\t\tt.Errorf(\"got %v, want %v\", got.Value, want.Value)\n\t\t}\n\t}\n}", "func ListPrefixIfLocked(ctx context.Context, prefix string, lock KVLocker) (KeyValuePairs, error) {\n\tv, err := Client().ListPrefixIfLocked(ctx, prefix, lock)\n\tTrace(\"ListPrefixIfLocked\", err, logrus.Fields{fieldPrefix: prefix, fieldNumEntries: len(v)})\n\treturn v, err\n}", "func ListObjectV1Handler(w http.ResponseWriter, r *http.Request) {\n\t// Future Work\n\t// TODO: Support Marker\n\t// TODO: Support Paging\n\n\tv := mux.Vars(r)\n\tbucket, err := GetInterceptorBucket(v[\"bucket\"])\n\tif err != nil {\n\t\tSendNoSuchBucketError(v[\"bucket\"], w, r)\n\t\treturn\n\t}\n\n\tuquery := r.URL.Query()\n\treadBucket := bucket.GetReadBucket()\n\twriteBucket := bucket.GetWriteBucket()\n\tri := listObjectInput(readBucket, uquery)\n\twi := listObjectInput(writeBucket, uquery)\n\trequestBuckets := []*db.S3Bucket{readBucket, writeBucket}\n\n\trchan := make(chan listObjectV1ResponseResult)\n\tgo getListObjects(readBucket, 100, ri, rchan)\n\tgo getListObjects(writeBucket, 1, wi, rchan)\n\n\tresults := make([]listObjectV1ResponseResult, 2)\n\tfor i := range requestBuckets {\n\t\tresults[i] = <-rchan\n\t\tif results[i].Error != nil {\n\t\t\t// TODO: Implement Error handling correctly\n\t\t\tSendInternalError(\"Something Happend. Maybe your bucket settings is wrong.\", w, r)\n\t\t\treturn\n\t\t}\n\t}\n\n\tfr := mergeListObjectResponse(bucket, results)\n\tapi.SendSuccessXml(w, *fr)\n}", "func scanWithPrefix(scanner *bufio.Scanner, prefix string) []string {\n\tvar result []string\n\tfor scanner.Scan() {\n\t\tresult = append(result, prefix+scanner.Text())\n\t}\n\treturn result\n}", "func (o BucketLoggingResponseOutput) LogObjectPrefix() pulumi.StringOutput {\n\treturn o.ApplyT(func(v BucketLoggingResponse) string { return v.LogObjectPrefix }).(pulumi.StringOutput)\n}", "func (s *Store) FindPrefix(bucket, prefix []byte, next func(key, val []byte) bool) error {\n\treturn s.db.View(func(tx *bolt.Tx) error {\n\t\tc := tx.Bucket(bucket).Cursor()\n\t\tfor k, v := c.Seek(prefix); k != nil && bytes.HasPrefix(k, prefix); k, v = c.Next() {\n\t\t\tif !next(k, v) {\n\t\t\t\treturn io.EOF\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t})\n}", "func (sys *BucketVersioningSys) PrefixEnabled(bucket, prefix string) bool {\n\tvc, err := sys.Get(bucket)\n\tif err != nil {\n\t\tlogger.CriticalIf(GlobalContext, err)\n\t}\n\treturn vc.PrefixEnabled(prefix)\n}", "func (o BucketMetricFilterOutput) Prefix() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BucketMetricFilter) *string { return v.Prefix }).(pulumi.StringPtrOutput)\n}", "func (l *gcsGateway) AnonListObjects(bucket string, prefix string, marker string, delimiter string, maxKeys int) (ListObjectsInfo, error) {\n\tresult, err := l.anonClient.ListObjects(bucket, prefix, marker, delimiter, maxKeys)\n\tif err != nil {\n\t\treturn ListObjectsInfo{}, s3ToObjectError(traceError(err), bucket)\n\t}\n\n\treturn fromMinioClientListBucketResult(bucket, result), nil\n}", "func (c *Client) ListBucket(bucket string, startAt string, maxKeys int) (items []*Item, err error) {\n\tif maxKeys < 0 {\n\t\treturn nil, errors.New(\"invalid negative maxKeys\")\n\t}\n\tmarker := startAt\n\tfor len(items) < maxKeys {\n\t\tfetchN := maxKeys - len(items)\n\t\tif fetchN > maxList {\n\t\t\tfetchN = maxList\n\t\t}\n\t\tvar bres listBucketResults\n\n\t\turl_ := fmt.Sprintf(\"%s?marker=%s&max-keys=%d\",\n\t\t\tc.bucketURL(bucket), url.QueryEscape(marker), fetchN)\n\n\t\t// Try the enumerate three times, since Amazon likes to close\n\t\t// https connections a lot, and Go sucks at dealing with it:\n\t\t// https://code.google.com/p/go/issues/detail?id=3514\n\t\tconst maxTries = 5\n\t\tfor try := 1; try <= maxTries; try++ {\n\t\t\ttime.Sleep(time.Duration(try-1) * 100 * time.Millisecond)\n\t\t\treq := newReq(url_)\n\t\t\tc.Auth.SignRequest(req)\n\t\t\tres, err := c.transport().RoundTrip(req)\n\t\t\tif err != nil {\n\t\t\t\tif try < maxTries {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif res.StatusCode != http.StatusOK {\n\t\t\t\tif res.StatusCode < 500 {\n\t\t\t\t\tbody, _ := ioutil.ReadAll(io.LimitReader(res.Body, 1<<20))\n\t\t\t\t\taerr := &Error{\n\t\t\t\t\t\tOp: \"ListBucket\",\n\t\t\t\t\t\tCode: res.StatusCode,\n\t\t\t\t\t\tBody: body,\n\t\t\t\t\t\tHeader: res.Header,\n\t\t\t\t\t}\n\t\t\t\t\taerr.parseXML()\n\t\t\t\t\tres.Body.Close()\n\t\t\t\t\treturn nil, aerr\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tbres = listBucketResults{}\n\t\t\t\tvar logbuf bytes.Buffer\n\t\t\t\terr = xml.NewDecoder(io.TeeReader(res.Body, &logbuf)).Decode(&bres)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Printf(\"Error parsing s3 XML response: %v for %q\", err, logbuf.Bytes())\n\t\t\t\t} else if bres.MaxKeys != fetchN || bres.Name != bucket || bres.Marker != marker {\n\t\t\t\t\terr = fmt.Errorf(\"Unexpected parse from server: %#v from: %s\", bres, logbuf.Bytes())\n\t\t\t\t\tlog.Print(err)\n\t\t\t\t}\n\t\t\t}\n\t\t\tres.Body.Close()\n\t\t\tif err != nil {\n\t\t\t\tif try < maxTries-1 {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tlog.Print(err)\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tbreak\n\t\t}\n\t\tfor _, it := range bres.Contents {\n\t\t\tif it.Key == marker && it.Key != startAt {\n\t\t\t\t// Skip first dup on pages 2 and higher.\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif it.Key < startAt {\n\t\t\t\treturn nil, fmt.Errorf(\"Unexpected response from Amazon: item key %q but wanted greater than %q\", it.Key, startAt)\n\t\t\t}\n\t\t\titems = append(items, it)\n\t\t\tmarker = it.Key\n\t\t}\n\t\tif !bres.IsTruncated {\n\t\t\t// log.Printf(\"Not truncated. so breaking. items = %d; len Contents = %d, url = %s\", len(items), len(bres.Contents), url_)\n\t\t\tbreak\n\t\t}\n\t}\n\treturn items, nil\n}", "func prefixListing() error {\n\tinputDir := getUserHome() + \"/sequence_lists/genbank_prefixes\"\n\tfiles, err := ioutil.ReadDir(inputDir)\n\tif err != nil {\n\t\treturn handle(\"Error in reading dir\", err)\n\t}\n\t// Gets a list of the prefixes found in the files and puts them into a\n\t// sorted list.\n\tres := []string{}\n\tfor _, f := range files {\n\t\tfname := f.Name()\n\t\tfileResult, err := prefixListForFile(inputDir+\"/\"+fname, fname)\n\t\tif err != nil {\n\t\t\treturn handle(\"Error in getting prefix list from file\", err)\n\t\t}\n\t\tres = append(res, fileResult)\n\t}\n\tsort.Sort(naturalsort.NaturalSort(res))\n\tfor _, v := range res {\n\t\tfmt.Println(v)\n\t}\n\treturn err\n}", "func (w *EtcdSafePointKV) GetWithPrefix(k string) ([]*mvccpb.KeyValue, error) {\n\tctx, cancel := context.WithTimeout(context.Background(), time.Second*15)\n\tresp, err := w.cli.Get(ctx, k, clientv3.WithPrefix())\n\tcancel()\n\tif err != nil {\n\t\treturn nil, errors.WithStack(err)\n\t}\n\treturn resp.Kvs, nil\n}", "func (c *Client) WatchPrefix(prefix string, keys []string, waitIndex uint64, stopChan chan bool) (uint64, error) {\n\t<-stopChan\n\treturn 0, nil\n}", "func (c *Client) WatchPrefix(prefix string, keys []string, waitIndex uint64, stopChan chan bool) (uint64, error) {\n\t<-stopChan\n\treturn 0, nil\n}", "func (o InventoryDestinationBucketOutput) Prefix() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v InventoryDestinationBucket) *string { return v.Prefix }).(pulumi.StringPtrOutput)\n}", "func (kps *KubernetesPrefixSource) Prefixes() []string {\n\treturn kps.prefixes.Load()\n}", "func prefixesListing(pfx string, prefixes []string) []listingItem {\n\tout := make([]listingItem, 0, len(prefixes)+1)\n\tif pfx != \"\" {\n\t\tparent := \"\"\n\t\tif idx := strings.LastIndex(pfx, \"/\"); idx != -1 {\n\t\t\tparent = pfx[:idx]\n\t\t}\n\t\tout = append(out, listingItem{\n\t\t\tBack: true,\n\t\t\tHref: prefixPageURL(parent),\n\t\t})\n\t}\n\treturn pathListing(pfx, prefixes, out, func(p string) listingItem {\n\t\treturn listingItem{\n\t\t\tHref: prefixPageURL(p),\n\t\t}\n\t})\n}", "func FindHostsByPrefix(prefix string) []string {\n\treturn index.Find(strings.ToLower(prefix), 10)\n}", "func listFiles(prefix, delimiter, marker string, maxKeys int, b *s3.Bucket) (files []s3.Key, err error) {\n\tresp, err := b.List(prefix, delimiter, marker, maxKeys)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// append to files\n\tfiles = append(files, resp.Contents...)\n\n\t// recursion for the recursion god\n\tif resp.IsTruncated && resp.NextMarker != \"\" {\n\t\tf, err := listFiles(resp.Prefix, resp.Delimiter, resp.NextMarker, resp.MaxKeys, b)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// append to files\n\t\tfiles = append(files, f...)\n\t}\n\n\treturn files, nil\n}", "func (b *Bucket) Iter(ctx context.Context, dir string, f func(string) error, opt ...objstore.IterOption) error {\n\tif dir != \"\" {\n\t\tdir = strings.TrimSuffix(dir, objstore.DirDelim) + objstore.DirDelim\n\t}\n\n\tdelimiter := objstore.DirDelim\n\n\tif objstore.ApplyIterOptions(opt...).Recursive {\n\t\tdelimiter = \"\"\n\t}\n\n\tvar marker string\n\tfor {\n\t\tif err := ctx.Err(); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tobjects, err := b.client.ListObjects(b.name, &api.ListObjectsArgs{\n\t\t\tDelimiter: delimiter,\n\t\t\tMarker: marker,\n\t\t\tMaxKeys: 1000,\n\t\t\tPrefix: dir,\n\t\t})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tmarker = objects.NextMarker\n\t\tfor _, object := range objects.Contents {\n\t\t\tif err := f(object.Key); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\n\t\tfor _, object := range objects.CommonPrefixes {\n\t\t\tif err := f(object.Prefix); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\tif !objects.IsTruncated {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn nil\n}", "func (utils MockJobLogUtils) ListJobRunFilesWithPrefix(job string, run int, prefix string) ([]string, error) {\n\tfilesWithPrefixes, ok := utils.MockFilesWithPrefix[run]\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"Run number %v not a 1st key in the mock files with prefix map\", run)\n\t}\n\tfilesWithPrefix, ok := filesWithPrefixes[prefix]\n\treturn filesWithPrefix, nil\n}", "func (c *Conn) listKeys(prefix string, keyType int) ([]string, error) {\n\tkeys := []string{}\n\n\tlister := c.bucket.List(&blob.ListOptions{\n\t\tDelimiter: \"/\",\n\t\tPrefix: prefix,\n\t})\n\tfor {\n\t\tobj, err := lister.Next(c.ctx)\n\t\tif err == io.EOF {\n\t\t\tbreak\n\t\t}\n\n\t\tif err != nil {\n\t\t\tc.Log.Errorf(\"Failed to get next blob err=%v\", err)\n\t\t\treturn keys, err\n\t\t}\n\n\t\tswitch keyType {\n\t\tcase ListKeyBoth:\n\t\tcase ListKeyFile:\n\t\t\tif obj.IsDir {\n\t\t\t\tcontinue\n\t\t\t}\n\t\tcase ListKeyDir:\n\t\t\tif !obj.IsDir {\n\t\t\t\tcontinue\n\t\t\t}\n\t\tdefault:\n\t\t\tc.Log.Warningf(\"Invalid keyType=%d, Ignored\", keyType)\n\t\t\tcontinue\n\t\t}\n\n\t\tkeys = append(keys, obj.Key)\n\t}\n\treturn keys, nil\n}" ]
[ "0.74287415", "0.72871804", "0.7260987", "0.7107129", "0.70805806", "0.7073893", "0.70719457", "0.6956832", "0.69261414", "0.68991977", "0.6828743", "0.6813604", "0.68116945", "0.6754437", "0.6732319", "0.6709022", "0.66668177", "0.66326827", "0.6577119", "0.65761316", "0.642166", "0.641053", "0.63974726", "0.63731986", "0.6353421", "0.634493", "0.6339197", "0.6333239", "0.63283813", "0.6322988", "0.6302006", "0.62993246", "0.62960505", "0.62587583", "0.6256361", "0.62547857", "0.6248734", "0.624796", "0.62160224", "0.62060916", "0.6180351", "0.615784", "0.6153607", "0.61284983", "0.61153114", "0.6091595", "0.6077623", "0.60734105", "0.6054", "0.60318404", "0.60312575", "0.6008556", "0.59906274", "0.59542465", "0.5946456", "0.59417796", "0.5941111", "0.5918845", "0.59184396", "0.59169906", "0.5910104", "0.5908962", "0.58951604", "0.5875568", "0.5873498", "0.58564305", "0.58556396", "0.5847373", "0.5846024", "0.5821302", "0.58181715", "0.58178514", "0.58178514", "0.5798558", "0.5795823", "0.5780368", "0.5779259", "0.5775808", "0.577505", "0.57647353", "0.5725676", "0.5724332", "0.57190275", "0.5706235", "0.5700098", "0.5680497", "0.5672335", "0.56636375", "0.5656997", "0.56506014", "0.56464976", "0.56464976", "0.56462365", "0.5633349", "0.56326663", "0.56310296", "0.56253386", "0.56017506", "0.55919373", "0.55752975" ]
0.84079736
0
DeleteObject : Deletes the given object
func (o *ObjectStorage) DeleteObject(bucket *string, object *string) (*s3.DeleteObjectOutput, error) { return o.Client.DeleteObject(&s3.DeleteObjectInput{ Bucket: bucket, Key: object, }) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (call CommandHandler) DeleteObject(objID uint16, objType uint8) error {\n\t// https://developers.yubico.com/YubiHSM2/Commands/Delete_Object.html\n\treturn call.nullResponse(CmdDeleteObject.Build(objID, objType))\n}", "func (h *Handler) DeleteObject(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\n\tlog := logger.Log.WithField(\"requestID\", middleware.GetReqID(r.Context()))\n\n\terr := h.Store.Delete(r.Context(), id)\n\tif err != nil {\n\t\tif err == store.KeyNotFound {\n\t\t\tlog.WithField(\"objectID\", id).Debug(err)\n\t\t\tw.WriteHeader(http.StatusNotFound)\n\t\t} else {\n\t\t\tlog.Error(err)\n\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t}\n\t\treturn\n\t}\n\n\tw.WriteHeader(http.StatusNoContent)\n}", "func (b NeteaseNOSBackend) DeleteObject(path string) error {\n\tkey := pathutil.Join(b.Prefix, path)\n\n\tobjectRequest := &model.ObjectRequest{\n\t\tBucket: b.Bucket,\n\t\tObject: key,\n\t}\n\n\terr := b.Client.DeleteObject(objectRequest)\n\treturn err\n}", "func (c *Client) DeleteObject(objectPath string) error {\n\tendpoint, _ := url.Parse(c.Endpoint.String())\n\tendpoint.Path = path.Join(endpoint.Path, objectPath)\n\n\treq, err := http.NewRequest(\"DELETE\", endpoint.String(), nil)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Error making the request: %v\", err)\n\t}\n\n\tres, err := c.HTTPClient.Do(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif statusErr := errorFromCode(res.StatusCode); statusErr != nil {\n\t\treturn statusErr\n\t}\n\n\treturn nil\n}", "func (fs *Stow) DeleteObject(ctx context.Context, path string) error {\n\tlocation, err := stow.Dial(fs.kind, fs.config)\n\tif err != nil {\n\t\tlog.Errorf(\"stow.Dial fail: %v\", err)\n\t\treturn err\n\t}\n\n\tcontainer, err := location.Container(fs.bucket)\n\tif err != nil {\n\t\tlog.Errorf(\"stow.GetContainer fail: %v\", err)\n\t\treturn err\n\t}\n\n\terr = container.RemoveItem(path)\n\tif err != nil {\n\t\tlog.Errorf(\"stow.Container.RemoveItem fail: %v\", err)\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (avisess *AviSession) DeleteObject(uri string, options ...ApiOptionsParams) error {\n\topts, err := getOptions(options)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn avisess.restRequestInterfaceResponse(\"DELETE\", uri, opts.payload, opts.result, options...)\n}", "func (p *ExperimentalPlayground) deleteObject(object engine.Object) error {\n\tp.objectsContainersMux.Lock()\n\tdefer p.objectsContainersMux.Unlock()\n\treturn p.unsafeDeleteObject(object)\n}", "func (c *SyscallService) DeleteObject(ctx context.Context, in *pb.DeleteRequest) (*pb.DeleteResponse, error) {\n\tnctx, ok := c.ctxmgr.Context(in.GetHeader().Ctxid)\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"bad ctx id:%d\", in.Header.Ctxid)\n\t}\n\n\terr := nctx.Cache.Del(nctx.ContractName, in.Key)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pb.DeleteResponse{}, nil\n}", "func (objectSet *VolumeObjectSet) DeleteObject(id string) error {\r\n\treturn objectSet.Client.Delete(volumePath, id)\r\n}", "func (objectSet *UserObjectSet) DeleteObject(id string) error {\n\terr := objectSet.Client.Delete(userPath, id)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func ObjectDelete(db *sql.DB, obj interface{}) error {\n\ttable, key, id := deleteInfo(obj)\n\tif len(key) == 0 {\n\t\treturn ErrNoKeyField\n\t}\n\tquery := fmt.Sprintf(\"delete from %s where %s=?\", table, key)\n\trec, err := db.Exec(query, id)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"BAD QUERY:%s ID:%v ERROR:%v\", query, id, err)\n\t}\n\tif updated, _ := rec.RowsAffected(); updated == 0 {\n\t\treturn fmt.Errorf(\"No record deleted for id: %v\", id)\n\t}\n\treturn nil\n}", "func (a *UserServiceApiService) DeleteObject(ctx _context.Context, username string) ApiDeleteObjectRequest {\n\treturn ApiDeleteObjectRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tusername: username,\n\t}\n}", "func (objectSet *ShelfObjectSet) DeleteObject(id string) error {\n\treturn fmt.Errorf(\"Unsupported operation 'delete' on Shelf\")\n}", "func (a *Application) DeleteObject(obj *LocalDeviceObject) error {\n\tlog.Debug().Stringer(\"obj\", obj).Msg(\"DeleteObject\")\n\n\t// extract the object name and identifier\n\tobjectName := obj.ObjectName\n\tobjectIdentifier := obj.ObjectIdentifier\n\n\t// delete it from the application\n\tdelete(a.objectName, objectName)\n\tdelete(a.objectIdentifier, objectIdentifier)\n\n\t// remove the object's identifier from the device's object list if there is one and has an object list property\n\tif a.localDevice != nil {\n\t\tfoundIndex := -1\n\t\tfor i, s := range a.localDevice.ObjectList {\n\t\t\tif s == objectIdentifier {\n\t\t\t\tfoundIndex = i\n\t\t\t}\n\t\t}\n\t\tif foundIndex >= 0 {\n\t\t\ta.localDevice.ObjectList = append(a.localDevice.ObjectList[0:foundIndex], a.localDevice.ObjectList[foundIndex+1:]...)\n\t\t}\n\t}\n\n\t// make sure the object knows it's detached from an application\n\tobj.App = nil\n\n\treturn nil\n}", "func (l *pydioObjects) DeleteObject(ctx context.Context, bucket string, object string) error {\n\n\t// log.Println(\"[DeleteObject]\", object)\n\t_, err := l.Router.DeleteNode(ctx, &tree.DeleteNodeRequest{\n\t\tNode: &tree.Node{\n\t\t\tPath: strings.TrimLeft(object, \"/\"),\n\t\t},\n\t})\n\tif err != nil {\n\t\treturn pydioToMinioError(err, bucket, object)\n\t}\n\treturn nil\n\n}", "func (r *Repository) DeleteObject(hash plumbing.Hash) error {\n\treturn r.Storer.DeleteLooseObject(hash)\n}", "func (client *Client) DeleteObject(container, object string) error {\n\t_, err := objects.Delete(client.Container, container, object, objects.DeleteOpts{}).Extract()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Error deleting objects %s of container %s: %s\", object, container, ProviderErrorToString(err))\n\t}\n\treturn nil\n\n}", "func (r Virtual_Guest) DeleteObject() (resp bool, err error) {\n\terr = r.Session.DoRequest(\"SoftLayer_Virtual_Guest\", \"deleteObject\", nil, &r.Options, &resp)\n\treturn\n}", "func (bucket Bucket) DeleteObject(objectKey string) error {\n\tparams := map[string]interface{}{}\n\tresp, err := bucket.do(\"DELETE\", objectKey, params, nil, nil, nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer resp.Body.Close()\n\treturn checkRespCode(resp.StatusCode, []int{http.StatusNoContent})\n}", "func (c *BaseController) DeleteSingleObject(r *web.Request) (*web.Response, error) {\n\tobjectID := r.PathParams[PathParamID]\n\tctx := r.Context()\n\tlog.C(ctx).Debugf(\"Deleting %s with id %s\", c.objectType, objectID)\n\n\tbyID := query.ByField(query.EqualsOperator, \"id\", objectID)\n\tctx, err := query.AddCriteria(ctx, byID)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tr.Request = r.WithContext(ctx)\n\n\treturn c.DeleteObjects(r)\n}", "func (cache *SiaCacheLayer) DeleteObject(bucket string, objectName string) *SiaServiceError {\n\tcache.debugmsg(\"SiaCacheLayer.DeleteObject\")\n\n\terr := cache.dbUpdateObjectDeletedStatus(bucket, objectName, 1)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Tell Sia daemon to delete the object\n\tvar siaObj = cache.getSiaObjectName(bucket, objectName)\n\n\tderr := post(cache.SiadAddress, \"/renter/delete/\"+siaObj, \"\")\n\tif derr != nil {\n\t\treturn &SiaServiceError{Code: \"SiaErrorDaemon\", Message: derr.Error()}\n\t}\n\n\treturn cache.dbDeleteObject(bucket, objectName)\n}", "func (s *Server) objectDelete(\n\tsess *pb.Session,\n\tuuids []string,\n) error {\n\treq := &pb.ObjectDeleteByUuidsRequest{\n\t\tSession: sess,\n\t\tUuids: uuids,\n\t}\n\tmc, err := s.metaClient()\n\tif err != nil {\n\t\treturn err\n\t}\n\t_, err = mc.ObjectDeleteByUuids(context.Background(), req)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func DeleteObject(ctx context.Context, rootId string) (rep string, err error) {\n\t// 1. gui lenh RemoveItself toi chinh no, khi do, cac child cua no se phai tu unsubscribe\n\trep, err = sendRemoveItselfCommand(ctx, rootId)\n\tif err != nil {\n\t\tLoggingClient.Error(err.Error())\n\t}\n\t// 2. xoa Object nay trong Procols cua cac Parent. Chi can xoa trong MetaData, khong can gui lenh\n\tfor parentId := range cacheGetParents(rootId) {\n\t\tparentObject, err := clientMetaDevice.Device(parentId, ctx)\n\t\tif err == nil {\n\t\t\tdeleteElementInProtocols(parentObject.Protocols, rootId)\n\t\t\terr = clientMetaDevice.Update(parentObject, ctx)\n\t\t\tif err != nil {\n\t\t\t\tLoggingClient.Error(err.Error())\n\t\t\t}\n\t\t}\n\t}\n\t// 3. xoa cac SubObject trong MetaData & xoa rootObject\n\t// doi voi DeviceType, Sub trung voi Root nen bo qua phan xoa SubObject\n\tif cacheGetType(rootId) != DEVICETYPE {\n\t\tlistOb, _ := clientMetaDevice.DevicesByLabel(rootId, ctx)\n\t\tfor _, sub := range listOb {\n\t\t\tfmt.Println(\"delete subObject \", sub.Id)\n\t\t\tclientMetaDevice.Delete(sub.Id, ctx)\n\t\t\tcacheDeleteMapHasName(sub.Name)\n\t\t}\n\t}\n\t// xoa RootObject\n\terr = clientMetaDevice.Delete(rootId, ctx)\n\t// 4. xoa Object trong MapRoot & xoa trong MapID\n\tcacheDeleteRoot(rootId)\n\treturn\n}", "func (db *DB) DeleteObject(ctx context.Context, class string, id strfmt.UUID,\n\trepl *additional.ReplicationProperties, tenant string,\n) error {\n\tidx := db.GetIndex(schema.ClassName(class))\n\tif idx == nil {\n\t\treturn fmt.Errorf(\"delete from non-existing index for %s\", class)\n\t}\n\n\terr := idx.deleteObject(ctx, id, repl, tenant)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"delete from index %q: %w\", idx.ID(), err)\n\t}\n\n\treturn nil\n}", "func DeleteObject(storageCli *storage.Client, bucketName, objectName string) {\n\tif err := storageCli.Bucket(bucketName).Object(objectName).Delete(context.Background()); err != nil {\n\t\tframework.FailfWithOffset(2, \"Failed to delete object %q from bucket %q: %s\",\n\t\t\tobjectName, bucketName, err)\n\t}\n}", "func (hp *hdfsProvider) DeleteObj(lom *cluster.LOM) (errCode int, err error) {\n\tfilePath := filepath.Join(lom.Bck().Props.Extra.HDFS.RefDirectory, lom.ObjName)\n\tif err := hp.c.Remove(filePath); err != nil {\n\t\terrCode, err = hdfsErrorToAISError(err)\n\t\treturn errCode, err\n\t}\n\tif verbose {\n\t\tnlog.Infof(\"[delete_object] %s\", lom)\n\t}\n\treturn 0, nil\n}", "func (objectSet *PoolObjectSet) DeleteObject(id string) error {\n\terr := objectSet.Client.Delete(poolPath, id)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (objectSet *PerformancePolicyObjectSet) DeleteObject(id string) error {\r\n\treturn objectSet.Client.Delete(performancePolicyPath, id)\r\n}", "func deleteObject(w http.ResponseWriter, r *http.Request) {\n\tvars := mux.Vars(r)\n\tkey, _ := strconv.Atoi(vars[\"id\"])\n\tfound := false\n\tlocation := 0\n\n\tfor index := range listOfObjects {\n\t\tif listOfObjects[index].ID == key {\n\t\t\tfound = true\n\t\t\tlocation = index\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif found == true {\n\t\tlistOfObjects = append(listOfObjects[:location], listOfObjects[location+1:]...)\n\t\terr := json.NewEncoder(w).Encode(\"Removed\")\n\t\tif err != nil {\n\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t\tlog.Fatal(\"Error encoding JSON\")\n\t\t}\n\t} else {\n\t\tw.WriteHeader(http.StatusNotFound)\n\t\terr := json.NewEncoder(w).Encode(\"Could not find object\")\n\t\tif err != nil {\n\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t\tlog.Fatal(\"Error encoding JSON\")\n\t\t}\n\t}\n}", "func (p *ExperimentalPlayground) DeleteObject(object engine.Object, location engine.Location) error {\n\tif !location.Empty() {\n\t\tcontainer, err := p.getContainerByObject(object)\n\t\tif err != nil {\n\t\t\treturn errDeleteObject(err.Error())\n\t\t}\n\t\tp.gameMap.MRemoveContainer(location, container)\n\t}\n\n\tif err := p.deleteObject(object); err != nil {\n\t\treturn errDeleteObject(err.Error())\n\t}\n\n\treturn nil\n}", "func (xl xlObjects) deleteObject(bucket, object string) error {\n\tmetaJSONFile := path.Join(object, \"meta.json\")\n\t// Ignore if meta.json file doesn't exist.\n\tif err := xl.storage.DeleteFile(bucket, metaJSONFile); err != nil {\n\t\tif err != errFileNotFound {\n\t\t\treturn err\n\t\t}\n\t}\n\tif err := xl.storage.DeleteFile(bucket, object); err != nil {\n\t\tif err != errFileNotFound {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (r Dns_Domain_ResourceRecord) DeleteObject() (resp bool, err error) {\n\terr = r.Session.DoRequest(\"SoftLayer_Dns_Domain_ResourceRecord\", \"deleteObject\", nil, &r.Options, &resp)\n\treturn\n}", "func (c cacheObjects) DeleteObject(ctx context.Context, bucket, object string) (err error) {\n\tif err = c.DeleteObjectFn(ctx, bucket, object); err != nil {\n\t\treturn\n\t}\n\tif c.isCacheExclude(bucket, object) {\n\t\treturn\n\t}\n\tif dcache, cerr := c.getCachedLoc(ctx, bucket, object); cerr == nil {\n\t\tdcache.Delete(ctx, bucket, object)\n\t}\n\treturn\n}", "func (xl xlObjects) DeleteObject(bucket, object string) error {\n\t// Verify if bucket is valid.\n\tif !IsValidBucketName(bucket) {\n\t\treturn BucketNameInvalid{Bucket: bucket}\n\t}\n\tif !IsValidObjectName(object) {\n\t\treturn ObjectNameInvalid{Bucket: bucket, Object: object}\n\t}\n\tnsMutex.Lock(bucket, object)\n\tdefer nsMutex.Unlock(bucket, object)\n\t// Verify if the object is a multipart object.\n\tif isMultipartObject(xl.storage, bucket, object) {\n\t\terr := xl.deleteMultipartObject(bucket, object)\n\t\tif err != nil {\n\t\t\treturn toObjectErr(err, bucket, object)\n\t\t}\n\t\treturn nil\n\t}\n\terr := xl.deleteObject(bucket, object)\n\tif err != nil {\n\t\treturn toObjectErr(err, bucket, object)\n\t}\n\treturn nil\n}", "func deleteObject(ctx *context, file string) error {\n\t// Setup\n\tsvc := ctx.svcS3\n\tinput := &s3.DeleteObjectInput{\n\t\tBucket: aws.String(ctx.bucket),\n\t\tKey: aws.String(file),\n\t}\n\n\toutput, err := svc.DeleteObject(input)\n\tawsOutput(output.GoString())\n\tif err != nil {\n\t\treturn handle(\"Error in deleting object.\", err)\n\t}\n\treturn err\n}", "func (r Dns_Domain) DeleteObject() (resp bool, err error) {\n\terr = r.Session.DoRequest(\"SoftLayer_Dns_Domain\", \"deleteObject\", nil, &r.Options, &resp)\n\treturn\n}", "func (p *Plugin) ObjectDeleted(obj interface{}) {\n\tp.Log.Infof(\"ObjectStore.ObjectDeleted: %s\", obj)\n\tswitch obj.(type) {\n\tcase *v1.NetworkService:\n\t\tns := obj.(*v1.NetworkService).Spec\n\t\tp.objects.networkServicesStore.Delete(meta{name: ns.Metadata.Name, namespace: ns.Metadata.Namespace})\n\tcase *v1.NetworkServiceChannel:\n\t\tnsc := obj.(*v1.NetworkServiceChannel).Spec\n\t\tp.objects.networkServiceChannelsStore.DeleteChannel(&nsc)\n\tcase *v1.NetworkServiceEndpoint:\n\t\tnse := obj.(*v1.NetworkServiceEndpoint).Spec\n\t\tp.objects.networkServiceEndpointsStore.Delete(meta{name: nse.Metadata.Name, namespace: nse.Metadata.Namespace})\n\t}\n}", "func (b *fakeBosClient) DeleteObject(bucket, object string) error {\n\tif bucket == \"error\" {\n\t\treturn fmt.Errorf(bucket + object)\n\t}\n\treturn nil\n}", "func delete_object(project *C.Project, bucket_name, object_key *C.char) C.ObjectResult { //nolint:golint\n\tif project == nil {\n\t\treturn C.ObjectResult{\n\t\t\terror: mallocError(ErrNull.New(\"project\")),\n\t\t}\n\t}\n\tif bucket_name == nil {\n\t\treturn C.ObjectResult{\n\t\t\terror: mallocError(ErrNull.New(\"bucket_name\")),\n\t\t}\n\t}\n\tif object_key == nil {\n\t\treturn C.ObjectResult{\n\t\t\terror: mallocError(ErrNull.New(\"object_key\")),\n\t\t}\n\t}\n\n\tproj, ok := universe.Get(project._handle).(*Project)\n\tif !ok {\n\t\treturn C.ObjectResult{\n\t\t\terror: mallocError(ErrInvalidHandle.New(\"project\")),\n\t\t}\n\t}\n\n\tdeleted, err := proj.DeleteObject(proj.scope.ctx, C.GoString(bucket_name), C.GoString(object_key))\n\treturn C.ObjectResult{\n\t\terror: mallocError(err),\n\t\tobject: mallocObject(deleted),\n\t}\n}", "func DeleteObject(service *awss3.S3, container, object string) error {\n\t_, err := service.DeleteObject(&awss3.DeleteObjectInput{\n\t\tBucket: aws.String(container),\n\t\tKey: aws.String(object),\n\t})\n\treturn err\n}", "func (b *Bucket) DeleteObject(path string) error {\n\t_, err := b.client.DeleteObject(&SDK.DeleteObjectInput{\n\t\tBucket: String(b.name),\n\t\tKey: String(path),\n\t})\n\tif err != nil {\n\t\tlog.Error(\"[S3] error on `DeleteObject` operation, bucket=\"+b.name, err.Error())\n\t}\n\treturn err\n}", "func (c *clientImpl) DeleteObject(ctx context.Context, p *DeleteObjectParams, opts ...CallOption) error {\n\tcallOpts := c.defaultCallOptions()\n\n\tfor i := range opts {\n\t\tif opts[i] != nil {\n\t\t\topts[i](callOpts)\n\t\t}\n\t}\n\n\t// create request\n\treq := new(v2object.DeleteRequest)\n\n\t// initialize request body\n\tbody := new(v2object.DeleteRequestBody)\n\treq.SetBody(body)\n\n\t// set meta header\n\tmeta := v2MetaHeaderFromOpts(callOpts)\n\n\tif err := c.attachV2SessionToken(callOpts, meta, v2SessionReqInfo{\n\t\taddr: p.addr.ToV2(),\n\t\tverb: v2session.ObjectVerbDelete,\n\t}); err != nil {\n\t\treturn fmt.Errorf(\"could not attach session token: %w\", err)\n\t}\n\n\treq.SetMetaHeader(meta)\n\n\t// fill body fields\n\tbody.SetAddress(p.addr.ToV2())\n\n\t// sign the request\n\tif err := signature.SignServiceMessage(callOpts.key, req); err != nil {\n\t\treturn fmt.Errorf(\"signing the request failed: %w\", err)\n\t}\n\n\t// send request\n\tresp, err := rpcapi.DeleteObject(c.Raw(), req, client.WithContext(ctx))\n\tif err != nil {\n\t\treturn fmt.Errorf(\"sending the request failed: %w\", err)\n\t}\n\n\t// handle response meta info\n\tif err := c.handleResponseInfoV2(callOpts, resp); err != nil {\n\t\treturn err\n\t}\n\n\t// verify response structure\n\tif err := signature.VerifyServiceMessage(resp); err != nil {\n\t\treturn fmt.Errorf(\"response verification failed: %w\", err)\n\t}\n\n\tif p.tombTgt != nil {\n\t\tp.tombTgt.SetAddress(object.NewAddressFromV2(resp.GetBody().GetTombstone()))\n\t}\n\n\treturn nil\n}", "func (r Virtual_DedicatedHost) DeleteObject() (resp bool, err error) {\n\terr = r.Session.DoRequest(\"SoftLayer_Virtual_DedicatedHost\", \"deleteObject\", nil, &r.Options, &resp)\n\treturn\n}", "func Delete(c client.Client, obj runtime.Object) error {\n\tif err := c.Delete(context.Background(), obj); err != nil {\n\t\tif apierrors.IsNotFound(err) {\n\t\t\treturn nil\n\t\t}\n\t\treturn err\n\t}\n\treturn nil\n}", "func (r Dns_Secondary) DeleteObject() (resp bool, err error) {\n\terr = r.Session.DoRequest(\"SoftLayer_Dns_Secondary\", \"deleteObject\", nil, &r.Options, &resp)\n\treturn\n}", "func (c *defaultGcsClient) DeleteObject(ctxIn context.Context, bucketName string, objectName string) error {\n\tctx, span := trace.StartSpan(ctxIn, \"(*defaultGcsClient).DeleteObject\")\n\tdefer span.End()\n\n\treturn c.client.Bucket(bucketName).Object(objectName).Delete(ctx)\n\n}", "func (s *Hipchat) ObjectDeleted(obj interface{}) {\n\tnotifyHipchat(s, obj, \"deleted\")\n}", "func (l *Level) DelObject(gameObject object.GameObject) {\n\tl.mainLayer.deletedIDs[gameObject] = struct{}{}\n}", "func (t *TestHandler) ObjectDeleted(obj interface{}) {\n\tdep := obj.(*apps_v1.Deployment)\n\tname := dep.GetName()\n\tif name == \"\" {\n\t\treturn\n\t}\n\tserviceName := name + \"-service\"\n\ts := &serviceResource{ns: ns,\n\t\tname: serviceName}\n\tcs := utils.GetKubeHandle()\n\texists := isServiceExists(s, cs)\n\tif !exists {\n\t\treturn\n\t}\n\terr := deleteService(s, cs)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}", "func (db *InMemDatabase) Delete(obj contrail.IObject) error {\n\tuid := parseUID(obj.GetUuid())\n\tdata, ok := db.objectData[uid]\n\tif !ok {\n\t\treturn fmt.Errorf(\"Object %s: not in database\", obj.GetUuid())\n\t}\n\tif len(data.children) > 0 {\n\t\treturn fmt.Errorf(\"Delete %s: has children %+v\", obj.GetUuid(), data.children)\n\t}\n\tif len(data.backRefs) > 0 {\n\t\treturn fmt.Errorf(\"Delete %s: has references %+v\", obj.GetUuid(), data.backRefs)\n\t}\n\n\tif !data.parent.IsNIL() {\n\t\tdb.deleteChild(data.parent, obj)\n\t\tif parentObj, ok := db.objByIDMap[data.parent]; ok {\n\t\t\tclearReferenceMask(parentObj)\n\t\t}\n\t}\n\tdb.deleteBackReferences(obj, data.refs)\n\n\tdelete(db.objByIDMap, uid)\n\tdelete(db.objectData, uid)\n\ttypeMap, ok := db.typeDB[obj.GetType()]\n\tif !ok {\n\t\treturn fmt.Errorf(\"No objects of type %s\", obj.GetType())\n\t}\n\tfqn := strings.Join(obj.GetFQName(), \":\")\n\tdelete(typeMap, fqn)\n\treturn nil\n}", "func (obj *SObject) Delete(id ...string) error {\n\tif obj.Type() == \"\" || obj.client() == nil {\n\t\t// Sanity check\n\t\treturn ErrFailure\n\t}\n\n\toid := obj.ID()\n\tif id != nil {\n\t\toid = id[0]\n\t}\n\tif oid == \"\" {\n\t\treturn ErrFailure\n\t}\n\n\turl := obj.client().makeURL(\"sobjects/\" + obj.Type() + \"/\" + obj.ID())\n\tlog.Println(url)\n\t_, err := obj.client().httpRequest(http.MethodDelete, url, nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func DeleteObject(ctx context.Context, c Client, p *DeleteObjectParams, opts ...CallOption) (*object.Address, error) {\n\tw := new(objectAddressWriter)\n\n\terr := c.DeleteObject(ctx, p.WithTombstoneAddressTarget(w), opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn w.addr, nil\n}", "func (r Virtual_Guest_Block_Device_Template_Group) DeleteObject() (resp datatypes.Provisioning_Version1_Transaction, err error) {\n\terr = r.Session.DoRequest(\"SoftLayer_Virtual_Guest_Block_Device_Template_Group\", \"deleteObject\", nil, &r.Options, &resp)\n\treturn\n}", "func (r Dns_Domain_ResourceRecord_MxType) DeleteObject() (resp bool, err error) {\n\terr = r.Session.DoRequest(\"SoftLayer_Dns_Domain_ResourceRecord_MxType\", \"deleteObject\", nil, &r.Options, &resp)\n\treturn\n}", "func (c *BaseController) DeleteSingleObject(r *web.Request) (*web.Response, error) {\n\tresourceID := r.PathParams[web.PathParamResourceID]\n\tctx := r.Context()\n\tlog.C(ctx).Debugf(\"Deleting %s with id %s\", c.objectType, resourceID)\n\n\tbyID := query.ByField(query.EqualsOperator, \"id\", resourceID)\n\tctx, err := query.AddCriteria(ctx, byID)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tr.Request = r.WithContext(ctx)\n\tcriteria := query.CriteriaForContext(ctx)\n\topCtx := c.prepareOperationContextByRequest(r)\n\n\taction := func(ctx context.Context, repository storage.Repository) (types.Object, error) {\n\t\t// At this point, the resource will be already deleted if cascade operation requested.\n\t\tif c.supportsCascadeDelete && opCtx.Cascade {\n\t\t\treturn nil, nil\n\t\t}\n\t\terr := repository.Delete(ctx, c.objectType, criteria...)\n\t\treturn nil, util.HandleStorageError(err, c.objectType.String())\n\t}\n\n\tUUID, err := uuid.NewV4()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"could not generate GUID for %s: %s\", c.objectType, err)\n\t}\n\tvar cascadeRootId = \"\"\n\tif opCtx.Cascade {\n\t\tif c.supportsCascadeDelete {\n\t\t\t// Scan if requested resource really exists\n\t\t\tresources, err := c.repository.List(ctx, c.objectType, criteria...)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, util.HandleStorageError(err, c.objectType.String())\n\t\t\t}\n\t\t\tif resources.Len() == 0 {\n\t\t\t\treturn nil, &util.HTTPError{\n\t\t\t\t\tErrorType: \"NotFound\",\n\t\t\t\t\tDescription: \"Resource not found\",\n\t\t\t\t\tStatusCode: http.StatusNotFound,\n\t\t\t\t}\n\t\t\t}\n\t\t\tcascadeRootId = UUID.String()\n\t\t} else {\n\t\t\treturn nil, &util.HTTPError{\n\t\t\t\tErrorType: \"BadRequest\",\n\t\t\t\tDescription: \"Cascade delete is not supported for this API\",\n\t\t\t\tStatusCode: http.StatusBadRequest,\n\t\t\t}\n\t\t}\n\t}\n\tif c.supportsCascadeDelete && opCtx.Cascade {\n\t\tconcurrentOp, err := operations.FindCascadeOperationForResource(ctx, c.repository, resourceID)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif concurrentOp != nil {\n\t\t\treturn util.NewLocationResponse(concurrentOp.GetID(), resourceID, c.resourceBaseURL)\n\t\t}\n\t}\n\n\tisForce := r.URL.Query().Get(web.QueryParamForce) == \"true\"\n\tlabels := types.Labels{}\n\tif isForce {\n\t\tlabels[\"force\"] = []string{\"true\"}\n\t}\n\n\toperation := &types.Operation{\n\t\tBase: types.Base{\n\t\t\tID: UUID.String(),\n\t\t\tCreatedAt: time.Now(),\n\t\t\tUpdatedAt: time.Now(),\n\t\t\tLabels: labels,\n\t\t\tReady: true,\n\t\t},\n\t\tType: types.DELETE,\n\t\tState: types.IN_PROGRESS,\n\t\tResourceID: resourceID,\n\t\tResourceType: c.objectType,\n\t\tPlatformID: types.SMPlatform,\n\t\tCorrelationID: log.CorrelationIDFromContext(ctx),\n\t\tContext: opCtx,\n\t\tCascadeRootID: cascadeRootId,\n\t}\n\tif c.supportsCascadeDelete && opCtx.Cascade {\n\t\t_, err = c.scheduler.ScheduleSyncStorageAction(ctx, operation, action)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn util.NewLocationResponse(operation.GetID(), operation.ResourceID, c.resourceBaseURL)\n\t}\n\t_, isAsync, err := c.scheduler.ScheduleStorageAction(ctx, operation, action, c.supportsAsync)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif isAsync {\n\t\treturn util.NewLocationResponse(operation.GetID(), operation.ResourceID, c.resourceBaseURL)\n\t}\n\n\treturn util.NewJSONResponse(http.StatusOK, map[string]string{})\n}", "func (o *ObjectNode) deleteObjectHandler(w http.ResponseWriter, r *http.Request) {\n\tlog.LogInfof(\"Delete object...\")\n\n\t_, _, object, vl, err := o.parseRequestParams(r)\n\tif err != nil {\n\t\tlog.LogErrorf(\"deleteObjectHandler: parse request params fail: requestID(%v) err(%v)\", RequestIDFromRequest(r), err)\n\t\t_ = NoSuchBucket.ServeResponse(w, r)\n\t\treturn\n\t}\n\n\terr = vl.DeleteFile(object)\n\tif err != nil {\n\t\tlog.LogErrorf(\"deleteObjectHandler: volume delete file fail: requestID(%v) err(%v)\", RequestIDFromRequest(r), err)\n\t\t_ = InternalError.ServeResponse(w, r)\n\t\treturn\n\t}\n\n\treturn\n}", "func (api *objectAPI) Delete(obj *objstore.Object) error {\n\tif api.ct.resolver != nil {\n\t\tapicl, err := api.ct.apiClient()\n\t\tif err != nil {\n\t\t\tapi.ct.logger.Errorf(\"Error creating API server clent. Err: %v\", err)\n\t\t\treturn err\n\t\t}\n\n\t\t_, err = apicl.ObjstoreV1().Object().Delete(context.Background(), &obj.ObjectMeta)\n\t\treturn err\n\t}\n\n\tapi.ct.handleObjectEvent(&kvstore.WatchEvent{Object: obj, Type: kvstore.Deleted})\n\treturn nil\n}", "func (a *Client) SafeObjectDelete(params *SafeObjectDeleteParams) (*SafeObjectDeleteOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewSafeObjectDeleteParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"safeObjectDelete\",\n\t\tMethod: \"DELETE\",\n\t\tPathPattern: \"/domainSafeObject/{object}/{type}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &SafeObjectDeleteReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*SafeObjectDeleteOK)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\t// safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue\n\tmsg := fmt.Sprintf(\"unexpected success response for safeObjectDelete: API contract not enforced by server. Client expected to get an error, but got: %T\", result)\n\tpanic(msg)\n}", "func (p *ExperimentalPlayground) unsafeDeleteObject(object engine.Object) error {\n\tif !p.unsafeObjectExists(object) {\n\t\treturn errors.New(\"delete object error: object to delete not found\")\n\t}\n\n\tdelete(p.objectsContainers, object)\n\treturn nil\n}", "func (t *Handler) ObjectDeleted(obj interface{}) {\n\tlog.Info(\"URRHandler.ObjectDeleted\")\n\t// Mail notification, TBD\n}", "func (a *UserServiceApiService) DeleteObject_1(ctx _context.Context, ident string, platform string, username string) ApiDeleteObject_0Request {\n\treturn ApiDeleteObject_0Request{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tident: ident,\n\t\tplatform: platform,\n\t\tusername: username,\n\t}\n}", "func (o *Object) Delete(ctx context.Context, c client.Client, namespace string) error {\n\tobj := o.Type.DeepCopyObject().(client.Object)\n\tkind := obj.GetObjectKind().GroupVersionKind().Kind\n\tkey := objectKey(namespace, o.Name)\n\tif err := c.Get(ctx, key, obj); err != nil {\n\t\tif apierrors.IsNotFound(err) {\n\t\t\treturn nil\n\t\t}\n\t\treturn errors.Wrapf(err, \"could not get %s '%s'\", kind, key.String())\n\t}\n\tif err := c.Delete(ctx, obj); err != nil {\n\t\treturn errors.Wrapf(err, \"could not delete %s '%s'\", kind, key.String())\n\t}\n\treturn nil\n}", "func (iter *DeleteObjectsIterator) DeleteObject() BatchDeleteObject {\n\tobject := iter.Objects[iter.index]\n\treturn object\n}", "func deleteObject(objectName string,\n\tbucketName string,\n\tsess *session.Session) (*s3.DeleteObjectOutput, error) {\n\tsvc := s3.New(sess)\n\tinput := &s3.DeleteObjectInput{\n\t\tBucket: aws.String(bucketName),\n\t\tKey: aws.String(objectName),\n\t}\n\tcount.Incr(\"aws-delete\")\n\toutput, err := svc.DeleteObject(input)\n\tif err != nil {\n\t\tif aerr, ok := err.(awserr.Error); ok {\n\t\t\tswitch aerr.Code() {\n\t\t\tcase s3.ErrCodeObjectNotInActiveTierError:\n\t\t\t\tfmt.Println(s3.ErrCodeObjectNotInActiveTierError, aerr.Error())\n\t\t\tdefault:\n\t\t\t\tfmt.Println(aerr.Error())\n\t\t\t}\n\t\t} else {\n\t\t\tfmt.Println(err.Error())\n\t\t}\n\t\tfmt.Println(\"Got error on delete ******** \", bucketName, objectName)\n\t\tcount.Incr(\"error-delete\")\n\t\tif strings.Contains(err.Error(), \"SlowDown\") {\n\t\t\tfmt.Println(\"Got slow down, sleeping for a few minutes\")\n\t\t\tcount.Incr(\"slow-down\")\n\t\t\ttime.Sleep(60 * time.Second)\n\t\t}\n\t\treturn nil, err\n\t}\n\treturn output, nil\n}", "func (h *Handler) DeleteFromObject(obj interface{}) error {\n\trc, ok := obj.(*corev1.ReplicationController)\n\tif !ok {\n\t\treturn fmt.Errorf(\"object type is not *corev1.ReplicationController\")\n\t}\n\treturn h.deleteRC(rc)\n}", "func (o *ObjectNode) deleteObjectTagging(w http.ResponseWriter, r *http.Request) {\n\t// TODO: implement handler 'DeleteObjectTagging'\n\treturn\n}", "func (sd *StateDB) DeleteStateObject(addr types.Address) error {\n\tsd.mu.Lock()\n\tdefer sd.mu.Unlock()\n\n\tdefer sd.refreshbeat(addr)\n\treturn sd.deleteStateObject(addr)\n}", "func (r Dns_Domain_ResourceRecord_SrvType) DeleteObject() (resp bool, err error) {\n\terr = r.Session.DoRequest(\"SoftLayer_Dns_Domain_ResourceRecord_SrvType\", \"deleteObject\", nil, &r.Options, &resp)\n\treturn\n}", "func (h *DaprHandler) ObjectDeleted(obj interface{}) {\n\th.deploymentsLock.Lock()\n\tdefer h.deploymentsLock.Unlock()\n\n\tdeployment := obj.(*appsv1.Deployment)\n\tannotated := h.isAnnotatedForDapr(deployment)\n\tif annotated {\n\t\tid := h.getAppID(deployment)\n\t\tif id == \"\" {\n\t\t\tlog.Warnf(\"skipping service deletion: id for deployment %s is empty\", deployment.GetName())\n\t\t\treturn\n\t\t}\n\n\t\terr := h.deleteDaprService(id, deployment)\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"failed deleting service for deployment %s: %s\", deployment.GetName(), err)\n\t\t}\n\n\t\tmonitoring.RecordServiceDeletedCount(id)\n\t}\n}", "func DeletePublicObject(c echo.Context) error {\n\tpublicObjectStore, err := createPublicObjectStore(c)\n\tif err != nil {\n\t\tglog.Error(err)\n\t\treturn echo.NewHTTPError(http.StatusInternalServerError, err.Error())\n\t}\n\n\th := NewPublicObjectHandler(publicObjectStore)\n\treturn h.Delete(c)\n}", "func (s *StateDB) deleteStateObject(stateObject *stateObject) {\n\tstateObject.deleted = true\n\taddr := stateObject.Address()\n\ts.setError(s.trie.Delete(addr[:]))\n}", "func afterObjectDelete(ctx context.Context, object *storage.ObjectHandle, err error) error {\n\treturn err\n}", "func (r *FakeClient) Delete(ctx context.Context, obj client.Object, opts ...client.DeleteOption) error {\n\tdelete(r.objects, util.NamespacedName(obj))\n\treturn nil\n}", "func (m *mockS3Client) DeleteObject(in *s3.DeleteObjectInput) (*s3.DeleteObjectOutput, error) {\n\tdelete(m.objects, *in.Key)\n\treturn &s3.DeleteObjectOutput{}, nil\n}", "func (c *BaseController) DeleteObjects(r *web.Request) (*web.Response, error) {\n\tctx := r.Context()\n\tlog.C(ctx).Debugf(\"Deleting %ss...\", c.objectType)\n\n\tcriteria := query.CriteriaForContext(ctx)\n\tif _, err := c.repository.Delete(ctx, c.objectType, criteria...); err != nil {\n\t\treturn nil, util.HandleStorageError(err, string(c.objectType))\n\t}\n\n\treturn util.NewJSONResponse(http.StatusOK, map[string]string{})\n}", "func (iter *DeleteListIterator) DeleteObject() BatchDeleteObject {\n\treturn BatchDeleteObject{\n\t\tObject: &s3.DeleteObjectInput{\n\t\t\tBucket: iter.Bucket,\n\t\t\tKey: iter.objects[0].Key,\n\t\t},\n\t}\n}", "func (ep *EtcdClient) DelObj(key string) error {\n\tlog.Infof(`objdb: deleting \"%s\"`, key)\n\tkeyName := \"/contiv.io/obj/\" + key\n\n\t// Remove it via etcd client\n\t_, err := ep.client.KV.Delete(context.Background(), keyName)\n\tif err != nil {\n\t\t// Retry few times if cluster is unavailable\n\t\tif err.Error() == client.ErrNoAvailableEndpoints.Error() {\n\t\t\tfor i := 0; i < maxEtcdRetries; i++ {\n\t\t\t\t_, err = ep.client.KV.Delete(context.Background(), keyName)\n\t\t\t\tif err == nil {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\n\t\t\t\t// Retry after a delay\n\t\t\t\ttime.Sleep(time.Second)\n\t\t\t}\n\t\t}\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Error removing key %s, Err: %v\", keyName, err)\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func (r Virtual_PlacementGroup) DeleteObject() (resp bool, err error) {\n\terr = r.Session.DoRequest(\"SoftLayer_Virtual_PlacementGroup\", \"deleteObject\", nil, &r.Options, &resp)\n\treturn\n}", "func (m *InstallManager) deleteAnyExistingObject(namespacedName types.NamespacedName, obj runtime.Object) error {\n\treturn resource.DeleteAnyExistingObject(m.DynamicClient, namespacedName, obj, m.log)\n}", "func (ctrler CtrlDefReactor) OnObjectDelete(obj *Object) error {\n\tlog.Info(\"OnObjectDelete is not implemented\")\n\treturn nil\n}", "func (m *memClient) RemoveObject(path string) error {\n\tdefer m.Unlock()\n\tm.Lock()\n\tdelete(m.store, path)\n\n\treturn nil\n}", "func removeObject(t *testing.T, s3Svc *common.S3, bucket *string, key *string) {\n\t_, err := s3Svc.Client.DeleteObject(\n\t\t&s3.DeleteObjectInput{\n\t\t\tBucket: bucket,\n\t\t\tKey: key,\n\t\t},\n\t)\n\trequire.Nil(t, err)\n}", "func (ep *EtcdClient) DelObj(key string) error {\n\tkeyName := \"/contiv.io/obj/\" + key\n\n\t// Remove it via etcd client\n\t_, err := ep.kapi.Delete(context.Background(), keyName, nil)\n\tif err != nil {\n\t\t// Retry few times if cluster is unavailable\n\t\tif err.Error() == client.ErrClusterUnavailable.Error() {\n\t\t\tfor i := 0; i < maxEtcdRetries; i++ {\n\t\t\t\t_, err = ep.kapi.Delete(context.Background(), keyName, nil)\n\t\t\t\tif err == nil {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\n\t\t\t\t// Retry after a delay\n\t\t\t\ttime.Sleep(time.Second)\n\t\t\t}\n\t\t}\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Error removing key %s, Err: %v\", keyName, err)\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func (o *ObjectNode) deleteObjectXAttr(w http.ResponseWriter, r *http.Request) {\n\t// TODO: implement 'deleteObjectXAttr'\n}", "func (h *Handler) ObjectDeleted(obj interface{}) (bool, string) {\n\treturn true, \"\"\n}", "func (o *PaymentObject) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) {\n\tif o == nil {\n\t\treturn 0, errors.New(\"models: no PaymentObject provided for delete\")\n\t}\n\n\tif err := o.doBeforeDeleteHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\n\targs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), paymentObjectPrimaryKeyMapping)\n\tsql := \"DELETE FROM `payment_objects` WHERE `id`=?\"\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to delete from payment_objects\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by delete for payment_objects\")\n\t}\n\n\tif err := o.doAfterDeleteHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn rowsAff, nil\n}", "func (t *Handler) ObjectDeleted(obj, deleted interface{}) {\n\tlog.Info(\"TeamHandler.ObjectDeleted\")\n\tfieldDeleted := deleted.(fields)\n\tt.clientset.CoreV1().Namespaces().Delete(fieldDeleted.object.childNamespace, &metav1.DeleteOptions{})\n\t// If there are users who participate in the team and team is enabled\n\tif fieldDeleted.users.status && fieldDeleted.enabled {\n\t\tteamOwnerNamespace, _ := t.clientset.CoreV1().Namespaces().Get(fieldDeleted.object.ownerNamespace, metav1.GetOptions{})\n\t\tvar deletedUserList []apps_v1alpha.SliceUsers\n\t\tjson.Unmarshal([]byte(fieldDeleted.users.deleted), &deletedUserList)\n\t\tif len(deletedUserList) > 0 {\n\t\t\tfor _, deletedUser := range deletedUserList {\n\t\t\t\tt.sendEmail(deletedUser.Username, deletedUser.Authority, teamOwnerNamespace.Labels[\"authority-name\"], fieldDeleted.object.ownerNamespace, fieldDeleted.object.name, fieldDeleted.object.childNamespace, \"team-deletion\")\n\t\t\t}\n\t\t}\n\t}\n}", "func (t *TestHandler) ObjectDeleted(obj interface{}) {\n\n\ttableName := strings.Split(obj.(string), \"/\")[1]\n\tfmt.Println(\"Object deleted\")\n\tfmt.Println(tableName)\n\t// fmt.Println(tableName)\n\tsess, err := session.NewSession(&aws.Config{\n\t\tRegion: aws.String(\"eu-west-1\"),\n\t\tCredentials: credentials.NewSharedCredentials(\"\", \"saml\"),\n\t})\n\n\t// Create DynamoDB client\n\tsvc := dynamodb.New(sess)\n\n\tinput := &dynamodb.DeleteTableInput{\n\t\tTableName: aws.String(tableName),\n\t}\n\tresult, err := svc.DeleteTable(input)\n\tif err != nil {\n\t\tfmt.Println(err.Error())\n\t}\n\t// if aerr, ok := err.(awserr.Error); ok {\n\t// \tswitch aerr.Code() {\n\t// \tcase dynamodb.ErrCodeResourceInUseException:\n\t// \t\tfmt.Println(dynamodb.ErrCodeResourceInUseException, aerr.Error())\n\t// \tcase dynamodb.ErrCodeResourceNotFoundException:\n\t// \t\tfmt.Println(dynamodb.ErrCodeResourceNotFoundException, aerr.Error())\n\t// \tcase dynamodb.ErrCodeLimitExceededException:\n\t// \t\tfmt.Println(dynamodb.ErrCodeLimitExceededException, aerr.Error())\n\t// \tcase dynamodb.ErrCodeInternalServerError:\n\t// \t\tfmt.Println(dynamodb.ErrCodeInternalServerError, aerr.Error())\n\t// \tdefault:\n\t// \t\tfmt.Println(aerr.Error())\n\t// \t}\n\t// } else {\n\t// \t// Print the error, cast err to awserr.Error to get the Code and\n\t// \t// Message from an error.\n\t// \tfmt.Println(err.Error())\n\t// }\n\t// return\n\t// }\n\tfmt.Println(result)\n\tfmt.Println(\"TestHandler.ObjectDeleted\")\n}", "func (r *repository) DeleteByReferenceObjectID(ctx context.Context, bundleID string, objectType model.BundleReferenceObjectType, objectID string) error {\n\tfieldName, err := r.referenceObjectFieldName(objectType)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tconditions := repo.Conditions{\n\t\trepo.NewEqualCondition(fieldName, objectID),\n\t\trepo.NewEqualCondition(bundleIDColumn, bundleID),\n\t}\n\n\treturn r.deleter.DeleteOneGlobal(ctx, conditions)\n}", "func (c *BaseController) DeleteObjects(r *web.Request) (*web.Response, error) {\n\tctx := r.Context()\n\tlog.C(ctx).Debugf(\"Deleting %ss...\", c.objectType)\n\n\tisAsync := r.URL.Query().Get(web.QueryParamAsync)\n\tif isAsync == \"true\" {\n\t\treturn nil, &util.HTTPError{\n\t\t\tErrorType: \"BadRequest\",\n\t\t\tDescription: \"Only one resource can be deleted asynchronously at a time\",\n\t\t\tStatusCode: http.StatusBadRequest,\n\t\t}\n\t}\n\n\tcriteria := query.CriteriaForContext(ctx)\n\n\tlog.C(ctx).Debugf(\"Request will be executed synchronously\")\n\tif err := c.repository.Delete(ctx, c.objectType, criteria...); err != nil {\n\t\treturn nil, util.HandleStorageError(err, c.objectType.String())\n\t}\n\n\treturn util.NewJSONResponse(http.StatusOK, map[string]string{})\n}", "func (h *Heap) Delete(obj interface{}) error {\n\tkey, err := h.data.keyFunc(obj)\n\tif err != nil {\n\t\treturn cache.KeyError{Obj: obj, Err: err}\n\t}\n\tif item, ok := h.data.items[key]; ok {\n\t\theap.Remove(h.data, item.index)\n\t\tif h.metricRecorder != nil {\n\t\t\th.metricRecorder.Dec()\n\t\t}\n\t\treturn nil\n\t}\n\treturn fmt.Errorf(\"object not found\")\n}", "func (client *MockClient) Delete(context ctx.Context, object ctrlClient.Object, options ...ctrlClient.DeleteOption) error {\n\tkindKey, err := buildKindKey(object)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tclient.fillInMaps(kindKey)\n\n\tobjectKey, err := buildRuntimeObjectKey(object)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = client.checkPresence(kindKey, objectKey)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tstuckTerminating := client.stuckTerminatingObjects != nil && client.stuckTerminatingObjects[kindKey] != nil && client.stuckTerminatingObjects[kindKey][objectKey]\n\tif !stuckTerminating {\n\t\tdelete(client.data[kindKey], objectKey)\n\t}\n\n\treturn nil\n}", "func (a *ObjectsApiService) DeleteObjectItem(ctx context.Context, templateId string, objectId int32) ( *http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Delete\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/objects/{template_id}/{object_id}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"template_id\"+\"}\", fmt.Sprintf(\"%v\", templateId), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"object_id\"+\"}\", fmt.Sprintf(\"%v\", objectId), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{ }\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\n\t\t\"application/json\",\n\t\t}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t localVarHttpResponse, err := a.client.callAPI(r)\n\t if err != nil || localVarHttpResponse == nil {\n\t\t return localVarHttpResponse, err\n\t }\n\t defer localVarHttpResponse.Body.Close()\n\t if localVarHttpResponse.StatusCode >= 300 {\n\t\treturn localVarHttpResponse, reportError(localVarHttpResponse.Status)\n\t }\n\n\treturn localVarHttpResponse, err\n}", "func (r *ReconcilerBase) DeleteResource(obj runtime.Object) error {\n\tlogger := NewLogger(true) //log in JSON format\n\n\terr := r.client.Delete(context.TODO(), obj)\n\tif err != nil {\n\t\tif !apierrors.IsNotFound(err) {\n\t\t\tif logger.IsEnabled(LogTypeError) {\n\t\t\t\tlogger.Log(CallerName(), LogTypeError, fmt.Sprintf(\"Unable to delete object: %s, Error: %s \", obj, err), logName)\n\t\t\t}\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t}\n\n\tmetaObj, ok := obj.(metav1.Object)\n\tif !ok {\n\t\terr := fmt.Errorf(\"%T is not a runtime.Object\", obj)\n\t\tif logger.IsEnabled(LogTypeError) {\n\t\t\tlogger.Log(CallerName(), LogTypeError, fmt.Sprintf(\"Failed to convert into runtime.Object, Error: %s \", err), logName)\n\t\t}\n\t\treturn err\n\t}\n\n\tvar gvk schema.GroupVersionKind\n\tgvk, err = apiutil.GVKForObject(obj, r.scheme)\n\tif err == nil {\n\t\tif logger.IsEnabled(LogTypeInfo) {\n\t\t\tlogger.Log(CallerName(), LogTypeInfo, fmt.Sprintf(\"Reconciled, Kind: %s, Name: %s, Status: deleted\", gvk.Kind, metaObj.GetName()), logName)\n\t\t}\n\t}\n\treturn nil\n}", "func (r *ReconcilerBase) DeleteResource(obj runtime.Object) error {\n\terr := r.client.Delete(context.TODO(), obj)\n\tif err != nil {\n\t\tif !apierrors.IsNotFound(err) {\n\t\t\tlog.Error(err, \"Unable to delete object \", \"object\", obj)\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t}\n\n\tmetaObj, ok := obj.(metav1.Object)\n\tif !ok {\n\t\terr := fmt.Errorf(\"%T is not a metav1.Object\", obj)\n\t\tlog.Error(err, \"Failed to convert into metav1.Object\")\n\t\treturn err\n\t}\n\n\tvar gvk schema.GroupVersionKind\n\tgvk, err = apiutil.GVKForObject(obj, r.scheme)\n\tif err == nil {\n\t\tlog.Info(\"Reconciled\", \"Kind\", gvk.Kind, \"Name\", metaObj.GetName(), \"Status\", \"deleted\")\n\t}\n\treturn nil\n}", "func DeleteObjectAction(w http.ResponseWriter, r *http.Request) {\n\n\tsvc := s3.New(sess)\n\n\tpageVars := PageVars{}\n\taddPageVars(r, &pageVars)\n\n\tif len(pageVars.BName) <= 0 {\n\t\thttp.Redirect(w, r, \"/objectlist?bucketName=\"+pageVars.BName+\"&errorM=Invalid bucket name\", http.StatusSeeOther)\n\t} else if len(pageVars.FName) <= 0 {\n\t\thttp.Redirect(w, r, \"/objectlist?bucketName=\"+pageVars.BName+\"&errorM=Invalid file name\", http.StatusSeeOther)\n\t} else {\n\t\tbucket := aws.String(pageVars.BName)\n\t\titem := aws.String(pageVars.FName)\n\n\t\t_, err := svc.DeleteObject(&s3.DeleteObjectInput{\n\t\t\tBucket: bucket,\n\t\t\tKey: item,\n\t\t})\n\n\t\tif err != nil {\n\t\t\tif awsErr, ok := err.(awserr.Error); ok {\n\t\t\t\thttp.Redirect(w, r, \"/objectlist?bucketName=\"+pageVars.BName+\"&errorM=\"+awsErr.Message(), http.StatusSeeOther)\n\t\t\t} else {\n\t\t\t\thttp.Redirect(w, r, \"/objectlist?bucketName=\"+pageVars.BName+\"&errorM=Failed to delete\", http.StatusSeeOther)\n\t\t\t}\n\t\t} else {\n\t\t\terr = svc.WaitUntilObjectNotExists(&s3.HeadObjectInput{\n\t\t\t\tBucket: bucket,\n\t\t\t\tKey: item,\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\tif awsErr, ok := err.(awserr.Error); ok {\n\t\t\t\t\thttp.Redirect(w, r, \"/objectlist?bucketName=\"+pageVars.BName+\"&errorM=\"+awsErr.Message(), http.StatusSeeOther)\n\t\t\t\t} else {\n\t\t\t\t\thttp.Redirect(w, r, \"/objectlist?bucketName=\"+pageVars.BName+\"&errorM=Failed to delete\", http.StatusSeeOther)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\thttp.Redirect(w, r, \"/objectlist?bucketName=\"+pageVars.BName+\"&successM=Successfully deleted\", http.StatusSeeOther)\n\t\t\t}\n\t\t}\n\n\t}\n}", "func (f *Controller) delete(indexName string, objectID string) error {\n\tindex, err := f.indexes.GetIndex(indexName)\n\tif err != nil {\n\t\treturn err\n\t}\n\t_, err = index.DeleteObject(objectID)\n\treturn err\n}", "func (b *Base) DestroyObject(req *DestroyObjectReq) error {\n\treturn ErrFunctionNotSupported\n}", "func (o *FakeObject) Delete(key string) { delete(o.Properties, key) }", "func (r DeleteObjectRequest) Send() (*DeleteObjectOutput, error) {\n\terr := r.Request.Send()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn r.Request.Data.(*DeleteObjectOutput), nil\n}" ]
[ "0.8063331", "0.7941498", "0.78729457", "0.7750781", "0.7721379", "0.7607542", "0.752522", "0.7473939", "0.7472445", "0.7378365", "0.73745173", "0.7333897", "0.73176414", "0.7316052", "0.7280629", "0.7272649", "0.72621286", "0.72478503", "0.7203864", "0.7195852", "0.71928763", "0.7161067", "0.7149009", "0.71292496", "0.712896", "0.7126384", "0.70904493", "0.69709474", "0.6963006", "0.6959672", "0.6951041", "0.69494116", "0.6948141", "0.6919908", "0.6911211", "0.69097304", "0.68567276", "0.68528056", "0.6847051", "0.6834553", "0.68199605", "0.6815803", "0.6798467", "0.6795369", "0.6791382", "0.67752934", "0.67480826", "0.6747257", "0.673502", "0.67332435", "0.67097837", "0.6707197", "0.66939616", "0.6688816", "0.66639125", "0.666328", "0.6645327", "0.6631213", "0.663", "0.6627355", "0.66244185", "0.66077423", "0.6533922", "0.64955086", "0.6462554", "0.6439918", "0.6420247", "0.64091223", "0.6385293", "0.63808054", "0.6371852", "0.63598526", "0.63550436", "0.633774", "0.62828314", "0.62663496", "0.6244774", "0.6244342", "0.62010866", "0.6180991", "0.617058", "0.6164408", "0.6153519", "0.6119971", "0.60702294", "0.606532", "0.6062534", "0.6057848", "0.6038065", "0.5985097", "0.5974084", "0.5967558", "0.5962312", "0.59545505", "0.59346646", "0.5934331", "0.59300166", "0.5875075", "0.58722204", "0.58685064" ]
0.62027323
78
OpenStorageConnection : Initialize and test connection with object storage
func OpenStorageConnection() error { accessToken, err := Vault.GetStorageAccessToken() if err != nil { return err } secretToken, err := Vault.GetStorageSecretToken() if err != nil { return err } Storage = &ObjectStorage{} Storage.connect( os.Getenv("STORAGE_API_ENDPOINT"), credentials.NewStaticCredentials(accessToken, secretToken, ""), ) _, err = Storage.ListBuckets() if err != nil { return errors.New("fatal: failed to list buckets from remote. configuration error?") } klog.Info("etablished connection with object storage") return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (s *MongoStorage) Open() error {\n\topts := options.Client().ApplyURI(s.config.DatabaseURL).SetAuth(options.Credential{AuthSource: s.config.DatabaseName, Username: s.config.DatabaseUser, Password: s.config.DatabasePasswd})\n\tclient, err := mongo.NewClient(opts)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err := client.Connect(context.TODO()); err != nil {\n\t\treturn err\n\t}\n\n\tif err = client.Ping(context.TODO(), nil); err != nil {\n\t\treturn err\n\t}\n\n\ts.client = client\n\ts.Db = client.Database(s.config.DatabaseName)\n\n\treturn nil\n}", "func (in *Config) Open(logger *zap.SugaredLogger) (generic.Storage, error) {\n\tcfg := clientv3.Config{\n\t\tEndpoints: in.Endpoints,\n\t\tDialTimeout: defaultDialTimeout * time.Second,\n\t\tUsername: in.User,\n\t\tPassword: in.Password,\n\t}\n\n\tvar cfgtls *transport.TLSInfo\n\tif in.SSLOptions != nil {\n\t\ttlsinfo := transport.TLSInfo{}\n\n\t\tif in.SSLOptions.SSLCert != \"\" {\n\t\t\ttlsinfo.CertFile = in.SSLOptions.SSLCert\n\t\t\tcfgtls = &tlsinfo\n\t\t}\n\t\tif in.SSLOptions.SSLKey != \"\" {\n\t\t\ttlsinfo.KeyFile = in.SSLOptions.SSLKey\n\t\t\tcfgtls = &tlsinfo\n\t\t}\n\t\tif in.SSLOptions.CACert != \"\" {\n\t\t\ttlsinfo.CAFile = in.SSLOptions.CACert\n\t\t\tcfgtls = &tlsinfo\n\t\t}\n\t\tif in.SSLOptions.ServerName != \"\" {\n\t\t\ttlsinfo.ServerName = in.SSLOptions.ServerName\n\t\t\tcfgtls = &tlsinfo\n\t\t}\n\n\t\tif cfgtls != nil {\n\t\t\tclientTLS, err := cfgtls.ClientConfig()\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tcfg.TLS = clientTLS\n\t\t}\n\t}\n\n\tcfg.DialTimeout = 3 * time.Second\n\n\tdb, err := clientv3.New(cfg)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif len(in.Namespace) == 0 {\n\t\tin.Namespace = []string{\"/com.redhat\", \"panther\"}\n\t}\n\tif !strings.HasPrefix(in.Namespace[0], \"/\") {\n\t\tin.Namespace[0] = \"/\" + in.Namespace[0]\n\t}\n\tprefix := filepath.Join(in.Namespace...)\n\tdb.KV = namespace.NewKV(db.KV, prefix)\n\tc := &conn{\n\t\tprefix: prefix,\n\t\tdb: db,\n\t\tlogger: logger,\n\t}\n\treturn c, nil\n}", "func (a *AzureBlobStorage) Connect() error {\n\treturn nil\n}", "func (a *azureBlobStorage) Connect(ctx context.Context) error {\n\tvar err error\n\ta.keyReader, err = newAzureTargetReader(ctx, a.conf, a.log, a.container)\n\treturn err\n}", "func StorageObjectTest(app *Server, t *testing.T) {\n\tapp.Storage.Clear()\n\tindex, err := app.Storage.Set(\"test\", \"test\")\n\trequire.NoError(t, err)\n\trequire.NotEmpty(t, index)\n\tdata, _ := app.Storage.Get(\"test\")\n\ttestObject, err := objects.DecodeRaw(data)\n\trequire.NoError(t, err)\n\trequire.Equal(t, \"test\", testObject.Data)\n\trequire.Equal(t, int64(0), testObject.Updated)\n\tindex, err = app.Storage.Set(\"test\", \"test_update\")\n\trequire.NoError(t, err)\n\trequire.NotEmpty(t, index)\n\tdata, err = app.Storage.Get(\"test\")\n\trequire.NoError(t, err)\n\ttestObject, err = objects.DecodeRaw(data)\n\trequire.NoError(t, err)\n\trequire.Equal(t, \"test_update\", testObject.Data)\n\terr = app.Storage.Del(\"test\")\n\trequire.NoError(t, err)\n\traw, _ := app.Storage.Get(\"test\")\n\tdataDel := string(raw)\n\trequire.Empty(t, dataDel)\n}", "func TestObjectStorageAPI(t *testing.T) {\n\tif testing.Short() {\n\t\tt.Skip(\"skipping test in short mode.\")\n\t}\n\n\tcontainer := os.Getenv(\"TEST_OBJECTSTORAGE_CONTAINERNAME\")\n\tif container == \"\" {\n\t\tt.Skip(\"No container specified for integration test in TEST_OBJECTSTORAGE_CONTAINERNAME env variable.\")\n\t}\n\n\tauthenticator, err := identity.AuthenticateFromEnvVars()\n\tif err != nil {\n\t\tt.Fatal(\"Cannot authenticate from env vars:\", err)\n\t}\n\n\ttokenID, err := authenticator.GetToken()\n\tif err != nil {\n\t\tt.Fatal(\"There was an error authenticating:\", err)\n\t}\n\n\turl, err := authenticator.GetServiceURL(\"object-store\", \"1.0\")\n\tif err != nil {\n\t\tt.Fatal(\"There was an error determining the object-store service url:\", err)\n\t}\n\n\thdr, err := objectstorage.GetAccountMeta(url, tokenID)\n\tif err != nil {\n\t\tt.Fatal(\"There was an error getting account metadata:\", err)\n\t}\n\n\t// Create a new container.\n\tif err = objectstorage.PutContainer(url+container, tokenID,\n\t\t\"X-Log-Retention\", \"true\"); err != nil {\n\t\tt.Fatal(\"PutContainer Error:\", err)\n\t}\n\n\t// Get a list of all the containers at the selected endoint.\n\tcontainersJSON, err := objectstorage.ListContainers(0, \"\", url, tokenID)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\ttype containerType struct {\n\t\tName string\n\t\tBytes, Count int\n\t}\n\tcontainersList := []containerType{}\n\n\tif err = json.Unmarshal(containersJSON, &containersList); err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tfound := false\n\tfor i := 0; i < len(containersList); i++ {\n\t\tif containersList[i].Name == container {\n\t\t\tfound = true\n\t\t}\n\t}\n\tif !found {\n\t\tt.Fatal(\"Created container is missing from downloaded containersList\")\n\t}\n\n\t// Set and Get container metadata.\n\tif err = objectstorage.SetContainerMeta(url+container, tokenID,\n\t\t\"X-Container-Meta-fubar\", \"false\"); err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\thdr, err = objectstorage.GetContainerMeta(url+container, tokenID)\n\tif err != nil {\n\t\tt.Fatal(fmt.Sprint(\"GetContainerMeta Error:\", err))\n\t}\n\tif hdr.Get(\"X-Container-Meta-fubar\") != \"false\" {\n\t\tt.Fatal(\"container meta does not match\")\n\t}\n\n\t// Create an object in a container.\n\tvar fContent []byte\n\tsrcFile := \"10-objectstore.go\"\n\tfContent, err = ioutil.ReadFile(srcFile)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tobject := container + \"/\" + srcFile\n\tif err = objectstorage.PutObject(&fContent, url+object, tokenID,\n\t\t\"X-Object-Meta-fubar\", \"false\"); err != nil {\n\t\tt.Fatal(err)\n\t}\n\tobjectsJSON, err := objectstorage.ListObjects(0, \"\", \"\", \"\", \"\",\n\t\turl+container, tokenID)\n\n\ttype objectType struct {\n\t\tName, Hash, Content_type, Last_modified string\n\t\tBytes int\n\t}\n\tobjectsList := []objectType{}\n\n\tif err = json.Unmarshal(objectsJSON, &objectsList); err != nil {\n\t\tt.Fatal(err)\n\t}\n\tfound = false\n\tfor i := 0; i < len(objectsList); i++ {\n\t\tif objectsList[i].Name == srcFile {\n\t\t\tfound = true\n\t\t}\n\t}\n\tif !found {\n\t\tt.Fatal(\"created object is missing from the objectsList\")\n\t}\n\n\t// Manage object metadata\n\tif err = objectstorage.SetObjectMeta(url+object, tokenID,\n\t\t\"X-Object-Meta-fubar\", \"true\"); err != nil {\n\t\tt.Fatal(\"SetObjectMeta Error:\", err)\n\t}\n\thdr, err = objectstorage.GetObjectMeta(url+object, tokenID)\n\tif err != nil {\n\t\tt.Fatal(\"GetObjectMeta Error:\", err)\n\t}\n\tif hdr.Get(\"X-Object-Meta-fubar\") != \"true\" {\n\t\tt.Fatal(\"SetObjectMeta Error:\", err)\n\t}\n\n\t// Retrieve an object and check that it is the same as what as uploaded.\n\t_, body, err := objectstorage.GetObject(url+object, tokenID)\n\tif err != nil {\n\t\tt.Fatal(\"GetObject Error:\", err)\n\t}\n\tif !bytes.Equal(fContent, body) {\n\t\tt.Fatal(\"GetObject Error:\", \"byte comparison of uploaded != downloaded\")\n\t}\n\n\t// Duplication (Copy) an existing object.\n\tif err = objectstorage.CopyObject(url+object, \"/\"+object+\".dup\", tokenID); err != nil {\n\t\tt.Fatal(\"CopyObject Error:\", err)\n\t}\n\n\t// Delete the objects.\n\tif err = objectstorage.DeleteObject(url+object, tokenID); err != nil {\n\t\tt.Fatal(\"DeleteObject Error:\", err)\n\t}\n\tif err = objectstorage.DeleteObject(url+object+\".dup\", tokenID); err != nil {\n\t\tt.Fatal(\"DeleteObject Error:\", err)\n\t}\n\n\t// Delete the container that was previously created.\n\tif err = objectstorage.DeleteContainer(url+container, tokenID); err != nil {\n\t\tt.Fatal(\"DeleteContainer Error:\", err)\n\t}\n}", "func InitStorage(service string, bucket string) {\n\ttransferType = service\n\tbenchName = bucket\n\tawsAccessKey, ok := os.LookupEnv(\"AWS_ACCESS_KEY\")\n\tif ok {\n\t\tAKID = awsAccessKey\n\t}\n\tawsSecretKey, ok := os.LookupEnv(\"AWS_SECRET_KEY\")\n\tif ok {\n\t\tSECRET_KEY = awsSecretKey\n\t}\n\tAWS_S3_REGION = \"us-west-1\"\n\tawsRegion, ok := os.LookupEnv(\"AWS_REGION\")\n\tif ok {\n\t\tAWS_S3_REGION = awsRegion\n\t}\n\tif transferType == S3 {\n\t\tvar err error\n\t\ts3session, err = session.NewSession(&aws.Config{\n\t\t\tRegion: aws.String(AWS_S3_REGION),\n\t\t\tCredentials: credentials.NewStaticCredentials(AKID, SECRET_KEY, TOKEN),\n\t\t})\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Failed establish s3 session: %s\", err)\n\t\t}\n\t} else if transferType == ELASTICACHE {\n\t\tredisClient = redis.NewClient(&redis.Options{\n\t\t\tAddr: benchName,\n\t\t\tPassword: \"\", // no password set\n\t\t\tDB: 0, // use default DB\n\t\t})\n\t}\n}", "func (s *Storage) Open() error {\n\tvar err error\n\tdbfile := s.Path\n\tconfig := &bolt.Options{Timeout: 1 * time.Second}\n\ts.DB, err = bolt.Open(dbfile, 0600, config)\n\tif err == nil {\n\t\ts.Opened = true\n\t}\n\treturn err\n}", "func Open(backend Backend) (*Storage, error) {\n\ts := new(Storage)\n\ts.backend = backend\n\terr := s.readHeader()\n\tif err != nil {\n\t\tlog.Errorf(\"error reading storage header: %s\", err)\n\t\treturn nil, err\n\t}\n\treturn s, nil\n}", "func (b *BoltDBStorage) Connect() error {\n\tdb, err := bolt.Open(b.config.DBPath, 0600, &bolt.Options{Timeout: time.Second * 1})\n\tif err != nil {\n\t\tlog.Printf(\"could not connnect to database\")\n\t\treturn fmt.Errorf(\"boldb error: %s \", err)\n\t}\n\tb.db = db\n\treturn nil\n}", "func OpenStorage(name string) (Storage, error) {\n\tif storage, ok := storages[name]; ok {\n\t\treturn storage, nil\n\t}\n\treturn createStorage(name)\n}", "func (s *Datastore) Open() error {\n\n\tvar err error\n\n\tendpoints := []string{\"http://localhost:8529\"}\n\n\tlogin := \"\"\n\tpwd := \"\"\n\n\tconn, err := arangodbhttp.NewConnection(arangodbhttp.ConnectionConfig{\n\t\tEndpoints: endpoints,\n\t\tTLSConfig: &tls.Config{InsecureSkipVerify: true},\n\t})\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tclient, err := arangodb.NewClient(arangodb.ClientConfig{\n\t\tConnection: conn,\n\t\tAuthentication: arangodb.BasicAuthentication(login, pwd),\n\t})\n\n\n\tif s.databases == nil {\n\t\ts.databases = make(map[string]*Db)\n\t}\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\ts.client = client\n\n\treturn nil\n}", "func (tb *TestingBackend) Open(connection string) error {\n\t// Usually a backend would connect here, but we don't have anything\n\t// to connect to.\n\treturn tb.OpenSideEffect\n}", "func (c *Config) Open(_ *certs.ServiceCerts, logger *zap.Logger,\n\tpv tokens.ProjectValidator) (tokens.Storage, error) {\n\tif c.PGURL == \"\" {\n\t\tif c.Database == \"\" {\n\t\t\treturn nil, errors.New(\"either pg_url or database must be provided\")\n\t\t}\n\t\tvar err error\n\n\t\tc.PGURL, err = platform_config.PGURIFromEnvironment(c.Database)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, \"could not get pg url\")\n\t\t}\n\t}\n\tu, err := url.Parse(c.PGURL)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"parse pg_url config\")\n\t}\n\n\tdb, err := c.initPostgresDB(u)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"connect to database\")\n\t}\n\tif err := runMigrations(db, u.String(), c.MigrationsPath); err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to setup database schema\")\n\t}\n\n\treturn &adapter{db: db, logger: logger, validator: pv}, nil\n}", "func Open(conf *config.Storage) (Pool, error) {\n\tdriver, ok := drivers[conf.Driver]\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\n\t\t\t\"storage: unknown driver %q (forgotten import?)\",\n\t\t\tconf.Driver,\n\t\t)\n\t}\n\tpool := driver.New(conf)\n\treturn pool, nil\n}", "func (s *Storage) Init(connectionString string) (*sql.DB, error) {\n\tdbase, err := sql.Open(\"postgres\", connectionString)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\ts.Conn = dbase\n\n\t// Creates all the DAOs of this storage.\n\terr = s.createDAOs()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn dbase, s.Conn.Ping()\n}", "func (gcs *GCS) Connect() error {\n\tvar err error\n\tvar clientOption option.ClientOption\n\n\tctx := context.Background()\n\n\tif gcs.Config.CredentialsJSON != \"\" {\n\t\tclientOption = option.WithCredentialsJSON([]byte(gcs.Config.CredentialsJSON))\n\t\tgcs.client, err = storage.NewClient(ctx, clientOption)\n\t} else if gcs.Config.CredentialsFile != \"\" {\n\t\tclientOption = option.WithCredentialsFile(gcs.Config.CredentialsFile)\n\t\tgcs.client, err = storage.NewClient(ctx, clientOption)\n\t} else {\n\t\tgcs.client, err = storage.NewClient(ctx)\n\t}\n\treturn err\n}", "func (b *TestDriver) Connection() gobot.Connection { return b.connection }", "func (a *API) Initialize(storage StorageInterface) {\n\ta.Storage = storage\n}", "func Initialize() error {\n\tvar err error\n\tconn, err = buntdb.Open(\":memory:\")\n\treturn err\n}", "func ConnectStorage(url, key, secret string) *session.Session {\n\t// Configure s3 remote\n\tstorage_config := &aws.Config{\n\t\tCredentials: credentials.NewStaticCredentials(key, secret, \"\"),\n\t\tEndpoint: aws.String(url),\n\t\tRegion: aws.String(\"us-east-1\"),\n\t\tDisableSSL: aws.Bool(true),\n\t\tS3ForcePathStyle: aws.Bool(true),\n\t}\n\n\t// Return new s3 session for starting connections\n\treturn session.New(storage_config)\n}", "func (d *Driver) Open(connStr string) (driver.Conn, error) {\n\trestClient, err := NewRestClient(nil, connStr)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefaultDb, ok := restClient.params[\"DEFAULTDB\"]\n\tif !ok {\n\t\tdefaultDb, _ = restClient.params[\"DB\"]\n\t}\n\treturn &Conn{restClient: restClient, defaultDb: defaultDb}, nil\n}", "func New(o *Options) *Storage {\n\ts := &Storage{}\n\tif o.GraphiteAddress != \"\" {\n\t\tc := graphite.NewClient(\n\t\t\to.GraphiteAddress, o.GraphiteTransport,\n\t\t\to.StorageTimeout, o.GraphitePrefix)\n\t\ts.queues = append(s.queues, NewStorageQueueManager(c, defaultConfig))\n\t}\n\tif o.OpentsdbURL != \"\" {\n\t\tc := opentsdb.NewClient(o.OpentsdbURL, o.StorageTimeout)\n\t\ts.queues = append(s.queues, NewStorageQueueManager(c, defaultConfig))\n\t}\n\tif o.InfluxdbURL != nil {\n\t\tconf := influx.Config{\n\t\t\tURL: *o.InfluxdbURL,\n\t\t\tUsername: o.InfluxdbUsername,\n\t\t\tPassword: o.InfluxdbPassword,\n\t\t\tTimeout: o.StorageTimeout,\n\t\t}\n\t\tc := influxdb.NewClient(conf, o.InfluxdbDatabase, o.InfluxdbRetentionPolicy)\n\t\tprometheus.MustRegister(c)\n\t\ts.queues = append(s.queues, NewStorageQueueManager(c, defaultConfig))\n\t}\n\tif o.GenericURL != \"\" {\n\t\theaders := http.Header{}\n\t\tif o.GenericHeaderName != \"\" {\n\t\t\theaders.Add(o.GenericHeaderName, o.GenericHeaderValue)\n\t\t}\n\t\tc := generic.NewClient(o.GenericURL, headers, o.StorageTimeout)\n\t\ts.queues = append(s.queues, NewStorageQueueManager(c, defaultConfig))\n\t}\n\tif len(s.queues) == 0 {\n\t\treturn nil\n\t}\n\treturn s\n}", "func (m *MockDriver) Open() error { return nil }", "func NewStorage() *Storage {\n\tConfig := util.NewConfig()\n\tses, err := mgo.Dial(string(Config.Mongo.Addr))\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\treturn &Storage{database: Config.Mongo.DB, table: Config.Mongo.Table, session: ses}\n}", "func newStorageObject(URL string, source interface{}, fileInfo os.FileInfo) storage.Object {\n\tabstract := storage.NewAbstractStorageObject(URL, source, fileInfo)\n\tresult := &object{\n\t\tAbstractObject: abstract,\n\t}\n\tresult.AbstractObject.Object = result\n\treturn result\n}", "func (s *SQLite3) Open(logger logrus.FieldLogger) (storage.Storage, error) {\n\tconn, err := s.open(logger)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn conn, nil\n}", "func (s *SQLite3) Open(logger logrus.FieldLogger) (storage.Storage, error) {\n\tconn, err := s.open(logger)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn conn, nil\n}", "func setupStorage(c *structs.Config) error {\n\tfmt.Println(\"setup storage...\")\n\t_, err := redis.Setup(c)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (db *mongoDataBase) OpenConnection() error {\n\n\tlog.Logger.Info().Msg(\"Starting MongoDB connection\")\n\tvar url = \"mongodb+srv://hola:[email protected]/test\"\n\tclientOptions := options.Client().ApplyURI(url)\n\n\tctx, cancelFunc := context.WithTimeout(context.Background(), config.Timeout)\n\tdefer cancelFunc()\n\tclient, err := mongo.Connect(ctx, clientOptions)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"Error on connection to mongoDB\")\n\t}\n\terr = client.Ping(ctx, readpref.Primary())\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"Error when it did the ping to mongoDB\")\n\t}\n\n\tdb.databaseConnection = client.Database(\"test\")\n\tlog.Logger.Info().Msg(\"MongoDB UP\")\n\treturn nil\n}", "func (s *Storage) Init() error {\n\n\tvar err error\n\n\tif s.client, err = mongo.NewClient(options.Client().ApplyURI(s.URI)); err != nil {\n\n\t\treturn err\n\n\t}\n\tif err = s.client.Connect(context.Background()); err != nil {\n\n\t\treturn err\n\n\t}\n\n\ts.db = s.client.Database(s.Database)\n\n\ts.visited = s.db.Collection(\"colly_visited\")\n\n\ts.cookies = s.db.Collection(\"colly_cookies\")\n\n\treturn nil\n\n}", "func (s Store) Open(path string, cfg *config.Config) (driver.IDB, error) {\n\tmockDB := MockDB{mock: s.Mock}\n\treturn &mockDB, nil\n}", "func (h *Handler) InitObjectStoreConnection(endpoint, accessKeyID, secretAccessKey string) error {\n\tklog.Info(\"Preparing S3 settings\")\n\n\tcfg, err := external.LoadDefaultAWSConfig()\n\n\tif err != nil {\n\t\tklog.Error(\"Failed to load aws config. error: \", err)\n\t\treturn err\n\t}\n\t// aws client report error without minio\n\tcfg.Region = \"minio\"\n\n\tdefaultResolver := endpoints.NewDefaultResolver()\n\ts3CustResolverFn := func(service, region string) (aws.Endpoint, error) {\n\t\tif service == \"s3\" {\n\t\t\treturn aws.Endpoint{\n\t\t\t\tURL: endpoint,\n\t\t\t}, nil\n\t\t}\n\n\t\treturn defaultResolver.ResolveEndpoint(service, region)\n\t}\n\n\tcfg.EndpointResolver = aws.EndpointResolverFunc(s3CustResolverFn)\n\tcfg.Credentials = &credentialProvider{\n\t\tAccessKeyID: accessKeyID,\n\t\tSecretAccessKey: secretAccessKey,\n\t}\n\n\th.Client = s3.New(cfg)\n\tif h.Client == nil {\n\t\tklog.Error(\"Failed to connect to s3 service\")\n\t\treturn err\n\t}\n\n\th.Client.ForcePathStyle = true\n\n\tklog.V(2).Info(\"S3 configured \")\n\n\treturn nil\n}", "func Open(path string, opts ...Option) (*Storage, error) {\n\tabs, err := filepath.Abs(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdir := filepath.Dir(abs)\n\tif err := os.MkdirAll(dir, 0755); err != nil {\n\t\treturn nil, err\n\t}\n\n\ts := &Storage{\n\t\tdir: dir,\n\t\tpath: path,\n\t\tdbopt: gorocksdb.NewDefaultOptions(),\n\t}\n\n\ts.dbopt.SetCompression(gorocksdb.NoCompression)\n\ts.dbopt.SetCreateIfMissing(true)\n\ts.dbopt.SetPurgeRedundantKvsWhileFlush(true)\n\n\ts.db, err = gorocksdb.OpenDb(s.dbopt, s.path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn s, nil\n}", "func InitStorageModel(model store.ClusterManagerModel) {\n\tlock.Lock()\n\tdefer lock.Unlock()\n\tstorage = model\n}", "func TestStorage(ctx common.Context) (func(*Options), error) {\n\tdb, err := stash.OpenTransient(ctx)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"Error opening transient storage\")\n\t}\n\treturn func(o *Options) { o.WithStorage(db) }, nil\n}", "func (as *AppStorage) TestDatabaseConnection() error {\n\t_, err := as.db.C.Scan(&dynamodb.ScanInput{\n\t\tTableName: aws.String(appsTableName),\n\t\tLimit: aws.Int64(1),\n\t})\n\treturn err\n}", "func (c *CloudStorage) Open(ctx context.Context, url string) (io.ReadCloser, error) {\n\tc.once.Do(func() {\n\t\tc.cl, c.initErr = c.newClient(ctx)\n\t})\n\tif c.initErr != nil {\n\t\treturn nil, c.initErr\n\t}\n\tbuildArtifactPrefix := \"build-artifact:///\"\n\tif strings.HasPrefix(url, buildArtifactPrefix) {\n\t\turl = c.buildArtifactsURL + strings.TrimPrefix(url, buildArtifactPrefix)\n\t}\n\treturn c.cl.Open(ctx, url)\n}", "func Connect(dbType StorageType, params Params) (Repository, error) {\n\tswitch dbType {\n\tcase StorageTypeMongo:\n\t\treturn newMongoDBConnection(params.URL, params.DB, params.Collection, params.Username, params.Password)\n\tdefault:\n\t\treturn nil, errors.Errorf(\"not supported database type [%s]\", dbType.String())\n\t}\n}", "func init() {\n\tcloudprovider.RegisterCloudProvider(providerName, newCloudConnection)\n}", "func (s *Store) Open() error {\n\tclient, err := mongo.NewClient(options.Client().ApplyURI(s.DatabaseURL))\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = client.Connect(dbctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err = client.Ping(dbctx, nil); err != nil {\n\t\treturn err\n\t}\n\n\ts.db = client\n\n\treturn nil\n}", "func TestConnection_Smoke(t *testing.T) {\n\tvar _ Connection = &connectionMock{}\n}", "func init() {\n\tos.RemoveAll(DataPath)\n\n\tdc := DatabaseConfig{\n\t\tDataPath: DataPath,\n\t\tIndexDepth: 4,\n\t\tPayloadSize: 16,\n\t\tBucketDuration: 3600000000000,\n\t\tResolution: 60000000000,\n\t\tSegmentSize: 100000,\n\t}\n\n\tcfg := &ServerConfig{\n\t\tVerboseLogs: true,\n\t\tRemoteDebug: true,\n\t\tListenAddress: Address,\n\t\tDatabases: map[string]DatabaseConfig{\n\t\t\tDatabase: dc,\n\t\t},\n\t}\n\n\tdbs := map[string]kdb.Database{}\n\tdb, err := dbase.New(dbase.Options{\n\t\tDatabaseName: Database,\n\t\tDataPath: dc.DataPath,\n\t\tIndexDepth: dc.IndexDepth,\n\t\tPayloadSize: dc.PayloadSize,\n\t\tBucketDuration: dc.BucketDuration,\n\t\tResolution: dc.Resolution,\n\t\tSegmentSize: dc.SegmentSize,\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tdbs[\"test\"] = db\n\td = db\n\to = dc\n\n\ts = NewServer(dbs, cfg)\n\tgo s.Listen()\n\n\t// wait for the server to start\n\ttime.Sleep(time.Second * 2)\n\n\tc = NewClient(Address)\n\tif err := c.Connect(); err != nil {\n\t\tpanic(err)\n\t}\n}", "func InitStorage() *storage.BucketHandle {\n\tctx := context.Background()\n\tclient, err := storage.NewClient(ctx)\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed to create Storage client: %v\\n\", err)\n\t}\n\tbucket := client.Bucket(os.Getenv(envVarNames[evBucket]))\n\tattrs, err := bucket.Attrs(context.Background())\n\tif attrs == nil {\n\t\tlog.Fatalf(\"Bucket has not attributes...\\n\")\n\t}\n\tif err != nil {\n\t\tlog.Fatalf(\"Could not get Bucket information: %v\\n\", err)\n\t}\n\treturn bucket\n}", "func TestOpenReturnsValidConnection(t *testing.T) {\n\tdriver := &drv{}\n\n\tdsn := os.Getenv(\"ORACLE_DSN\")\n\tif dsn == \"\" {\n\t\tt.Fatal(\"To run tests, set the ORACLE_DSN environment variable.\")\n\t}\n\n\tdb, err := driver.Open(dsn)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tif db == nil {\n\t\tt.Fatal(\"db undefined\")\n\t}\n}", "func Open(path, tablePrefix string) (Database, error) {\n\tvar err error\n\tif strings.HasPrefix(path, storage.MySQLPrefix) {\n\t\tname := path[len(storage.MySQLPrefix):]\n\t\t// probe isolation variable name\n\t\tisolationVarName, err := storage.ProbeMySQLIsolationVariableName(name)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Trace(err)\n\t\t}\n\t\t// append parameters\n\t\tif name, err = storage.AppendMySQLParams(name, map[string]string{\n\t\t\t\"sql_mode\": \"'ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION'\",\n\t\t\tisolationVarName: \"'READ-UNCOMMITTED'\",\n\t\t\t\"parseTime\": \"true\",\n\t\t}); err != nil {\n\t\t\treturn nil, errors.Trace(err)\n\t\t}\n\t\t// connect to database\n\t\tdatabase := new(SQLDatabase)\n\t\tdatabase.driver = MySQL\n\t\tdatabase.TablePrefix = storage.TablePrefix(tablePrefix)\n\t\tif database.client, err = otelsql.Open(\"mysql\", name,\n\t\t\totelsql.WithAttributes(semconv.DBSystemMySQL),\n\t\t\totelsql.WithSpanOptions(otelsql.SpanOptions{DisableErrSkip: true}),\n\t\t); err != nil {\n\t\t\treturn nil, errors.Trace(err)\n\t\t}\n\t\tdatabase.gormDB, err = gorm.Open(mysql.New(mysql.Config{Conn: database.client}), storage.NewGORMConfig(tablePrefix))\n\t\tif err != nil {\n\t\t\treturn nil, errors.Trace(err)\n\t\t}\n\t\treturn database, nil\n\t} else if strings.HasPrefix(path, storage.PostgresPrefix) || strings.HasPrefix(path, storage.PostgreSQLPrefix) {\n\t\tdatabase := new(SQLDatabase)\n\t\tdatabase.driver = Postgres\n\t\tdatabase.TablePrefix = storage.TablePrefix(tablePrefix)\n\t\tif database.client, err = otelsql.Open(\"postgres\", path,\n\t\t\totelsql.WithAttributes(semconv.DBSystemPostgreSQL),\n\t\t\totelsql.WithSpanOptions(otelsql.SpanOptions{DisableErrSkip: true}),\n\t\t); err != nil {\n\t\t\treturn nil, errors.Trace(err)\n\t\t}\n\t\tdatabase.gormDB, err = gorm.Open(postgres.New(postgres.Config{Conn: database.client}), storage.NewGORMConfig(tablePrefix))\n\t\tif err != nil {\n\t\t\treturn nil, errors.Trace(err)\n\t\t}\n\t\treturn database, nil\n\t} else if strings.HasPrefix(path, storage.ClickhousePrefix) || strings.HasPrefix(path, storage.CHHTTPPrefix) || strings.HasPrefix(path, storage.CHHTTPSPrefix) {\n\t\t// replace schema\n\t\tparsed, err := url.Parse(path)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Trace(err)\n\t\t}\n\t\tif strings.HasPrefix(path, storage.CHHTTPSPrefix) {\n\t\t\tparsed.Scheme = \"https\"\n\t\t} else {\n\t\t\tparsed.Scheme = \"http\"\n\t\t}\n\t\turi := parsed.String()\n\t\tdatabase := new(SQLDatabase)\n\t\tdatabase.driver = ClickHouse\n\t\tdatabase.TablePrefix = storage.TablePrefix(tablePrefix)\n\t\tif database.client, err = otelsql.Open(\"chhttp\", uri,\n\t\t\totelsql.WithAttributes(semconv.DBSystemKey.String(\"clickhouse\")),\n\t\t\totelsql.WithSpanOptions(otelsql.SpanOptions{DisableErrSkip: true}),\n\t\t); err != nil {\n\t\t\treturn nil, errors.Trace(err)\n\t\t}\n\t\tdatabase.gormDB, err = gorm.Open(clickhouse.New(clickhouse.Config{Conn: database.client}), storage.NewGORMConfig(tablePrefix))\n\t\tif err != nil {\n\t\t\treturn nil, errors.Trace(err)\n\t\t}\n\t\treturn database, nil\n\t} else if strings.HasPrefix(path, storage.MongoPrefix) || strings.HasPrefix(path, storage.MongoSrvPrefix) {\n\t\t// connect to database\n\t\tdatabase := new(MongoDB)\n\t\topts := options.Client()\n\t\topts.Monitor = otelmongo.NewMonitor()\n\t\topts.ApplyURI(path)\n\t\tif database.client, err = mongo.Connect(context.Background(), opts); err != nil {\n\t\t\treturn nil, errors.Trace(err)\n\t\t}\n\t\t// parse DSN and extract database name\n\t\tif cs, err := connstring.ParseAndValidate(path); err != nil {\n\t\t\treturn nil, errors.Trace(err)\n\t\t} else {\n\t\t\tdatabase.dbName = cs.Database\n\t\t\tdatabase.TablePrefix = storage.TablePrefix(tablePrefix)\n\t\t}\n\t\treturn database, nil\n\t} else if strings.HasPrefix(path, storage.SQLitePrefix) {\n\t\t// append parameters\n\t\tif path, err = storage.AppendURLParams(path, []lo.Tuple2[string, string]{\n\t\t\t{\"_pragma\", \"busy_timeout(10000)\"},\n\t\t\t{\"_pragma\", \"journal_mode(wal)\"},\n\t\t}); err != nil {\n\t\t\treturn nil, errors.Trace(err)\n\t\t}\n\t\t// connect to database\n\t\tname := path[len(storage.SQLitePrefix):]\n\t\tdatabase := new(SQLDatabase)\n\t\tdatabase.driver = SQLite\n\t\tdatabase.TablePrefix = storage.TablePrefix(tablePrefix)\n\t\tif database.client, err = otelsql.Open(\"sqlite\", name,\n\t\t\totelsql.WithAttributes(semconv.DBSystemSqlite),\n\t\t\totelsql.WithSpanOptions(otelsql.SpanOptions{DisableErrSkip: true}),\n\t\t); err != nil {\n\t\t\treturn nil, errors.Trace(err)\n\t\t}\n\t\tgormConfig := storage.NewGORMConfig(tablePrefix)\n\t\tgormConfig.Logger = &zapgorm2.Logger{\n\t\t\tZapLogger: log.Logger(),\n\t\t\tLogLevel: logger.Warn,\n\t\t\tSlowThreshold: 10 * time.Second,\n\t\t\tSkipCallerLookup: false,\n\t\t\tIgnoreRecordNotFoundError: false,\n\t\t}\n\t\tdatabase.gormDB, err = gorm.Open(sqlite.Dialector{Conn: database.client}, gormConfig)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Trace(err)\n\t\t}\n\t\treturn database, nil\n\t} else if strings.HasPrefix(path, storage.RedisPrefix) {\n\t\taddr := path[len(storage.RedisPrefix):]\n\t\tdatabase := new(Redis)\n\t\tdatabase.client = redis.NewClient(&redis.Options{Addr: addr})\n\t\tif tablePrefix != \"\" {\n\t\t\tpanic(\"table prefix is not supported for redis\")\n\t\t}\n\t\tlog.Logger().Warn(\"redis is used for testing only\")\n\t\treturn database, nil\n\t} else if strings.HasPrefix(path, storage.OraclePrefix) {\n\t\tdatabase := new(SQLDatabase)\n\t\tdatabase.driver = Oracle\n\t\tdatabase.TablePrefix = storage.TablePrefix(tablePrefix)\n\t\tif database.client, err = otelsql.Open(\"oracle\", path,\n\t\t\totelsql.WithAttributes(semconv.DBSystemOracle),\n\t\t\totelsql.WithSpanOptions(otelsql.SpanOptions{DisableErrSkip: true}),\n\t\t); err != nil {\n\t\t\treturn nil, errors.Trace(err)\n\t\t}\n\t\tdatabase.gormDB, err = gorm.Open(oracle.New(oracle.Config{Conn: database.client}), storage.NewGORMConfig(tablePrefix))\n\t\tif err != nil {\n\t\t\treturn nil, errors.Trace(err)\n\t\t}\n\t\treturn database, nil\n\t}\n\treturn nil, errors.Errorf(\"Unknown database: %s\", path)\n}", "func (xmem *XmemNozzle) initializeConnection() (err error) {\n\txmem.Logger().Debugf(\"xmem.config= %v\", xmem.config.connectStr)\n\txmem.Logger().Debugf(\"poolName=%v\", xmem.getPoolName(xmem.config.connectStr))\n\tpool, err := base.ConnPoolMgr().GetOrCreatePool(xmem.getPoolName(xmem.config.connectStr), xmem.config.connectStr, xmem.config.bucketName, xmem.config.password, base.DefaultConnectionSize)\n\tif err == nil {\n\t\txmem.memClient, err = pool.Get()\n\t}\n\treturn err\n}", "func (p *Postgres) Open(logger logrus.FieldLogger) (storage.Storage, error) {\n\tconn, err := p.open(logger)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn conn, nil\n}", "func (p *Postgres) Open(logger logrus.FieldLogger) (storage.Storage, error) {\n\tconn, err := p.open(logger)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn conn, nil\n}", "func (pgs *PGStorage) Init() error {\n\tdb, err := sqlx.Open(\"pgx\", pgs.URI)\n\tif err != nil {\n\t\treturn err\n\t}\n\tpgs.DB = db\n\treturn nil\n}", "func connect() {\n\tconn, err := http.NewConnection(http.ConnectionConfig{\n\t\tEndpoints: []string{hlp.Conf.DB.URL},\n\t})\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed to create HTTP connection: %v\", err)\n\t}\n\n\tclient, err := driver.NewClient(driver.ClientConfig{\n\t\tConnection: conn,\n\t\tAuthentication: driver.BasicAuthentication(\n\t\t\thlp.Conf.DB.User,\n\t\t\thlp.Conf.DB.Pass,\n\t\t),\n\t})\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed to create new client: %v\", err)\n\t}\n\n\tctx := context.Background()\n\tdb, err := client.Database(ctx, \"cardo_dev\")\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed to open database: %v\", err)\n\t}\n\n\tDatabase = db\n}", "func InitStorage(engineType storage.EngineType) (storage.Engine, error) {\n\n\tconfig, err := config.FromEnv()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tswitch engineType {\n\tcase storage.BoltEngine:\n\n\t\tboltStorageEngine, err := bolt.Init(config.Database.Bolt)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\treturn &boltStorageEngine, nil\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"storage backend %q not implemented\", engineType)\n\t}\n}", "func init() {\n\tRepoCreateDatabaseConnection(DatabaseConnection{Name: \"Write presentation\"})\n\tRepoCreateDatabaseConnection(DatabaseConnection{Name: \"Host meetup\"})\n}", "func TestCreateConnection(t *testing.T) {\n\tfmt.Println(\"Connection creation test.\")\n\n\tcc := getClientConnection()\n\n\t// when done the connection will be close.\n\tdefer cc.Close()\n\n\t// Create a new client service...\n\tc := persistencepb.NewPersistenceServiceClient(cc)\n\n\trqst := &persistencepb.CreateConnectionRqst{\n\t\tConnection: &persistencepb.Connection{\n\t\t\tId: \"mongo_db_test_connection\",\n\t\t\tName: \"TestMongoDB\",\n\t\t\tUser: \"\",\n\t\t\tPassword: \"\",\n\t\t\tPort: 27017,\n\t\t\tHost: \"localhost\",\n\t\t\tStore: persistencepb.StoreType_MONGO,\n\t\t\tTimeout: 10,\n\t\t},\n\t}\n\n\trsp, err := c.CreateConnection(context.Background(), rqst)\n\tif err != nil {\n\t\tlog.Fatalf(\"error while CreateConnection: %v\", err)\n\t}\n\n\tlog.Println(\"Response form CreateConnection:\", rsp.Result)\n}", "func OpenTestConnection() (db *gorm.DB, err error) {\n\tswitch os.Getenv(\"GORM_DIALECT\") {\n\tcase \"mysql\":\n\t\t// CREATE USER 'gorm'@'localhost' IDENTIFIED BY 'gorm';\n\t\t// CREATE DATABASE gorm;\n\t\t// GRANT ALL ON gorm.* TO 'gorm'@'localhost';\n\t\tfmt.Println(\"testing mysql...\")\n\t\tdbhost := os.Getenv(\"GORM_DBADDRESS\")\n\t\tif dbhost != \"\" {\n\t\t\tdbhost = fmt.Sprintf(\"tcp(%v)\", dbhost)\n\t\t}\n\t\tdb, err = gorm.Open(\"mysql\", fmt.Sprintf(\"gorm:gorm@%v/gorm?charset=utf8&parseTime=True\", dbhost))\n\tcase \"postgres\":\n\t\tfmt.Println(\"testing postgres...\")\n\t\tdbhost := os.Getenv(\"GORM_DBHOST\")\n\t\tif dbhost != \"\" {\n\t\t\tdbhost = fmt.Sprintf(\"host=%v \", dbhost)\n\t\t}\n\t\tdb, err = gorm.Open(\"postgres\", fmt.Sprintf(\"%v user=gorm password=gorm DB.name=gorm sslmode=disable\", dbhost))\n\tcase \"foundation\":\n\t\tfmt.Println(\"testing foundation...\")\n\t\tdb, err = gorm.Open(\"foundation\", \"dbname=gorm port=15432 sslmode=disable\")\n\tcase \"mssql\":\n\t\t// CREATE LOGIN gorm WITH PASSWORD = 'LoremIpsum86';\n\t\t// CREATE DATABASE gorm;\n\t\t// USE gorm;\n\t\t// CREATE USER gorm FROM LOGIN gorm;\n\t\t// sp_changedbowner 'gorm';\n\t\tfmt.Println(\"testing mssql...\")\n\t\tdb, err = gorm.Open(\"mssql\", \"sqlserver://gorm:LoremIpsum86@localhost:1433?database=gorm\")\n\tdefault:\n\t\tfmt.Println(\"testing sqlite3...\")\n\t\tdb, err = gorm.Open(\"sqlite3\", filepath.Join(os.TempDir(), \"gorm.db\"))\n\t}\n\n\t// db.SetLogger(Logger{log.New(os.Stdout, \"\\r\\n\", 0)})\n\t// db.SetLogger(log.New(os.Stdout, \"\\r\\n\", 0))\n\tif os.Getenv(\"DEBUG\") == \"true\" {\n\t\tdb.LogMode(true)\n\t}\n\n\tif err == nil {\n\t\tdb.DB().SetMaxIdleConns(10)\n\t}\n\n\treturn\n}", "func NewTestConnection() *TestConnection {\n return &TestConnection {\n ToRead: make([]string, 0),\n Written: make([]string, 0),\n WriteCount: -1,\n Closed: false,\n TimesReadCalled: 0,\n TimesWriteCalled: 0,\n ThrowWriteErrorAfter: 0,\n ThrowReadErrorAfter: 0,\n }\n}", "func (s *AzureBlobStorage) Open(ctx context.Context, name string) (ExternalFileReader, error) {\n\tclient := s.containerClient.NewBlockBlobClient(s.withPrefix(name))\n\tresp, err := client.GetProperties(ctx, nil)\n\tif err != nil {\n\t\treturn nil, errors.Annotate(err, \"Failed to get properties from the azure blob\")\n\t}\n\n\treturn &azblobObjectReader{\n\t\tblobClient: client,\n\n\t\tpos: 0,\n\t\ttotalSize: *resp.ContentLength,\n\n\t\tctx: ctx,\n\n\t\tcpkInfo: s.cpkInfo,\n\t}, nil\n}", "func TestStorage(t *testing.T) {\n\tvar result Storage\n\terr := json.NewDecoder(strings.NewReader(storageBody)).Decode(&result)\n\n\tif err != nil {\n\t\tt.Errorf(\"Error decoding JSON: %s\", err)\n\t}\n\n\tif result.ID != \"Storage-1\" {\n\t\tt.Errorf(\"Received invalid ID: %s\", result.ID)\n\t}\n\n\tif result.Name != \"StorageOne\" {\n\t\tt.Errorf(\"Received invalid name: %s\", result.Name)\n\t}\n\n\tif len(result.drives) != 6 {\n\t\tt.Errorf(\"Unexpected number of drives: %d\", len(result.drives))\n\t}\n\n\tif result.StorageControllers[0].CacheSummary.PersistentCacheSizeMiB != 1024 {\n\t\tt.Errorf(\"Invalid PersistenCacheSize: %d\",\n\t\t\tresult.StorageControllers[0].CacheSummary.PersistentCacheSizeMiB)\n\t}\n\n\tif result.StorageControllers[0].PCIeInterface.MaxPCIeType != Gen4PCIeTypes {\n\t\tt.Errorf(\"Invalid MaxPCIeType: %s\", result.StorageControllers[0].PCIeInterface.MaxPCIeType)\n\t}\n\n\tif result.setEncryptionKeyTarget != \"/redfish/v1/Storage/Actions/Storage.SetEncryptionKey\" {\n\t\tt.Errorf(\"Invalid SetEncryptionKey target: %s\", result.setEncryptionKeyTarget)\n\t}\n}", "func NewStorage() *Storage {\n\tst := new(Storage)\n\tdsn := fmt.Sprintf(\"host=%s port=%s user=%s dbname=%s sslmode=disable password=%s\",\n\t\tos.Getenv(\"STORAGE_HOST\"),\n\t\tos.Getenv(\"STORAGE_PORT\"),\n\t\tos.Getenv(\"STORAGE_USER\"),\n\t\tos.Getenv(\"STORAGE_DB\"),\n\t\tos.Getenv(\"STORAGE_PASSWORD\"))\n\t// connStr := \"host=database user=postgres dbname=postgres sslmode=disable\"\n\tdatabase, err := sql.Open(\"postgres\", dsn)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tst.database = database\n\treturn st\n}", "func Connect() *DB {\n\n\tif db2 == nil {\n\n\t\t// Singleton the bank has to close every call,\n\t\t// save, update, get etc ..\n\t\t//\n\t\t// Testing and verifying if there is a directory\n\t\t// and file of our bucket, if it does not exist\n\t\t// it creates the directory and the file so that\n\t\t// we can manipulate all our bucket.\n\t\t//\n\t\t// Remember that boltdb with the open function also creates.\n\t\tif err := DataBaseTest(PathDb); err != nil {\n\n\t\t\tlog.Fatal(\"Error Test database\", err)\n\t\t}\n\n\t\t// Here is the object responsible for\n\t\t// allowing calls to the methods, such as Get, Save, etc.\n\t\tdbbolt, err = bolt.Open(PathDb, 0600, &bolt.Options{Timeout: 1 * time.Second})\n\n\t\tif err != nil {\n\n\t\t\tlog.Fatal(\"connect error: \", err)\n\t\t}\n\n\t\t// We create a new reference\n\t\t// just to facilitate\n\t\t// understanding and syntax\n\t\tdb2 = &DB{dbbolt}\n\t}\n\n\treturn db2\n}", "func (gc gcsClient) Open(ctx context.Context, path Path) (io.ReadCloser, *storage.ReaderObjectAttrs, error) {\n\tclient := gc.clientFromPath(path)\n\treturn client.Open(ctx, path)\n}", "func Open(conf *config.DataStore) (Conn, error) {\n\tdriver, ok := drivers[conf.Driver]\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\n\t\t\t\"backend: unknown driver %q (forgotten import?)\",\n\t\t\tconf.Driver,\n\t\t)\n\t}\n\tpool := driver.New(conf)\n\treturn pool, nil\n}", "func newStorage(\n\tmachineName,\n\ttablename,\n\tdatabase,\n\tretentionPolicy,\n\tusername,\n\tpassword,\n\tinfluxdbHost string,\n\tisSecure bool,\n\tbufferDuration time.Duration,\n) (*influxdbStorage, error) {\n\turl := &url.URL{\n\t\tScheme: \"http\",\n\t\tHost: influxdbHost,\n\t}\n\tif isSecure {\n\t\turl.Scheme = \"https\"\n\t}\n\n\tconfig := &influxdb.Config{\n\t\tURL: *url,\n\t\tUsername: username,\n\t\tPassword: password,\n\t\tUserAgent: fmt.Sprintf(\"%v/%v\", \"cAdvisor\", version.Info[\"version\"]),\n\t}\n\tclient, err := influxdb.NewClient(*config)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tret := &influxdbStorage{\n\t\tclient: client,\n\t\tmachineName: machineName,\n\t\tdatabase: database,\n\t\tretentionPolicy: retentionPolicy,\n\t\tbufferDuration: bufferDuration,\n\t\tlastWrite: time.Now(),\n\t\tpoints: make([]*influxdb.Point, 0),\n\t}\n\tret.readyToFlush = ret.defaultReadyToFlush\n\treturn ret, nil\n}", "func (o *Operator) onStartStorage(stop <-chan struct{}) {\n\tfor {\n\t\tif err := o.waitForCRD(false, false, true, false); err == nil {\n\t\t\tbreak\n\t\t} else {\n\t\t\tlog.Error().Err(err).Msg(\"Resource initialization failed\")\n\t\t\tlog.Info().Msgf(\"Retrying in %s...\", initRetryWaitTime)\n\t\t\ttime.Sleep(initRetryWaitTime)\n\t\t}\n\t}\n\to.runLocalStorages(stop)\n}", "func (i *API) openDB() error {\n\tvar err error\n\tif len(i.primaryDBPath) == 0 || len(i.backupDBPath) == 0 {\n\t\tlog.Error(errors.Wrap(types.ErrMissingStorePaths, \"Infra API: \"))\n\t\treturn errors.Wrap(types.ErrMissingStorePaths, \"Infra API: \")\n\t}\n\n\tif _, err := os.Stat(i.primaryDBPath); os.IsNotExist(err) {\n\t\tif err := os.MkdirAll(path.Dir(i.primaryDBPath), 600); err != nil {\n\t\t\tlog.Error(errors.Wrapf(types.ErrDBPathCreate, \"Infra API: Path: %s | Err: %v\", i.primaryDBPath, err))\n\t\t\treturn errors.Wrapf(types.ErrDBPathCreate, \"Infra API: Path: %s | Err: %v\", i.primaryDBPath, err)\n\t\t}\n\t}\n\n\tif _, err := os.Stat(i.backupDBPath); os.IsNotExist(err) {\n\t\tif err := os.MkdirAll(path.Dir(i.backupDBPath), 600); err != nil {\n\t\t\tlog.Error(errors.Wrapf(types.ErrDBPathCreate, \"Infra API: Path: %s | Err: %v\", i.backupDBPath, err))\n\t\t\treturn errors.Wrapf(types.ErrDBPathCreate, \"Infra API: Path: %s | Err: %v\", i.backupDBPath, err)\n\t\t}\n\t}\n\n\ti.primaryStore, err = emstore.NewEmstore(emstore.BoltDBType, i.primaryDBPath)\n\tif err != nil {\n\t\tlog.Error(errors.Wrapf(types.ErrPrimaryStoreCreate, \"Infra API: Err: %v\", err))\n\t\treturn errors.Wrapf(types.ErrPrimaryStoreCreate, \"Infra API: Err: %v\", err)\n\t}\n\n\ti.backupStore, err = emstore.NewEmstore(emstore.BoltDBType, i.backupDBPath)\n\tif err != nil {\n\t\tlog.Error(errors.Wrapf(types.ErrBackupStoreCreate, \"Infra API: Err: %v\", err))\n\t\treturn errors.Wrapf(types.ErrBackupStoreCreate, \"Infra API: Err: %v\", err)\n\t}\n\treturn nil\n}", "func (c *Dg) Initialize(con util.XapiClient) {\n c.con = con\n}", "func (ds *gcdatastore) connect() (err error) {\n\tctx := context.Background()\n\n\tds.Client, err = datastore.NewClient(ctx, ds.gcloudProjectID, ds.gcloudClientOpts...)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func StorageInit() {\n\tm := new(v2.MetaV2)\n\t// get the most recent record as the init resource version\n\t_, err := dbm.DBAccess.QueryTable(v2.NewMetaTableName).OrderBy(\"-\" + v2.RV).Limit(1).All(m)\n\tutilruntime.Must(err)\n\tDefaultV2Client.SetRevision(m.ResourceVersion)\n}", "func (c *client) Open() error {\n\n\tsslmode := \"require\"\n\tif c.config.Environment == \"DEV\" {\n\t\tsslmode = \"disable\"\n\t}\n\tconnectionStr := fmt.Sprintf(\"host=%s port=%s user=%s \"+\n\t\t\"password=%s dbname=%s search_path=%s sslmode=%s\",\n\t\tc.config.Database.Host, c.config.Database.Port, c.config.Database.User,\n\t\tc.config.Database.Password, c.config.Database.DB, c.config.Database.Schema, sslmode,\n\t)\n\n\t// pq.Driver\n\tdriverName := c.config.Database.Dialect + \"WithHooks\"\n\tsql.Register(driverName, sqlhooks.Wrap(&pq.Driver{}, &Hooks{}))\n\n\t// driverName := c.config.Database.Dialect\n\tsqlDatabase, err := sql.Open(driverName, connectionStr)\n\n\tdatabase := sqlx.NewDb(sqlDatabase, c.config.Database.Dialect)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\tc.db = database\n\n\tif err = database.Ping(); err != nil {\n\t\tlog.Fatal(err)\n\t\tos.Exit(1)\n\t}\n\n\treturn nil\n}", "func initConnection(key string, drvName string, connection string) {\n\n\tpool.connections[key] = newDbContainer(drvName, connection)\n\tpool.connections[key].initConnection()\n\n\tpool.ok[key] <- true\n\tclose(pool.ok[key])\n}", "func (d Driver) Open(path string) (kv.Storage, error) {\n\tmc.mu.Lock()\n\tdefer mc.mu.Unlock()\n\n\tu, err := url.Parse(path)\n\tif err != nil {\n\t\treturn nil, errors.Trace(err)\n\t}\n\n\tengineSchema := filepath.Join(u.Host, u.Path)\n\tif store, ok := mc.cache[engineSchema]; ok {\n\t\t// TODO: check the cache store has the same engine with this Driver.\n\t\tlog.Info(\"[kv] cache store\", engineSchema)\n\t\treturn store, nil\n\t}\n\n\tdb, err := d.Driver.Open(engineSchema)\n\tif err != nil {\n\t\treturn nil, errors.Trace(err)\n\t}\n\n\tlog.Info(\"[kv] New store\", engineSchema)\n\ts := &dbStore{\n\t\ttxns: make(map[uint64]*dbTxn),\n\t\tkeysLocked: make(map[string]uint64),\n\t\tuuid: uuid.NewV4().String(),\n\t\tpath: engineSchema,\n\t\tdb: db,\n\t\tcompactor: newLocalCompactor(localCompactDefaultPolicy, db),\n\t\tclosed: false,\n\t}\n\ts.recentUpdates, err = segmentmap.NewSegmentMap(100)\n\tif err != nil {\n\t\treturn nil, errors.Trace(err)\n\t}\n\tregionServers := buildLocalRegionServers(s)\n\tvar infos []*regionInfo\n\tfor _, rs := range regionServers {\n\t\tri := &regionInfo{startKey: rs.startKey, endKey: rs.endKey, rs: rs}\n\t\tinfos = append(infos, ri)\n\t}\n\ts.pd.SetRegionInfo(infos)\n\tmc.cache[engineSchema] = s\n\ts.compactor.Start()\n\treturn s, nil\n}", "func Open(cfg Config) (backend.Backend, error) {\n\tdebug.Log(\"s3.Open\", \"open, config %#v\", cfg)\n\n\tclient, err := minio.New(cfg.Endpoint, cfg.KeyID, cfg.Secret, cfg.UseHTTP)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tbe := &s3{client: client, bucketname: cfg.Bucket, prefix: cfg.Prefix}\n\tbe.createConnections()\n\n\tif err := client.BucketExists(cfg.Bucket); err != nil {\n\t\tdebug.Log(\"s3.Open\", \"BucketExists(%v) returned err %v, trying to create the bucket\", cfg.Bucket, err)\n\n\t\t// create new bucket with default ACL in default region\n\t\terr = client.MakeBucket(cfg.Bucket, \"\")\n\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn be, nil\n}", "func (s *MySQL) Open(logger logrus.FieldLogger) (storage.Storage, error) {\n\tconn, err := s.open(logger)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn conn, nil\n}", "func testGatewayStorage(gwStorage storage.GatewayStorage, userID model.UserID, t *testing.T) {\n\t// Retrieve the empty list\n\tgwChan, err := gwStorage.GetList(userID)\n\tif err != nil {\n\t\tt.Fatal(\"Got error retrieving empty list: \", err)\n\t}\n\tcount := 0\n\tfor range gwChan {\n\t\tcount++\n\t}\n\tif count > 0 {\n\t\tt.Fatalf(\"Expected 0 elements but got %d\", count)\n\t}\n\n\t// Create a new gateway\n\tgw1EUI, _ := protocol.EUIFromString(\"00-01-02-03-04-05-06-07\")\n\tgateway1 := model.Gateway{\n\t\tGatewayEUI: gw1EUI,\n\t\tIP: net.ParseIP(\"127.0.0.1\"),\n\t\tStrictIP: false,\n\t\tLatitude: 63.0,\n\t\tLongitude: 10.0,\n\t\tAltitude: 50.0,\n\t\tTags: model.NewTags(),\n\t}\n\n\tif err := gwStorage.Put(gateway1, userID); err != nil {\n\t\tt.Fatal(\"Got error storing gateway: \", err)\n\t}\n\n\tif err := gwStorage.Put(gateway1, userID); err != storage.ErrAlreadyExists {\n\t\tt.Fatal(\"Should get ErrAlreadyExists when storing same gateway twice\")\n\t}\n\t// ...and another one\n\tgw2EUI, _ := protocol.EUIFromString(\"aa-01-02-03-04-05-06-07\")\n\tgateway2 := model.Gateway{\n\t\tGatewayEUI: gw2EUI,\n\t\tIP: net.ParseIP(\"127.0.0.2\"),\n\t\tStrictIP: true,\n\t\tLatitude: -63.0,\n\t\tLongitude: -10.0,\n\t\tAltitude: 0.0,\n\t\tTags: model.NewTags(),\n\t}\n\n\tgateway2.Tags.SetTag(\"Name\", \"Value\")\n\tgateway2.Tags.SetTag(\"Key\", \"Value\")\n\tif err := gwStorage.Put(gateway2, userID); err != nil {\n\t\tt.Fatal(\"Got error storing gateway: \", err)\n\t}\n\n\t// Retrieve the list\n\tgwChan, err = gwStorage.GetList(userID)\n\tif err != nil {\n\t\tt.Fatal(\"Got error retrieving list: \", err)\n\t}\n\n\tvar foundOne, foundTwo bool\n\tfor val := range gwChan {\n\t\tif gateway1.Equals(val) {\n\t\t\tfoundOne = true\n\t\t}\n\t\tif gateway2.Equals(val) {\n\t\t\tfoundTwo = true\n\t\t}\n\t}\n\n\tif !foundOne || !foundTwo {\n\t\tt.Fatal(\"One or both gateways missing from list\")\n\t}\n\n\t// Try adding the same gateway twice. Should yield error\n\tif err := gwStorage.Put(gateway1, userID); err == nil {\n\t\tt.Fatal(\"Expected error when adding gateway twice\")\n\t}\n\n\t// Retrieve the gateway as another user. It should return errnotfound\n\tif _, err := gwStorage.Get(gateway1.GatewayEUI, model.UserID(\"foo\")); err != storage.ErrNotFound {\n\t\tt.Fatal(\"Another user should not be able to access gateway\")\n\t}\n\t// Retrieve just the first gateway. It should - of course - be the same.\n\tfirst, err := gwStorage.Get(gateway1.GatewayEUI, userID)\n\tif err != nil {\n\t\tt.Fatal(\"Did not expect an error\")\n\t}\n\tif !gateway1.Equals(first) {\n\t\tt.Fatalf(\"Gateway did not match %v != %v\", gateway1, first)\n\t}\n\n\t// Retrieving gateway that doesn't exist should yield error\n\tnonEUI, _ := protocol.EUIFromString(\"00-00-00-00-00-00-00-00\")\n\t_, err = gwStorage.Get(nonEUI, userID)\n\tif err == nil {\n\t\tt.Fatal(\"Expected error when retrieving gw that doesn't exist\")\n\t}\n\n\tif err := gwStorage.Update(gateway1, userID); err != nil {\n\t\tt.Fatalf(\"Got error storing tags on gateway: %v\", err)\n\t}\n\n\t// Update fields\n\tgateway1.Altitude = 111\n\tgateway1.Latitude = 222\n\tgateway1.Longitude = 333\n\tgateway1.IP = net.ParseIP(\"10.10.10.10\")\n\tgateway1.StrictIP = true\n\tif err := gwStorage.Update(gateway1, userID); err != nil {\n\t\tt.Fatalf(\"Got error updating gateway: %v\", err)\n\t}\n\tupdatedGw, _ := gwStorage.Get(gateway1.GatewayEUI, userID)\n\tif updatedGw.Altitude != gateway1.Altitude || updatedGw.Longitude != gateway1.Longitude || updatedGw.IP.String() != gateway1.IP.String() || updatedGw.StrictIP != gateway1.StrictIP {\n\t\tt.Fatalf(\"Gateways doesn't match! %v != %v\", updatedGw, gateway1)\n\t}\n\t// Remove both\n\tif err := gwStorage.Delete(gateway1.GatewayEUI, userID); err != nil {\n\t\tt.Fatalf(\"Got error removing gateway #1: %v\", err)\n\t}\n\tif err := gwStorage.Delete(gateway2.GatewayEUI, userID); err != nil {\n\t\tt.Fatalf(\"Got error removing gateway #2: %v\", err)\n\t}\n\t// Remove one that isn't supposed to exist in the list\n\tif err := gwStorage.Delete(gateway1.GatewayEUI, userID); err == nil {\n\t\tt.Fatal(\"Expected error when deleting gateway a second time\")\n\t}\n\n\t// Ensure list is empty again\n\t// Retrieve the empty list\n\tgwChan, err = gwStorage.GetList(userID)\n\tif err != nil {\n\t\tt.Fatal(\"Got error retrieving empty list: \", err)\n\t}\n\tcount = 0\n\tfor range gwChan {\n\t\tcount++\n\t}\n\tif count > 0 {\n\t\tt.Fatalf(\"Got more than 0 elements (got %d)\", count)\n\t}\n\n\ttestAllGateways(gwStorage, t)\n\n}", "func (s *StandAloneStorage) Start() error {\n\tvar err error\n\ts.db, err = badger.Open(s.opt)\n\treturn err\n}", "func Setup(c config.Config) error {\n\n\tlog.Info(\"storage: setting up storage module\")\n\n\tlog.Info(\"storage: setting up Redis connection pool\")\n\n\tredisPool = &redis.Pool{\n\t\tMaxIdle: c.Redis.MaxIdle,\n\t\tMaxActive: c.Redis.MaxActive,\n\t\tIdleTimeout: c.Redis.IdleTimeout,\n\t\tWait: true,\n\t\tDial: func() (redis.Conn, error) {\n\t\t\tc, err := redis.DialURL(c.Redis.URL,\n\t\t\t\tredis.DialReadTimeout(time.Minute),\n\t\t\t\tredis.DialWriteTimeout(time.Minute),\n\t\t\t)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, fmt.Errorf(\"redis connection error: %s\", err)\n\t\t\t}\n\t\t\treturn c, err\n\t\t},\n\t\tTestOnBorrow: func(c redis.Conn, t time.Time) error {\n\t\t\tif time.Now().Sub(t) < time.Minute {\n\t\t\t\treturn nil\n\t\t\t}\n\n\t\t\t_, err := c.Do(\"PING\")\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"ping redis error: %s\", err)\n\t\t\t}\n\t\t\treturn nil\n\t\t},\n\t}\n\n\tlog.Info(\"storage: connecting to database\")\n\td, err := gorm.Open(\"mysql\", c.MySQL.DSN)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"storage: mariadb connectionerror\")\n\t}\n\td.DB().SetMaxOpenConns(c.MySQL.MaxOpenConnections)\n\td.DB().SetMaxIdleConns(c.MySQL.MaxIdleConnections)\n\tfor {\n\t\tif err := d.DB().Ping(); err != nil {\n\t\t\tlog.WithError(err).Warning(\"storage: ping mariadb error, will retry in 2s\")\n\t\t\ttime.Sleep(2 * time.Second)\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\n\tdb = d\n\n\treturn nil\n}", "func setupStorage(t *testing.T) caddytls.Storage {\n\ttruncateDs(t)\n\n\tcaurl, _ := url.Parse(TestCaUrl)\n\tcs, err := tlsclouddatastore.NewCloudDatastoreStorage(caurl)\n\n\tif err != nil {\n\t\tt.Fatalf(\"Error creating Consul storage: %v\", err)\n\t}\n\n\treturn cs\n}", "func InitKVStorage() *kv.LibKVBackend {\n\n\tswitch storagebackend := Get(\"EnvDefaultKVBackend\"); storagebackend {\n\tcase structs.StorageBoltDB:\n\t\treturn generateDefaultStorageBackend()\n\tcase structs.StorageConsul:\n\t\tconsul.Register()\n\t\tdb := Get(\"EnvDefaultConsulAddr\")\n\t\tkv, err := kv.NewLibKVBackend(structs.StorageConsul, \"default\", []string{db})\n\t\tif err != nil {\n\t\t\tlog.WithFields(log.Fields{\n\t\t\t\t\"path\": db,\n\t\t\t\t\"error\": err,\n\t\t\t}).Fatal(\"failed to create new consult connection\")\n\t\t}\n\t\treturn kv\n\tdefault:\n\t\treturn generateDefaultStorageBackend()\n\t}\n}", "func open(s string, gopts GlobalOptions, opts options.Options) (restic.Backend, error) {\n\tdebug.Log(\"parsing location %v\", s)\n\tloc, err := location.Parse(s)\n\tif err != nil {\n\t\treturn nil, errors.Fatalf(\"parsing repository location failed: %v\", err)\n\t}\n\n\tvar be restic.Backend\n\n\tcfg, err := parseConfig(loc, opts)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\ttropts := backend.TransportOptions{\n\t\tRootCertFilenames: globalOptions.CACerts,\n\t\tTLSClientCertKeyFilename: globalOptions.TLSClientCert,\n\t}\n\trt, err := backend.Transport(tropts)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// wrap the transport so that the throughput via HTTP is limited\n\tlim := limiter.NewStaticLimiter(gopts.LimitUploadKb, gopts.LimitDownloadKb)\n\trt = lim.Transport(rt)\n\n\tswitch loc.Scheme {\n\tcase \"local\":\n\t\tbe, err = local.Open(cfg.(local.Config))\n\t\t// wrap the backend in a LimitBackend so that the throughput is limited\n\t\tbe = limiter.LimitBackend(be, lim)\n\tcase \"sftp\":\n\t\tbe, err = sftp.Open(cfg.(sftp.Config))\n\t\t// wrap the backend in a LimitBackend so that the throughput is limited\n\t\tbe = limiter.LimitBackend(be, lim)\n\tcase \"s3\":\n\t\tbe, err = s3.Open(cfg.(s3.Config), rt)\n\tcase \"gs\":\n\t\tbe, err = gs.Open(cfg.(gs.Config), rt)\n\tcase \"azure\":\n\t\tbe, err = azure.Open(cfg.(azure.Config), rt)\n\tcase \"swift\":\n\t\tbe, err = swift.Open(cfg.(swift.Config), rt)\n\tcase \"b2\":\n\t\tbe, err = b2.Open(globalOptions.ctx, cfg.(b2.Config), rt)\n\tcase \"rest\":\n\t\tbe, err = rest.Open(cfg.(rest.Config), rt)\n\tcase \"rclone\":\n\t\tbe, err = rclone.Open(cfg.(rclone.Config), lim)\n\n\tdefault:\n\t\treturn nil, errors.Fatalf(\"invalid backend: %q\", loc.Scheme)\n\t}\n\n\tif err != nil {\n\t\treturn nil, errors.Fatalf(\"unable to open repo at %v: %v\", s, err)\n\t}\n\n\t// check if config is there\n\tfi, err := be.Stat(globalOptions.ctx, restic.Handle{Type: restic.ConfigFile})\n\tif err != nil {\n\t\treturn nil, errors.Fatalf(\"unable to open config file: %v\\nIs there a repository at the following location?\\n%v\", err, s)\n\t}\n\n\tif fi.Size == 0 {\n\t\treturn nil, errors.New(\"config file has zero size, invalid repository?\")\n\t}\n\n\treturn be, nil\n}", "func init() {\n\tif STORMPATH_API_KEY_ID == \"\" {\n\t\tlog.Fatal(\"STORMPATH_API_KEY_ID not set in the environment.\")\n\t} else if STORMPATH_API_KEY_SECRET == \"\" {\n\t\tlog.Fatal(\"STORMPATH_API_KEY_SECRET not set in the environment.\")\n\t}\n\n\t// Generate a globally unique UUID to be used as a prefix throughout our\n\t// testing.\n\tuuid, err := uuid.NewV4()\n\tif err != nil {\n\t\tlog.Fatal(\"UUID generation failed.\")\n\t}\n\n\t// Store our test prefix.\n\tTEST_PREFIX = uuid.String() + \"-\"\n\n\t// Generate a Stormpath client we'll use for all our tests.\n\tclient, err := NewClient(&ApiKeyPair{\n\t\tId: STORMPATH_API_KEY_ID,\n\t\tSecret: STORMPATH_API_KEY_SECRET,\n\t})\n\tif err != nil {\n\t\tlog.Fatal(\"Couldn't create a Stormpath client.\")\n\t}\n\tCLIENT = client\n}", "func newStorage(\n\tmachineName,\n\ttablename,\n\tdatabase,\n\tusername,\n\tpassword,\n\tinfluxdbHost string,\n\tisSecure bool,\n\tbufferDuration time.Duration,\n) (*influxdbStorage, error) {\n\tconfig := &influxdb.ClientConfig{\n\t\tHost: influxdbHost,\n\t\tUsername: username,\n\t\tPassword: password,\n\t\tDatabase: database,\n\t\tIsSecure: isSecure,\n\t}\n\tclient, err := influxdb.NewClient(config)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t// TODO(monnand): With go 1.3, we cannot compress data now.\n\tclient.DisableCompression()\n\n\tret := &influxdbStorage{\n\t\tclient: client,\n\t\tmachineName: machineName,\n\t\ttableName: tablename,\n\t\tbufferDuration: bufferDuration,\n\t\tlastWrite: time.Now(),\n\t\tseries: make([]*influxdb.Series, 0),\n\t}\n\tret.readyToFlush = ret.defaultReadyToFlush\n\treturn ret, nil\n}", "func (d *Driver) Open(uri string) (driver.Conn, error) {\n proto, addr, dbname, user, passwd, params, err := parseDSN(uri)\n\tif err != nil {\n\t return nil, err\n }\n\td.proto = proto\n d.raddr = addr\n d.user = user\n\td.passwd = passwd\n d.db = dbname\n\n\t// Establish the connection\n\tc := conn{mysql.New(d.proto, d.laddr, d.raddr, d.user, d.passwd, d.db)}\n\n if v, ok := params[\"charset\"]; ok {\n Register(\"SET NAMES \" + v)\n }\n if v, ok := params[\"keepalive\"]; ok {\n t, err := strconv.Atoi(v)\n if err != nil {\n return nil, ErrMaxIdle\n }\n RegisterFunc(func(my mysql.Conn){\n go func() {\n for my.IsConnected() {\n time.Sleep(time.Duration(t) * time.Second)\n if err := my.Ping(); err != nil {\n break\n }\n }\n }()\n })\n }\n\tfor _, q := range d.initCmds {\n\t\tc.my.Register(q) // Register initialisation commands\n\t}\n for _, f := range d.initFuncs {\n c.my.RegisterFunc(f)\n }\n\tif err := c.my.Connect(); err != nil {\n\t\treturn nil, errFilter(err)\n\t}\n\treturn &c, nil\n}", "func (storage *B2Storage) EnableTestMode() {\n for _, client := range storage.clients {\n client.TestMode = true\n }\n}", "func TestCheckStorage(t *testing.T) {\n\tctx := pctx.TestContext(t)\n\tt.Parallel()\n\tclient := newClient(ctx, t)\n\tres, err := client.CheckStorage(ctx, &pfs.CheckStorageRequest{\n\t\tReadChunkData: false,\n\t})\n\trequire.NoError(t, err)\n\trequire.NotNil(t, res)\n}", "func (config Service) Open(log gopi.Logger) (gopi.Driver, error) {\n\tlog.Debug(\"<grpc.service.sensordb.Open>{ server=%v database=%v }\", config.Server, config.Database)\n\n\t// Check for bad input parameters\n\tif config.Server == nil || config.Database == nil {\n\t\treturn nil, gopi.ErrBadParameter\n\t}\n\n\tthis := new(service)\n\tthis.log = log\n\tthis.database = config.Database\n\n\t// Register service with GRPC server\n\tpb.RegisterSensorDBServer(config.Server.(grpc.GRPCServer).GRPCServer(), this)\n\n\t// Success\n\treturn this, nil\n}", "func init() {\n\tsubId = flag.String(\"subid\", \"\", \"Azure SubscriptionId (Required)\")\n\tappId = flag.String(\"appid\", \"\", \"App Registration Id (Required)\")\n\ttenant = flag.String(\"tenantid\", \"\", \"Tenant Id (Required)\")\n\tstore = flag.String(\"store\", \"\", \"Storage Acct for Upload (Required)\")\n\tcontainer = flag.String(\"container\", \"\", \"Container for Upload (Required)\")\n\tflag.Parse()\n\n\tif *subId == \"\" {\n\t\tflag.PrintDefaults()\n\t\tos.Exit(1)\n\t}\n\tif *appId == \"\" {\n\t\tflag.PrintDefaults()\n\t\tos.Exit(1)\n\t}\n\tif *tenant == \"\" {\n\t\tflag.PrintDefaults()\n\t\tos.Exit(1)\n\t}\n\tif *store == \"\" {\n\t\tflag.PrintDefaults()\n\t\tos.Exit(1)\n\t}\n\tif *container == \"\" {\n\t\tflag.PrintDefaults()\n\t\tos.Exit(1)\n\t}\n\t// Setup a new Device Token configuration\n\tdfc := auth.NewDeviceFlowConfig(*appId, *tenant)\n\t\n\t// Set the resource for the storage endpoint\n\tdfc.Resource = \"https://storage.azure.com/\"\n\t\n\t// Prompt user for authentication\n\tstorageToken, err := dfc.ServicePrincipalToken()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\t// Setup a storage Token credential to use for upload\n\tcredential = azblob.NewTokenCredential(storageToken.OAuthToken(), nil)\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed to get OAuth config: %v\", err)\n\t}\n}", "func (pg *PostgresqlDb)Open() *domain.ErrHandler {\n connErr:= pg.config() \n if connErr != nil {\n return connErr\n }\n var err error\n psqlInfo := fmt.Sprintf(\"host=%s port=%d user=%s password=%s dbname=%s sslmode=disable\", pg.host, pg.port, pg.user, pg.pass, pg.dbname)\n pg.conn, err = sql.Open(\"postgres\", psqlInfo)\n if err != nil {\n return &domain.ErrHandler{1, \"func (pg PostgresqlDb)\", \"Open\", err.Error()}\n }\n err = pg.conn.Ping()\n if err != nil {\n return &domain.ErrHandler{1, \"func (pg PostgresqlDb)\", \"Open\", err.Error()}\n }\n connErr = pg.initDb()\n if connErr != nil {\n return connErr\n }\n return nil\n}", "func (s *orm) Connect(ctx context.Context, config skelego.Config) {\n\t//databaseType := config.Get(\"storage.engine\").(string)\n\tlogger := skelego.Logger()\n\thost := config.Get(\"storage.host\")\n\tport := config.Get(\"storage.port\")\n\tdatabase := config.Get(\"storage.name\")\n\tusername := config.Get(\"storage.username\")\n\tpassword := config.Get(\"storage.password\")\n\n\tlogger.LogEvent(\"Connecting to %s\", s.dbURI(host, port, username, database, password))\n\n\tdb, err := gorm.Open(\"postgres\", s.dbURI(host, port, username, database, password))\n\tif err != nil {\n\t\tprint(err.Error())\n\t}\n\tlogger.LogEvent(\"Successfully connected!\", db)\n\ts.client = db\n\tif err := db.DB().Ping(); err != nil {\n\t\tlogger.LogFatal(\"Error in database connection; restart app; error: %s\", err.Error())\n\t}\n}", "func NewStorage(mongoURI string, dbname string) (*Storage, error) {\r\n\tclientOptions := options.Client().ApplyURI(mongoURI)\r\n\tclient, err := mongo.Connect(context.Background(), clientOptions)\r\n\tif err != nil {\r\n\t\treturn nil, err\r\n\t}\r\n\tdb := client.Database(dbname)\r\n\ts := &Storage{Client: client, DB: db}\r\n\treturn s, nil\r\n}", "func (a *AzureBlobStorage) Init(metadata bindings.Metadata) error {\n\tm, err := a.parseMetadata(metadata)\n\tif err != nil {\n\t\treturn err\n\t}\n\ta.metadata = m\n\n\tcredential, env, err := azauth.GetAzureStorageBlobCredentials(a.logger, m.AccountName, metadata.Properties)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"invalid credentials with error: %s\", err.Error())\n\t}\n\n\tuserAgent := \"dapr-\" + logger.DaprVersion\n\toptions := azblob.PipelineOptions{\n\t\tTelemetry: azblob.TelemetryOptions{Value: userAgent},\n\t}\n\tp := azblob.NewPipeline(credential, options)\n\n\tvar containerURL azblob.ContainerURL\n\tcustomEndpoint, ok := mdutils.GetMetadataProperty(metadata.Properties, azauth.StorageEndpointKeys...)\n\tif ok && customEndpoint != \"\" {\n\t\tURL, parseErr := url.Parse(fmt.Sprintf(\"%s/%s/%s\", customEndpoint, m.AccountName, m.Container))\n\t\tif parseErr != nil {\n\t\t\treturn parseErr\n\t\t}\n\t\tcontainerURL = azblob.NewContainerURL(*URL, p)\n\t} else {\n\t\tURL, _ := url.Parse(fmt.Sprintf(\"https://%s.blob.%s/%s\", m.AccountName, env.StorageEndpointSuffix, m.Container))\n\t\tcontainerURL = azblob.NewContainerURL(*URL, p)\n\t}\n\n\tctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)\n\t_, err = containerURL.Create(ctx, azblob.Metadata{}, m.PublicAccessLevel)\n\tcancel()\n\t// Don't return error, container might already exist\n\ta.logger.Debugf(\"error creating container: %w\", err)\n\ta.containerURL = containerURL\n\n\treturn nil\n}", "func (sessionRepo *mockSessionRepo) Initialize(ctx context.Context, db *sql.DB) {}", "func initStorage() storage.Engine {\n\t// Name of the storage engine.\n\tname := viper.GetString(\"storage\")\n\tpath := viper.GetString(\"path\")\n\n\t// Directory of the config file. Ensure the storage engine\n\t// path is resolved relative to the config file.\n\tdir := filepath.Dir(viper.ConfigFileUsed())\n\tpath = filepath.Join(dir, path)\n\n\t// Supported options.\n\topts := storage.Options{\n\t\t\"path\": path,\n\t}\n\n\t// Initialize the storage engine.\n\tengine, err := origins.Init(name, &opts)\n\n\tif err != nil {\n\t\tlogrus.Fatal(\"storage:\", err)\n\t}\n\n\treturn engine\n}", "func init() {\n\tcs = &testConnSource{}\n}", "func TestReplicaStorage(t *testing.T) {\n\tsuite.Run(t, NewReplicaSuite())\n}", "func (bc *BoltClient) OpenDB() {\n\tvar err error\n\tbc.boltDB, err = bolt.Open(bc.FileName, 0600, nil)\n\tif err != nil {\n\t\tfmt.Println(err.Error())\n\t\tpanic(err.Error())\n\t}\n}", "func (c *tensorboardRESTClient) Connection() *grpc.ClientConn {\n\treturn nil\n}", "func (s *DataStore) Open() (err error) {\n\t// set foreign key pragma to true in connection: https://github.com/mattn/go-sqlite3#connection-string\n\ts.DB, err = sql.Open(\"sqlite3\", s.Path+\"?_fk=true\")\n\tif err != nil {\n\t\tlog.Error(err)\n\t}\n\treturn\n}", "func (al *AccessLayer) Open() error {\n\tc := mysql.Config{\n\t\tUser: al.user,\n\t\tPasswd: al.pass,\n\t\tDBName: al.db,\n\t\tAddr: al.host,\n\t}\n\tif al.host != \"\" {\n\t\tc.Net = \"tcp\"\n\t}\n\tc.Params = map[string]string{\"allowNativePasswords\": \"true\"}\n\tfmt.Println(c.FormatDSN())\n\tdb, err := sql.Open(\"mysql\", c.FormatDSN())\n\tif err != nil {\n\t\treturn err\n\t}\n\tdb.SetConnMaxLifetime(5 * time.Minute)\n\tif err := db.Ping(); err != nil {\n\t\treturn err\n\t}\n\n\tal.AL = db\n\n\terr = al.CreateTables(false)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = al.CreateIndices(false)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n\n}", "func (d *Driver) Connection() gobot.Connection { return d.connection }", "func NewStorage(cfg *api.Config, rootPath string, syncFrequency time.Duration) (storage.Interface, error) {\n\tcfg.WaitTime = syncFrequency\n\n\t// Get a new client\n\tclient, err := api.NewClient(cfg)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"creating consul client\")\n\t}\n\n\treturn &Client{\n\t\tv1: &v1client{\n\t\t\tupstreams: &upstreamsClient{\n\t\t\t\tbase: base.NewConsulStorageClient(rootPath+\"/upstreams\", client),\n\t\t\t},\n\t\t\tvirtualHosts: &virtualHostsClient{\n\t\t\t\tbase: base.NewConsulStorageClient(rootPath+\"/virtualhosts\", client),\n\t\t\t},\n\t\t},\n\t}, nil\n}" ]
[ "0.65038735", "0.64576876", "0.6236563", "0.614791", "0.60238194", "0.6002559", "0.59863055", "0.59810174", "0.5964064", "0.59425503", "0.5887152", "0.58508617", "0.5841261", "0.58155966", "0.5799284", "0.57392347", "0.5700831", "0.56958634", "0.5690558", "0.56326973", "0.56047887", "0.55859286", "0.55818313", "0.5579471", "0.55607235", "0.5545584", "0.5516038", "0.5516038", "0.5496707", "0.5490289", "0.54888713", "0.5455411", "0.5447899", "0.5441492", "0.5436471", "0.54348004", "0.54254127", "0.54244703", "0.5421882", "0.5407952", "0.54063386", "0.5400203", "0.5383598", "0.5377804", "0.5361866", "0.5361025", "0.5359253", "0.5333447", "0.5333447", "0.5332182", "0.5306516", "0.5305871", "0.5298676", "0.5295224", "0.52883637", "0.5276982", "0.52739716", "0.525896", "0.52433544", "0.5242458", "0.5240936", "0.5239054", "0.5237278", "0.5236707", "0.52140456", "0.52029485", "0.51964974", "0.5193569", "0.5188713", "0.51853806", "0.51804686", "0.5175739", "0.5172124", "0.5171024", "0.5166946", "0.51538354", "0.5146467", "0.51367253", "0.5129992", "0.512615", "0.5099979", "0.5096852", "0.5073053", "0.5066698", "0.5065692", "0.50601816", "0.50558454", "0.5053404", "0.5047628", "0.50399643", "0.5038787", "0.50330126", "0.50303", "0.50250375", "0.5024485", "0.5018883", "0.501742", "0.5015658", "0.5012806", "0.500486" ]
0.76576877
0
Deprecated: This has been replaced by github.com/oapicodegen/runtime/typesMarshalJSON
func (d Date) MarshalJSON() ([]byte, error) { return json.Marshal(d.Time.Format(DateFormat)) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Marshal(v interface{}) ([]byte, error) {\n\tb, err := json.Marshal(v)\n\tif err != nil {\n\t\treturn []byte{}, err\n\t}\n\n\treturn replaceUnicodeConversion(b), err\n}", "func Marshal(v interface{}) ([]byte, error) {\n\treturn json.Marshal(v)\n}", "func Marshal(v interface{}) ([]byte, error) {\n\tif ImplementsPreJSONMarshaler(v) {\n\t\terr := v.(PreJSONMarshaler).PreMarshalJSON()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn json.Marshal(v)\n}", "func JsonMarshal(t interface{}) ([]byte, error) {\n\tbuffer := &bytes.Buffer{}\n\tenc := json.NewEncoder(buffer)\n\tenc.SetEscapeHTML(false)\n\terr := enc.Encode(t)\n\treturn buffer.Bytes(), err\n}", "func Marshal(v interface{}) ([]byte, error) {\n\tb, err := json.Marshal(v)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn canonicaljson.Transform(b)\n}", "func Marshal(in interface{}) ([]byte, error) {\n\tres, err := jsoniter.ConfigCompatibleWithStandardLibrary.Marshal(in)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"marshaling error: %w\", err)\n\t}\n\treturn res, nil\n}", "func jsonify(v interface{}) string { return string(mustMarshalJSON(v)) }", "func jsonMarshal(t interface{}) ([]byte, error) {\n\tbuffer := &bytes.Buffer{}\n\tencoder := json.NewEncoder(buffer)\n\tencoder.SetEscapeHTML(false)\n\tencoder.SetIndent(\"\", \" \")\n\terr := encoder.Encode(t)\n\treturn buffer.Bytes(), err\n}", "func marshal() {\n\tfmt.Println(\"=== json.marshal ===\")\n\tryan := &Person{\"Ryan\", 25}\n\twire, err := json.Marshal(ryan)\n\tcheck(err)\n\tfmt.Println(string(wire))\n}", "func My_json(demo interface{}) *bytes.Buffer {\r\n\tif bs, err := json.Marshal(demo); err == nil {\r\n\t\treq := bytes.NewBuffer([]byte(bs))\r\n\t\treturn req\r\n\t} else {\r\n\t\tpanic(err)\r\n\t}\r\n}", "func Marshal(p Payload) ([]byte, error) {\n\treturn json.Marshal(p)\n}", "func JSONMarshal(v interface{}) ([]byte, error) {\n\tb, err := json.MarshalIndent(v, \"\", \" \")\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn b, err\n\t}\n\tb = bytes.Replace(b, []byte(\"\\\\u003c\"), []byte(\"<\"), -1)\n\tb = bytes.Replace(b, []byte(\"\\\\u003e\"), []byte(\">\"), -1)\n\tb = bytes.Replace(b, []byte(\"\\\\u0026\"), []byte(\"&\"), -1)\n\treturn b, err\n}", "func jsonMarshal(t interface{}) ([]byte, error) {\n\tvar buffer bytes.Buffer\n\tencoder := json.NewEncoder(&buffer)\n\tencoder.SetEscapeHTML(false)\n\tif err := encoder.Encode(t); err != nil {\n\t\treturn nil, err\n\t}\n\t// Prettify\n\tvar out bytes.Buffer\n\tif err := json.Indent(&out, buffer.Bytes(), \"\", \"\\t\"); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn out.Bytes(), nil\n}", "func starlarkJSON(out *bytes.Buffer, v starlark.Value) error {\n\tswitch v := v.(type) {\n\tcase starlark.NoneType:\n\t\tout.WriteString(\"null\")\n\tcase starlark.Bool:\n\t\tfmt.Fprintf(out, \"%t\", v)\n\tcase starlark.Int:\n\t\tdata, err := json.Marshal(v.BigInt())\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tout.Write(data)\n\tcase starlark.Float:\n\t\tdata, err := json.Marshal(float64(v))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tout.Write(data)\n\tcase starlark.String:\n\t\t// we have to use a json Encoder to disable noisy html\n\t\t// escaping. But the encoder appends a final \\n so we\n\t\t// also should remove it.\n\t\tdata := &bytes.Buffer{}\n\t\te := json.NewEncoder(data)\n\t\te.SetEscapeHTML(false)\n\t\tif err := e.Encode(string(v)); err != nil {\n\t\t\treturn err\n\t\t}\n\t\t// remove final \\n introduced by the encoder\n\t\tout.Write(bytes.TrimSuffix(data.Bytes(), []byte(\"\\n\")))\n\tcase starlark.Indexable: // Tuple, List\n\t\tout.WriteByte('[')\n\t\tfor i, n := 0, starlark.Len(v); i < n; i++ {\n\t\t\tif i > 0 {\n\t\t\t\tout.WriteString(\", \")\n\t\t\t}\n\t\t\tif err := starlarkJSON(out, v.Index(i)); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\tout.WriteByte(']')\n\tcase *starlark.Dict:\n\t\tout.WriteByte('{')\n\t\tfor i, item := range v.Items() {\n\t\t\tif i > 0 {\n\t\t\t\tout.WriteString(\", \")\n\t\t\t}\n\t\t\tif _, ok := item[0].(starlark.String); !ok {\n\t\t\t\treturn fmt.Errorf(\"cannot convert non-string dict key to JSON\")\n\t\t\t}\n\t\t\tif err := starlarkJSON(out, item[0]); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tout.WriteString(\": \")\n\t\t\tif err := starlarkJSON(out, item[1]); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\tout.WriteByte('}')\n\n\tdefault:\n\t\treturn fmt.Errorf(\"cannot convert starlark type %q to JSON\", v.Type())\n\t}\n\treturn nil\n}", "func (c *JSONCodec) Marshal(v interface{}) ([]byte, error) {\n\treturn json.Marshal(v)\n}", "func JSONMarshal(data interface{}) ([]byte, error) {\n\tvar b []byte\n\tvar err error\n\n\tb, err = json.MarshalIndent(data, \"\", \" \")\n\n\treturn b, err\n}", "func jsonType(v interface{}) string {\n\tswitch v.(type) {\n\tcase nil:\n\t\treturn \"null\"\n\tcase bool:\n\t\treturn \"boolean\"\n\tcase json.Number, float64, int, int32, int64:\n\t\treturn \"number\"\n\tcase string:\n\t\treturn \"string\"\n\tcase []interface{}:\n\t\treturn \"array\"\n\tcase map[string]interface{}:\n\t\treturn \"object\"\n\t}\n\tpanic(InvalidJSONTypeError(fmt.Sprintf(\"%T\", v)))\n}", "func marshal(v interface{}) string {\n\tb, err := json.Marshal(v)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn string(b)\n}", "func Marshal(object interface{}) (data string, err error) {\n\tif t, err := json.Marshal(object); err != nil {\n\t\tdata = \"\"\n\t} else {\n\t\tdata = string(t)\n\t}\n\treturn\n}", "func Marshal(v Marshaler) ([]byte, error) {\n\tw := jwriter.Writer{}\n\tv.MarshalEasyJSON(&w)\n\treturn w.BuildBytes()\n}", "func (j *JsonlMarshaler) Marshal(v interface{}) ([]byte, error) {\n\treturn json.Marshal(v)\n}", "func (c *Codec) Marshal(v interface{}) ([]byte, error) {\n\tresult, err := json.Marshal(v)\n\tif err != nil {\n\t\treturn nil, xerrors.New(err.Error())\n\t}\n\n\treturn result, nil\n}", "func marshal(v interface{}) []byte {\n\tb, err := json.Marshal(v)\n\tassert(err == nil, \"marshal error: %s\", err)\n\treturn b\n}", "func (j *JSON) Marshal(target interface{}) (output interface{}, err error) {\n\treturn jsonEncoding.Marshal(target)\n}", "func EncodeJson(v interface{}) ([]byte, error) {\n\treturn json.ConfigCompatibleWithStandardLibrary.Marshal(v)\n}", "func Marshal(v Marshaler) ([]byte, error) {\n\tif isNilInterface(v) {\n\t\treturn nullBytes, nil\n\t}\n\n\tw := jwriter.Writer{}\n\tv.MarshalTinyJSON(&w)\n\treturn w.BuildBytes()\n}", "func JsonMarshal(val any) ([]byte, error) {\n\tbuf := &bytes.Buffer{}\n\tencoder := json.NewEncoder(buf)\n\tencoder.SetEscapeHTML(false)\n\tif err := encoder.Encode(val); err != nil {\n\t\treturn nil, err\n\t}\n\t// Return without a trailing line feed.\n\tlineTerminatedJson := buf.Bytes()\n\treturn bytes.TrimSuffix(lineTerminatedJson, []byte(\"\\n\")), nil\n}", "func (j *JsonMarshaler) Marshal(v interface{}) ([]byte, error) {\n\tswitch v.(type) {\n\tcase *distribute.GetResponse:\n\t\tvalue, err := protobuf.MarshalAny(v.(*distribute.GetResponse).Fields)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn json.Marshal(\n\t\t\tmap[string]interface{}{\n\t\t\t\t\"fields\": value,\n\t\t\t},\n\t\t)\n\tcase *distribute.SearchResponse:\n\t\tvalue, err := protobuf.MarshalAny(v.(*distribute.SearchResponse).SearchResult)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn json.Marshal(\n\t\t\tmap[string]interface{}{\n\t\t\t\t\"search_result\": value,\n\t\t\t},\n\t\t)\n\tdefault:\n\t\treturn json.Marshal(v)\n\t}\n}", "func (c *JsonCodec) Marshal(object interface{}, options map[string]interface{}) ([]byte, error) {\n\treturn jsonEncoding.Marshal(object)\n}", "func JSONEncoder() Encoder { return jsonEncoder }", "func JsonEncode(data []byte, v interface{}) error {\n\n\treturn json.Unmarshal(data, v)\n}", "func (s *Serializer) Marshal(v interface{}) ([]byte, error) {\n\treturn jsoniter.Marshal(v)\n}", "func (m *Marshaler) JSON(v interface{}) ([]byte, error) {\n\tif _, ok := v.(proto.Message); ok {\n\t\tvar buf bytes.Buffer\n\t\tjm := &jsonpb.Marshaler{}\n\t\tjm.OrigName = true\n\t\tif err := jm.Marshal(&buf, v.(proto.Message)); err != nil {\n\t\t\treturn []byte{}, err\n\t\t}\n\n\t\tif m.FilterProtoJson {\n\t\t\treturn m.FilterJsonWithStruct(buf.Bytes(), v)\n\t\t}\n\t\treturn buf.Bytes(), nil\n\t}\n\treturn json.Marshal(v)\n}", "func marshalJSON(namingStrategy string, that interface{}) ([]byte, error) {\n\tout := map[string]interface{}{}\n\tt := reflect.TypeOf(that)\n\tv := reflect.ValueOf(that)\n\n\tfnctn := v.MethodByName(namingStrategy)\n\tfname := func(params ...interface{}) string {\n\t\tin := make([]reflect.Value, len(params))\n\t\tfor k, param := range params {\n\t\t\tin[k] = reflect.ValueOf(param)\n\t\t}\n\t\treturn fnctn.Call(in)[0].String()\n\t}\n\toutName := \"\"\n\tfor i := 0; i < t.NumField(); i++ {\n\t\tf := t.Field(i)\n\t\tswitch n := f.Tag.Get(\"json\"); n {\n\t\tcase \"\":\n\t\t\toutName = f.Name\n\t\tcase \"-\":\n\t\t\toutName = \"\"\n\t\tdefault:\n\t\t\toutName = fname(n)\n\t\t}\n\t\tif outName != \"\" {\n\t\t\tout[outName] = v.Field(i).Interface()\n\t\t}\n\t}\n\treturn json.Marshal(out)\n}", "func (j *jsonNative) MarshalJSONBuf(buf fflib.EncodingBuffer) error {\n\tif j == nil {\n\t\tbuf.WriteString(\"null\")\n\t\treturn nil\n\t}\n\tvar err error\n\tvar obj []byte\n\t_ = obj\n\t_ = err\n\tbuf.WriteString(`{ \"request\":`)\n\tfflib.WriteJsonString(buf, string(j.Request))\n\tbuf.WriteByte(',')\n\tif len(j.Ver) != 0 {\n\t\tbuf.WriteString(`\"ver\":`)\n\t\tfflib.WriteJsonString(buf, string(j.Ver))\n\t\tbuf.WriteByte(',')\n\t}\n\tif len(j.API) != 0 {\n\t\tbuf.WriteString(`\"api\":`)\n\t\tif j.API != nil {\n\t\t\tbuf.WriteString(`[`)\n\t\t\tfor i, v := range j.API {\n\t\t\t\tif i != 0 {\n\t\t\t\t\tbuf.WriteString(`,`)\n\t\t\t\t}\n\t\t\t\tfflib.FormatBits2(buf, uint64(v), 10, v < 0)\n\t\t\t}\n\t\t\tbuf.WriteString(`]`)\n\t\t} else {\n\t\t\tbuf.WriteString(`null`)\n\t\t}\n\t\tbuf.WriteByte(',')\n\t}\n\tif len(j.BAttr) != 0 {\n\t\tbuf.WriteString(`\"battr\":`)\n\t\tif j.BAttr != nil {\n\t\t\tbuf.WriteString(`[`)\n\t\t\tfor i, v := range j.BAttr {\n\t\t\t\tif i != 0 {\n\t\t\t\t\tbuf.WriteString(`,`)\n\t\t\t\t}\n\t\t\t\tfflib.FormatBits2(buf, uint64(v), 10, v < 0)\n\t\t\t}\n\t\t\tbuf.WriteString(`]`)\n\t\t} else {\n\t\t\tbuf.WriteString(`null`)\n\t\t}\n\t\tbuf.WriteByte(',')\n\t}\n\tif j.Ext != nil {\n\t\tif true {\n\t\t\tbuf.WriteString(`\"ext\":`)\n\n\t\t\t{\n\n\t\t\t\tobj, err = j.Ext.MarshalJSON()\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tbuf.Write(obj)\n\n\t\t\t}\n\t\t\tbuf.WriteByte(',')\n\t\t}\n\t}\n\tif j.ParsedRequest != nil {\n\t\tif true {\n\t\t\tbuf.WriteString(`\"_parsed_request\":`)\n\n\t\t\t{\n\n\t\t\t\terr = j.ParsedRequest.MarshalJSONBuf(buf)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\n\t\t\t}\n\t\t\tbuf.WriteByte(',')\n\t\t}\n\t}\n\tbuf.Rewind(1)\n\tbuf.WriteByte('}')\n\treturn nil\n}", "func mustMarshalJSON(v interface{}) []byte {\n\tb, err := json.Marshal(v)\n\tif err != nil {\n\t\tpanic(\"marshal json: \" + err.Error())\n\t}\n\treturn b\n}", "func EncodedJson(v interface{}) []byte {\n\tif p, err := json.Marshal(v); err != nil {\n\t\treturn []byte{}\n\t} else {\n\t\treturn p\n\t}\n}", "func Marshal(v interface{}) ([]byte, error) {\n\trv := reflect.ValueOf(v)\n\tif rv.Kind() != reflect.Slice {\n\t\treturn nil, &InvalidMarshalError{rv.Kind()}\n\t}\n\n\tvar buf bytes.Buffer\n\tencoder := json.NewEncoder(&buf)\n\tfor i := 0; i < rv.Len(); i++ {\n\t\tif err := encoder.Encode(rv.Index(i).Interface()); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn buf.Bytes(), nil\n}", "func JSON(val interface{}) interface {\n\tdriver.Valuer\n\tsql.Scanner\n} {\n\treturn jsontype{val: val}\n}", "func (j *TextMarshaler) Marshal(v interface{}) ([]byte, error) {\n\treturn json.Marshal(v)\n}", "func marshal(v interface{}) (*string, error) {\n\tvar s *string\n\tif v != nil {\n\t\tdata, err := json.Marshal(v)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tc := string(data)\n\t\ts = &c\n\t}\n\treturn s, nil\n}", "func (i Identity)MarshalJSON() ([]byte, error){\n objectMap := make(map[string]interface{})\n if(i.Type != \"\") {\n objectMap[\"type\"] = i.Type\n }\n return json.Marshal(objectMap)\n }", "func JSONEncode(data interface{}) string {\n\tbt, _ := json.Marshal(data)\n\treturn string(bt)\n}", "func (doc T) MarshalJSON() ([]byte, error) {\n\tm := make(map[string]interface{}, 15+len(doc.Extensions))\n\tfor k, v := range doc.Extensions {\n\t\tm[k] = v\n\t}\n\tm[\"swagger\"] = doc.Swagger\n\tm[\"info\"] = doc.Info\n\tif x := doc.ExternalDocs; x != nil {\n\t\tm[\"externalDocs\"] = x\n\t}\n\tif x := doc.Schemes; len(x) != 0 {\n\t\tm[\"schemes\"] = x\n\t}\n\tif x := doc.Consumes; len(x) != 0 {\n\t\tm[\"consumes\"] = x\n\t}\n\tif x := doc.Produces; len(x) != 0 {\n\t\tm[\"produces\"] = x\n\t}\n\tif x := doc.Host; x != \"\" {\n\t\tm[\"host\"] = x\n\t}\n\tif x := doc.BasePath; x != \"\" {\n\t\tm[\"basePath\"] = x\n\t}\n\tif x := doc.Paths; len(x) != 0 {\n\t\tm[\"paths\"] = x\n\t}\n\tif x := doc.Definitions; len(x) != 0 {\n\t\tm[\"definitions\"] = x\n\t}\n\tif x := doc.Parameters; len(x) != 0 {\n\t\tm[\"parameters\"] = x\n\t}\n\tif x := doc.Responses; len(x) != 0 {\n\t\tm[\"responses\"] = x\n\t}\n\tif x := doc.SecurityDefinitions; len(x) != 0 {\n\t\tm[\"securityDefinitions\"] = x\n\t}\n\tif x := doc.Security; len(x) != 0 {\n\t\tm[\"security\"] = x\n\t}\n\tif x := doc.Tags; len(x) != 0 {\n\t\tm[\"tags\"] = x\n\t}\n\treturn json.Marshal(m)\n}", "func JSONMarshal(content interface{}, escape bool) ([]byte, error) {\n\tvar buf bytes.Buffer\n\tenc := json.NewEncoder(&buf)\n\tenc.SetEscapeHTML(escape)\n\tif err := enc.Encode(content); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn buf.Bytes(), nil\n}", "func (t JSONTime)MarshalJSON(b []byte) (err error) {\n s := string(b)\n logging.Trace(currentCtx,\"JSONTime(String)=%s\",s)\n if s == \"null\" || s == \"\" {\n t.Time = time.Time{}\n return\n }\n tt, err := time.Parse(jsonTimeLayout, s)\n t = JSONTime{tt}\n return\n}", "func testJSONMarshal(t *testing.T, v interface{}, want string) {\n\tj, err := json.Marshal(v)\n\tif err != nil {\n\t\tt.Errorf(\"Unable to marshal JSON for %v\", v)\n\t}\n\n\tw := new(bytes.Buffer)\n\terr = json.Compact(w, []byte(want))\n\tif err != nil {\n\t\tt.Errorf(\"String is not valid json: %s\", want)\n\t}\n\n\tif w.String() != string(j) {\n\t\tt.Errorf(\"json.Marshal(%q) returned %s, want %s\", v, j, w)\n\t}\n\n\t// now go the other direction and make sure things unmarshal as expected\n\tu := reflect.ValueOf(v).Interface()\n\tif err := json.Unmarshal([]byte(want), u); err != nil {\n\t\tt.Errorf(\"Unable to unmarshal JSON for %v: %v\", want, err)\n\t}\n\n\tif !reflect.DeepEqual(v, u) {\n\t\tt.Errorf(\"json.Unmarshal(%q) returned %s, want %s\", want, u, v)\n\t}\n}", "func (j AvroTypeWrapper) MarshalJSON() ([]byte, error) {\n\tswitch {\n\tcase j.Type != \"\":\n\t\treturn []byte(fmt.Sprintf(`\"%s\"`, j.Type)), nil\n\tcase j.TypeObj != nil:\n\t\treturn json.Marshal(*j.TypeObj)\n\tcase len(j.TypeList) > 0:\n\t\treturn json.Marshal(j.TypeList)\n\tdefault:\n\t\treturn []byte(\"null\"), nil\n\t}\n}", "func Marshal(data interface{}) ([]byte, error) {\n\tdocument, err := MarshalToStruct(data, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn json.Marshal(document)\n}", "func (info Info) Marshal() ([]byte, error) {\n\tdata, err := json.Marshal(info)\n\tif err != nil {\n\t\treturn nil, VerError.Wrap(err)\n\t}\n\treturn data, nil\n}", "func testJSONMarshal(t *testing.T, v interface{}, want string) {\n\tj, err := json.Marshal(v)\n\tif err != nil {\n\t\tt.Errorf(\"Unable to marshal JSON for %v\", v)\n\t}\n\n\tw := new(bytes.Buffer)\n\terr = json.Compact(w, []byte(want))\n\tif err != nil {\n\t\tt.Errorf(\"String is not valid json: %s\", want)\n\t}\n\n\tif w.String() != string(j) {\n\t\tt.Errorf(\"json.Marshal(%q) returned %s, want %s\", v, j, w)\n\t}\n\n\t// now go the other direction and make sure things unmarshal as expected\n\tu := reflect.ValueOf(v).Interface()\n\tif err := json.Unmarshal([]byte(want), u); err != nil {\n\t\tt.Errorf(\"Unable to unmarshal JSON for %v\", want)\n\t}\n\n\tif !reflect.DeepEqual(v, u) {\n\t\tt.Errorf(\"json.Unmarshal(%q) returned %s, want %s\", want, u, v)\n\t}\n}", "func (s *ServiceSecrets) MarshalJson() ([]byte, error) {\n\treturn json.Marshal(s)\n}", "func (s Signature) MarshalJSON() ([]byte, error) {\n return json.Marshal(s[:])\n}", "func (v Node) MarshalEasyJSON(w *jwriter.Writer) {\n\teasyjson6601e8cdEncodeGithubComSkydiveProjectSkydiveGraffitiApiTypes1(w, v)\n}", "func (j *Type) MarshalJSON() ([]byte, error) {\n\tvar buf fflib.Buffer\n\tif j == nil {\n\t\tbuf.WriteString(\"null\")\n\t\treturn buf.Bytes(), nil\n\t}\n\terr := j.MarshalJSONBuf(&buf)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn buf.Bytes(), nil\n}", "func marshalJSON(c *gin.Context, products interface{}) ([]byte, error) {\n\tswitch c.MustGet(\"version\").(string) {\n\tcase versionV1:\n\t\treturn json.Marshal(products)\n\tcase versionV2:\n\t\treturn json.Marshal(hydrateProductsToV2(c, products))\n\t}\n\n\treturn nil, errors.New(\"invalid API version\")\n}", "func DefaultMarshalJSON(obj interface{}) ([]byte, error) {\n\treturn json.MarshalIndent(obj, \"\", \" \")\n}", "func jsonifyType(typeName string) string {\n\tswitch typeName {\n\tcase stringType:\n\t\treturn stringJSONType\n\tcase boolType:\n\t\treturn booleanJSONType\n\tcase intType, int32Type, int64Type:\n\t\treturn integerJSONType\n\tcase float32Type, float64Type:\n\t\treturn floatJSONType\n\tcase byteType:\n\t\treturn stringJSONType\n\t}\n\tfmt.Println(\"jsonifyType called with a complex type \", typeName)\n\tpanic(\"jsonifyType called with a complex type\")\n}", "func (uut UserUserType)MarshalJSON() ([]byte, error){\n objectMap := make(map[string]interface{})\n if(uut.Attributes != nil) {\n objectMap[\"attributes\"] = uut.Attributes\n }\n if(uut.CreatedOn != nil) {\n objectMap[\"created_on\"] = uut.CreatedOn\n }\n if(uut.ID != nil) {\n objectMap[\"id\"] = uut.ID\n }\n if(uut.LastTouch != nil) {\n objectMap[\"last_touch\"] = uut.LastTouch\n }\n if(uut.ModifiedOn != nil) {\n objectMap[\"modified_on\"] = uut.ModifiedOn\n }\n if(uut.Status != nil) {\n objectMap[\"status\"] = uut.Status\n }\n if(uut.TenantID != nil) {\n objectMap[\"tenant_id\"] = uut.TenantID\n }\n if(uut.Username != nil) {\n objectMap[\"username\"] = uut.Username\n }\n return json.Marshal(objectMap)\n }", "func jsonEnc(in T) ([]byte, error) {\n\treturn jsonx.Marshal(in)\n}", "func (u *TSignatureType) MarshalJSON() ([]byte, error) {\n\t// Declare temporary struct without functions to avoid recursive function call\n\ttype Alias TSignatureType\n\n\t// Encode innerXML to base64\n\tu.XsdGoPkgCDATA = base64.StdEncoding.EncodeToString([]byte(u.XsdGoPkgCDATA))\n\n\treturn json.Marshal(&struct{\n\t\t*Alias\n\t}{\n\t\tAlias: (*Alias)(u),\n\t})\n}", "func serialize(toMarshal interface{}) *bytes.Buffer {\n\tjsonStr, _ := json.Marshal(toMarshal)\n\treturn bytes.NewBuffer(jsonStr)\n}", "func Bytify(i interface{}) []byte {\n\tb, err := json.Marshal(i)\n\tif err != nil {\n\t\treturn nil\n\t}\n\treturn b\n}", "func testMarshalJSON(t *testing.T, cmd interface{}) {\n\tjsonCmd, err := json.Marshal(cmd)\n\tif err != nil {\n\t\tt.Error(err)\n\t}\n\tfmt.Println(string(jsonCmd))\n}", "func (v SimpleType) MarshalJSON() ([]byte, error) {\n\tswitch int32(v) {\n\tcase 1:\n\t\treturn ([]byte)(\"\\\"BOOL\\\"\"), nil\n\tcase 2:\n\t\treturn ([]byte)(\"\\\"BYTE\\\"\"), nil\n\tcase 3:\n\t\treturn ([]byte)(\"\\\"INT8\\\"\"), nil\n\tcase 4:\n\t\treturn ([]byte)(\"\\\"INT16\\\"\"), nil\n\tcase 5:\n\t\treturn ([]byte)(\"\\\"INT32\\\"\"), nil\n\tcase 6:\n\t\treturn ([]byte)(\"\\\"INT64\\\"\"), nil\n\tcase 7:\n\t\treturn ([]byte)(\"\\\"FLOAT64\\\"\"), nil\n\tcase 8:\n\t\treturn ([]byte)(\"\\\"STRING\\\"\"), nil\n\tcase 9:\n\t\treturn ([]byte)(\"\\\"STRUCT_EMPTY\\\"\"), nil\n\t}\n\treturn ([]byte)(strconv.FormatInt(int64(v), 10)), nil\n}", "func (v Version) EncodeJSON(b []byte) []byte {\n\tb = append(b, \"{\"...)\n\tif len(v.Metadata) > 0 {\n\t\tb = append(b, `\"metadata\":`...)\n\t\tb = append(b, v.Metadata...)\n\t\tb = append(b, \",\"...)\n\t}\n\tif v.MiddlewareVersion.Set {\n\t\tb = append(b, '\"', 'm', 'i', 'd', 'd', 'l', 'e', 'w', 'a', 'r', 'e', '_', 'v', 'e', 'r', 's', 'i', 'o', 'n', '\"', ':')\n\t\tb = json.AppendString(b, v.MiddlewareVersion.Value)\n\t\tb = append(b, \",\"...)\n\t}\n\tb = append(b, `\"node_version\":`...)\n\tb = json.AppendString(b, v.NodeVersion)\n\tb = append(b, ',', '\"', 'r', 'o', 's', 'e', 't', 't', 'a', '_', 'v', 'e', 'r', 's', 'i', 'o', 'n', '\"', ':')\n\tb = json.AppendString(b, v.RosettaVersion)\n\treturn append(b, \"}\"...)\n}", "func (v Features) MarshalEasyJSON(w *jwriter.Writer) {\n\teasyjson42239ddeEncodeGithubComKhliengDispatchServer25(w, v)\n}", "func toJSON(a interface{}) ([]byte, error) {\n\tbs, err := json.Marshal(a)\n\n\tif err != nil {\n\t\treturn []byte{}, fmt.Errorf(\"Error caught by Onur Gurel\")\n\t}\n\n\treturn bs, nil\n}", "func (b *SampleFJSONBuilder) Marshal(orig *SampleF) ([]byte, error) {\n\tret, err := b.Convert(orig)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn json.Marshal(ret)\n}", "func marshalJSON(i *big.Int) ([]byte, error) {\n\ttext, err := i.MarshalText()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn json.Marshal(string(text))\n}", "func (r Api) MarshalJSON() ([]byte, error) {\n\ttype Properties Api\n\treturn json.Marshal(&struct {\n\t\tType string\n\t\tProperties Properties\n\t\tDependsOn []string `json:\"DependsOn,omitempty\"`\n\t\tMetadata map[string]interface{} `json:\"Metadata,omitempty\"`\n\t\tDeletionPolicy policies.DeletionPolicy `json:\"DeletionPolicy,omitempty\"`\n\t\tUpdateReplacePolicy policies.UpdateReplacePolicy `json:\"UpdateReplacePolicy,omitempty\"`\n\t\tCondition string `json:\"Condition,omitempty\"`\n\t}{\n\t\tType: r.AWSCloudFormationType(),\n\t\tProperties: (Properties)(r),\n\t\tDependsOn: r.AWSCloudFormationDependsOn,\n\t\tMetadata: r.AWSCloudFormationMetadata,\n\t\tDeletionPolicy: r.AWSCloudFormationDeletionPolicy,\n\t\tUpdateReplacePolicy: r.AWSCloudFormationUpdateReplacePolicy,\n\t\tCondition: r.AWSCloudFormationCondition,\n\t})\n}", "func (v Allow) EncodeJSON(b []byte) []byte {\n\tb = append(b, '{', '\"', 'b', 'a', 'l', 'a', 'n', 'c', 'e', '_', 'e', 'x', 'e', 'm', 'p', 't', 'i', 'o', 'n', 's', '\"', ':', '[')\n\tfor i, elem := range v.BalanceExemptions {\n\t\tif i != 0 {\n\t\t\tb = append(b, \",\"...)\n\t\t}\n\t\tb = elem.EncodeJSON(b)\n\t}\n\tb = append(b, ']', ',', '\"', 'c', 'a', 'l', 'l', '_', 'm', 'e', 't', 'h', 'o', 'd', 's', '\"', ':', '[')\n\tfor i, elem := range v.CallMethods {\n\t\tif i != 0 {\n\t\t\tb = append(b, \",\"...)\n\t\t}\n\t\tb = json.AppendString(b, elem)\n\t}\n\tb = append(b, `],\"errors\":[`...)\n\tfor i, elem := range v.Errors {\n\t\tif i != 0 {\n\t\t\tb = append(b, \",\"...)\n\t\t}\n\t\tb = elem.EncodeJSON(b)\n\t}\n\tb = append(b, ']', ',', '\"', 'h', 'i', 's', 't', 'o', 'r', 'i', 'c', 'a', 'l', '_', 'b', 'a', 'l', 'a', 'n', 'c', 'e', '_', 'l', 'o', 'o', 'k', 'u', 'p', '\"', ':')\n\tb = json.AppendBool(b, v.HistoricalBalanceLookup)\n\tb = append(b, ',', '\"', 'm', 'e', 'm', 'p', 'o', 'o', 'l', '_', 'c', 'o', 'i', 'n', 's', '\"', ':')\n\tb = json.AppendBool(b, v.MempoolCoins)\n\tb = append(b, ',', '\"', 'o', 'p', 'e', 'r', 'a', 't', 'i', 'o', 'n', '_', 's', 't', 'a', 't', 'u', 's', 'e', 's', '\"', ':', '[')\n\tfor i, elem := range v.OperationStatuses {\n\t\tif i != 0 {\n\t\t\tb = append(b, \",\"...)\n\t\t}\n\t\tb = elem.EncodeJSON(b)\n\t}\n\tb = append(b, ']', ',', '\"', 'o', 'p', 'e', 'r', 'a', 't', 'i', 'o', 'n', '_', 't', 'y', 'p', 'e', 's', '\"', ':', '[')\n\tfor i, elem := range v.OperationTypes {\n\t\tif i != 0 {\n\t\t\tb = append(b, \",\"...)\n\t\t}\n\t\tb = json.AppendString(b, elem)\n\t}\n\tb = append(b, \"],\"...)\n\tif v.TimestampStartIndex.Set {\n\t\tb = append(b, '\"', 't', 'i', 'm', 'e', 's', 't', 'a', 'm', 'p', '_', 's', 't', 'a', 'r', 't', '_', 'i', 'n', 'd', 'e', 'x', '\"', ':')\n\t\tb = json.AppendInt(b, v.TimestampStartIndex.Value)\n\t\tb = append(b, \",\"...)\n\t}\n\tb[len(b)-1] = '}'\n\treturn b\n}", "func (j *JSON) Marshal(obj interface{}) error {\n\tres, err := json.Marshal(obj)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Call our implementation of\n\t// JSON UnmarshalJSON through json.Unmarshal\n\t// to set the result to the JSON object\n\treturn json.Unmarshal(res, j)\n}", "func MarshalJSON(v interface{}) []byte {\n\tdata, err := json.Marshal(v)\n\tAbortIf(err)\n\treturn data\n}", "func convertToJSONTypes(value interface{}) (interface{}, error) {\n\tjsonBytes, err := json.Marshal(value)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar jsonValue interface{}\n\n\terr = json.Unmarshal(jsonBytes, &jsonValue)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn jsonValue, nil\n}", "func encode(ins interface{}) ([]byte, error) {\n\treturn json.Marshal(ins)\n}", "func MarshalJSON(a interface{}) (b []byte, err error) {\n\tif m, ok := a.(proto.Message); ok {\n\t\tmarshaller := &jsonpb.Marshaler{}\n\t\tvar buf bytes.Buffer\n\t\terr = marshaller.Marshal(&buf, m)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t\tb = buf.Bytes()\n\t} else {\n\t\tb, err = json.Marshal(a)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\treturn\n}", "func JSON(v interface{}) (*Payload, error) {\n\tdata, err := json.Marshal(v)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &Payload{false, nil, []byte(data)}, nil\n}", "func (opts *CreateDocumentOpts) Marshal() ([]byte, error) {\n\treturn json.Marshal(opts)\n}", "func (f *Formatter) Marshal(v interface{}) ([]byte, error) {\n\tdata, err := json.Marshal(v)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn f.Format(data)\n}", "func JSONType(conn redis.Conn, key string, path string) (res interface{}, err error) {\n\tname, args, _ := CommandBuilder(\"JSON.TYPE\", key, path)\n\treturn conn.Do(name, args...)\n}", "func JsonEncode(i interface{}) string {\n\tb, err := json.Marshal(i)\n\n\tif err != nil {\n\t\tfmt.Println(\"util.getJsonStr.error\", err)\n\t\treturn \"\"\n\t}\n\n\treturn string(b)\n}", "func (re RawExtension) MarshalJSON() ([]byte, error) {\n\tif re.Raw == nil {\n\t\t// TODO: this is to support legacy behavior of JSONPrinter and YAMLPrinter, which\n\t\t// expect to call json.Marshal on arbitrary versioned objects (even those not in\n\t\t// the scheme). pkg/kubectl/resource#AsVersionedObjects and its interaction with\n\t\t// kubectl get on objects not in the scheme needs to be updated to ensure that the\n\t\t// objects that are not part of the scheme are correctly put into the right form.\n\t\tif re.Object != nil {\n\t\t\treturn json.Marshal(re.Object)\n\t\t}\n\t\treturn []byte(\"null\"), nil\n\t}\n\t// TODO: Check whether ContentType is actually JSON before returning it.\n\treturn re.Raw, nil\n}", "func (ppbo PutPropertyBatchOperation) MarshalJSON() ([]byte, error) {\n\tppbo.Kind = KindPut\n\tobjectMap := make(map[string]interface{})\n\tobjectMap[\"Value\"] = ppbo.Value\n\tif ppbo.CustomTypeID != nil {\n\t\tobjectMap[\"CustomTypeId\"] = ppbo.CustomTypeID\n\t}\n\tif ppbo.PropertyName != nil {\n\t\tobjectMap[\"PropertyName\"] = ppbo.PropertyName\n\t}\n\tif ppbo.Kind != \"\" {\n\t\tobjectMap[\"Kind\"] = ppbo.Kind\n\t}\n\treturn json.Marshal(objectMap)\n}", "func (r *StoredInfoType) marshal(c *Client) ([]byte, error) {\n\tm, err := expandStoredInfoType(c, r)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error marshalling StoredInfoType: %w\", err)\n\t}\n\tm = encodeStoredInfoTypeCreateRequest(m)\n\n\treturn json.Marshal(m)\n}", "func (v Event) MarshalEasyJSON(w *jwriter.Writer) {\n\teasyjson6601e8cdEncodeGithubComGoParkMailRu2018242GameServerTypes12(w, v)\n}", "func (u UpdateTrustedIDProviderParameters) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"properties\", u.Properties)\n\treturn json.Marshal(objectMap)\n}", "func (ssru SQLServerRegistrationUpdate) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tif ssru.Tags != nil {\n\t\tobjectMap[\"tags\"] = ssru.Tags\n\t}\n\treturn json.Marshal(objectMap)\n}", "func MarshalIndent(v interface{}, errMsg string) string {\n\tjsonBytes, err := json.MarshalIndent(v, \"\", JSON_INDENT)\n\tif err != nil {\n\t\tFatal(JSON_PARSING_ERROR, i18n.GetMessagePrinter().Sprintf(\"failed to marshal data type from %s: %v\", errMsg, err))\n\t}\n\treturn string(jsonBytes)\n}", "func Marshal(o interface{}) ([]byte, error) {\n\tj, err := json.Marshal(o)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error marshaling into JSON: %v\", err)\n\t}\n\n\ty, err := JSONToYAML(j)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error converting JSON to YAML: %v\", err)\n\t}\n\n\treturn y, nil\n}", "func (v Signature) EncodeJSON(b []byte) []byte {\n\tb = append(b, `{\"hex_bytes\":`...)\n\tb = json.AppendHexBytes(b, v.Bytes)\n\tb = append(b, `,\"public_key\":`...)\n\tb = v.PublicKey.EncodeJSON(b)\n\tb = append(b, ',', '\"', 's', 'i', 'g', 'n', 'a', 't', 'u', 'r', 'e', '_', 't', 'y', 'p', 'e', '\"', ':')\n\tb = json.AppendString(b, string(v.SignatureType))\n\tb = append(b, ',', '\"', 's', 'i', 'g', 'n', 'i', 'n', 'g', '_', 'p', 'a', 'y', 'l', 'o', 'a', 'd', '\"', ':')\n\tb = v.SigningPayload.EncodeJSON(b)\n\treturn append(b, \"}\"...)\n}", "func MarshalAny(s *jsonplugin.MarshalState, v *anypb.Any, legacyFieldmask bool) {\n\tif v == nil {\n\t\ts.WriteNil()\n\t\treturn\n\t}\n\n\t// We first need to get the wrapped message out of the Any.\n\tmsg, err := v.UnmarshalNew()\n\tif err != nil {\n\t\ts.SetErrorf(\"failed to unmarshal wrapped message from Any: %w\", err)\n\t}\n\n\tswitch marshaler := msg.(type) {\n\tdefault:\n\t\t// If v doesn't implement jsonplugin.Marshaler, delegate to protojson.\n\t\tMarshalMessage(s, v)\n\tcase jsonplugin.Marshaler:\n\t\t// Instantiate a sub-marshaler with the same configuration and marshal the wrapped message to that.\n\t\tsub := s.Sub()\n\t\tmarshaler.MarshalProtoJSON(sub)\n\t\tdata, err := sub.Bytes()\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\n\t\t// We need to prepend the @type field to that object, so we read the { character.\n\t\tbuf := bytes.NewBuffer(data)\n\t\tobjectStart, err := buf.ReadByte()\n\t\tif err != nil {\n\t\t\ts.SetError(err)\n\t\t\treturn\n\t\t}\n\t\tif objectStart != '{' {\n\t\t\ts.SetErrorf(\"marshaled Any is not an object\")\n\t\t\treturn\n\t\t}\n\n\t\t// We take a look at the next token, because if it's a \", we'll need a comma after we write the @type field.\n\t\tnextToken, err := buf.ReadByte()\n\t\tif err != nil {\n\t\t\ts.SetError(err)\n\t\t\treturn\n\t\t}\n\t\tbuf.UnreadByte()\n\n\t\t// Write the opening { and the type field to the main marshaler.\n\t\ts.WriteObjectStart()\n\t\ts.WriteObjectField(\"@type\")\n\t\ts.WriteString(v.GetTypeUrl())\n\n\t\t// If the next token is a \", we have more fields, so we need to write a comma.\n\t\t// Otherwise, it's a } and we don't need a comma.\n\t\tif nextToken == '\"' {\n\t\t\ts.WriteMore()\n\t\t}\n\n\t\t// Write the rest of the buffer (the sub-object without the { character).\n\t\ts.Write(buf.Bytes())\n\tcase *durationpb.Duration,\n\t\t*fieldmaskpb.FieldMask,\n\t\t*structpb.Struct,\n\t\t*structpb.Value,\n\t\t*structpb.ListValue,\n\t\t*timestamppb.Timestamp:\n\n\t\t// Write the opening { and the type field to the main marshaler.\n\t\ts.WriteObjectStart()\n\t\ts.WriteObjectField(\"@type\")\n\t\ts.WriteString(v.GetTypeUrl())\n\n\t\t// Write the comma, and the next field, which is always \"value\" for these types.\n\t\ts.WriteMore()\n\t\ts.WriteObjectField(\"value\")\n\n\t\t// Write the value.\n\t\tswitch msg := msg.(type) {\n\t\tcase *durationpb.Duration:\n\t\t\tMarshalDuration(s, msg)\n\t\tcase *fieldmaskpb.FieldMask:\n\t\t\tif legacyFieldmask {\n\t\t\t\tMarshalLegacyFieldMask(s, msg)\n\t\t\t} else {\n\t\t\t\tMarshalFieldMask(s, msg)\n\t\t\t}\n\t\tcase *structpb.Struct:\n\t\t\tMarshalStruct(s, msg)\n\t\tcase *structpb.Value:\n\t\t\tMarshalValue(s, msg)\n\t\tcase *structpb.ListValue:\n\t\t\tMarshalListValue(s, msg)\n\t\tcase *timestamppb.Timestamp:\n\t\t\tMarshalTimestamp(s, msg)\n\t\t}\n\n\t\t// Write the closing }.\n\t\ts.WriteObjectEnd()\n\t}\n}", "func Marshal(data interface{}, typ DataFormat) []byte {\n\tswitch typ {\n\tcase GOB:\n\t\tvar buf bytes.Buffer\n\t\tgob.NewEncoder(&buf).Encode(data)\n\t\treturn buf.Bytes()\n\n\tcase JSON:\n\t\tbuf, err := json.Marshal(data)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\treturn buf\n\n\tdefault:\n\t\tpanic(fmt.Errorf(\"Unrecognized data type\"))\n\t}\n}", "func marshalArg(arg any) any {\n\tif buf, err := json.Marshal(arg); err == nil {\n\t\targ = string(buf)\n\t}\n\treturn arg\n}", "func (t Type) MarshalJSON() ([]byte, error) {\n\treturn json.Marshal(t.String())\n}", "func MarshalJSON(v interface{}, extensions map[string]interface{}) ([]byte, error) {\n\tmarshaled, err := json.Marshal(v)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif len(extensions) == 0 {\n\t\treturn marshaled, nil\n\t}\n\tvar unmarshaled interface{}\n\tif err := json.Unmarshal(marshaled, &unmarshaled); err != nil {\n\t\treturn nil, err\n\t}\n\tasserted := unmarshaled.(map[string]interface{})\n\tfor k, v := range extensions {\n\t\tasserted[k] = v\n\t}\n\tmerged, err := json.Marshal(asserted)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn merged, nil\n}", "func marshalStructOrOrderedList(s any, enc gnmipb.Encoding, cfg *RFC7951JSONConfig) (*gnmipb.TypedValue, error) {\n\tif reflect.ValueOf(s).IsNil() {\n\t\treturn nil, nil\n\t}\n\n\tvar (\n\t\tj any\n\t\terr error\n\t\tencfn func(s string) *gnmipb.TypedValue\n\t)\n\n\tswitch enc {\n\tcase gnmipb.Encoding_JSON:\n\t\tj, err = jsonValue(reflect.ValueOf(s), \"\", jsonOutputConfig{jType: Internal})\n\t\tencfn = func(s string) *gnmipb.TypedValue {\n\t\t\treturn &gnmipb.TypedValue{Value: &gnmipb.TypedValue_JsonVal{JsonVal: []byte(s)}}\n\t\t}\n\tcase gnmipb.Encoding_JSON_IETF:\n\t\t// We always prepend the module name when marshalling within a Notification.\n\t\tcfg.AppendModuleName = true\n\t\tj, err = jsonValue(reflect.ValueOf(s), \"\", jsonOutputConfig{jType: RFC7951, rfc7951Config: cfg})\n\t\tencfn = func(s string) *gnmipb.TypedValue {\n\t\t\treturn &gnmipb.TypedValue{Value: &gnmipb.TypedValue_JsonIetfVal{JsonIetfVal: []byte(s)}}\n\t\t}\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"invalid encoding %v\", gnmipb.Encoding_name[int32(enc)])\n\t}\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tjs, err := json.MarshalIndent(j, \"\", \" \")\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"cannot encode JSON, %v\", err)\n\t}\n\n\treturn encfn(string(js)), nil\n}", "func Marshal(val interface{}) ([]byte, error) {}", "func (obj TypeKeyReference) MarshalJSON() ([]byte, error) {\n\ttype Alias TypeKeyReference\n\treturn json.Marshal(struct {\n\t\tAction string `json:\"typeId\"`\n\t\t*Alias\n\t}{Action: \"type\", Alias: (*Alias)(&obj)})\n}", "func (e Enum) JSONMarshal(v any) ([]byte, error) {\n\tkey := e.GetKey(v)\n\tdata, err := json.Marshal(&key)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn data, nil\n}", "func (sl Slice) MarshalJSON() ([]byte, error) {\n\tnk := len(sl)\n\tb := make([]byte, 0, nk*100+20)\n\tif nk == 0 {\n\t\tb = append(b, []byte(\"null\")...)\n\t\treturn b, nil\n\t}\n\tnstr := fmt.Sprintf(\"[{\\\"n\\\":%d,\", nk)\n\tb = append(b, []byte(nstr)...)\n\tfor i, kid := range sl {\n\t\t// fmt.Printf(\"json out of %v\\n\", kid.PathUnique())\n\t\tknm := kit.Types.TypeName(reflect.TypeOf(kid).Elem())\n\t\ttstr := fmt.Sprintf(\"\\\"type\\\":\\\"%v\\\", \\\"name\\\": \\\"%v\\\"\", knm, kid.UniqueName()) // todo: escape names!\n\t\tb = append(b, []byte(tstr)...)\n\t\tif i < nk-1 {\n\t\t\tb = append(b, []byte(\",\")...)\n\t\t}\n\t}\n\tb = append(b, []byte(\"},\")...)\n\tfor i, kid := range sl {\n\t\tvar err error\n\t\tvar kb []byte\n\t\tkb, err = json.Marshal(kid)\n\t\tif err == nil {\n\t\t\tb = append(b, []byte(\"{\")...)\n\t\t\tb = append(b, kb[1:len(kb)-1]...)\n\t\t\tb = append(b, []byte(\"}\")...)\n\t\t\tif i < nk-1 {\n\t\t\t\tb = append(b, []byte(\",\")...)\n\t\t\t}\n\t\t} else {\n\t\t\tfmt.Printf(\"error doing json.Marshall from kid: %v\\n\", kid.PathUnique())\n\t\t\tlog.Println(err)\n\t\t\tfmt.Printf(\"output to point of error: %v\\n\", string(b))\n\t\t}\n\t}\n\tb = append(b, []byte(\"]\")...)\n\t// fmt.Printf(\"json out: %v\\n\", string(b))\n\treturn b, nil\n}" ]
[ "0.66125995", "0.660554", "0.65499395", "0.6423218", "0.63709205", "0.6357944", "0.6298685", "0.62594444", "0.6257283", "0.6255777", "0.6218893", "0.6200771", "0.6188309", "0.61856353", "0.6164007", "0.61423665", "0.61347765", "0.61151165", "0.6094529", "0.6088757", "0.6045098", "0.60424817", "0.60412693", "0.60351145", "0.6033484", "0.59998673", "0.5995388", "0.59817696", "0.5957911", "0.5955789", "0.59350926", "0.5926118", "0.59070635", "0.58695734", "0.5861235", "0.58612084", "0.5852372", "0.5837987", "0.5833598", "0.58329004", "0.58261764", "0.58179384", "0.58167696", "0.5808081", "0.5801243", "0.57997394", "0.5798957", "0.5797299", "0.57933414", "0.57919884", "0.5781104", "0.5773036", "0.57695466", "0.57567155", "0.5740344", "0.57387286", "0.5738669", "0.57315207", "0.5730725", "0.5726718", "0.5720236", "0.57166183", "0.57110196", "0.5708552", "0.5707783", "0.5694897", "0.5693031", "0.5685344", "0.56727815", "0.5654002", "0.564501", "0.56427014", "0.564258", "0.56381005", "0.5634662", "0.5634549", "0.56317157", "0.5631672", "0.5627799", "0.5620804", "0.5620079", "0.56196934", "0.5609376", "0.56089526", "0.5601041", "0.5599365", "0.55989206", "0.55972517", "0.5595734", "0.5589864", "0.55898273", "0.55897176", "0.5588712", "0.5586698", "0.55736196", "0.5571329", "0.5570682", "0.55704314", "0.5565398", "0.556387", "0.5562376" ]
0.0
-1
Deprecated: This has been replaced by github.com/oapicodegen/runtime/typesUnmarshalJSON
func (d *Date) UnmarshalJSON(data []byte) error { var dateStr string err := json.Unmarshal(data, &dateStr) if err != nil { return err } parsed, err := time.Parse(DateFormat, dateStr) if err != nil { return err } d.Time = parsed return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (j *Type) UnmarshalJSON(input []byte) error {\n\tfs := fflib.NewFFLexer(input)\n\treturn j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)\n}", "func (x *MetadataUpdateType) UnmarshalJSON(b []byte) error {\n\tnum, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = MetadataUpdateType(num)\n\treturn nil\n}", "func (t *Type) UnmarshalJSON(b []byte) error {\n\tvar text string\n\tif err := json.Unmarshal(b, &text); err != nil {\n\t\treturn err\n\t}\n\n\treturn t.UnmarshalText([]byte(text))\n}", "func (this *Simple) UnmarshalJSON(b []byte) error {\n\treturn TypesUnmarshaler.Unmarshal(bytes.NewReader(b), this)\n}", "func (x *UpdateType) UnmarshalJSON(b []byte) error {\n\tnum, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = UpdateType(num)\n\treturn nil\n}", "func (t *Type) UnmarshalJSON(data []byte) error {\n\t// First unmarshal only the unambiguous fields.\n\tvar x struct {\n\t\tKind string `json:\"kind\"`\n\t\tItems []*Type `json:\"items\"`\n\t\tElement *Type `json:\"element\"`\n\t\tName string `json:\"name\"`\n\t\tKey *Type `json:\"key\"`\n\t\tValue any `json:\"value\"`\n\t\tLine int `json:\"line\"`\n\t}\n\tif err := json.Unmarshal(data, &x); err != nil {\n\t\treturn err\n\t}\n\t*t = Type{\n\t\tKind: x.Kind,\n\t\tItems: x.Items,\n\t\tElement: x.Element,\n\t\tName: x.Name,\n\t\tValue: x.Value,\n\t\tLine: x.Line,\n\t}\n\n\t// Then unmarshal the 'value' field based on the kind.\n\t// This depends on Unmarshal ignoring fields it doesn't know about.\n\tswitch x.Kind {\n\tcase \"map\":\n\t\tvar x struct {\n\t\t\tKey *Type `json:\"key\"`\n\t\t\tValue *Type `json:\"value\"`\n\t\t}\n\t\tif err := json.Unmarshal(data, &x); err != nil {\n\t\t\treturn fmt.Errorf(\"Type.kind=map: %v\", err)\n\t\t}\n\t\tt.Key = x.Key\n\t\tt.Value = x.Value\n\n\tcase \"literal\":\n\t\tvar z struct {\n\t\t\tValue ParseLiteral `json:\"value\"`\n\t\t}\n\n\t\tif err := json.Unmarshal(data, &z); err != nil {\n\t\t\treturn fmt.Errorf(\"Type.kind=literal: %v\", err)\n\t\t}\n\t\tt.Value = z.Value\n\n\tcase \"base\", \"reference\", \"array\", \"and\", \"or\", \"tuple\",\n\t\t\"stringLiteral\":\n\t\t// nop. never seen integerLiteral or booleanLiteral.\n\n\tdefault:\n\t\treturn fmt.Errorf(\"cannot decode Type.kind %q: %s\", x.Kind, data)\n\t}\n\treturn nil\n}", "func Unmarshal(data []byte, typ DataFormat, target interface{}) {\n\tswitch typ {\n\tcase GOB:\n\t\tbuf := bytes.NewReader(data)\n\t\tgob.NewDecoder(buf).Decode(target)\n\n\tdefault:\n\t\tif err := json.Unmarshal(data, target); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}\n}", "func JSONToUnstructured(stub, namespace string, mapping *meta.RESTMapping, dynamicClient dynamic.Interface) (dynamic.ResourceInterface, *unstructured.Unstructured, error) {\n\ttypeMetaAdder := map[string]interface{}{}\n\tif err := json.Unmarshal([]byte(stub), &typeMetaAdder); err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\t// we don't require GVK on the data we provide, so we fill it in here. We could, but that seems extraneous.\n\ttypeMetaAdder[\"apiVersion\"] = mapping.GroupVersionKind.GroupVersion().String()\n\ttypeMetaAdder[\"kind\"] = mapping.GroupVersionKind.Kind\n\n\tif mapping.Scope == meta.RESTScopeRoot {\n\t\tnamespace = \"\"\n\t}\n\n\treturn dynamicClient.Resource(mapping.Resource).Namespace(namespace), &unstructured.Unstructured{Object: typeMetaAdder}, nil\n}", "func (this *ImportedReference) UnmarshalJSON(b []byte) error {\n\treturn TypesUnmarshaler.Unmarshal(bytes.NewReader(b), this)\n}", "func (u *TSignatureType) UnmarshalJSON(b []byte) error {\n\t// Declare temporary struct without functions to avoid recursive function call\n\ttype Alias TSignatureType\n\n\t// Copy values into temporary struct\n\ttemp := &struct {\n\t\t*Alias\n\t}{\n\t\tAlias: (*Alias)(u),\n\t}\n\n\t// Unmarshal JSON\n\tif err := json.Unmarshal(b, &temp); err != nil {\n\t\treturn err\n\t}\n\n\t// Decode base64 value\n\tbyteValue, err := base64.StdEncoding.DecodeString(u.XsdGoPkgCDATA)\n\tif err != nil {\n\t\treturn err\n\t}\n\tu.XsdGoPkgCDATA = string(byteValue)\n\n\treturn nil\n}", "func (receiver *Type) UnmarshalJSON(src []byte) error {\n\tif nil == receiver {\n\t\treturn errNilReceiver\n\t}\n\n\tvar s string\n\tif err := json.Unmarshal(src, &s); nil != err {\n\t\treturn err\n\t}\n\n\tif err := receiver.Scan(s); nil != err {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (pl PLUtil) Unmarshal(data []byte, v interface{}) error {\n\tcmd := pl.execCommand(\n\t\t\"plutil\",\n\t\t\"-convert\", \"json\",\n\t\t// Read from stdin.\n\t\t\"-\",\n\t\t// Output to stdout.\n\t\t\"-o\", \"-\")\n\tcmd.Stdin = bytes.NewReader(data)\n\tstdout, err := cmd.Output()\n\tif exitErr, ok := err.(*exec.ExitError); ok {\n\t\treturn fmt.Errorf(\"`%s` failed (%w) with stderr: %s\", cmd, err, exitErr.Stderr)\n\t}\n\tif err != nil {\n\t\treturn fmt.Errorf(\"`%s` failed (%w)\", cmd, err)\n\t}\n\tif err := json.Unmarshal(stdout, v); err != nil {\n\t\treturn fmt.Errorf(\"failed to parse json: %w\", err)\n\t}\n\treturn nil\n}", "func unmarshalJSON(j extv1.JSON, output *any) error {\n\tif len(j.Raw) == 0 {\n\t\treturn nil\n\t}\n\treturn json.Unmarshal(j.Raw, output)\n}", "func (j *jsonNative) UnmarshalJSON(input []byte) error {\n\tfs := fflib.NewFFLexer(input)\n\treturn j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)\n}", "func (x *NotifyType) UnmarshalJSON(b []byte) error {\n\tnum, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = NotifyType(num)\n\treturn nil\n}", "func (v *Node) UnmarshalJSON(data []byte) error {\n\tr := jlexer.Lexer{Data: data}\n\teasyjson6601e8cdDecodeGithubComSkydiveProjectSkydiveGraffitiApiTypes1(&r, v)\n\treturn r.Error()\n}", "func (j *ThirdParty) UnmarshalJSON(input []byte) error {\n\tfs := fflib.NewFFLexer(input)\n\treturn j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)\n}", "func deJSONify(i interface{}) (interface{}, error) {\n\tvar data []byte\n\tswitch t := i.(type) {\n\tcase string:\n\t\tdata = []byte(t)\n\tcase []byte:\n\t\tdata = t\n\tcase json.RawMessage:\n\t\tdata = []byte(t)\n\tdefault:\n\t\treturn i, nil\n\t}\n\tvar x interface{}\n\tif err := json.Unmarshal(data, &x); err != nil {\n\t\treturn nil, &kivik.Error{HTTPStatus: http.StatusBadRequest, Err: err}\n\t}\n\treturn x, nil\n}", "func Unmarshal(data []byte) (interface{}, error) {\n\tvar value marble\n\terr := json.Unmarshal(data, &value)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &value, nil\n}", "func UnmarshalFromJSON(data []byte, target interface{}) error {\n\tvar ctx map[string]interface{}\n\terr := json.Unmarshal(data, &ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn Unmarshal(ctx, target)\n}", "func Unmarshal(b []byte, v interface{}) error {\n\treturn json.Unmarshal(b, v)\n}", "func UnmarshalJSON(b []byte) (dgo.Value, error) {\n\tdec := json.NewDecoder(bytes.NewReader(b))\n\tdec.UseNumber()\n\treturn jsonDecodeValue(dec)\n}", "func (x *Event_Type) UnmarshalJSON(b []byte) error {\n\tnum, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = Event_Type(num)\n\treturn nil\n}", "func (j *JSON) Unmarshal(input, target interface{}) error {\n\t// take the input and convert it to target\n\treturn jsonEncoding.Unmarshal(input.([]byte), target)\n}", "func jsonDec(t reflect.Type, in []byte) (T, error) {\n\tval := reflect.New(t)\n\tif err := jsonx.Unmarshal(val.Interface(), in); err != nil {\n\t\treturn nil, err\n\t}\n\treturn val.Elem().Interface(), nil\n}", "func (doc *T) UnmarshalJSON(data []byte) error {\n\ttype TBis T\n\tvar x TBis\n\tif err := json.Unmarshal(data, &x); err != nil {\n\t\treturn err\n\t}\n\t_ = json.Unmarshal(data, &x.Extensions)\n\tdelete(x.Extensions, \"swagger\")\n\tdelete(x.Extensions, \"info\")\n\tdelete(x.Extensions, \"externalDocs\")\n\tdelete(x.Extensions, \"schemes\")\n\tdelete(x.Extensions, \"consumes\")\n\tdelete(x.Extensions, \"produces\")\n\tdelete(x.Extensions, \"host\")\n\tdelete(x.Extensions, \"basePath\")\n\tdelete(x.Extensions, \"paths\")\n\tdelete(x.Extensions, \"definitions\")\n\tdelete(x.Extensions, \"parameters\")\n\tdelete(x.Extensions, \"responses\")\n\tdelete(x.Extensions, \"securityDefinitions\")\n\tdelete(x.Extensions, \"security\")\n\tdelete(x.Extensions, \"tags\")\n\t*doc = T(x)\n\treturn nil\n}", "func Unmarshal(jsoned []byte, out interface{}) error {\n\terr := jsoniter.ConfigCompatibleWithStandardLibrary.Unmarshal(jsoned, out)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"unmarshaling error: %w\", jsoniter.ConfigCompatibleWithStandardLibrary.Unmarshal(jsoned, out))\n\t}\n\treturn nil\n}", "func (x *FieldInfo_Type) UnmarshalJSON(b []byte) error {\n\tnum, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = FieldInfo_Type(num)\n\treturn nil\n}", "func (v *Event) UnmarshalJSON(data []byte) error {\n\tr := jlexer.Lexer{Data: data}\n\teasyjson6601e8cdDecodeGithubComGoParkMailRu2018242GameServerTypes12(&r, v)\n\treturn r.Error()\n}", "func (u *TDigestMethodType) UnmarshalJSON(b []byte) error {\n\t// Declare temporary struct without functions to avoid recursive function call\n\ttype Alias TDigestMethodType\n\n\t// Copy values into temporary struct\n\ttemp := &struct {\n\t\t*Alias\n\t}{\n\t\tAlias: (*Alias)(u),\n\t}\n\n\t// Unmarshal JSON\n\tif err := json.Unmarshal(b, &temp); err != nil {\n\t\treturn err\n\t}\n\n\t// Decode base64 value\n\tbyteValue, err := base64.StdEncoding.DecodeString(u.XsdGoPkgCDATA)\n\tif err != nil {\n\t\treturn err\n\t}\n\tu.XsdGoPkgCDATA = string(byteValue)\n\n\treturn nil\n}", "func (w *Entry) UnmarshalJSON(bb []byte) error {\n\t<<!!YOUR_CODE!!>>\n}", "func (TagsRemoved) Unmarshal(v []byte) (interface{}, error) {\n\te := TagsRemoved{}\n\terr := json.Unmarshal(v, &e)\n\treturn e, err\n}", "func (j *AvroTypeWrapper) UnmarshalJSON(data []byte) error {\n\terr := json.Unmarshal(data, &j.Type)\n\tif err == nil {\n\t\treturn nil\n\t}\n\n\terr = json.Unmarshal(data, &j.TypeList)\n\tif err == nil {\n\t\treturn nil\n\t}\n\n\terr = json.Unmarshal(data, &j.TypeObj)\n\tif err == nil {\n\t\treturn nil\n\t}\n\n\treturn errors.Wrap(err, \"could not unmarshal data - unknown type\")\n}", "func (TagsAdded) Unmarshal(v []byte) (interface{}, error) {\n\te := TagsAdded{}\n\terr := json.Unmarshal(v, &e)\n\treturn e, err\n}", "func (v *OneLike) UnmarshalJSON(data []byte) error {\n\tr := jlexer.Lexer{Data: data}\n\tdecodeOneLike(&r, v)\n\treturn r.Error()\n}", "func (a *ActivityReportVulnerabilityTypesUpdated) UnmarshalJSON(b []byte) error {\n\tvar helper activityReportVulnerabilityTypesUpdatedUnmarshalHelper\n\tif err := json.Unmarshal(b, &helper); err != nil {\n\t\treturn err\n\t}\n\ta.OldVulnerabilityTypes = helper.Relationships.OldVulnerabilityTypes.Data\n\ta.NewVulnerabilityTypes = helper.Relationships.NewVulnerabilityTypes.Data\n\treturn nil\n}", "func (a *AvailablePrivateEndpointTypesResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", a, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &a.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &a.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", a, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (v *Raw) UnmarshalJSON(data []byte) error {\n\tr := jlexer.Lexer{Data: data}\n\teasyjson42239ddeDecodeGithubComKhliengDispatchServer10(&r, v)\n\treturn r.Error()\n}", "func unmarshal(s *string, v interface{}) error {\n\tif s != nil && len(*s) > 0 {\n\t\terr := json.Unmarshal([]byte(*s), v)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func Unmarshal(weatherJson []byte) WeatherData {\n\tvar data WeatherData //[]map[string]interface{}\n\n\terr := json.Unmarshal(weatherJson, &data)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\treturn data\n}", "func (v *OneUpdateLike) UnmarshalJSON(data []byte) error {\n\tr := jlexer.Lexer{Data: data}\n\tdecodeOneUpdateLike(&r, v)\n\treturn r.Error()\n}", "func Unmarshal(data []byte, v any) error {\n\tif pb, ok := v.(proto.Message); ok {\n\t\topts := protojson.UnmarshalOptions{DiscardUnknown: true}\n\t\treturn annotate(data, opts.Unmarshal(data, pb))\n\t}\n\treturn annotate(data, json.Unmarshal(data, v))\n}", "func (j *UnInstallPacket) UnmarshalJSON(input []byte) error {\n\tfs := fflib.NewFFLexer(input)\n\treturn j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)\n}", "func (t *TherapistType) UnmarshalJSON(d []byte) error {\n\tvar s string\n\tif err := json.Unmarshal(d, &s); err != nil {\n\t\treturn errors.Wrap(err, \"can't unmarshal therapist type\")\n\t}\n\treturn t.FromString(s)\n}", "func (j *Event) UnmarshalJSON(input []byte) error {\n\tfs := fflib.NewFFLexer(input)\n\treturn j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)\n}", "func Unmarshal(v starlark.Value) (interface{}, error) {\n\tswitch v.Type() {\n\tcase \"NoneType\":\n\t\treturn nil, nil\n\tcase \"bool\":\n\t\treturn v.Truth() == starlark.True, nil\n\tcase \"int\":\n\t\treturn starlark.AsInt32(v)\n\tcase \"float\":\n\t\tif float, ok := starlark.AsFloat(v); ok {\n\t\t\treturn float, nil\n\t\t} else {\n\t\t\treturn nil, fmt.Errorf(\"couldn't parse float\")\n\t\t}\n\tcase \"string\":\n\t\treturn strconv.Unquote(v.String())\n\tcase \"dict\":\n\t\tif dict, ok := v.(*starlark.Dict); ok {\n\t\t\tvar values = map[string]interface{}{}\n\t\t\tfor _, key := range dict.Keys() {\n\t\t\t\tvalue, _, err := dict.Get(key)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\ttemp, err := Unmarshal(value)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\tvalues[AsString(key)] = temp\n\t\t\t}\n\t\t\treturn values, nil\n\t\t} else {\n\t\t\treturn nil, fmt.Errorf(\"error parsing dict. invalid type: %v\", v)\n\t\t}\n\tcase \"list\":\n\t\tif list, ok := v.(*starlark.List); ok {\n\t\t\tvar element starlark.Value\n\t\t\tvar iterator = list.Iterate()\n\t\t\tvar value = make([]interface{}, 0)\n\t\t\tfor iterator.Next(&element) {\n\t\t\t\ttemp, err := Unmarshal(element)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\tvalue = append(value, temp)\n\t\t\t}\n\t\t\titerator.Done()\n\t\t\treturn value, nil\n\t\t} else {\n\t\t\treturn nil, fmt.Errorf(\"error parsing list. invalid type: %v\", v)\n\t\t}\n\tcase \"tuple\":\n\t\tif tuple, ok := v.(starlark.Tuple); ok {\n\t\t\tvar element starlark.Value\n\t\t\tvar iterator = tuple.Iterate()\n\t\t\tvar value = make([]interface{}, 0)\n\t\t\tfor iterator.Next(&element) {\n\t\t\t\ttemp, err := Unmarshal(element)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\tvalue = append(value, temp)\n\t\t\t}\n\t\t\titerator.Done()\n\t\t\treturn value, nil\n\t\t} else {\n\t\t\treturn nil, fmt.Errorf(\"error parsing dict. invalid type: %v\", v)\n\t\t}\n\tcase \"set\":\n\t\treturn nil, fmt.Errorf(\"sets aren't yet supported\")\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"unrecognized starlark type: %s\", v.Type())\n\t}\n}", "func (j *LuaFunction) UnmarshalJSON(input []byte) error {\n\tfs := fflib.NewFFLexer(input)\n\treturn j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)\n}", "func (sti *ServiceTypeInfo) UnmarshalJSON(body []byte) error {\n\tvar m map[string]*json.RawMessage\n\terr := json.Unmarshal(body, &m)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor k, v := range m {\n\t\tswitch k {\n\t\tcase \"ServiceTypeDescription\":\n\t\t\tif v != nil {\n\t\t\t\tserviceTypeDescription, err := unmarshalBasicServiceTypeDescription(*v)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tsti.ServiceTypeDescription = serviceTypeDescription\n\t\t\t}\n\t\tcase \"ServiceManifestName\":\n\t\t\tif v != nil {\n\t\t\t\tvar serviceManifestName string\n\t\t\t\terr = json.Unmarshal(*v, &serviceManifestName)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tsti.ServiceManifestName = &serviceManifestName\n\t\t\t}\n\t\tcase \"ServiceManifestVersion\":\n\t\t\tif v != nil {\n\t\t\t\tvar serviceManifestVersion string\n\t\t\t\terr = json.Unmarshal(*v, &serviceManifestVersion)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tsti.ServiceManifestVersion = &serviceManifestVersion\n\t\t\t}\n\t\tcase \"IsServiceGroup\":\n\t\t\tif v != nil {\n\t\t\t\tvar isServiceGroup bool\n\t\t\t\terr = json.Unmarshal(*v, &isServiceGroup)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tsti.IsServiceGroup = &isServiceGroup\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func (x *CodeGeneratorResponse_Feature) UnmarshalJSON(b []byte) error {\n\tnum, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = CodeGeneratorResponse_Feature(num)\n\treturn nil\n}", "func (x *TransportType) UnmarshalJSON(b []byte) error {\n\tnum, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = TransportType(num)\n\treturn nil\n}", "func (x *ProbeConf_Type) UnmarshalJSON(b []byte) error {\n\tnum, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = ProbeConf_Type(num)\n\treturn nil\n}", "func unmarshal() {\n\tfmt.Println(\"=== json.unmarshal ===\")\n\tvar jsonBlob = []byte(`[\n\t\t{\"name\": \"Bill\", \"age\": 109},\n\t\t{\"name\": \"Bob\", \"age\": 5}\n\t]`)\n\n\tvar persons []Person\n\terr := json.Unmarshal(jsonBlob, &persons)\n\tcheck(err)\n\n\tfmt.Printf(\"%+v\\n\", persons)\n}", "func Unmarshal(data []byte, v interface{}) error {\n\terr := json.Unmarshal(data, v)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif ImplementsPostJSONUnmarshaler(v) {\n\t\terr := v.(PostJSONUnmarshaler).PostUnmarshalJSON()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func unmarshal(data []byte, v interface{}) {\n\terr := json.Unmarshal(data, v)\n\tassert(err == nil, \"unmarshal error: %s\", err)\n}", "func Unmarshal(data []byte, v Unmarshaler) error {\n\tl := jlexer.Lexer{Data: data}\n\tv.UnmarshalTinyJSON(&l)\n\treturn l.Error()\n}", "func UnmarshalJSON(data []byte, v interface{}) {\n\terr := json.Unmarshal(data, v)\n\tAbortIf(err)\n}", "func (response Response) Unmarshal(value interface{}) error {\n\tvalues := map[string]interface{}{}\n\terr := json.Unmarshal(response.Raw.Json, &values)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdecoder, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{\n\t\tDecodeHook: func(from reflect.Value, to reflect.Value) (interface{}, error) {\n\t\t\tif _, ok := to.Interface().(time.Time); ok {\n\t\t\t\treturn time.Parse(time.RFC3339, from.String())\n\t\t\t}\n\t\t\treturn from.Interface(), nil\n\t\t},\n\t\tResult: value,\n\t\tTagName: \"json\",\n\t})\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif response.dataKeyPath != \"\" {\n\t\treturn decoder.Decode(values[response.dataKeyPath])\n\t}\n\n\treturn decoder.Decode(values)\n}", "func (ft *FieldType) UnmarshalJSON(data []byte) error {\n\tvar r jsonFieldType\n\terr := json.Unmarshal(data, &r)\n\tif err == nil {\n\t\tft.tp = r.Tp\n\t\tft.flag = r.Flag\n\t\tft.flen = r.Flen\n\t\tft.decimal = r.Decimal\n\t\tft.charset = r.Charset\n\t\tft.collate = r.Collate\n\t\tft.elems = r.Elems\n\t\tft.elemsIsBinaryLit = r.ElemsIsBinaryLit\n\t\tft.array = r.Array\n\t}\n\treturn err\n}", "func unmarshal(text []byte, fields *map[string]interface{}) error {\n\tdec := json.NewDecoder(bytes.NewReader(text))\n\tdec.UseNumber()\n\terr := dec.Decode(fields)\n\tif err != nil {\n\t\treturn err\n\t}\n\tjsontransform.TransformNumbers(*fields)\n\treturn nil\n}", "func Unmarshal(s *any.Any) interface{} {\n\treturn arch.DecodeType(s.TypeUrl, s.Value)\n}", "func (t *SDPType) UnmarshalJSON(b []byte) error {\n\tvar s string\n\tif err := json.Unmarshal(b, &s); err != nil {\n\t\treturn err\n\t}\n\tswitch strings.ToLower(s) {\n\tdefault:\n\t\treturn ErrUnknownType\n\tcase \"offer\":\n\t\t*t = SDPTypeOffer\n\tcase \"pranswer\":\n\t\t*t = SDPTypePranswer\n\tcase \"answer\":\n\t\t*t = SDPTypeAnswer\n\tcase \"rollback\":\n\t\t*t = SDPTypeRollback\n\t}\n\n\treturn nil\n}", "func (i *EventType) UnmarshalJSON(data []byte) error {\n\tvar s string\n\tif err := json.Unmarshal(data, &s); err != nil {\n\t\treturn fmt.Errorf(\"EventType should be a string, got %s\", data)\n\t}\n\n\tvar err error\n\t*i, err = EventTypeString(s)\n\treturn err\n}", "func (r *InfoResponse) UnmarshalJSON(data []byte) error {\n\ttype Alias InfoResponse\n\taux := &struct {\n\t\tVersion interface{} `json:\"version\"`\n\t\tAuthorProfile interface{} `json:\"author_profile\"`\n\t\tRequires interface{} `json:\"requires\"`\n\t\tRequiresPHP interface{} `json:\"requires_php\"`\n\t\tTested interface{} `json:\"tested\"`\n\t\tContributors interface{} `json:\"contributors\"`\n\t\tRatings interface{} `json:\"ratings\"`\n\t\tNumRatings interface{} `json:\"num_ratings\"`\n\t\tScreenshots interface{} `json:\"screenshots\"`\n\t\tTags interface{} `json:\"tags\"`\n\t\tVersions interface{} `json:\"versions\"`\n\t\t*Alias\n\t}{\n\t\tAlias: (*Alias)(r),\n\t}\n\tif err := json.Unmarshal(data, &aux); err != nil {\n\t\treturn err\n\t}\n\n\t// Set Version as string\n\tswitch v := aux.Version.(type) {\n\tcase string:\n\t\tr.Version = v\n\tcase int:\n\t\tr.Version = strconv.Itoa(v)\n\tdefault:\n\t\tr.Version = \"\"\n\t}\n\n\t// AuthorProfile can occasionally be a boolean (false)\n\tswitch v := aux.AuthorProfile.(type) {\n\tcase string:\n\t\tr.AuthorProfile = v\n\tdefault:\n\t\tr.AuthorProfile = \"\"\n\t}\n\n\t// Requires can occasionally be a boolean (false)\n\tswitch v := aux.Requires.(type) {\n\tcase string:\n\t\tr.Requires = v\n\tdefault:\n\t\tr.Requires = \"\"\n\t}\n\n\t// Tested can occasionally be a boolean (false)\n\tswitch v := aux.Tested.(type) {\n\tcase string:\n\t\tr.Requires = v\n\tdefault:\n\t\tr.Requires = \"\"\n\t}\n\n\t// RequiresPHP can occasionally be a boolean (false)\n\tswitch v := aux.RequiresPHP.(type) {\n\tcase string:\n\t\tr.RequiresPHP = v\n\tdefault:\n\t\tr.RequiresPHP = \"\"\n\t}\n\n\t// RequiresPHP can occasionally be a boolean (false)\n\tswitch v := aux.RequiresPHP.(type) {\n\tcase string:\n\t\tr.RequiresPHP = v\n\tdefault:\n\t\tr.RequiresPHP = \"\"\n\t}\n\n\t// Parse Contributors\n\tif aux.Contributors != nil && reflect.TypeOf(aux.Contributors).Kind() == reflect.Map {\n\t\tfor k, v := range aux.Contributors.(map[string]interface{}) {\n\t\t\tcontrib := []string{\n\t\t\t\tk, v.(string),\n\t\t\t}\n\t\t\tr.Contributors = append(r.Contributors, contrib)\n\t\t}\n\t}\n\n\t// Parse Ratings\n\tif reflect.TypeOf(aux.Ratings).Kind() == reflect.Map {\n\t\tfor k, v := range aux.Ratings.(map[string]interface{}) {\n\t\t\tvar num int\n\t\t\tvar err error\n\t\t\tswitch t := v.(type) {\n\t\t\tcase float64:\n\t\t\t\tnum = int(t)\n\t\t\tcase string:\n\t\t\t\tnum, err = strconv.Atoi(t)\n\t\t\t\tif err != nil {\n\t\t\t\t\tnum = 0\n\t\t\t\t}\n\t\t\tdefault:\n\t\t\t\tnum = 0\n\t\t\t}\n\t\t\trating := Rating{\n\t\t\t\tStars: k,\n\t\t\t\tNumber: num,\n\t\t\t}\n\t\t\tr.Ratings = append(r.Ratings, rating)\n\t\t}\n\t}\n\n\t// NumRatings can be a string \"0\" when zero\n\tswitch v := aux.NumRatings.(type) {\n\tcase int:\n\t\tr.NumRatings = v\n\tcase string:\n\t\tnum, err := strconv.Atoi(v)\n\t\tif err != nil {\n\t\t\tr.NumRatings = 0\n\t\t} else {\n\t\t\tr.NumRatings = num\n\t\t}\n\tdefault:\n\t\tr.NumRatings = 0\n\t}\n\n\t// Parse Screenshots\n\tif reflect.TypeOf(aux.Screenshots).Kind() == reflect.Map {\n\t\tfor _, v := range aux.Screenshots.(map[string]interface{}) {\n\t\t\ts := v.(map[string]interface{})\n\t\t\tscreenshot := Screenshot{\n\t\t\t\tSrc: s[\"src\"].(string),\n\t\t\t}\n\t\t\t// Handle different types for caption\n\t\t\t// Can sometimes be boolean instead of string\n\t\t\tswitch v := s[\"caption\"].(type) {\n\t\t\tcase bool:\n\t\t\t\tscreenshot.Caption = \"\"\n\t\t\tcase string:\n\t\t\t\tscreenshot.Caption = v\n\t\t\tdefault:\n\t\t\t\tscreenshot.Caption = \"\"\n\t\t\t}\n\t\t\tr.Screenshots = append(r.Screenshots, screenshot)\n\t\t}\n\t}\n\n\t// Parse Tags\n\tif reflect.TypeOf(aux.Tags).Kind() == reflect.Map {\n\t\tfor k, v := range aux.Tags.(map[string]interface{}) {\n\t\t\ttag := []string{\n\t\t\t\tk, v.(string),\n\t\t\t}\n\t\t\tr.Tags = append(r.Tags, tag)\n\t\t}\n\t}\n\n\t// Parse Versions\n\tif reflect.TypeOf(aux.Versions).Kind() == reflect.Map {\n\t\tfor k, v := range aux.Versions.(map[string]interface{}) {\n\t\t\tversion := []string{\n\t\t\t\tk, v.(string),\n\t\t\t}\n\t\t\tr.Versions = append(r.Versions, version)\n\t\t}\n\t}\n\n\treturn nil\n}", "func UnmarshalJSON(body io.Reader, v interface{}) error {\n\tdecoder := json.NewDecoder(body)\n\treturn decoder.Decode(v)\n}", "func UnmarshalType(b []byte, outputType reflect.Type) (interface{}, error) {\n\n\tif len(b) == 0 {\n\t\treturn nil, ErrEmptyInput\n\t}\n\n\tswitch string(b) {\n\tcase \"true\":\n\t\tif outputType.Kind() != reflect.Bool {\n\t\t\treturn nil, &ErrInvalidKind{Value: outputType, Expected: []reflect.Kind{reflect.Bool}}\n\t\t}\n\t\treturn true, nil\n\tcase \"false\":\n\t\tif outputType.Kind() != reflect.Bool {\n\t\t\treturn nil, &ErrInvalidKind{Value: outputType, Expected: []reflect.Kind{reflect.Bool}}\n\t\t}\n\t\treturn false, nil\n\tcase \"null\":\n\t\treturn nil, nil\n\t}\n\n\tfirst, _ := utf8.DecodeRune(b)\n\tif first == utf8.RuneError {\n\t\treturn nil, ErrInvalidRune\n\t}\n\n\tswitch first {\n\tcase '[', '-':\n\t\tif outputType.Kind() != reflect.Slice {\n\t\t\treturn nil, &ErrInvalidKind{Value: outputType, Expected: []reflect.Kind{reflect.Slice}}\n\t\t}\n\t\tptr := reflect.New(outputType)\n\t\tptr.Elem().Set(reflect.MakeSlice(outputType, 0, 0))\n\t\terr := goyaml.Unmarshal(b, ptr.Interface())\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, fmt.Sprintf(\"error unmarshaling JSON %q\", string(b)))\n\t\t}\n\t\treturn ptr.Elem().Interface(), nil\n\tcase '{':\n\t\tif k := outputType.Kind(); k != reflect.Map {\n\t\t\treturn nil, &ErrInvalidKind{Value: outputType, Expected: []reflect.Kind{reflect.Map}}\n\t\t}\n\t\tptr := reflect.New(outputType)\n\t\tptr.Elem().Set(reflect.MakeMap(outputType))\n\t\terr := goyaml.Unmarshal(b, ptr.Interface())\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, fmt.Sprintf(\"error unmarshaling JSON %q\", string(b)))\n\t\t}\n\t\treturn ptr.Elem().Interface(), nil\n\tcase '\"':\n\t\tif k := outputType.Kind(); k != reflect.String {\n\t\t\treturn nil, &ErrInvalidKind{Value: outputType, Expected: []reflect.Kind{reflect.String}}\n\t\t}\n\t\tobj := \"\"\n\t\terr := goyaml.Unmarshal(b, &obj)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, fmt.Sprintf(\"error unmarshaling JSON %q\", string(b)))\n\t\t}\n\t\treturn obj, nil\n\t}\n\n\tif strings.Contains(string(b), \"\\n\") {\n\t\tif k := outputType.Kind(); k != reflect.Map {\n\t\t\treturn nil, &ErrInvalidKind{Value: outputType, Expected: []reflect.Kind{reflect.Map}}\n\t\t}\n\t\tptr := reflect.New(outputType)\n\t\tptr.Elem().Set(reflect.MakeMap(outputType))\n\t\terr := goyaml.Unmarshal(b, ptr.Interface())\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, fmt.Sprintf(\"error unmarshaling YAML %q\", string(b)))\n\t\t}\n\t\treturn ptr.Elem().Interface(), nil\n\t}\n\n\tswitch outputType.Kind() {\n\tcase reflect.Int:\n\t\ti, err := strconv.Atoi(string(b))\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, fmt.Sprintf(\"error unmarshaling YAML %q\", string(b)))\n\t\t}\n\t\treturn i, nil\n\tcase reflect.Float64:\n\t\tf, err := strconv.ParseFloat(string(b), 64)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, fmt.Sprintf(\"error unmarshaling YAML %q\", string(b)))\n\t\t}\n\t\treturn f, nil\n\t}\n\treturn string(b), nil\n}", "func UnmarshalJson(in *pongo2.Value, param *pongo2.Value) (*pongo2.Value, *pongo2.Error) {\n\ts := in.String()\n\n\tif s != \"\" {\n\t\tvar result []interface{}\n\t\terr := json.Unmarshal([]byte(s), &result)\n\t\tif err == nil {\n\t\t\tlog.Debugf(\"[UnmarshalJson] JSON OUT LIST ===== %s\", result)\n\t\t\treturn pongo2.AsValue(result), nil\n\t\t}\n\t\tvar result2 map[string]interface{}\n\t\terr2 := json.Unmarshal([]byte(s), &result2)\n\t\tif err2 == nil {\n\t\t\tlog.Debugf(\"[UnmarshalJson] JSON OUT MAP ===== %s\", result2)\n\t\t\treturn pongo2.AsValue(result2), nil\n\t\t}\n\t\treturn nil, &pongo2.Error{\n\t\t\tSender: \"filterUnmarshalJson\",\n\t\t\tErrorMsg: fmt.Sprintf(\"Error unmarshaling value '%v' %s\", err, s),\n\t\t}\n\t}\n\n\treturn pongo2.AsValue([]string{}), nil\n}", "func (v *Features) UnmarshalJSON(data []byte) error {\n\tr := jlexer.Lexer{Data: data}\n\teasyjson42239ddeDecodeGithubComKhliengDispatchServer25(&r, v)\n\treturn r.Error()\n}", "func (j *User) UnmarshalJSON(input []byte) error {\n\tfs := fflib.NewFFLexer(input)\n\treturn j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)\n}", "func (o *options) UnmarshalJSON(raw []byte) error {\n\tostruct := optstruct{}\n\tif err := json.Unmarshal(raw, &ostruct); err != nil {\n\t\treturn instrumentationError(\"failed to unmashal options: %v\", err)\n\t}\n\t*o = options(ostruct)\n\treturn nil\n}", "func (x *EProtoAppType) UnmarshalJSON(b []byte) error {\n\tnum, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = EProtoAppType(num)\n\treturn nil\n}", "func (j *JsonlMarshaler) Unmarshal(data []byte, v interface{}) error {\n\treturn json.Unmarshal(data, v)\n}", "func (a *AvailablePrivateEndpointType) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", a, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"displayName\":\n\t\t\terr = unpopulate(val, \"DisplayName\", &a.DisplayName)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"id\":\n\t\t\terr = unpopulate(val, \"ID\", &a.ID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"name\":\n\t\t\terr = unpopulate(val, \"Name\", &a.Name)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"resourceName\":\n\t\t\terr = unpopulate(val, \"ResourceName\", &a.ResourceName)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"type\":\n\t\t\terr = unpopulate(val, \"Type\", &a.Type)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", a, err)\n\t\t}\n\t}\n\treturn nil\n}", "func UnwrapRegister(JSON string) (Register, error) {\n\tvar register Register\n\terr := json.Unmarshal([]byte(JSON), &register)\n\tif err != nil {\n\t\treturn Register{}, err\n\t}\n\treturn register, nil\n}", "func (j *qProxyClient) UnmarshalJSON(input []byte) error {\n\tfs := fflib.NewFFLexer(input)\n\treturn j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)\n}", "func (s *Serializer) Unmarshal(data []byte, v interface{}) error {\n\treturn jsoniter.Unmarshal(data,v)\n}", "func (x *GtwEventType) UnmarshalJSON(b []byte) error {\n\tnum, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = GtwEventType(num)\n\treturn nil\n}", "func (j *UnInstallRespPacket) UnmarshalJSON(input []byte) error {\n\tfs := fflib.NewFFLexer(input)\n\treturn j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)\n}", "func Unmarshal(b []byte) (Payload, error) {\n\tvar p Payload\n\terr := json.Unmarshal(b, &p)\n\treturn p, err\n}", "func (j *Response) UnmarshalJSON(input []byte) error {\n\tfs := fflib.NewFFLexer(input)\n\treturn j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)\n}", "func (self *TypeDef) UnmarshalJSON(b []byte) error {\n\tvar m rawTypeDef\n\terr := json.Unmarshal(b, &m)\n\tif err == nil {\n\t\to := TypeDef(m)\n\t\t*self = o\n\t\terr = self.Validate()\n\t}\n\treturn err\n}", "func (f genHelperDecoder) DecJSONUnmarshal(tm jsonUnmarshaler) {\n\tf.d.jsonUnmarshalV(tm)\n}", "func jsonUnmarshal(r io.Reader, o interface{}, opts ...JSONOpt) error {\n\td := json.NewDecoder(r)\n\tfor _, opt := range opts {\n\t\td = opt(d)\n\t}\n\tif err := d.Decode(&o); err != nil {\n\t\treturn fmt.Errorf(\"while decoding JSON: %v\", err)\n\t}\n\treturn nil\n}", "func (x *EventType) UnmarshalJSON(b []byte) error {\n\tnum, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = EventType(num)\n\treturn nil\n}", "func (e Enum) JSONUnmarshal(v any, data []byte) error {\n\ts := \"\"\n\tif err := json.Unmarshal(data, &s); err != nil {\n\t\treturn err\n\t}\n\treturn e.FlagSet(v, s)\n}", "func (v *SyntheticsTestRequestBodyType) UnmarshalJSON(src []byte) error {\n\tvar value string\n\terr := json.Unmarshal(src, &value)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*v = SyntheticsTestRequestBodyType(value)\n\treturn nil\n}", "func (x *CRIUType) UnmarshalJSON(b []byte) error {\n\tnum, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = CRIUType(num)\n\treturn nil\n}", "func (x *MigrationFSType) UnmarshalJSON(b []byte) error {\n\tnum, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = MigrationFSType(num)\n\treturn nil\n}", "func Unmarshal(data []byte, v Unmarshaler) error {\n\tl := jlexer.Lexer{Data: data}\n\tv.UnmarshalEasyJSON(&l)\n\treturn l.Error()\n}", "func (x *AnswerType) UnmarshalJSON(b []byte) error {\n\tnum, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = AnswerType(num)\n\treturn nil\n}", "func (v *Element) UnmarshalJSON(data []byte) error {\n\tr := jlexer.Lexer{Data: data}\n\teasyjsonB83d7b77DecodeGoplaygroundMyjson2(&r, v)\n\treturn r.Error()\n}", "func (m *HookMessage) Unmarshal(b []byte) error {\n\terr := json.Unmarshal(b, m)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (j *Error) UnmarshalJSON(input []byte) error {\n\tfs := fflib.NewFFLexer(input)\n\treturn j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)\n}", "func (x *UserInfoField) UnmarshalJSON(b []byte) error {\n\tnum, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = UserInfoField(num)\n\treturn nil\n}", "func UnmarshalJSON(b []byte, discriminator string, f Factory) (interface{}, error) {\n\tm := make(map[string]interface{})\n\terr := json.Unmarshal(b, &m)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn Decode(m, discriminator, f)\n}", "func (this *ReferencedMap) UnmarshalJSON(b []byte) error {\n\treturn TypesUnmarshaler.Unmarshal(bytes.NewReader(b), this)\n}", "func Unmarshal(data []byte, v interface{}) error {\n\treturn ReturnIfError(\n\t\tjson.Unmarshal(data, v),\n\t\tcheckValues(v),\n\t)\n}", "func Unmarshal(data []byte, v interface{}) error {\n\tvalue := reflect.ValueOf(v)\n\texpectedFields := make(map[string]bool)\n\tunionTypes, err := parseUnionTypes(value.Type().Elem())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfor key, fields := range unionTypes {\n\t\texpectedFields[key] = true\n\t\texpectedValue, ok, err := getTagValue(data, key)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t} else if !ok {\n\t\t\tcontinue\n\t\t}\n\t\tfield, ok := fields[expectedValue]\n\t\tif !ok {\n\t\t\treturn errors.Errorf(\"unexpected %s: %s\", key, expectedValue)\n\t\t}\n\n\t\tif fieldVal := value.Elem().Field(field.index); !fieldVal.IsNil() {\n\t\t\tif err := json.Unmarshal(data, fieldVal.Interface()); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t} else {\n\t\t\tnested := reflect.New(field.field.Type.Elem())\n\t\t\tif err := json.Unmarshal(data, nested.Interface()); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfieldVal.Set(nested)\n\t\t}\n\n\t\tfor _, other := range fields {\n\t\t\tif other.index == field.index {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tvalue.Elem().Field(other.index).Set(reflect.Zero(other.field.Type))\n\t\t}\n\n\t\tfor k := range parseFields(field.field.Type.Elem()) {\n\t\t\texpectedFields[k] = true\n\t\t}\n\t}\n\tfor k := range parseFields(value.Type().Elem()) {\n\t\texpectedFields[k] = true\n\t}\n\treturn checkFields(expectedFields, data)\n}", "func (value *ObjectSearchType) UnmarshalJSON(arg []byte) error {\n\ti, err := objectSearchTypeEnumMap.UnMarshal(arg)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"Failed to unmarshal ObjectSearchType\")\n\t}\n\n\t*value = ObjectSearchType(i)\n\treturn nil\n}", "func (this *Service) UnmarshalJSON(b []byte) error {\n\treturn CommonUnmarshaler.Unmarshal(bytes.NewReader(b), this)\n}", "func (LinkRemoved) Unmarshal(v []byte) (interface{}, error) {\n\te := LinkRemoved{}\n\terr := json.Unmarshal(v, &e)\n\treturn e, err\n}", "func (j *Data) UnmarshalJSON(input []byte) error {\n\tfs := fflib.NewFFLexer(input)\n\treturn j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)\n}" ]
[ "0.68717504", "0.6608546", "0.64842206", "0.64827716", "0.6417563", "0.6328602", "0.6327481", "0.63081324", "0.6281408", "0.6278843", "0.6270395", "0.6254503", "0.62464404", "0.62433", "0.62106514", "0.6200904", "0.6185819", "0.6159575", "0.6141161", "0.6112783", "0.6111504", "0.60936433", "0.6048301", "0.6046798", "0.6040153", "0.60107875", "0.6010093", "0.5997431", "0.5987197", "0.59689474", "0.5958044", "0.59548324", "0.5948115", "0.5944951", "0.59316796", "0.59316474", "0.5924841", "0.59241027", "0.59235966", "0.5923003", "0.59191746", "0.591851", "0.59104574", "0.5908926", "0.5908152", "0.590711", "0.5903063", "0.5894915", "0.5891278", "0.58909523", "0.5886408", "0.58819395", "0.58788544", "0.5872783", "0.58676875", "0.5854676", "0.5844524", "0.58401275", "0.5834732", "0.58282155", "0.5819151", "0.58187586", "0.58180547", "0.58160806", "0.5813088", "0.5808522", "0.5800562", "0.5791016", "0.5789213", "0.57860416", "0.57840157", "0.57809454", "0.5778628", "0.5769753", "0.5759839", "0.57586884", "0.57581204", "0.57570493", "0.57567304", "0.5753743", "0.57462096", "0.57432795", "0.5738553", "0.5737141", "0.57345265", "0.5729613", "0.57293177", "0.57289994", "0.57172006", "0.5713792", "0.57131547", "0.57130307", "0.57096875", "0.57089776", "0.57052916", "0.5704548", "0.5704051", "0.57009935", "0.5700078", "0.5696985", "0.56960034" ]
0.0
-1
Deprecated: This has been replaced by github.com/oapicodegen/runtime/typesString
func (d Date) String() string { return d.Time.Format(DateFormat) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func String(args ...interface{}) dgo.StringType {\n\treturn internal.StringType(args...)\n}", "func (this *NowStr) Type() value.Type { return value.STRING }", "func String(str string) Val { return Val{t: bsontype.String}.writestring(str) }", "func (s *String) Type() ObjectType { return STRING_OBJ }", "func (l settableString) Type() string { return \"<string>\" }", "func newString(value string) *TypedString {\n\ttypedString := TypedString{\n\t\tBytes: []byte(value),\n\t\tType: ValueType_STRING,\n\t}\n\treturn &typedString\n}", "func TypeStringOn(typ dgo.Type, w io.Writer) {\n\tnewTypeBuilder(w, internal.DefaultAliases()).buildTypeString(typ, 0)\n}", "func (s *String) Type() Type {\n\treturn STRING_OBJ\n}", "func (*StringSchema) String() string {\n\treturn `{\"type\": \"string\"}`\n}", "func (b Basic) TypeString() string {\n\treturn b.String()\n}", "func TypeString(typ dgo.Type) string {\n\treturn TypeStringWithAliasMap(typ, internal.DefaultAliases())\n}", "func String(key string, val string) Field {\n\treturn Field{Key: key, Type: core.StringType, String: val}\n}", "func (t Type) String() string {\n\tswitch t {\n\tcase UNKNOWN:\n\t\treturn \"unknown\"\n\tcase STRING:\n\t\treturn \"string\"\n\tcase BOOLEAN:\n\t\treturn \"boolean\"\n\tcase NUMBER:\n\t\treturn \"number\"\n\tcase DATE:\n\t\treturn \"date\"\n\tcase TIME:\n\t\treturn \"time\"\n\tcase DATETIME:\n\t\treturn \"datetime\"\n\tcase LOCATION:\n\t\treturn \"location\"\n\tcase ADDRESS:\n\t\treturn \"address\"\n\t}\n\treturn \"unsupported\"\n}", "func (bs *BoundString) Type() string {\n\treturn \"string\"\n}", "func (t *BaseType) TypeString() string { panic(\"not implemented\") }", "func TypeString(v String) Bool {\n\treturn TrimBool(&typString{\n\t\tE: v,\n\t\thash: hashWithId(2486848561, v),\n\t\thasVariable: v.HasVariable(),\n\t})\n}", "func (s simpleString) String() string { return string(s) }", "func (m Struct) TypeString() string {\n\treturn m.String()\n}", "func (a Alias) TypeString() string {\n\treturn a.Type.TypeString()\n}", "func NewString(value string) *Value {\n\treturn &Value{kind: kindString, stringContent: value}\n}", "func String(f string) Interface {\n\treturn &valueType{\n\t\tname: f,\n\t\tempty: \"\",\n\t\tget: func(g Getter) interface{} {\n\t\t\treturn g.GetString(f)\n\t\t},\n\t}\n}", "func (me TdtypeType) IsString() bool { return me.String() == \"string\" }", "func (t Type) String() string {\n\tswitch t {\n\tcase Int32:\n\t\treturn \"int32\"\n\tcase Int64:\n\t\treturn \"int64\"\n\tcase Float64:\n\t\treturn \"float64\"\n\tcase String:\n\t\treturn \"string\"\n\tcase Bool:\n\t\treturn \"bool\"\n\tcase Timestamp:\n\t\treturn \"timestamp\"\n\tcase JSON:\n\t\treturn \"json\"\n\tdefault:\n\t\treturn \"unsupported\"\n\t}\n}", "func StringsType() *jsonDataType {\n\treturn JSONType([]string{})\n}", "func String(str string) *Str {\n\treturn &Str{val: str}\n}", "func String(value string) *string {\n\treturn New(value).(*string)\n}", "func fidlTypeString(t fidlgen.Type) string {\n\tn := nullableToString(t.Nullable)\n\tswitch t.Kind {\n\tcase fidlgen.PrimitiveType:\n\t\treturn string(t.PrimitiveSubtype)\n\tcase fidlgen.StringType:\n\t\treturn fmt.Sprintf(\"%s%s%s\",\n\t\t\tt.Kind, elementCountToString(t.ElementCount), n)\n\tcase fidlgen.ArrayType, fidlgen.VectorType:\n\t\treturn fidlNestedToString(t)\n\tcase fidlgen.HandleType:\n\t\tswitch t.HandleSubtype {\n\t\tcase fidlgen.Handle:\n\t\t\treturn fmt.Sprintf(\"handle%v\", n)\n\t\tdefault:\n\t\t\treturn fmt.Sprintf(\n\t\t\t\t\"zx/handle:zx/obj_type.%v%v\", strings.ToUpper(string(t.HandleSubtype)), n)\n\t\t}\n\tcase fidlgen.IdentifierType:\n\t\treturn fmt.Sprintf(\"%v%v\", string(t.Identifier), n)\n\tcase fidlgen.RequestType:\n\t\treturn fmt.Sprintf(\"request<%v>%v\", string(t.RequestSubtype), n)\n\tdefault:\n\t\treturn \"<not implemented>\"\n\t}\n}", "func (t Type) String() string {\n\tswitch t {\n\tdefault:\n\t\treturn \"Unknown\"\n\tcase '+':\n\t\treturn \"SimpleString\"\n\tcase '-':\n\t\treturn \"Error\"\n\tcase ':':\n\t\treturn \"Integer\"\n\tcase '$':\n\t\treturn \"BulkString\"\n\tcase '*':\n\t\treturn \"Array\"\n\tcase 'R':\n\t\treturn \"RDB\"\n\t}\n}", "func (s *StringPointerValue) Type() string {\n\treturn \"string\"\n}", "func (s *String) Type() ObjectType {\n\treturn STRING\n}", "func init() {\n\tinternal.TypeString = TypeString\n\tinternal.TypeStringOn = TypeStringOn\n}", "func TypeStringWithAliasMap(typ dgo.Type, am dgo.AliasMap) string {\n\ts := strings.Builder{}\n\tnewTypeBuilder(&s, am).buildTypeString(typ, 0)\n\treturn s.String()\n}", "func String(v string) *string {\n\treturn &v\n}", "func String(v string) *string {\n\treturn &v\n}", "func String(v string) *string {\n\treturn &v\n}", "func String(v string) *string {\n\treturn &v\n}", "func String(v string) *string {\n\treturn &v\n}", "func String(v string) *string {\n\treturn &v\n}", "func String(v string) *string {\n\treturn &v\n}", "func String(v interface{}) string {\n\treturn v.(string)\n}", "func (this *DateAddStr) Type() value.Type { return value.STRING }", "func String(string string) *string {\n\treturn &string\n}", "func (column *ColumnString) Type() string {\n\treturn \"string\"\n}", "func (pt Provider) LegacyString() string {\n\tif pt.Namespace != \"-\" {\n\t\tpanic(\"not a legacy Provider\")\n\t}\n\treturn pt.Type\n}", "func (t Type) String() string {\n\tswitch t {\n\tdefault:\n\t\treturn \"Unknown\"\n\tcase Null:\n\t\treturn \"Null\"\n\tcase RESP:\n\t\treturn \"RESP\"\n\tcase Telnet:\n\t\treturn \"Telnet\"\n\tcase Native:\n\t\treturn \"Native\"\n\tcase HTTP:\n\t\treturn \"HTTP\"\n\tcase WebSocket:\n\t\treturn \"WebSocket\"\n\tcase JSON:\n\t\treturn \"JSON\"\n\t}\n}", "func (t Type) String() string {\n\treturn string(t)\n}", "func (t Type) String() string {\n\treturn string(t)\n}", "func (n Named) TypeString() string {\n\treturn n.String()\n}", "func Strings(f string) Interface {\n\treturn &stringsType{\n\t\tvalueType{\n\t\t\tname: f,\n\t\t\tempty: nil,\n\t\t\tget: func(g Getter) interface{} {\n\t\t\t\treturn g.GetStrings(f)\n\t\t\t},\n\t\t},\n\t}\n}", "func TypeString(s string) (Type, error) {\n\tif val, ok := _TypeNameToValueMap[s]; ok {\n\t\treturn val, nil\n\t}\n\ts = strings.ToLower(s)\n\tif val, ok := _TypeNameToValueMap[s]; ok {\n\t\treturn val, nil\n\t}\n\treturn 0, fmt.Errorf(\"%s does not belong to Type values\", s)\n}", "func (m Chan) TypeString() string {\n\treturn m.String()\n}", "func IsString(v interface{}) bool {\n\tr := elconv.AsValueRef(reflect.ValueOf(v))\n\treturn r.Kind() == reflect.String\n}", "func StringValue(s string) Value { return Value{Typ: '$', Str: []byte(s)} }", "func (s SecretString) Type() string {\n\treturn \"SecretString\"\n}", "func String(v string) *string { return &v }", "func OptionalString(v string) OptionalStringType {\n\treturn OptionalStringType{true, v}\n}", "func newString(val string) stringValue {\n\tif isNullString(val) {\n\t\treturn stringValue{options.GetDisplayStringNullFiller(), true}\n\t}\n\treturn stringValue{val, false}\n}", "func StringType() *StringDataType {\n\treturn &StringDataType{}\n}", "func (t Type) String() string {\n\tswitch t {\n\tcase TypeCustom:\n\t\treturn \"custom\"\n\tcase TypeASCII:\n\t\treturn \"ascii\"\n\tcase TypeBigInt:\n\t\treturn \"bigint\"\n\tcase TypeBlob:\n\t\treturn \"blob\"\n\tcase TypeBoolean:\n\t\treturn \"boolean\"\n\tcase TypeCounter:\n\t\treturn \"counter\"\n\tcase TypeDecimal:\n\t\treturn \"decimal\"\n\tcase TypeDouble:\n\t\treturn \"double\"\n\tcase TypeFloat:\n\t\treturn \"float\"\n\tcase TypeInt:\n\t\treturn \"int\"\n\tcase TypeText:\n\t\treturn \"text\"\n\tcase TypeTimestamp:\n\t\treturn \"timestamp\"\n\tcase TypeUUID:\n\t\treturn \"uuid\"\n\tcase TypeVarchar:\n\t\treturn \"varchar\"\n\tcase TypeTimeUUID:\n\t\treturn \"timeuuid\"\n\tcase TypeInet:\n\t\treturn \"inet\"\n\tcase TypeDate:\n\t\treturn \"date\"\n\tcase TypeTime:\n\t\treturn \"time\"\n\tcase TypeSmallInt:\n\t\treturn \"smallint\"\n\tcase TypeTinyInt:\n\t\treturn \"tinyint\"\n\tcase TypeList:\n\t\treturn \"list\"\n\tcase TypeMap:\n\t\treturn \"map\"\n\tcase TypeSet:\n\t\treturn \"set\"\n\tcase TypeVarint:\n\t\treturn \"varint\"\n\tcase TypeTuple:\n\t\treturn \"tuple\"\n\tdefault:\n\t\treturn fmt.Sprintf(\"unknown_type_%d\", t)\n\t}\n}", "func generateStringExample() {\n\tvar s string = \"Hello\"\n\tc := NewStringContainer()\n\tc.Put(s)\n\tv := c.Get()\n\tfmt.Printf(\"generateExample: %s (%T)\\n\", v, v)\n}", "func String(key, val string) Tag {\n\treturn Tag{key: key, tType: stringType, stringVal: val}\n}", "func lvalString(str string) *LVal {\n\tval := LVal{Type: LVAL_STR, String: str}\n\treturn &val\n}", "func NewStringTypeDef(init ...*StringTypeDef) *StringTypeDef {\n\tvar o *StringTypeDef\n\tif len(init) == 1 {\n\t\to = init[0]\n\t} else {\n\t\to = new(StringTypeDef)\n\t}\n\treturn o\n}", "func NewString(s string) *Value {\n\treturn &Value{s, String}\n}", "func TestStringPrimitives(t *testing.T) {\n\to := old()\n\ts := \"now is the time\"\n\to.EncodeStringBytes(s)\n\tdecs_data, e := o.DecodeRawBytes()\n\tdecs := make([]byte, len(decs_data))\n\tcopy(decs, decs_data)\n\tif e != nil {\n\t\tt.Error(\"dec_string\")\n\t}\n\tif s != string(decs) {\n\t\tt.Error(\"string encode/decode fail:\", s, decs)\n\t}\n}", "func String(name, value string, usage string) *string {\n\tp := new(string);\n\tStringVar(p, name, value, usage);\n\treturn p;\n}", "func String(value string) string {\n\treturn \"'\" + value + \"'\"\n}", "func IsString(data interface{}) bool {\n\treturn typeIs(data, reflect.String)\n}", "func String(name string, value string, usage string, aliases ...string) *Value {\n\treturn newString(flag.Var, name, value, usage, aliases...)\n}", "func fieldDescriptorProtoType_StringValue(fieldType FieldDescriptorProto_Type) string {\n\tswitch fieldType {\n\tcase FieldDescriptorProto_TYPE_DOUBLE:\n\t\treturn \"double\"\n\tcase FieldDescriptorProto_TYPE_FLOAT:\n\t\treturn \"float\"\n\tcase FieldDescriptorProto_TYPE_INT64:\n\t\treturn \"int64\"\n\tcase FieldDescriptorProto_TYPE_UINT64:\n\t\treturn \"uint64\"\n\tcase FieldDescriptorProto_TYPE_INT32:\n\t\treturn \"int32\"\n\tcase FieldDescriptorProto_TYPE_FIXED64:\n\t\treturn \"fixed64\"\n\tcase FieldDescriptorProto_TYPE_FIXED32:\n\t\treturn \"fixed32\"\n\tcase FieldDescriptorProto_TYPE_BOOL:\n\t\treturn \"bool\"\n\tcase FieldDescriptorProto_TYPE_STRING:\n\t\treturn \"string\"\n\tcase FieldDescriptorProto_TYPE_GROUP:\n\t\treturn \"group\"\n\tcase FieldDescriptorProto_TYPE_MESSAGE:\n\t\treturn \"message\"\n\tcase FieldDescriptorProto_TYPE_BYTES:\n\t\treturn \"bytes\"\n\tcase FieldDescriptorProto_TYPE_UINT32:\n\t\treturn \"uint32\"\n\tcase FieldDescriptorProto_TYPE_ENUM:\n\t\treturn \"enum\"\n\tcase FieldDescriptorProto_TYPE_SFIXED32:\n\t\treturn \"sfixed32\"\n\tcase FieldDescriptorProto_TYPE_SFIXED64:\n\t\treturn \"sfixed64\"\n\tcase FieldDescriptorProto_TYPE_SINT32:\n\t\treturn \"sint32\"\n\tcase FieldDescriptorProto_TYPE_SINT64:\n\t\treturn \"sint64\"\n\t}\n\n\treturn \"nil\"\n}", "func typeString(t []TypeName) (str string) {\n\tswitch len(t) {\n\tcase 0:\n\t\tbreak\n\tcase 1:\n\t\tif t[0].Location.Line == 0 {\n\t\t\t// Use the empty string for undeclared\n\t\t\t// implicit types (such as object).\n\t\t\tbreak\n\t\t}\n\t\tstr = t[0].Str\n\tdefault:\n\t\tstr = \"(either\"\n\t\tfor _, n := range t {\n\t\t\tstr += \" \" + n.Str\n\t\t}\n\t\tstr += \")\"\n\t}\n\treturn\n}", "func String(v string) (p *string) { return &v }", "func castString(input map[string]interface{}, key string) *string {\n\tvar intf interface{}\n\tvar rv string\n\tvar ok bool\n\n\tintf, ok = input[key]\n\tif !ok {\n\t\treturn nil\n\t}\n\n\trv, ok = intf.(string)\n\tif !ok {\n\t\tlog.Print(\"WARNING: type mismatch for field \", key,\n\t\t\t\". Expected string, found \", reflect.TypeOf(intf).Name())\n\t\treturn nil\n\t}\n\n\treturn proto.String(rv)\n}", "func String(v string) *string { return &v }", "func String(v string) *string { return &v }", "func String(v string) *string { return &v }", "func String(v string) *string { return &v }", "func (me TScriptType) String() string { return xsdt.String(me).String() }", "func (*StringSchema) Type() int {\n\treturn String\n}", "func (this *ClockStr) Type() value.Type { return value.STRING }", "func String(v *string) string {\n\tif v != nil {\n\t\treturn *v\n\t}\n\treturn \"\"\n}", "func String() Scalar {\n\treturn stringTypeInstance\n}", "func (this *DateTruncStr) Type() value.Type { return value.STRING }", "func SimpleStringValue(s string) Value { return Value{Typ: '+', Str: []byte(formSingleLine(s))} }", "func (sf *String) String() string {\n\treturn sf.Value\n}", "func typeIsString(typ reflect.Type) bool {\n\tk := typ.Kind()\n\treturn k == reflect.String || ((k == reflect.Slice || k == reflect.Array) && typ.Elem().Kind() == reflect.Uint8)\n}", "func changeString(x interface{}) string {\n\tswitch x.(type) {\n\tcase string:\n\t\treturn x.(string)\n\tdefault:\n\t\t// Always type=nil, but this way we don't have to add a return at the end.\n\t\treturn \"\"\n\t}\n}", "func (m Func) TypeString() string {\n\treturn m.String()\n}", "func String(name string, value string) *string {\n\tp := new(string)\n\tStringVar(p, name, value)\n\treturn p\n}", "func String(name, val string) Field {\n\treturn Field(zap.String(name, val))\n}", "func String(str string) *string {\n\treturn &str\n}", "func (b *Builder) String(s string) value.Pointer {\n\treturn b.constantMemory.writeString(s)\n}", "func (typeType TagTypePrimitive) String() string {\n\treturn TypeNames[typeType]\n}", "func String(flag string, value string, description string) *string {\n\tvar v string\n\tStringVar(&v, flag, value, description)\n\treturn &v\n}", "func IsString(value interface{}) bool {\n\treturn kindOf(value) == reflect.String\n}", "func String(key, val string) Claim {\n\treturn Claim{Key: key, Type: StringType, String: val}\n}", "func (ref *TypeRef) String() string {\n\tswitch {\n\tcase ref.Named != nil:\n\t\treturn ref.Named.String()\n\tcase ref.List != nil:\n\t\treturn ref.List.String()\n\tcase ref.NonNull != nil:\n\t\treturn ref.NonNull.String()\n\tdefault:\n\t\tpanic(\"unknown type reference\")\n\t}\n}", "func validate_string(element String, object any, name string) error {\n\tif getter, ok := object.(StringGetter); ok {\n\t\tif value, ok := getter.GetStringOK(name); ok {\n\t\t\treturn element.Validate(value)\n\t\t}\n\t}\n\n\tif getter, ok := object.(PointerGetter); ok {\n\t\tif value, ok := getter.GetPointer(name); ok {\n\t\t\tif typed, ok := value.(*string); ok {\n\t\t\t\treturn element.Validate(*typed)\n\t\t\t}\n\t\t}\n\t}\n\n\tif element.Required {\n\t\treturn derp.NewValidationError(\"schema.validate_string\", \"Required string property is missing\", element, object, name)\n\t}\n\n\treturn nil\n}", "func StringValue(value string) Value {\n\thdr := (*reflect.StringHeader)(unsafe.Pointer(&value))\n\treturn Value{num: uint64(hdr.Len), any: stringptr(hdr.Data)}\n}", "func String(v *Value, def string) string {\n\ts, err := v.String()\n\tif err != nil {\n\t\treturn def\n\t}\n\treturn s\n}", "func (String) Native(c *compiler.Compiler) (token compiler.Token) {\n\tif c.Target == target.Go {\n\t\treturn compiler.Token(\"string\")\n\t}\n\tif c.Target == target.JS {\n\t\treturn compiler.Token(\"String\")\n\t}\n\treturn\n}" ]
[ "0.72857946", "0.7238155", "0.72031343", "0.69831485", "0.6972583", "0.68899757", "0.6842019", "0.6686224", "0.6526475", "0.6458924", "0.6457487", "0.64249575", "0.64231193", "0.63809097", "0.63782394", "0.6371063", "0.6320504", "0.63115734", "0.6286366", "0.62502384", "0.6244942", "0.6242984", "0.6234517", "0.6231712", "0.6217326", "0.6215126", "0.62137866", "0.62090844", "0.62002754", "0.6182127", "0.6173211", "0.6167565", "0.61363727", "0.61363727", "0.61363727", "0.61363727", "0.61363727", "0.61363727", "0.61363727", "0.6132747", "0.61296725", "0.6128693", "0.6127239", "0.612702", "0.6124476", "0.61203223", "0.61203223", "0.61064744", "0.60922617", "0.6090959", "0.6085812", "0.6084255", "0.60588276", "0.6058682", "0.60561293", "0.60537475", "0.6050759", "0.6044646", "0.6035707", "0.6029238", "0.6027234", "0.6025985", "0.602577", "0.60248184", "0.601973", "0.60088956", "0.60081446", "0.59949297", "0.5983424", "0.5981345", "0.59804577", "0.59799206", "0.5976236", "0.5970507", "0.5970507", "0.5970507", "0.5970507", "0.59635824", "0.59536165", "0.5946578", "0.59457004", "0.5944348", "0.5936591", "0.5936017", "0.59357", "0.59316504", "0.5910962", "0.59057677", "0.5904072", "0.59031135", "0.59021646", "0.5892078", "0.5889304", "0.58888346", "0.5886085", "0.58846414", "0.5884461", "0.58769137", "0.5871241", "0.58679885", "0.58668965" ]
0.0
-1
Deprecated: This has been replaced by github.com/oapicodegen/runtime/typesUnmarshalText
func (d *Date) UnmarshalText(data []byte) error { parsed, err := time.Parse(DateFormat, string(data)) if err != nil { return err } d.Time = parsed return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (t *Type) UnmarshalText(text []byte) error {\n\tswitch strings.ToLower(string(text)) {\n\tdefault:\n\t\t*t = Unsupported\n\tcase \"int32\", \"integer\", \"uint32\":\n\t\t*t = Int32\n\tcase \"int64\", \"bigint\", \"long\", \"uint64\":\n\t\t*t = Int64\n\tcase \"float64\", \"double\":\n\t\t*t = Float64\n\tcase \"string\", \"text\", \"varchar\":\n\t\t*t = String\n\tcase \"bool\", \"boolean\":\n\t\t*t = Bool\n\tcase \"timestamp\", \"time\":\n\t\t*t = Timestamp\n\tcase \"json\", \"map\":\n\t\t*t = JSON\n\t}\n\treturn nil\n}", "func (x *PackageType) UnmarshalText(text []byte) error {\n\tname := string(text)\n\ttmp, err := ParsePackageType(name)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = tmp\n\treturn nil\n}", "func (ot *OutputType) UnmarshalText(text []byte) error {\n\tswitch strings.ToLower(string(text)) {\n\tcase \"file\":\n\t\t*ot = OutputFile\n\tcase \"stderr\":\n\t\t*ot = OutputStdErr\n\tcase \"both\":\n\t\t*ot = OutputBoth\n\t}\n\n\treturn nil\n}", "func (i *TaskType) UnmarshalText(text []byte) error {\n\tvar err error\n\t*i, err = TaskTypeString(string(text))\n\treturn err\n}", "func (z *Rat) UnmarshalText(text []byte) error {}", "func (u tu) UnmarshalText(data []byte) error {\n\tu.Text = string(data)\n\treturn nil\n}", "func (mvt *ValueType) UnmarshalText(text []byte) error {\n\tswitch vtStr := string(text); vtStr {\n\tcase \"string\":\n\t\tmvt.ValueType = pcommon.ValueTypeStr\n\tcase \"int\":\n\t\tmvt.ValueType = pcommon.ValueTypeInt\n\tcase \"double\":\n\t\tmvt.ValueType = pcommon.ValueTypeDouble\n\tcase \"bool\":\n\t\tmvt.ValueType = pcommon.ValueTypeBool\n\tcase \"bytes\":\n\t\tmvt.ValueType = pcommon.ValueTypeBytes\n\tcase \"slice\":\n\t\tmvt.ValueType = pcommon.ValueTypeSlice\n\tcase \"map\":\n\t\tmvt.ValueType = pcommon.ValueTypeMap\n\tdefault:\n\t\treturn fmt.Errorf(\"invalid type: %q\", vtStr)\n\t}\n\treturn nil\n}", "func (i *MessageType) UnmarshalText(text []byte) error {\n\tvar err error\n\t*i, err = MessageTypeString(string(text))\n\treturn err\n}", "func (t *Type) UnmarshalText(text []byte) error {\n\ts := Type(text)\n\tfor _, keyType := range []Type{PublicKey, PrivateKey, SymmetricKey} {\n\t\tif keyType == s {\n\t\t\t*t = keyType\n\t\t\treturn nil\n\t\t}\n\t}\n\treturn serrors.WithCtx(ErrUnsupportedType, \"input\", string(text))\n}", "func (v *SimpleType) UnmarshalText(value []byte) error {\n\tswitch s := string(value); s {\n\tcase \"BOOL\":\n\t\t*v = SimpleTypeBool\n\t\treturn nil\n\tcase \"BYTE\":\n\t\t*v = SimpleTypeByte\n\t\treturn nil\n\tcase \"INT8\":\n\t\t*v = SimpleTypeInt8\n\t\treturn nil\n\tcase \"INT16\":\n\t\t*v = SimpleTypeInt16\n\t\treturn nil\n\tcase \"INT32\":\n\t\t*v = SimpleTypeInt32\n\t\treturn nil\n\tcase \"INT64\":\n\t\t*v = SimpleTypeInt64\n\t\treturn nil\n\tcase \"FLOAT64\":\n\t\t*v = SimpleTypeFloat64\n\t\treturn nil\n\tcase \"STRING\":\n\t\t*v = SimpleTypeString\n\t\treturn nil\n\tcase \"STRUCT_EMPTY\":\n\t\t*v = SimpleTypeStructEmpty\n\t\treturn nil\n\tdefault:\n\t\tval, err := strconv.ParseInt(s, 10, 32)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unknown enum value %q for %q: %v\", s, \"SimpleType\", err)\n\t\t}\n\t\t*v = SimpleType(val)\n\t\treturn nil\n\t}\n}", "func textUnmarshalerDecode(\n\tinputType reflect.Type, outputType reflect.Type, data interface{},\n) (interface{}, error) {\n\tif !reflect.PtrTo(outputType).Implements(stringUnmarshalerType) {\n\t\treturn data, nil\n\t}\n\tvalue, ok := data.(string)\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"invalid type %v\", inputType)\n\t}\n\tparsedValue, ok := reflect.New(outputType).Interface().(stringUnmarshaler)\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"invalid output type %v\", outputType)\n\t}\n\terr := parsedValue.Decode(value)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn parsedValue, nil\n}", "func (i *EventType) UnmarshalText(text []byte) error {\n\tvar err error\n\t*i, err = EventTypeString(string(text))\n\treturn err\n}", "func (t *TopicType) UnmarshalText(input []byte) error {\n\treturn hexutil.UnmarshalFixedText(\"Topic\", input, t[:])\n}", "func (x EnvironmentType) UnmarshalText(text []byte) (EnvironmentType, *errorAVA.Error) {\n\tname := string(text)\n\ttmp, err := ParseEnvironmentType(name)\n\tif err != nil {\n\t\treturn EnvironmentTypeUnknown, err\n\t}\n\tx = tmp\n\treturn tmp, nil\n}", "func (u *Usage) UnmarshalText(text []byte) error {\n\ts := Usage(text)\n\tif _, ok := usages[s]; !ok {\n\t\treturn serrors.WithCtx(ErrUnsupportedUsage, \"input\", string(text))\n\t}\n\t*u = s\n\treturn nil\n}", "func (a *ArchType) UnmarshalText(text []byte) error {\n\tif u, ok := archTypeMap[string(text)]; ok {\n\t\t*a = u\n\t\treturn nil\n\t}\n\n\treturn fmt.Errorf(\"unknown ArchType %q\", text)\n}", "func (t *UniversalMytoken) UnmarshalText(data []byte) (err error) {\n\ts := string(data)\n\t*t, err = Parse(log.StandardLogger(), s)\n\treturn errors.WithStack(err)\n}", "func (v *ShortCode) UnmarshalText(b []byte) error {\n\tvar err error\n\t*v, err = NewShortCode(string(b))\n\treturn err\n}", "func (v *Value) UnmarshalText(text []byte) error {\n\tif err := json.Unmarshal(text, v); err == nil {\n\t\treturn nil\n\t}\n\t*v = String(string(text))\n\treturn nil\n}", "func (v *Feature) UnmarshalText(value []byte) error {\n\tswitch s := string(value); s {\n\tcase \"SERVICE_GENERATOR\":\n\t\t*v = FeatureServiceGenerator\n\t\treturn nil\n\tdefault:\n\t\tval, err := strconv.ParseInt(s, 10, 32)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unknown enum value %q for %q: %v\", s, \"Feature\", err)\n\t\t}\n\t\t*v = Feature(val)\n\t\treturn nil\n\t}\n}", "func (i *UOM) UnmarshalText(text []byte) error {\n\tvar err error\n\t*i, err = UOMString(string(text))\n\treturn err\n}", "func (i *Transform) UnmarshalText(text []byte) error {\n\tvar err error\n\t*i, err = ParseTransformString(string(text))\n\treturn err\n}", "func unmarshal(text []byte, fields *map[string]interface{}) error {\n\tdec := json.NewDecoder(bytes.NewReader(text))\n\tdec.UseNumber()\n\terr := dec.Decode(fields)\n\tif err != nil {\n\t\treturn err\n\t}\n\tjsontransform.TransformNumbers(*fields)\n\treturn nil\n}", "func (f genHelperDecoder) DecTextUnmarshal(tm encoding.TextUnmarshaler) {\n\thalt.onerror(tm.UnmarshalText(f.d.d.DecodeStringAsBytes()))\n}", "func (txt *txt) UnmarshalText(b []byte) error {\n\t(*txt).string = string(append(b, '_'))\n\treturn nil\n}", "func (v *MType) UnmarshalText(b []byte) error {\n\ti, err := unmarshalEnumFromText(\"MType\", MType_value, b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*v = MType(i)\n\treturn nil\n}", "func (t *Time) UnmarshalText(data []byte) error {}", "func (f *Field) UnmarshalText(p []byte) error {\n\tr, err := Parse(string(p))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tf.reference = r\n\treturn nil\n}", "func (b *Bytes) UnmarshalText(data []byte) error {\n\tdata = bytes.TrimSpace(data)\n\tval, err := ParseBytes(reflectx.BytesToString(data))\n\tif err != nil {\n\t\treturn fmt.Errorf(\"fail to unmarshal bytes : %v\", err)\n\t}\n\t*(*int64)(b) = val\n\treturn nil\n}", "func (c *TextType) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error {\n\tvar v string\n\td.DecodeElement(&v, &start)\n\tfor _, attr := range start.Attr {\n\t\tif attr.Name.Local == \"datestamp\" {\n\t\t\tc.Datestamp = DtDotDateOrDateTime(attr.Value)\n\t\t}\n\t\tif attr.Name.Local == \"sourcetype\" {\n\t\t\tc.Sourcetype = SourceTypeCode(attr.Value)\n\t\t}\n\t\tif attr.Name.Local == \"sourcename\" {\n\t\t\tc.Sourcename = DtDotNonEmptyString(attr.Value)\n\t\t}\n\t}\n\tswitch v {\n\n // To be used only in circumstances where the parties to an exchange have agreed to include text which (a) is not for general distribution, and (b) cannot be coded elsewhere. If more than one type of text is sent, it must be identified by tagging within the text itself\n case \"01\":\n\t\tc.Body = `Sender-defined text`\n\n // Limited to a maximum of 350 characters\n case \"02\":\n\t\tc.Body = `Short description/annotation`\n\n // Length unrestricted\n case \"03\":\n\t\tc.Body = `Description`\n\n // Used for a table of contents sent as a single text field, which may or may not carry structure expressed using XHTML\n case \"04\":\n\t\tc.Body = `Table of contents`\n\n // Primary descriptive blurb usually taken from the back cover or jacket, or occasionally from the cover/jacket flaps. See also code 27\n case \"05\":\n\t\tc.Body = `Primary cover copy`\n\n // A quote taken from a review of the product or of the work in question where there is no need to take account of different editions\n case \"06\":\n\t\tc.Body = `Review quote`\n\n // A quote taken from a review of a previous edition of the work\n case \"07\":\n\t\tc.Body = `Review quote: previous edition`\n\n // A quote taken from a review of a previous work by the same author(s) or in the same series\n case \"08\":\n\t\tc.Body = `Review quote: previous work`\n\n // A quote usually provided by a celebrity or another author to promote a new book, not from a review\n case \"09\":\n\t\tc.Body = `Endorsement`\n\n // A promotional phrase which is intended to headline a description of the product\n case \"10\":\n\t\tc.Body = `Promotional headline`\n\n // Text describing a feature of a product to which the publisher wishes to draw attention for promotional purposes. Each separate feature should be described by a separate repeat, so that formatting can be applied at the discretion of the receiver of the ONIX record, or multiple features can be described using appropriate XHTML markup\n case \"11\":\n\t\tc.Body = `Feature`\n\n // A note referring to all contributors to a product – NOT linked to a single contributor\n case \"12\":\n\t\tc.Body = `Biographical note`\n\n // A statement included by a publisher in fulfillment of contractual obligations, such as a disclaimer, sponsor statement, or legal notice of any sort. Note that the inclusion of such a notice cannot and does not imply that a user of the ONIX record is obliged to reproduce it\n case \"13\":\n\t\tc.Body = `Publisher’s notice`\n\n // A short excerpt from the main text of the work\n case \"14\":\n\t\tc.Body = `Excerpt`\n\n // Used for an index sent as a single text field, which may be structured using XHTML\n case \"15\":\n\t\tc.Body = `Index`\n\n // (of which the product is a part.) Limited to a maximum of 350 characters\n case \"16\":\n\t\tc.Body = `Short description/annotation for collection`\n\n // (of which the product is a part.) Length unrestricted\n case \"17\":\n\t\tc.Body = `Description for collection`\n\n // As code 11 but used for a new feature of this edition or version\n case \"18\":\n\t\tc.Body = `New feature`\n\n // Version history\n case \"19\":\n\t\tc.Body = `Version history`\n\n // Short summary statement of open access status and any related conditions (eg ‘Open access – no commercial use’), primarily for marketing purposes. Should always be accompanied by a link to the complete license (see <EpubLicense> or code 99 in List 158)\n case \"20\":\n\t\tc.Body = `Open access statement`\n\n // Short summary statement that the product is available only in digital formats (eg ‘Digital exclusive’). If a non-digital version is planned, <ContentDate> should be used to specify the date when exclusivity will end (use content date role code 15). If a non-digital version is available, the statement should not be included\n case \"21\":\n\t\tc.Body = `Digital exclusivity statement`\n\n // For example a recommendation or approval provided by a ministry of education or other official body. Use <Text> to provide details and ideally use <TextSourceCorporate> to name the approver\n case \"22\":\n\t\tc.Body = `Official recommendation`\n\n // Short description in format specified by Japanese Book Publishers Association\n case \"23\":\n\t\tc.Body = `JBPA description`\n\n // JSON-LD snippet suitable for use within an HTML <script type=\"application/ld+json\"> tag, containing structured metadata suitable for use with schema.org\n case \"24\":\n\t\tc.Body = `schema.org snippet`\n\n // Errata\n case \"25\":\n\t\tc.Body = `Errata`\n\n // Introduction, preface or the text of other preliminary material, sent as a single text field, which may be structured using XHTML\n case \"26\":\n\t\tc.Body = `Introduction`\n\n // Secondary descriptive blurb taken from the cover/jacket flaps, or occasionally from the back cover or jacket, used only when there are two separate texts and the primary text is included using code 05\n case \"27\":\n\t\tc.Body = `Secondary cover copy`\n\n // For use with dramatized audiobooks, filmed entertainment etc, for a cast list sent as a single text field, which may or may not carry structure expressed using XHTML\n case \"28\":\n\t\tc.Body = `Full cast and credit list`\n\n // Complete list of books by the author(s), supplied as a single text field, which may be structured using (X)HTML\n case \"29\":\n\t\tc.Body = `Bibliography`\n\n // Formal summary of content (normally used with academic and scholarly content only)\n case \"30\":\n\t\tc.Body = `Abstract`\n\n // Eg for a game, kit\n case \"31\":\n\t\tc.Body = `Rules or instructions`\n\n // Eg for a game, kit. Note: use code 04 for a Table of Contents of a book\n case \"32\":\n\t\tc.Body = `List of contents`\n\tdefault:\n\t\treturn fmt.Errorf(\"undefined code for TextType has been passed, got [%s]\", v)\n\t}\n\treturn nil\n}", "func (r *ToS) UnmarshalText(data []byte) error {\n\tif v, ok := _tosNameToValue[string(data)]; ok {\n\t\t*r = v\n\t\treturn nil\n\t}\n\n\treturn fmt.Errorf(\"invalid ToS %q\", string(data))\n}", "func (j *TextMarshaler) Unmarshal(data []byte, v interface{}) error {\n\treturn json.Unmarshal(data, v)\n}", "func (z *Int) UnmarshalText(text []byte) error {}", "func (a *ValueArray) UnmarshalText(text []byte) error {\n\treturn a.UnmarshalJSON(text)\n}", "func (dt *DateTime) UnmarshalText(text []byte) error {\n\treturn dt.src.UnmarshalText(text)\n}", "func (x *TimedLyricsArray) UnmarshalText(data []byte) error {\n\t// TODO: need samples\n\treturn nil\n}", "func (d *DateTime) UnmarshalText(data []byte) (err error) {\n\treturn d.Time.UnmarshalText(data)\n}", "func (u *UUID) UnmarshalText(data []byte) error {\n\treturn u.ReadBytes(data)\n}", "func (b *ByteSize) UnmarshalText(text []byte) error {\n\treturn b.UnmarshalFlag(string(text))\n}", "func (x *Commented) UnmarshalText(text []byte) error {\n\tname := string(text)\n\ttmp, err := ParseCommented(name)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = tmp\n\treturn nil\n}", "func (r *Record) UnmarshalText(b []byte) error {\n\treturn r.UnmarshalSAM(nil, b)\n}", "func (duration *Duration) UnmarshalText(text []byte) error {\n\treturn duration.UnmarshalFlag(string(text))\n}", "func (d *Date) UnmarshalText(data []byte) error {\n\tvar err error\n\t*d, err = ParseDate(string(data))\n\treturn err\n}", "func (dt *Dairytime) UnmarshalText(text []byte) (err error) {\n\tif text == nil {\n\t\treturn nil\n\t}\n\tif len(text) == 0 {\n\t\treturn nil\n\t}\n\n\tdt.Time, _ = time.Parse(timeLayout, string(text))\n\treturn nil\n}", "func (r *Roletype) UnmarshalText(text []byte) error {\n\tswitch string(text) {\n\tcase \"SUPERADMIN\":\n\t\t*r = RoletypeSuperadmin\n\n\tcase \"ADMIN\":\n\t\t*r = RoletypeAdmin\n\n\tcase \"USER\":\n\t\t*r = RoletypeUser\n\n\tdefault:\n\t\treturn errors.New(\"invalid Roletype\")\n\t}\n\n\treturn nil\n}", "func (b *Bool) UnmarshalText(text []byte) error {\n\tstr := string(text)\n\tswitch str {\n\tcase \"\", \"null\":\n\t\tb.Valid = false\n\t\treturn nil\n\tcase \"Y\":\n\t\tb.Bool = true\n\tcase \"N\":\n\t\tb.Bool = false\n\tdefault:\n\t\tb.Valid = false\n\t\treturn errors.New(\"invalid input:\" + str)\n\t}\n\tb.Valid = true\n\treturn nil\n}", "func (t *Timestamp) UnmarshalText(data []byte) error {\n\tunix, err := strconv.ParseInt(string(data), 10, 64)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*t = Timestamp(unix)\n\treturn nil\n}", "func (b *Bytes) UnmarshalText(hex []byte) error {\n\tif len(hex) == 0 {\n\t\treturn nil\n\t}\n\n\tarrayByte, err := hexutil.HexToBytes(string(hex))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t*b = arrayByte\n\treturn nil\n}", "func (u *UUID) UnmarshalText(text []byte) error {\n\tid, err := Parse(text)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*u = id\n\treturn nil\n}", "func (key *Key) UnmarshalText(data []byte) error {\n\tvar s string\n\tif err := json.Unmarshal(data, &s); err != nil {\n\t\treturn err\n\t}\n\n\tparsedKey, err := parseKey(s)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t*key = parsedKey\n\treturn nil\n}", "func (a *Action) UnmarshalText(text []byte) error {\n\tif _, exists := actions[Action(text)]; exists {\n\t\t*a = Action(text)\n\t\treturn nil\n\t}\n\treturn fmt.Errorf(\"unrecognized action type %q\", string(text))\n}", "func (f *Field) UnmarshalText(text []byte) error {\n\tfield, err := NewField(string(text))\n\t*f = field\n\treturn err\n}", "func (dt *DateTime) UnmarshalText(text []byte) error {\n\treturn dt.Set(string(text))\n}", "func (s *Scalar) UnmarshalText(text []byte) error {\n\tsb, err := base64.StdEncoding.DecodeString(string(text))\n\tif err == nil {\n\t\treturn s.Decode(sb)\n\t}\n\treturn err\n}", "func (i *Enum) UnmarshalText(text []byte) error {\n\tname := string(text)\n\ttmp, err := ParseEnum(name)\n\tif err != nil {\n\t\treturn &json.UnmarshalTypeError{\n\t\t\tValue: name,\n\t\t\tType: reflect.TypeOf(*i),\n\t\t}\n\t}\n\t*i = tmp\n\treturn nil\n}", "func (i *Enum) UnmarshalText(text []byte) error {\n\tname := string(text)\n\ttmp, err := ParseEnum(name)\n\tif err != nil {\n\t\treturn &json.UnmarshalTypeError{\n\t\t\tValue: name,\n\t\t\tType: reflect.TypeOf(*i),\n\t\t}\n\t}\n\t*i = tmp\n\treturn nil\n}", "func (u *UUID) UnmarshalText(data []byte) error { // validation is performed later on\n\t*u = UUID(string(data))\n\treturn nil\n}", "func (ns *Float32) UnmarshalText(text []byte) error {\n\treturn ns.UnmarshalJSON(text)\n}", "func (data *AlertData) UnmarshalText(text []byte) error {\n\ttype x AlertData\n\treturn json.Unmarshal(text, (*x)(data))\n}", "func (t *Time) UnmarshalText(data []byte) error {\n\ttt, err := time.Parse(time.RFC3339, string(data))\n\tif _, ok := err.(*time.ParseError); ok {\n\t\ttt, err = time.Parse(DeisDatetimeFormat, string(data))\n\t\tif _, ok := err.(*time.ParseError); ok {\n\t\t\ttt, err = time.Parse(PyOpenSSLTimeDateTimeFormat, string(data))\n\t\t}\n\t}\n\t*t = Time{&tt}\n\treturn err\n}", "func (u *ISBN) UnmarshalText(data []byte) error { // validation is performed later on\n\t*u = ISBN(string(data))\n\treturn nil\n}", "func (m *ValueMap) UnmarshalText(text []byte) error {\n\treturn m.UnmarshalJSON(text)\n}", "func (z *Float) UnmarshalText(text []byte) error {}", "func (j *JSONText) Unmarshal(v interface{}) error {\n\treturn json.Unmarshal([]byte(*j), v)\n}", "func (x *XID) UnmarshalText(text []byte) (err error) {\n\t*x, err = ParseXID(b2s(text))\n\treturn\n}", "func (v *RejoinRequestType) UnmarshalText(b []byte) error {\n\ti, err := unmarshalEnumFromText(\"RejoinRequestType\", RejoinRequestType_value, b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*v = RejoinRequestType(i)\n\treturn nil\n}", "func (m *Money) UnmarshalText(text []byte) (err error) {\n\t*m, err = Parse(string(text))\n\treturn\n}", "func (e *Email) UnmarshalText(data []byte) error { // validation is performed later on\n\t*e = Email(string(data))\n\treturn nil\n}", "func (id *ID) UnmarshalText(text []byte) error {\n\tif len(text) != encodedLen {\n\t\treturn ErrInvalidID\n\t}\n\tdecode(id, text)\n\treturn nil\n}", "func (x *Prefecture) UnmarshalText(text []byte) error {\n\tname := string(text)\n\ttmp, err := ParsePrefecture(name)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = tmp\n\treturn nil\n}", "func (i *State) UnmarshalText(text []byte) error {\n\tvar err error\n\t*i, err = StateString(string(text))\n\treturn err\n}", "func (f *Filter) UnmarshalText(text []byte) error {\n\tf.lock.Lock()\n\tdefer f.lock.Unlock()\n\n\tf2, err := UnmarshalText(text)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tf.m = f2.m\n\tf.n = f2.n\n\tcopy(f.bits, f2.bits)\n\tcopy(f.keys, f2.keys)\n\n\treturn nil\n}", "func (x *IntShop) UnmarshalText(text []byte) error {\n\tname := string(text)\n\ttmp, err := ParseIntShop(name)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = tmp\n\treturn nil\n}", "func (i *OrderState) UnmarshalText(text []byte) error {\n\tvar err error\n\t*i, err = OrderStateString(string(text))\n\treturn err\n}", "func (l *List) UnmarshalText(text []byte) error {\n\tm := parse.Bytes(text)\n\tdefer m.Release()\n\tcapacity := len(m.List)\n\tif len(m.Text) == 0 {\n\t\tcapacity += 2\n\t}\n\tlist := make(List, 0, capacity)\n\n\tif len(m.Text) > 0 {\n\t\tlist = append(list, \"msg\", string(m.Text))\n\t}\n\tfor _, v := range m.List {\n\t\tlist = append(list, string(v))\n\t}\n\t*l = list\n\treturn nil\n}", "func (x *ComplexCommented) UnmarshalText(text []byte) error {\n\tname := string(text)\n\ttmp, err := ParseComplexCommented(name)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = tmp\n\treturn nil\n}", "func (r *RootableField) UnmarshalText(text []byte) error {\n\tfield, err := newField(string(text), true)\n\t*r = RootableField{Field: field}\n\treturn err\n}", "func (i *Time) UnmarshalText(b []byte) error {\n\tni, err := parseDuration(string(b))\n\tif err != nil {\n\t\treturn err\n\t}\n\t*i = *ni\n\treturn nil\n}", "func (x *StrState) UnmarshalText(text []byte) error {\n\ttmp, err := ParseStrState(string(text))\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = tmp\n\treturn nil\n}", "func (u *UUID4) UnmarshalText(data []byte) error { // validation is performed later on\n\t*u = UUID4(string(data))\n\treturn nil\n}", "func (u *ISBN13) UnmarshalText(data []byte) error { // validation is performed later on\n\t*u = ISBN13(string(data))\n\treturn nil\n}", "func (record *Packed) UnmarshalText(s []byte) error {\n\tsize := hex.DecodedLen(len(s))\n\t*record = make([]byte, size)\n\t_, err := hex.Decode(*record, s)\n\treturn err\n}", "func (u *URI) UnmarshalText(data []byte) error { // validation is performed later on\n\t*u = URI(string(data))\n\treturn nil\n}", "func (i *Int) UnmarshalText(b []byte) error {\n\tif b == nil {\n\t\ti.Nil = true\n\t\treturn nil\n\t}\n\n\tif bytes.Compare(b, []byte(\"\")) == 0 {\n\t\ti.Nil = true\n\t\treturn nil\n\t}\n\n\tresult, err := toType(b, &i.Val)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\ti.Nil = result\n\treturn nil\n}", "func (info *Info) UnmarshalText(text []byte) error {\n\tparts := strings.Split(string(text), \" \")\n\tif len(parts) == 0 {\n\t\treturn errors.New(\"uci: invalid info line \" + string(text))\n\t}\n\tif parts[0] != \"info\" {\n\t\treturn errors.New(\"uci: invalid info line \" + string(text))\n\t}\n\tref := \"\"\n\tfor i := 1; i < len(parts); i++ {\n\t\ts := parts[i]\n\t\tswitch s {\n\t\tcase \"score\":\n\t\t\tcontinue\n\t\tcase \"lowerbound\":\n\t\t\tinfo.Score.LowerBound = true\n\t\t\tcontinue\n\t\tcase \"upperbound\":\n\t\t\tinfo.Score.UpperBound = true\n\t\t\tcontinue\n\t\t}\n\t\tif ref == \"\" {\n\t\t\tref = s\n\t\t\tcontinue\n\t\t}\n\t\tswitch ref {\n\t\tcase \"depth\":\n\t\t\tv, err := strconv.Atoi(s)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tinfo.Depth = v\n\t\tcase \"seldepth\":\n\t\t\tv, err := strconv.Atoi(s)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tinfo.Seldepth = v\n\t\tcase \"multipv\":\n\t\t\tv, err := strconv.Atoi(s)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tinfo.Multipv = v\n\t\tcase \"cp\":\n\t\t\tv, err := strconv.Atoi(s)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tinfo.Score.CP = v\n\t\tcase \"nodes\":\n\t\t\tv, err := strconv.Atoi(s)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tinfo.Nodes = v\n\t\tcase \"mate\":\n\t\t\tv, err := strconv.Atoi(s)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tinfo.Score.Mate = v\n\t\tcase \"currmovenumber\":\n\t\t\tv, err := strconv.Atoi(s)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tinfo.CurrentMoveNumber = v\n\t\tcase \"currmove\":\n\t\t\tm, err := chess.UCINotation{}.Decode(nil, s)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tinfo.CurrentMove = m\n\t\tcase \"hashfull\":\n\t\t\tv, err := strconv.Atoi(s)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tinfo.Hashfull = v\n\t\tcase \"tbhits\":\n\t\t\tv, err := strconv.Atoi(s)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tinfo.TBHits = v\n\t\tcase \"time\":\n\t\t\tv, err := strconv.Atoi(s)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tinfo.Time = time.Millisecond * time.Duration(v)\n\t\tcase \"nps\":\n\t\t\tv, err := strconv.Atoi(s)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tinfo.NPS = v\n\t\tcase \"cpuload\":\n\t\t\tv, err := strconv.Atoi(s)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tinfo.CPULoad = v\n\t\tcase \"pv\":\n\t\t\tm, err := chess.UCINotation{}.Decode(nil, s)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tinfo.PV = append(info.PV, m)\n\t\t}\n\t\tif ref != \"pv\" {\n\t\t\tref = \"\"\n\t\t}\n\t}\n\treturn nil\n}", "func (v *Uint64) UnmarshalText(text []byte) error {\n\treturn v.Set(string(text))\n}", "func (receiver *Request) UnmarshalText(text []byte) error {\n\tif nil == receiver {\n\t\treturn errNilReceiver\n\t}\n\n\tvar value string = string(text)\n\n\treturn receiver.Parse(value)\n}", "func (f *Feature) UnmarshalText(b []byte) error {\n\tvar f0 int\n\tvar err error\n\tif len(b) > 0 {\n\t\tif b[0] >= '0' && b[0] <= '9' {\n\t\t\tf0, err = strconv.Atoi(string(b))\n\t\t} else {\n\t\tLoop:\n\t\t\tfor i := 0; i < len(b); i++ {\n\t\t\t\tswitch b[i] {\n\t\t\t\tcase 'V', 'v':\n\t\t\t\t\tf0 |= int(FeatureValidated)\n\t\t\t\tcase 'L', 'l':\n\t\t\t\t\tf0 |= int(FeatureNoLogin)\n\t\t\t\tdefault:\n\t\t\t\t\terr = errors.New(\"Feature: invalid character '\" + string(b[i]) + \"'\")\n\t\t\t\t\tbreak Loop\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t*f = Feature(f0)\n\n\treturn err\n}", "func (t *Texture) UnmarshalText(text []byte) error {\n\ttextureImage, _ := utils.Try2(ebitenutil.NewImageFromFile(string(text)))\n\tt.Image = textureImage\n\treturn nil\n}", "func (v *CFListType) UnmarshalText(b []byte) error {\n\ti, err := unmarshalEnumFromText(\"CFListType\", CFListType_value, b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*v = CFListType(i)\n\treturn nil\n}", "func (t *Time) UnmarshalText(data []byte) error {\n\tstr := string(data)\n\tif str == \"\" || str == \"null\" {\n\t\tt.Valid = false\n\t\treturn nil\n\t}\n\n\tvar epochMsec int64\n\tif err := json.Unmarshal(data, &epochMsec); err != nil {\n\t\tt.Valid = false\n\t\treturn nil\n\t}\n\n\tt.Time = t.timeFromEpochMsec(epochMsec)\n\n\treturn nil\n}", "func (p *Pair) UnmarshalText(d []byte) error {\n\tpair, err := PairFromString(string(d))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t*p = pair\n\n\treturn nil\n}", "func (i *Int8) UnmarshalText(text []byte) error {\n\tstr := string(text)\n\tif str == \"\" || str == \"null\" {\n\t\ti.Valid = false\n\t\treturn nil\n\t}\n\tvar err error\n\tres, err := strconv.ParseInt(string(text), 10, 8)\n\ti.Valid = err == nil\n\tif i.Valid {\n\t\ti.Int8 = int8(res)\n\t}\n\treturn err\n}", "func (ll *Level) UnmarshalText(text []byte) error {\n\tswitch strings.ToLower(string(text)) {\n\tcase \"debug\":\n\t\t*ll = DebugLevel\n\tcase \"info\", \"information\":\n\t\t*ll = InfoLevel\n\tcase \"warn\", \"warning\":\n\t\t*ll = WarnLevel\n\tcase \"error\":\n\t\t*ll = ErrorLevel\n\tcase \"fatal\":\n\t\t*ll = FatalLevel\n\tdefault:\n\t\treturn fmt.Errorf(\"unknown level: %s\", string(text))\n\t}\n\treturn nil\n}", "func (d *Date) UnmarshalText(data []byte) (err error) {\n\td.Time, err = time.Parse(fullDate, string(data))\n\treturn err\n}", "func BenchmarkUUID_UnmarshalText(b *testing.B) {\n\tfor i := 0; i < b.N; i++ {\n\t\tu, err := UUIDv4()\n\t\tif err != nil {\n\t\t\tb.Error(err)\n\t\t}\n\t\ts := u.MarshalText()\n\t\tvar u2 UUID\n\t\tu2.UnmarshalText(s)\n\t}\n}", "func (u *SSN) UnmarshalText(data []byte) error { // validation is performed later on\n\t*u = SSN(string(data))\n\treturn nil\n}", "func (p *Protocol) UnmarshalText(text []byte) error {\n\tprotocol, ok := ParseProtocol(string(text))\n\tif !ok {\n\t\treturn fmt.Errorf(\"failed to parse protocol string %s\", text)\n\t}\n\n\t*p = protocol\n\n\treturn nil\n}", "func (os *OS) UnmarshalText(b []byte) error {\n\tsplit := bytes.Split(b, []byte(\":\"))\n\tif len(split) != 2 {\n\t\treturn trace.BadParameter(\"OS should be in format vendor:version, got %q\", b)\n\t}\n\tos.Vendor = string(split[0])\n\tos.Version = string(split[1])\n\treturn nil\n}", "func (u *Userstatus) UnmarshalText(text []byte) error {\n\tswitch string(text) {\n\tcase \"HEALTHY\":\n\t\t*u = UserstatusHealthy\n\n\tcase \"POSITIVE\":\n\t\t*u = UserstatusPositive\n\n\tcase \"RECOVERED\":\n\t\t*u = UserstatusRecovered\n\n\tdefault:\n\t\treturn errors.New(\"invalid Userstatus\")\n\t}\n\n\treturn nil\n}", "func (x *MainMatchingStatus) UnmarshalText(text []byte) error {\n\tname := string(text)\n\ttmp, err := ParseMainMatchingStatus(name)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = tmp\n\treturn nil\n}" ]
[ "0.7104717", "0.68668705", "0.6808103", "0.67961055", "0.6683559", "0.6666399", "0.65630394", "0.6553652", "0.6547393", "0.6498534", "0.64623374", "0.6423306", "0.6405346", "0.63897216", "0.6382696", "0.63822085", "0.63565415", "0.6272622", "0.6263414", "0.6253243", "0.6249443", "0.62347955", "0.62189573", "0.6213458", "0.6194381", "0.6151587", "0.6146218", "0.6128554", "0.6076107", "0.60684645", "0.606101", "0.60533446", "0.6026313", "0.6019117", "0.5984032", "0.59836787", "0.5980852", "0.5978861", "0.59773487", "0.5970864", "0.5934783", "0.59333056", "0.59330225", "0.5926845", "0.59034735", "0.59021926", "0.5888376", "0.5871069", "0.5866027", "0.5862746", "0.58516204", "0.5849075", "0.58398545", "0.58390033", "0.58366406", "0.58366406", "0.5826621", "0.58140624", "0.5807462", "0.579286", "0.57925415", "0.5786609", "0.57863003", "0.578565", "0.57818276", "0.57781667", "0.5763145", "0.5760312", "0.57570076", "0.5754742", "0.57544875", "0.5753133", "0.5745748", "0.57436293", "0.57408035", "0.57397014", "0.57352453", "0.5726852", "0.57220984", "0.57206345", "0.57152116", "0.57134336", "0.5708398", "0.570394", "0.57007957", "0.57002735", "0.56889826", "0.5686964", "0.568632", "0.5678681", "0.5674664", "0.56719244", "0.5669499", "0.5663138", "0.56610674", "0.5654236", "0.5652946", "0.5650655", "0.565053", "0.56458354", "0.5641518" ]
0.0
-1
1 Try to insert 4 new unique users
func TestInsertNewUserService (t *testing.T){ err := PostNewUserService(user_01) assert.Equal(t, 200, err.HTTPStatus) err = PostNewUserService(user_02) assert.Equal(t, 200, err.HTTPStatus) err = PostNewUserService(user_03) assert.Equal(t, 200, err.HTTPStatus) err = PostNewUserService(user_04) assert.Equal(t, 200, err.HTTPStatus) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func CreateNewUser(username, password string) (userID string, err error) {\n\n err = checkInit()\n if err != nil {\n return\n }\n\n var checkIfTheUserAlreadyExists = func(username string, userData map[string]interface{}) (err error) {\n var salt = userData[\"_salt\"].(string)\n var loginUsername = userData[\"_username\"].(string)\n\n if SHA256(username, salt) == loginUsername {\n err = createError(020)\n }\n\n return\n }\n\n var users = data[\"users\"].(map[string]interface{})\n for _, userData := range users {\n err = checkIfTheUserAlreadyExists(username, userData.(map[string]interface{}))\n if err != nil {\n return\n }\n }\n\n var defaults = defaultsForNewUser(username, password)\n userID = defaults[\"_id\"].(string)\n users[userID] = defaults\n\n saveDatabase(data)\n\n return\n}", "func insertUser(id int) result {\n\tr := result{\n\t\tid: id,\n\t\top: fmt.Sprintf(\"insert USERS value (%d)\", id),\n\t}\n\n\t// Randomize if the insert fails or not.\n\tif rand.Intn(10) == 0 {\n\t\tr.err = fmt.Errorf(\"Unable to insert %d into USER table\", id)\n\t}\n\n\treturn r\n}", "func createUserHandler(res http.ResponseWriter, req *http.Request) {\n\tvar user MongoUserSchema\n\tjson.NewDecoder(req.Body).Decode(&user)\n\t// fmt.Println(hash(user.Password))\n\tif checkEmailValidity(user.Email) == false {\n\t\tres.WriteHeader(http.StatusInternalServerError)\n\t\tres.Write([]byte(\"Invalid e-mail id!\"))\n\t\treturn\n\t}\n\n\tusersCol := client.Database(\"Aviroop_Nandy_Appointy\").Collection(\"users\")\n\tctx, _ := context.WithTimeout(context.Background(), 15*time.Second)\n\tcursor, err := usersCol.Find(ctx, bson.M{})\n\n\tfor cursor.Next(ctx) {\n\t\tvar backlogUser MongoUserSchema\n\t\tcursor.Decode(&backlogUser)\n\t\tif backlogUser.Email == user.Email {\n\t\t\tres.WriteHeader(http.StatusInternalServerError)\n\t\t\tres.Write([]byte(`{\"This e-mail is already registered!\":\"` + err.Error() + `\"}`))\n\t\t\treturn\n\t\t}\n\t}\n\n\thashedPswd := hashPassword(user.Password)\n\tuser.Password = hashedPswd\n\n\tuserResult, insertErrorUser := usersCol.InsertOne(ctx, user)\n\tif insertErrorUser != nil {\n\t\tfmt.Println(\"Error while creating user: \", insertErrorUser)\n\t} else {\n\t\tjson.NewEncoder(res).Encode(userResult)\n\t\tuserID := userResult.InsertedID\n\t\tfmt.Println(\"New user id: \", userID)\n\t}\n\n\tres.Header().Add(\"content-type\", \"application/json\")\n\tres.WriteHeader(http.StatusOK)\n}", "func CreateUserSampleData(db *gorm.DB) ([]utilDB.User, error) {\n\tusers := make([]utilDB.User, 0)\n\n\tfor i := 0; i < 15; i++ {\n\t\tuser := utilDB.User{\n\t\t\tID: i + 1,\n\t\t\tName: \"hieudeptrai\" + strconv.Itoa(i),\n\t\t}\n\t\terr := db.Create(&user).Error\n\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tusers = append(users, user)\n\t}\n\n\treturn users, nil\n}", "func insertRecord() {\n db.LogMode(false)\n\n var ch chan int64\n var cnum chan int\n maxProcs := 50\n ch = make(chan int64, maxProcs)\n cnum = make(chan int, maxProcs)\n var startTime = time.Now().Unix()\n for i := 0; i < maxProcs; i++ {\n go func(ch chan int64, cnum chan int) {\n var uid int64\n var tableName, username, nickname, skey, password string\n for {\n uid = <-ch\n if uid == 0 {\n cnum <- 1\n break\n }\n username = fmt.Sprintf(\"username%d\", uid)\n tableName = getTableName(username)\n nickname = fmt.Sprintf(\"nickname%d\", uid)\n skey = generateSkey()\n password = utils.Md5String(utils.Md5String(\"123456\") + skey)\n db.Table(tableName).Create(&User{Username: username, Nickname: nickname, Passwd: password, Skey: skey, Uptime: time.Now().Unix()})\n }\n }(ch, cnum)\n }\n fmt.Println(\"Start to create user data,Please wait...\")\n totalNum := 10000000\n for i := 1; i <= totalNum; i++ {\n if int64(i)%20000 == 0 {\n fmt.Println(time.Now().Format(\"2000-01-01 00:00:00\"), fmt.Sprintf(\"Completed %.1f%%\", float64(i*100)/float64(totalNum)))\n }\n ch <- int64(i)\n }\n\n for i := 0; i < maxProcs; i++ {\n ch <- int64(0)\n }\n for i := 0; i < maxProcs; i++ {\n <-cnum\n }\n var endTime = time.Now().Unix()\n fmt.Println(\"Done.Cost\", endTime-startTime, \"s.\")\n}", "func (this *Queries_UServ) InsertUsers(ctx context.Context, db persist.Runnable) *Query_UServ_InsertUsers {\n\treturn &Query_UServ_InsertUsers{\n\t\topts: this.opts,\n\t\tctx: ctx,\n\t\tdb: db,\n\t}\n}", "func seedUsers() error {\n\tusers := parseUsers()\n\tfor _, u := range users {\n\t\tu.PreSave()\n\t}\n\tif err := cmdApp.Srv().Store.User().BulkInsert(users); err != nil {\n\t\tcmdApp.Log().Error(\"could not seed users\", zlog.String(\"err: \", err.Message))\n\t\treturn err\n\t}\n\tcmdApp.Log().Info(\"users seed completed\")\n\treturn nil\n}", "func generateUsers(num int) error {\n\tcurrentNum := 1\n\tfor currentNum <= num {\n\t\tuser := User{\n\t\t\tID: currentNum,\n\t\t\tName: RandStringRunes(10),\n\t\t}\n\t\tpayload, err := json.Marshal(user)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"Could not marshal JSON user\")\n\t\t}\n\t\tjob := Job{\n\t\t\tData: payload,\n\t\t\tType: \"user\",\n\t\t}\n\t\tjobs <- job\n\t\tcurrentNum++\n\t}\n\treturn nil\n}", "func createUser(usr *User) error {\n\tpasswordHash, err := encrypt(usr.password)\n\tusr.password = \"\"\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif usr.email != \"\" {\n\t\tusr.emailToken, err = generateEmailToken()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\temailTokenHash, err := encrypt(usr.emailToken)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tqueryStr := \"INSERT INTO users(username, password, email, email_token) VALUES($1, $2, $3, $4) returning id\"\n\t\terr = db.QueryRow(queryStr, usr.Username, passwordHash, usr.email, emailTokenHash).Scan(&usr.id)\n\n\t} else {\n\t\tqueryStr := \"INSERT INTO users(username, password) VALUES($1, $2) returning id\"\n\n\t\terr = db.QueryRow(queryStr, usr.Username, passwordHash).Scan(&usr.id)\n\t}\n\n\tif err != nil {\n\t\t// check if the error is for a violation of a unique constraint like the username or email index\n\t\tif err.(*pq.Error).Code == \"23505\" { // 23505 is duplicate key value violates unique constraint\n\t\t\tswitch err.(*pq.Error).Constraint {\n\t\t\tcase \"unique_username\":\n\t\t\t\treturn ErrDuplicateUsername\n\t\t\tcase \"unique_email\":\n\t\t\t\treturn ErrDuplicateEmail\n\t\t\t}\n\t\t}\n\n\t\t// all our other sql errors\n\t\treturn err\n\t}\n\tlog.Printf(\"user %s created\", usr.Username)\n\treturn addSession(usr)\n\n}", "func InsertUsers(docs []interface{}) error {\n\terr := checkMongoClient()\n\tif err != nil {\n\t\treturn err\n\t}\n\tcollection := repositoryClient.Database(DBName).Collection(CollectionName)\n\t//unique index for Email field\n\tctx := context.TODO()\n\t_, err = collection.Indexes().CreateOne(\n\t\tctx,\n\t\tmongo.IndexModel{\n\t\t\tKeys: bsonx.Doc{{\"email\", bsonx.Int32(1)}},\n\t\t\tOptions: options.Index().SetUnique(true),\n\t\t},\n\t)\n\n\t_, err = collection.InsertMany(ctx, docs)\n\treturn err\n}", "func sign_up(conn net.Conn, args []string, db *sql.DB, newMsgCh *chan message) serverError {\n login := args[0]\n // check this login\n // login must be free\n _, err := getUserID(login, db)\n if err == sql.ErrNoRows {\n // add new user if everything ok\n // check args\n\n // RSA key.module must be big.Int()\n n := big.NewInt(0)\n if _, ok := n.SetString(args[1], 10); !ok {\n return serverError{ errors.New(\"Expected public key module. Got \" + args[1]),\n WRONG_PUBLIC_KEY_MODULE}\n }\n // size of n must be RSA_KEY_LEN bit\n if n.BitLen() != RSA_KEY_LEN {\n return serverError{ errors.New(\"Wrong public key module size. Expected \" +\n strconv.Itoa(RSA_KEY_LEN) + \" bit, got \" +\n strconv.Itoa(n.BitLen())), WRONG_PUBLIC_KEY_SIZE}\n }\n\n //check that args[2] is RSA exponent\n e, err := strconv.Atoi(args[2])\n if err != nil {\n return serverError{ errors.New(\"error convert \" + args[2] + \" to key exponent\"),\n WRONG_KEY_EXPONENT }\n }\n\n //register new user\n _, err = db.Exec(`INSERT INTO users(login, pubKey_n, pubKey_e)\n VALUES ($1, $2, $3)`, login, args[1], e)\n return serverError{err, SERVER_INNER_ERR}\n } else {\n return serverError{ errors.New(\"login \" + login + \" is used\"),\n LOGIN_IS_USED }\n }\n\n return NoErrors\n}", "func CreateUser(id int, email string, pass string, bName string,\n\tbAccNum int, bRoutNum int) {\n\tvar count int = 0\n\n\tfor count < len(userList) {\n\t\tif userList[count].uID == id {\n\t\t\tfmt.Println(\"That user id is taken. Please choose a new ID.\")\n\t\t\treturn\n\t\t} else {\n\t\t\tcount++\n\t\t}\n\t}\n\n\tpANew := payAccount{\n\t\tbankName: bName,\n\t\taccountNumber: bAccNum,\n\t\troutingNumber: bRoutNum,\n\t}\n\tuNew := user{\n\t\tuID: id,\n\t\tuEmail: email,\n\t\tuPassword: pass,\n\t\tuBankAccount: pANew,\n\t}\n\tAddUserToDatabase(uNew)\n}", "func TestInsertNewUserServiceAlreadyExists (t *testing.T){\n\terr := PostNewUserService(user_01)\n\tassert.Equal(t, 409, err.HTTPStatus)\n}", "func (rep rep_users) Create(user models.Usuario) (uint64, error) {\n\n\tstatement, erro := rep.db.Prepare(\"INSERT INTO usuarios (nome, nick, email, senha) VALUES (?, ?, ?, ?)\")\n\n\tif erro != nil {\n\t\treturn 0, erro\n\t}\n\n\tdefer statement.Close()\n\n\tresult, erro := statement.Exec(user.Nome, user.Nick, user.Email, user.Senha)\n\n\tif erro != nil {\n\t\treturn 0, erro\n\t}\n\n\tlastIDInsert, erro := result.LastInsertId()\n\n\tif erro != nil {\n\t\treturn 0, erro\n\t}\n\n\treturn uint64(lastIDInsert), nil\n}", "func InsertUser(w http.ResponseWriter, r *http.Request) string {\n\tdb, err_open := sql.Open(\"mysql\", \"doubly_app:doubly_user1@tcp(doublydb.ct2fpvea2u25.us-west-2.rds.amazonaws.com:3306)/Doubly\")\n if err_open != nil {\n log.Fatal(err_open)\n }\n\tvar rStrings = strings.Split(r.URL.Path, \"/\")\n\tvar rUserName = rStrings[2]\n\tvar rUserEmail = rStrings[3]\n\tvar rPassword = rStrings[4]\n\tvar rDOB = rStrings[5]\n\tvar rGender = rStrings[6]\n rows, err_query := db.Query(\"SELECT * FROM Users WHERE Users.Email = '\" + rUserEmail + \"'\")\n\tdefer rows.Close()\n if err_query != nil {\n panic(err_query.Error())\n }\n\tvar count = 0\n\tfor rows.Next() {\n\t\tcount++\n\t}\n\tif count > 0 {\n\t\treturn \"{\\\"Error\\\":\\\"UserExists\\\"}\"\n\t}\n\tresults_insert, err_insert := db.Exec(\"INSERT INTO Users(UserName, Email, Password, DOB, Gender) VALUES ('\" + rUserName + \"', '\" + rUserEmail + \"', '\" + rPassword + \"', \" + rDOB + \", '\" + rGender + \"')\")\n\tif err_insert != nil {\n\t\tpanic(err_insert.Error())\n\t}\n\tlastInsertedID, err_last_id := results_insert.LastInsertId()\n\tif err_last_id != nil {\n\t\tprintln(\"Error: UserID not found\")\n\t\tpanic(err_last_id.Error())\n\t\treturn \"{\\\"Error\\\":\\\"UserID Not Found\\\"}\"\n\t}\n\treturn \"{\\\"UserID\\\":\\\"\" + strconv.FormatInt(lastInsertedID, 10) + \"\\\"}\"\n}", "func createUser(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tvar user User\n\tif err := json.NewDecoder(r.Body).Decode(&user); err != nil {\n\t\tpanic(err)\n\t}\n\t//Todo (Farouk): Mock ID - not safe\n\tuser.ID = strconv.Itoa(rand.Intn(1000000))\n\tusers = append(users, user)\n}", "func signupHandler(usr string, pass string) string {\n\t// query for the number of users with the passed in username\n\tquery := \"SELECT count(*) FROM users WHERE username = ?;\"\n\thashed_usr := Hash1(usr)\n\trows := QueryDB(query, hashed_usr)\n\trows.Next()\n\tvar count int\n\t// scan the result\n\terr = rows.Scan(&count)\n\tif err != nil {\n\t\tfmt.Println(\"ERROR: querying for number of users with a given username (my_server.go: signupHandler)\")\n\t\tfmt.Println(err)\n\t\treturn \"\";\n\t}\n\trows.Close()\n\t// make sure that username is unique\n\tif count == 0 {\n\t\t// generate a per user salt\n\t\tsalt := GenerateRandomString()\n\t\t// hash the password with the generated salt\n\t\thashed_pass := Hash256(pass, salt)\n\t\t// make the RC4 key for the user\n\t\tkey := GenerateRandomString()\n\t\tfor KeyExists(key) {\n\t\t\tkey = GenerateRandomString()\n\t\t}\n\t\t// insert the information into the DB\n\t\tquery := \"INSERT INTO users VALUES (?,?,?,?);\"\n\t\t// make a call to execute the query\n\t\tExecDB(query, hashed_usr, hashed_pass, salt, key)\n\t\t// mkdir for new user\n\t\tencrypted := EncryptString(hashed_usr, key)\n\t\tpath := GetBaseDir(encrypted)\n\t\tusr_info := []string{hashed_usr, hashed_pass, salt, key}\n\t\ttoken := NewUserSignIn(path, usr, usr_info)\n\t\treturn token\n\t}\n\treturn \"\"\n}", "func DummyUsers(db *sqlx.DB) {\n\t//db.MustExec(userSchema)\n\tu1 := new(mUsers)\n\tu1.Name = \"George\"\n\tu1.Username = \"210978\"\n\tu1.PasswordHash = \"hkis210978\"\n\t_ = u1.CreateUser(db)\n\t//if err != nil {\n\t//\tlog.Fatal(err)\n\t//}\n\tu2 := new(mUsers)\n\tu2.Name = \"John\"\n\tu2.Username = \"teacher\"\n\tu2.PasswordHash = \"Yes,papa!\"\n\t_ = u2.CreateUser(db)\n\t//if err != nil {\n\t//\tlog.Fatal(err)\n\t//}\n\n\t//db.MustExec(PostSchema)\n\tfor i := 0; i < 10; i++ {\n\t\tp := new(mPost)\n\t\tp.OwnerID = 1\n\t\tvar s = fmt.Sprintf(\"George posts %d\", i)\n\t\tp.Text = &s\n\t\te := p.CreatePost(db)\n\t\tif e != nil {\n\t\t\treturn\n\t\t}\n\t}\n\t//fmt.Println(u1.GetPosts(db))\n\n\tvar n = mNode{\n\t\tID: 8,\n\t}\n\tfmt.Println(n.GetParents(2, db))\n\n\tn.ID = 5\n\tfmt.Println(n.GetChildren(1, db))\n\n\tgrp1 := &Group{Name: \"Group one\"}\n\t//err := grp1.CreateGroup(db)\n\t//if err != nil {\n\t//\tlog.Println(err)\n\t//}\n\n\t//err = u1.AddToGroup(grp1.ID, db)\n\t//if err != nil {\n\t//\tlog.Println(err)\n\t//\treturn\n\t//}\n\t//_ = u2.AddToGroup(grp1.ID, db)\n\n\t//ids, err := grp1.GetUsersID(db)\n\t//fmt.Println(ids, err)\n\tgrp1.ID = 10\n\tuserDetails, err := grp1.GetUserDetails(db)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn\n\t}\n\tfmt.Println(userDetails)\n\n\t//fmt.Println(grp1.GetUserDetails(db))\n\t//n.ID = 15\n\t//n.CreateChild(\"child\", db)\n\t//n.CreateChild(\"child\", db)\n\t//n.CreateChild(\"child\", db)\n\t//_, err = u1.GetPosts(db)\n\t//db.MustExec(NodeSchema)\n\t//n := new(mNode)\n\t//n.Name.String = \"ROOT\"\n\t//n.ParentID.Int64 = -1\n\t//n.Children.Bool = true\n\t//err = n.CreateNode(db)\n\t//if err != nil {\n\t//\tlog.Println(err)\n\t//}\n\t//\n\t//nq := new(mNode)\n\t//nq.ID.Int64 = 1\n\t//nq, err = nq.GetNode(db)\n\t//if err != nil {\n\t//\tlog.Fatal(err)\n\t//}\n\t// db.MustExec(GroupSchema)\n\t// db.MustExec(GroupUserSchema)\n}", "func (h UserHook) OnInsert(ctx context.Context, items []*resource.Item) error {\n\tfor _, i := range items {\n\t\temail := i.GetField(\"email\")\n\t\tpassword := i.GetField(\"password\")\n\t\tif email == nil || password == nil {\n\t\t\t// We dont allow users with blank email or passwords, deny if blank.\n\t\t\treturn rest.ErrUnauthorized\n\t\t}\n\n\t\t// Determine if a user with this email address already exists\n\t\tuserList, err := utils.FindUser(ctx, h.UserResource, email.(string))\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Error finding user: %s\\n\", err)\n\t\t\treturn err\n\t\t}\n\t\tif len(userList.Items) > 0 {\n\t\t\t// @todo add details for deny to body... user already exists\n\t\t\treturn rest.ErrConflict\n\t\t}\n\t}\n\treturn nil\n}", "func (db *DataBase) Register(user *models.UserPrivateInfo) (userID int, err error) {\n\n\tvar (\n\t\ttx *sql.Tx\n\t)\n\n\tif tx, err = db.Db.Begin(); err != nil {\n\t\treturn\n\t}\n\tdefer tx.Rollback()\n\n\tif userID, err = db.createPlayer(tx, user); err != nil {\n\t\treturn\n\t}\n\n\tif err = db.createRecords(tx, userID); err != nil {\n\t\treturn\n\t}\n\n\terr = tx.Commit()\n\treturn\n}", "func Up_20150324152625(txn *sql.Tx) {\n\tquery := `\nCREATE OR REPLACE FUNCTION kullo_random_id()\n RETURNS text AS\n$BODY$\nSELECT substr(translate( -- truncate after replacing + by - and / by _\n\tencode(decode( -- transcode hex -> base64\n\t\tmd5( -- each call to random() yields ~47b of randomness\n\t\t\tto_char(random(), '0.99999999999999') ||\n\t\t\tto_char(random(), '0.99999999999999')\n\t\t),\n\t\t'hex'), 'base64'),\n\t'+/', '-_'), 1, 16) -- 16 base64 chars = 12 bytes = 96b\n$BODY$\n LANGUAGE sql VOLATILE\n COST 100;\n\nALTER TABLE users ADD COLUMN weblogin_username varchar(16) NOT NULL DEFAULT kullo_random_id() UNIQUE;\nALTER TABLE users ADD COLUMN weblogin_secret varchar(16) NOT NULL DEFAULT kullo_random_id();\n`\n\t_, err := txn.Exec(query)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n}", "func (wu *WxUser) Insert(ctx context.Context, key ...interface{}) error {\n\tvar err error\n\tvar dbConn *sql.DB\n\tvar res sql.Result\n\t// if already exist, bail\n\tif wu._exists {\n\t\treturn errors.New(\"insert failed: already exists\")\n\t}\n\n\ttx, err := components.M.GetConnFromCtx(ctx)\n\tif err != nil {\n\t\tdbConn, err = components.M.GetMasterConn()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\ttableName, err := GetWxUserTableName(key...)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// sql insert query, primary key provided by autoincrement\n\tsqlstr := `INSERT INTO ` + tableName +\n\t\t` (` +\n\t\t`openid, session_id, unionid, appid, uid, gcode, tcode, tuid, tfid, tsid, session_key, expires_in, nickName, gender, avatarUrl, city, province, country, created, updated` +\n\t\t`) VALUES (` +\n\t\t`?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?` +\n\t\t`)`\n\n\t// run query\n\tutils.GetTraceLog(ctx).Debug(\"DB\", zap.String(\"SQL\", fmt.Sprint(sqlstr, wu.Openid, wu.SessionID, wu.Unionid, wu.Appid, wu.UID, wu.Gcode, wu.Tcode, wu.Tuid, wu.Tfid, wu.Tsid, wu.SessionKey, wu.ExpiresIn, wu.Nickname, wu.Gender, wu.Avatarurl, wu.City, wu.Province, wu.Country, wu.Created, wu.Updated)))\n\tif err != nil {\n\t\treturn err\n\t}\n\tif tx != nil {\n\t\tres, err = tx.Exec(sqlstr, wu.Openid, wu.SessionID, wu.Unionid, wu.Appid, wu.UID, wu.Gcode, wu.Tcode, wu.Tuid, wu.Tfid, wu.Tsid, wu.SessionKey, wu.ExpiresIn, wu.Nickname, wu.Gender, wu.Avatarurl, wu.City, wu.Province, wu.Country, wu.Created, wu.Updated)\n\t} else {\n\t\tres, err = dbConn.Exec(sqlstr, wu.Openid, wu.SessionID, wu.Unionid, wu.Appid, wu.UID, wu.Gcode, wu.Tcode, wu.Tuid, wu.Tfid, wu.Tsid, wu.SessionKey, wu.ExpiresIn, wu.Nickname, wu.Gender, wu.Avatarurl, wu.City, wu.Province, wu.Country, wu.Created, wu.Updated)\n\t}\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// retrieve id\n\tid, err := res.LastInsertId()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// set primary key and existence\n\twu.ID = int(id)\n\twu._exists = true\n\n\treturn nil\n}", "func CreateUser(w http.ResponseWriter, r *http.Request){\n\n\t\tu := User{}\n\n\t\terr:= json.NewDecoder(r.Body).Decode(&u)\n\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\n\t\t// Checks if name is Empty\n\t\tfmt.Printf(\"name: [%+v]\\n\", u.Name)\n\t\tif u.Name == \"\" {\n\t\t\tfmt.Println(\"Empty string\")\n\t\t\tw.Write([]byte(`{\"status\":\"Invalid Name\"}`))\n\t\t\treturn\n\t\t}\n\n\n\t\t//start validation for username\n\t\tvar isStringAlphabetic = regexp.MustCompile(`^[A-Za-z][A-Za-z0-9]*$`).MatchString\n\t\tif !isStringAlphabetic(u.Name){\n\t\t\tfmt.Println(\"is not alphanumeric\")\n\t\t\tw.Write([]byte(`{\"status\":\"Invalid Name\"}`))\n\t\t\treturn\n\t\t}\n\n\t\t//make the Name Uppercase\n\t\tu.Name = strings.ToUpper(u.Name)\n\n\t\t// check if username already exists\n\t\tuser := userExist(u.Name)\n\t\tif user != (User{}) {\n\t\t\tfmt.Println(\"Name already exists\")\n\t\t\tw.Write([]byte(`{\"status\":\"Name Exists\"}`))\n\t\t\treturn\n\t\t}\n\n\t\t//if it does exist create the user with a random ID and score = 0\n\t\tuuid, err := uuid.NewV4()\n\t\tu.ID = uuid.String()\n\t\tu.Score = 0\n\n\t\tquery := \"INSERT INTO users (id, name, score) VALUES ($1, $2, $3);\"\n\t\t_, err = db.Exec(query, u.ID, u.Name, u.Score);\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\tw.WriteHeader(201)\n\t\tjson.NewEncoder(w).Encode(u)\n\n}", "func handleSignUp(w http.ResponseWriter, r *http.Request) {\n\tif parseFormErr := r.ParseForm(); parseFormErr != nil {\n\t\thttp.Error(w, \"Sent invalid form\", 400)\n\t}\n\n\tname := r.FormValue(\"name\")\n\tuserHandle := r.FormValue(\"userHandle\")\n\temail := r.FormValue(\"email\")\n\tpassword := r.FormValue(\"password\")\n\n\tif !verifyUserHandle(userHandle) {\n\t\thttp.Error(w, \"Invalid userHandle\", http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tif !verifyEmail(email) {\n\t\thttp.Error(w, \"Invalid email\", http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tif !verifyPassword(password) {\n\t\thttp.Error(w, \"Password does not meet complexity requirements\", http.StatusBadRequest)\n\t\treturn\n\t}\n\n\thashed, _ := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost)\n\n\turChannel := make(chan *database.InsertResponse)\n\tgo createUser(\n\t\tmodel.User{Name: name, UserHandle: userHandle, Email: email, Password: hashed},\n\t\turChannel,\n\t)\n\tcreatedUser := <-urChannel\n\n\tif createdUser.Err != nil {\n\t\tlog.Println(createdUser.Err)\n\n\t\tif strings.Contains(createdUser.Err.Error(), \"E11000\") {\n\t\t\tif strings.Contains(createdUser.Err.Error(), \"index: userHandle_1\") {\n\t\t\t\thttp.Error(w, \"Userhandle \"+userHandle+\" already registered\", http.StatusConflict)\n\t\t\t} else {\n\t\t\t\thttp.Error(w, \"Email \"+email+\" already registered\", http.StatusConflict)\n\t\t\t}\n\t\t} else {\n\t\t\tcommon.SendInternalServerError(w)\n\t\t}\n\n\t} else {\n\t\tlog.Println(\"Created user with ID \" + createdUser.ID)\n\t\tw.WriteHeader(http.StatusOK)\n\t\t_, wError := w.Write([]byte(\"Created user with ID \" + createdUser.ID))\n\n\t\tif wError != nil {\n\t\t\tlog.Println(\"Error while writing: \" + wError.Error())\n\t\t}\n\t}\n\n}", "func (r *Repository) UsersInsert(o *User) error {\n\n\tif hash, err := crypto.HashPassword(o.Password); err != nil {\n\t\treturn err\n\t} else {\n\t\to.PasswordHash = hash\n\t\to.Password = \"\"\n\t}\n\n\to.CreatedAt = now()\n\to.UpdatedAt = o.CreatedAt\n\n\tstmt, err := r.db.PrepareNamed(\n\t\t\"INSERT INTO users (name, email, password_hash, extra, is_activated, created_at, updated_at) \" +\n\t\t\t\"VALUES(:name, :email, :password_hash, :extra, :is_activated, :created_at, :updated_at) \" +\n\t\t\t\"RETURNING id\")\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn stmt.QueryRowx(o).Scan(&o.ID)\n}", "func CreateUserHandler(w http.ResponseWriter, req *http.Request) {\n // Validate internal token.\n if internalToken := req.Header.Get(app.Config.AuthHeaderName); internalToken != app.Config.RestApiToken {\n respond.Error(w, errmsg.Unauthorized())\n return\n }\n\n // Parse & validate payload.\n var pl payload.CreateUserPayload\n\n if !pl.Validate(req) {\n respond.Error(w, errmsg.InvalidPayload())\n return\n }\n\n // Check if the executor is using the USER_CREATION_HASH to create this user.\n usingUserCreationPw := pl.ExecutorEmail == \"\" && app.Config.UserCreationHash != \"\" &&\n crypt.VerifySha256(pl.ExecutorPassword, app.Config.UserCreationHash)\n\n // If not using USER_CREATION_HASH for auth, verify executor exists using email/pw.\n if !usingUserCreationPw {\n // Get executor user by email.\n executorUser, err := usersvc.FromEmail(pl.ExecutorEmail)\n\n if err != nil {\n app.Log.Errorln(err.Error())\n respond.Error(w, errmsg.UserNotFound())\n return\n }\n\n // Ensure executor user's password is correct.\n if !crypt.VerifyBcrypt(pl.ExecutorPassword, executorUser.HashedPw) {\n app.Log.Errorln(\"error creating new User: invalid executor user password\")\n respond.Error(w, errmsg.Unauthorized())\n return\n }\n\n // Only admin users can create other users.\n if !executorUser.Admin {\n app.Log.Errorln(\"error creating new User: executor user must be an admin\")\n respond.Error(w, errmsg.Unauthorized())\n return\n }\n }\n\n // Hash provided user password.\n hashedPw, err := crypt.BcryptHash(pl.NewPassword)\n\n if err != nil {\n app.Log.Errorf(\"error creating new User: bcrypt password hash failed with %s\\n\", err.Error())\n respond.Error(w, errmsg.ISE())\n return\n }\n\n // Create new User.\n newUser, err := usersvc.Create(pl.NewEmail, hashedPw, pl.Admin)\n\n if err != nil {\n app.Log.Errorln(err.Error())\n pqError, ok := err.(*pq.Error)\n\n if ok && pqError.Code.Name() == \"unique_violation\" {\n respond.Error(w, errmsg.EmailNotAvailable())\n } else {\n respond.Error(w, errmsg.UserCreationFailed())\n }\n\n return\n }\n\n // Create response payload and respond.\n respData := successmsg.UserCreationSuccess\n respData[\"uid\"] = newUser.Uid\n\n respond.Created(w, respData)\n}", "func UserRegister(res http.ResponseWriter, req *http.Request) {\n\n\t// get user form from user register form\n\t// insert data to DB\n\t// First step would be Firstname, lastname and password..\n\t/*\n\t* encrypting password from frontend and decrypt at this end...\n\t* Password matching ( re entering)\n\t* Inserting to db ( firstname,lastname,email,password,registered_at)\n\t */\n\n\trequestID := req.FormValue(\"uid\")\n\tfirstName := req.FormValue(\"first_name\")\n\tlastName := req.FormValue(\"last_name\")\n\temail := req.FormValue(\"email\")\n\tpassword := req.FormValue(\"password\")\n\n\tlogs.WithFields(logs.Fields{\n\t\t\"Service\": \"User Service\",\n\t\t\"package\": \"register\",\n\t\t\"function\": \"UserRegister\",\n\t\t\"uuid\": requestID,\n\t\t\"email\": email,\n\t}).Info(\"Received data to insert to users table\")\n\n\t// check user entered same email address\n\thasAccount := Checkmail(email, requestID)\n\n\tif hasAccount != true {\n\n\t\tdb := dbConn()\n\n\t\t// Inserting token to login_token table\n\t\tinsertUser, err := db.Prepare(\"INSERT INTO users (email,first_name,last_name,password) VALUES(?,?,?,?)\")\n\t\tif err != nil {\n\t\t\tlogs.WithFields(logs.Fields{\n\t\t\t\t\"Service\": \"User Service\",\n\t\t\t\t\"package\": \"register\",\n\t\t\t\t\"function\": \"UserRegister\",\n\t\t\t\t\"uuid\": requestID,\n\t\t\t\t\"Error\": err,\n\t\t\t}).Error(\"Couldnt prepare insert statement for users table\")\n\t\t}\n\t\tinsertUser.Exec(email, firstName, lastName, password)\n\n\t\t// Inserting email to emails table\n\n\t\tinsertEmail, err := db.Prepare(\"INSERT INTO emails (email,isActive) VALUES(?,?)\")\n\t\tif err != nil {\n\t\t\tlogs.WithFields(logs.Fields{\n\t\t\t\t\"Service\": \"User Service\",\n\t\t\t\t\"package\": \"register\",\n\t\t\t\t\"function\": \"UserRegister\",\n\t\t\t\t\"uuid\": requestID,\n\t\t\t\t\"Error\": err,\n\t\t\t}).Error(\"Couldnt prepare insert statement for emails table\")\n\t\t}\n\t\tinsertEmail.Exec(email, 1)\n\n\t\t_, err = http.PostForm(\"http://localhost:7070/response\", url.Values{\"uid\": {requestID}, \"service\": {\"User Service\"},\n\t\t\t\"function\": {\"UserRegister\"}, \"package\": {\"Register\"}, \"status\": {\"1\"}})\n\n\t\tif err != nil {\n\t\t\tlog.Println(\"Error response sending\")\n\t\t}\n\n\t\tdefer db.Close()\n\t\treturn\n\t} // user has an account\n\n\tlogs.WithFields(logs.Fields{\n\t\t\"Service\": \"User Service\",\n\t\t\"package\": \"register\",\n\t\t\"function\": \"UserRegister\",\n\t\t\"uuid\": requestID,\n\t\t\"email\": email,\n\t}).Error(\"User has an account for this email\")\n\n\t_, err := http.PostForm(\"http://localhost:7070/response\", url.Values{\"uid\": {requestID}, \"service\": {\"User Service\"},\n\t\t\"function\": {\"sendLoginEmail\"}, \"package\": {\"Check Email\"}, \"status\": {\"0\"}})\n\n\tif err != nil {\n\t\tlog.Println(\"Error response sending\")\n\t}\n}", "func (s *GodrorStorage) InsertUserByFields(db XODB, u *User) error {\n\tvar err error\n\n\tparams := make([]interface{}, 0, 5)\n\tfields := make([]string, 0, 5)\n\tretFields := make([]string, 0, 5)\n\tretFields = append(retFields, `\"id\"`)\n\tretVars := make([]interface{}, 0, 5)\n\tretVars = append(retVars, sql.Out{Dest: &u.ID})\n\n\tfields = append(fields, `\"subject\"`)\n\tparams = append(params, RealOracleEmptyString(u.Subject))\n\n\tif u.Name.Valid {\n\t\tfields = append(fields, `\"name\"`)\n\t\tparams = append(params, RealOracleNullString(u.Name))\n\n\t} else {\n\t\tretFields = append(retFields, `\"name\"`)\n\t\tretVars = append(retVars, sql.Out{Dest: &u.Name.String})\n\n\t}\n\tif u.CreatedDate.Valid {\n\t\tfields = append(fields, `\"created_date\"`)\n\t\tparams = append(params, u.CreatedDate)\n\n\t} else {\n\t\tretFields = append(retFields, `\"created_date\"`)\n\t\tretVars = append(retVars, sql.Out{Dest: &u.CreatedDate})\n\n\t}\n\tif u.ChangedDate.Valid {\n\t\tfields = append(fields, `\"changed_date\"`)\n\t\tparams = append(params, u.ChangedDate)\n\n\t} else {\n\t\tretFields = append(retFields, `\"changed_date\"`)\n\t\tretVars = append(retVars, sql.Out{Dest: &u.ChangedDate})\n\n\t}\n\tif u.DeletedDate.Valid {\n\t\tfields = append(fields, `\"deleted_date\"`)\n\t\tparams = append(params, u.DeletedDate)\n\n\t} else {\n\t\tretFields = append(retFields, `\"deleted_date\"`)\n\t\tretVars = append(retVars, sql.Out{Dest: &u.DeletedDate})\n\n\t}\n\n\tif len(params) == 0 {\n\t\t// FIXME(jackie): maybe we should allow this?\n\t\treturn errors.New(\"all fields are empty, unable to insert\")\n\t}\n\tparams = append(params, retVars...)\n\n\tvar fieldsPlaceHolders []string\n\tfor i := range fields {\n\t\tfieldsPlaceHolders = append(fieldsPlaceHolders, \":\"+strconv.Itoa(i+1))\n\t}\n\tvar retFieldsPlaceHolders []string\n\tfor i := range retFields {\n\t\tretFieldsPlaceHolders = append(retFieldsPlaceHolders, \":\"+strconv.Itoa(len(fieldsPlaceHolders)+i+1))\n\t}\n\n\tsqlstr := `INSERT INTO \"AC\".\"user\" (` +\n\t\tstrings.Join(fields, \",\") +\n\t\t`) VALUES (` + strings.Join(fieldsPlaceHolders, \",\") +\n\t\t`) RETURNING ` + strings.Join(retFields, \",\") +\n\t\t` INTO ` + strings.Join(retFieldsPlaceHolders, \",\")\n\n\t// run query\n\ts.Logger.Info(sqlstr, params)\n\t_, err = db.Exec(sqlstr, params...)\n\tif err != nil {\n\t\treturn err\n\t}\n\tFixRealOracleEmptyString(&u.Subject)\n\n\tif !u.Name.Valid {\n\t\tFixRealOracleNullString(&u.Name)\n\t}\n\n\treturn nil\n}", "func InsertRegister(object models.User) (string, bool, error) {\n\tctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)\n\n\t//When end instruction remove timeout operation and liberate context\n\tdefer cancel()\n\n\tdb := MongoConnection.Database(\"socialnetwork\")\n\tcollection := db.Collection(\"Users\")\n\n\t//Set password encrypted\n\tpassWordEncrypted, _ := utils.EcryptPasswordUtil(object.Password)\n\tobject.Password = passWordEncrypted\n\n\tresult, err := collection.InsertOne(ctx, object)\n\n\tif err != nil {\n\t\treturn \"\", false, err\n\t}\n\n\t//Get id of created object\n\tObjectID, _ := result.InsertedID.(primitive.ObjectID)\n\n\t//Return created object id\n\treturn ObjectID.String(), true, nil\n\n}", "func Register(w http.ResponseWriter, r *http.Request) {\n\tt:= models.Users{}\n\n\terr := json.NewDecoder(r.Body).Decode(&t)\n\n\tif err != nil {\n\t\thttp.Error(w, \"Error en los datos recibidos \"+err.Error(), 400)\n\t\treturn\n\t}\n\tif len(t.Login) < 6 {\n\t\thttp.Error(w, \"Error en los datos recibidos, ingrese un login mayor a 5 digitos \", 400)\n\t\treturn\n\t}\n\tif len(t.Password) < 6 {\n\t\thttp.Error(w, \"Ingrese una contraseña mayor a 5 digitos \", 400)\n\t\treturn\n\t}\n\n\t_, found, _ := bd.CheckUser(t.Login)\n\tif found == true {\n\t\thttp.Error(w, \"Ya existe un usuario registrado con ese login\", 400)\n\t\treturn\n\t}\n\n\tif t.Id_role == 3 {\n\t\tcod := bd.CodFamiliar(t.Cod_familiar)\n\t\tif cod == false {\n\t\t\thttp.Error(w, \"Debe ingresar un codigo de familia correcto\", 400)\n\t\t\treturn\n\t\t}\n\t}\n\n\tif t.Id_role == 1 {\n\t\thttp.Error(w, \"Usted no esta autorizado para crear este tipo de usuario\", 400)\n\t\treturn\n\t}\n\n\t_, status, err := bd.InsertRegister(t)\n\tif err != nil {\n\t\thttp.Error(w, \"Ocurrió un error al intentar realizar el registro de usuario \"+err.Error(), 400)\n\t\treturn\n\t}\n\n\tif status == false {\n\t\thttp.Error(w, \"No se ha logrado insertar el registro del usuario\", 400)\n\t\treturn\n\t}\n\n\tw.WriteHeader(http.StatusCreated)\n}", "func insertUser(username string, password string, kind int) bool {\n\tresult, err := mysql_client.Exec(\"INSERT INTO User(username, password, kind) VALUES(?,?,?)\", username, password, kind)\n\tif err != nil {\n\t\t// insert failed\n\t\treturn false\n\t}\n\t_, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn false\n\t}\n\t_, err = result.RowsAffected()\n\tif err != nil {\n\t\treturn false\n\t}\n\treturn true\n}", "func (r *RedisConnection) NewUser(l *models.User) (int, error) {\n\tconn, err := r.GetOneConnection()\n\tif err != nil {\n\t\tlogger.Error.Println(\"Failed to retrieve a connection\")\n\t\treturn -1, err\n\t}\n\tdefer r.PutBackConnection(conn)\n\n\tnewUserID, errIncr := conn.Cmd(\"INCR\", userIDCounterPrefix).Str()\n\tif errIncr != nil {\n\t\tlogger.Error.Println(\"Failed to incremenet number of users\")\n\t\treturn -1, errIncr\n\t}\n\n\tif ok, err1 := conn.Cmd(\"MULTI\").Str(); strings.ToLower(ok) != \"ok\" {\n\t\tlogger.Error.Println(\"Cannot execute commands now\")\n\t\treturn -1, err1\n\t}\n\n\tif queued, err2 := conn.Cmd(\"SADD\", usersListName, userKeyPrefix+newUserID).Str(); strings.ToLower(queued) != queuedKeyword {\n\t\tlogger.Error.Println(\"Error Queuing command SADD\", usersListName, userKeyPrefix+newUserID)\n\t\treturn -1, err2\n\t}\n\n\tif queued, err3 := conn.Cmd(\"HMSET\", userKeyPrefix+newUserID, userID, newUserID, userName, l.Name, userDepartmentID, l.DepartmentID,\n\t\tuserRemainingAnnualLeaves, l.RemainingAnnualLeaves).Str(); strings.ToLower(queued) != queuedKeyword {\n\t\tlogger.Error.Println(\"Error Queuing command HMSET\", userKeyPrefix+newUserID, userID, newUserID, userName, l.Name, userDepartmentID, l.DepartmentID,\n\t\t\tuserRemainingAnnualLeaves, l.RemainingAnnualLeaves)\n\t\treturn -1, err3\n\t}\n\n\tresult := conn.Cmd(\"EXEC\")\n\tif result.Err != nil {\n\t\tlogger.Trace.Println(\"Error while executing commands\")\n\t\treturn -1, result.Err\n\t}\n\treturn 1, nil\n}", "func TestSaveUser(t *testing.T) {\n\tDelete(context.Background())\n\twg := sync.WaitGroup{}\n\tfor i:=0 ; i < 10 ; i++ {\n\t\twg.Add(1)\n\t\tgo func(v int) {\n\t\t\tdefer wg.Done()\n\n\t\t\tuser := model.User{\n\t\t\t\tUsername: \"test1_\"+strconv.Itoa(v)+\"@gmail.com\",\n\t\t\t\tPassword: \"1234\",\n\t\t\t\tFirstName: \"us\"+strconv.Itoa(v),\n\t\t\t\tLastName: \"er\"+strconv.Itoa(v),\n\t\t\t}\n\t\t\terr := SaveUser(user,context.Background())\n\t\t\tif err != nil {\n\t\t\t\tt.Error(\"Problem in adding user\")\n\t\t\t}\n\t\t}(i)\n\t}\n\twg.Wait()\n\tres,_ := GetAll(context.Background())\n\tres = res[:len(res)-1]\n\tt.Log(res)\n\tresSlice := strings.Split(res,\",\")\n\tif len(resSlice) == 10{\n\t\tt.Log(\"Test SaveUser successful\")\n\t}else {\n\t\tt.Error(\"Test SaveUser unsuccessful\")\n\t}\n\n}", "func RegisterNewUser(user *model.RegisterUserRequest) error {\n\tdb, err := connectDB()\n\tif err != nil {\n\t\tlog.Println(err.Error())\n\t\treturn errors.New(\"Cannat connect to database\")\n\t}\n\n\tdefer db.Close()\n\tinsertStmt := `INSERT INTO users.members \n\t\t\t\t\t\t(p_token, username, firstname, lastname, email, phone) \n\t\t\t\t VALUES \n\t\t\t\t\t\t($1, $2, $3, $4, $5, $6);`\n\n\t_, err = db.Exec(insertStmt, user.PToken, user.UserName, user.FirstName, user.LastName, user.Email, user.Phone)\n\tif err != nil {\n\t\tlog.Println(err.Error())\n\t\treturn errors.New(\"Cannot register new user\")\n\t}\n\n\treturn nil\n}", "func MassInsert_GroupOrderdUser(rows []GroupOrderdUser, db XODB) error {\n\tif len(rows) == 0 {\n\t\treturn errors.New(\"rows slice should not be empty - inserted nothing\")\n\t}\n\tvar err error\n\tln := len(rows)\n\n\t// insVals_:= strings.Repeat(s, ln)\n\t// insVals := insVals_[0:len(insVals_)-1]\n\tinsVals := helper.SqlManyDollars(3, ln, true)\n\t// sql query\n\tsqlstr := \"INSERT INTO sun_chat.group_orderd_user (\" +\n\t\t\"OrderId, GroupId, UserId\" +\n\t\t\") VALUES \" + insVals\n\n\t// run query\n\tvals := make([]interface{}, 0, ln*5) //5 fields\n\n\tfor _, row := range rows {\n\t\t// vals = append(vals,row.UserId)\n\t\tvals = append(vals, row.OrderId)\n\t\tvals = append(vals, row.GroupId)\n\t\tvals = append(vals, row.UserId)\n\n\t}\n\n\tif LogTableSqlReq.GroupOrderdUser {\n\t\tXOLog(sqlstr, \" MassInsert len = \", ln, vals)\n\t}\n\t_, err = db.Exec(sqlstr, vals...)\n\tif err != nil {\n\t\tif LogTableSqlReq.GroupOrderdUser {\n\t\t\tXOLogErr(err)\n\t\t}\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func InsertAccount(r *http.Request, email, hashedPassword string, aUUID uuid.UUID, firstName, lastName string, isUser, isMiner bool, newUserCredit int) error {\n\tctx := r.Context()\n\ttx, txerr := db.BeginTx(ctx, nil)\n\tif message, err := func() (string, error) {\n\t\tif txerr != nil {\n\t\t\treturn errBeginTx, txerr\n\t\t}\n\n\t\tsqlStmt := `\n\t\tINSERT INTO accounts (uuid, email, password, first_name, last_name, credit)\n\t\tVALUES ($1, $2, $3, $4, $5, $6)\n\t\t`\n\t\tif _, err := tx.Exec(sqlStmt, aUUID, email, hashedPassword, firstName, lastName, newUserCredit); err != nil {\n\t\t\treturn \"error inserting account\", err\n\t\t}\n\n\t\tif isUser {\n\t\t\tsqlStmt = `\n\t\t\tINSERT INTO users (uuid)\n\t\t\tVALUES ($1)\n\t\t\t`\n\t\t\tif _, err := tx.Exec(sqlStmt, aUUID); err != nil {\n\t\t\t\treturn \"error inserting user\", err\n\t\t\t}\n\t\t}\n\n\t\tif isMiner {\n\t\t\tsqlStmt = `\n\t\t\tINSERT INTO miners (uuid)\n\t\t\tVALUES ($1)\n\t\t\t`\n\t\t\tif _, err := tx.Exec(sqlStmt, aUUID); err != nil {\n\t\t\t\treturn \"error inserting miner\", err\n\t\t\t}\n\t\t}\n\n\t\tif err := tx.Commit(); err != nil {\n\t\t\treturn errCommitTx, err\n\t\t}\n\n\t\treturn \"\", nil\n\t}(); err != nil {\n\t\tpqErr, ok := err.(*pq.Error)\n\t\tif ok {\n\t\t\tlog.Sugar.Errorw(message,\n\t\t\t\t\"method\", r.Method,\n\t\t\t\t\"url\", r.URL,\n\t\t\t\t\"err\", err.Error(),\n\t\t\t\t\"aID\", aUUID,\n\t\t\t\t\"email\", email,\n\t\t\t\t\"pq_sev\", pqErr.Severity,\n\t\t\t\t\"pq_code\", pqErr.Code,\n\t\t\t\t\"pq_msg\", pqErr.Message,\n\t\t\t\t\"pq_detail\", pqErr.Detail,\n\t\t\t)\n\t\t\tif pqErr.Code == errEmailExistsCode {\n\t\t\t\treturn ErrEmailExists\n\t\t\t} else if pqErr.Code == errNullViolationCode {\n\t\t\t\treturn ErrNullViolation\n\t\t\t}\n\t\t} else {\n\t\t\tlog.Sugar.Errorw(message,\n\t\t\t\t\"method\", r.Method,\n\t\t\t\t\"url\", r.URL,\n\t\t\t\t\"err\", err.Error(),\n\t\t\t\t\"aID\", aUUID,\n\t\t\t\t\"email\", email,\n\t\t\t)\n\t\t}\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func signup(w http.ResponseWriter, r *http.Request) {\n\n\tif r.Method != \"POST\" {\n\n\t\tvar data = map[string]interface{}{\n\t\t\t\"Title\": \"Sign Up\",\n\t\t}\n\n\t\terr := templates.ExecuteTemplate(w, \"signup.html\", data)\n\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t}\n\t\treturn\n\n\t}\n\n\t// Define user registration info.\n\tusername := r.FormValue(\"username\")\n\tnickname := r.FormValue(\"nickname\")\n\tavatar := r.FormValue(\"avatar\")\n\temail := r.FormValue(\"email\")\n\tpassword := r.FormValue(\"password\")\n\tip := r.Header.Get(\"X-Forwarded-For\")\n\tlevel := \"0\"\n\trole := \"0\"\n\tlast_seen := time.Now()\n\tcolor := \"\"\n\tyeah_notifications := \"1\"\n\n\tusers := QueryUser(username)\n\n\tif (user{}) == users {\n\t\tif len(username) > 32 || len(username) < 3 {\n\t\t\thttp.Error(w, \"invalid username length sorry br0o0o0o0o0o0\", http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\n\t\t// Let's hash the password. We're using bcrypt for this.\n\t\thashedPassword, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost)\n\n\t\tif len(hashedPassword) != 0 && checkErr(w, r, err) {\n\n\t\t\t// Prepare the statement.\n\t\t\tstmt, err := db.Prepare(\"INSERT users SET username=?, nickname=?, avatar=?, email=?, password=?, ip=?, level=?, role=?, last_seen=?, color=?, yeah_notifications=?\")\n\t\t\tif err == nil {\n\n\t\t\t\t// If there's no errors, we can go ahead and execute the statement.\n\t\t\t\t_, err := stmt.Exec(&username, &nickname, &avatar, &email, &hashedPassword, &ip, &level, &role, &last_seen, &color, &yeah_notifications)\n\t\t\t\tif err != nil {\n\n\t\t\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\t\t\treturn\n\n\t\t\t\t}\n\t\t\t\tusers := QueryUser(username)\n\n\t\t\t\tuser := users.ID\n\t\t\t\tcreated_at := time.Now()\n\t\t\t\tnnid := \"\"\n\t\t\t\tgender := 0\n\t\t\t\tregion := \"\" // ooh what if we replace this with a country from a GeoIP later????????????????\n\t\t\t\tcomment := \"\"\n\t\t\t\tnnid_visibility := 1\n\t\t\t\tyeah_visibility := 1\n\t\t\t\treply_visibility := 0\n\n\t\t\t\tstmt, err := db.Prepare(\"INSERT profiles SET user=?, created_at=?, nnid=?, gender=?, region=?, comment=?, nnid_visibility=?, yeah_visibility=?, reply_visibility=?\")\n\t\t\t\tif err != nil {\n\t\t\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\t_, err = stmt.Exec(&user, &created_at, &nnid, &gender, &region, &comment, &nnid_visibility, &yeah_visibility, &reply_visibility)\n\t\t\t\tif err != nil {\n\t\t\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\t\t\treturn\n\t\t\t\t}\n\n\t\t\t\tsession := sessions.Start(w, r)\n\t\t\t\tsession.Set(\"username\", users.Username)\n\t\t\t\tsession.Set(\"user_id\", users.ID)\n\t\t\t\thttp.Redirect(w, r, \"/\", 302)\n\n\t\t\t}\n\n\t\t} else {\n\n\t\t\thttp.Redirect(w, r, \"/signup\", 302)\n\n\t\t}\n\n\t}\n\n}", "func Create(nickname, email, password string) (int, int) {\n\tnickname, ok := misc.ValidateString(nickname, misc.MaxLenS)\n\tif !ok {\n\t\tlog.Println(\"Wrong nickname\", nickname)\n\t\treturn 0, misc.WrongName\n\t}\n\n\temail, ok = misc.ValidateEmail(email)\n\tif !ok {\n\t\tlog.Println(\"Wrong email\", email)\n\t\treturn 0, misc.WrongEmail\n\t}\n\n\tif !misc.IsPasswordValid(password) {\n\t\tlog.Println(\"Wrong password\")\n\t\treturn 0, misc.WrongPassword\n\t}\n\n\tsalt, err := auth.GenerateSalt()\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn 0, misc.NoSalt\n\t}\n\n\thash, err := auth.PasswordHash(password, salt)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn 0, misc.NothingToReport\n\t}\n\n\tuserId, confirmationCode := 0, misc.RandomString(misc.ConfCodeLen)\n\terr = psql.Db.QueryRow(`\n\t\tINSERT INTO users (nickname, email, password, salt, confirmation_code)\n\t\tVALUES ($1, $2, $3, $4, $5)\n\t\tRETURNING id`, nickname, email, hash, salt, confirmationCode,\n\t).Scan(&userId)\n\tif err == nil {\n\t\tmailer.EmailConfirmation(email, confirmationCode)\n\t\treturn userId, misc.NothingToReport\n\t}\n\n\terr, code := psql.CheckSpecificDriverErrors(err)\n\tlog.Println(err)\n\treturn 0, code\n}", "func createSchema(db *pg.DB) (err error) {\n\t// db.AddQueryHook(dbLogger{})\n\n\trefresh := false\n\n\tmodels := []interface{}{\n\t\t&structs.User{},\n\t\t&structs.Project{},\n\t\t&structs.Webhook{},\n\t\t&structs.IssueEntry{},\n\t\t&structs.Comment{},\n\t\t&structs.InviteCode{},\n\t}\n\n\tfor _, model := range models {\n\t\tif refresh {\n\t\t\tdb.Model(model).DropTable(&orm.DropTableOptions{\n\t\t\t\tIfExists: true,\n\t\t\t\tCascade: true,\n\t\t\t})\n\t\t}\n\n\t\terr = db.Model(model).CreateTable(&orm.CreateTableOptions{\n\t\t\tIfNotExists: true,\n\t\t})\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}\n\n\tif refresh {\n\t\tidGen := idgenerator.NewIDGenerator(1602507674941, 0)\n\n\t\tprintln(\"Create User\")\n\t\tuser := &structs.User{\n\t\t\tID: idGen.GenerateID(),\n\t\t\tName: \"ImRock\",\n\t\t\tAvatar: \"https://cdn.discordapp.com/avatars/143090142360371200/a_70444022ea3e5d73dd00d59c5578b07e.gif?size=1024\",\n\t\t\tUserType: structs.DiscordUser,\n\t\t\tHookID: 143090142360371200,\n\t\t\tProjectIDs: make([]int64, 0),\n\t\t\tIntegration: false,\n\t\t}\n\n\t\t_, err = db.Model(user).Insert()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Create second user\")\n\t\tuser2 := &structs.User{\n\t\t\tID: idGen.GenerateID(),\n\t\t\tName: \"biscuitcord\",\n\t\t\tAvatar: \"https://cdn.discordapp.com/avatars/164297154276360192/4c8f9b0310948cce460613081d074a13.webp?size=1024\",\n\t\t\tUserType: structs.DiscordUser,\n\t\t\tHookID: 164297154276360192,\n\t\t\tProjectIDs: make([]int64, 0),\n\t\t\tIntegration: false,\n\t\t}\n\t\t_, err = db.Model(user2).Insert()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Create Project\")\n\t\tproject := &structs.Project{\n\t\t\tID: idGen.GenerateID(),\n\n\t\t\tCreatedAt: time.Now().UTC(),\n\t\t\tCreatedByID: user.ID,\n\n\t\t\tIntegrations: make([]*structs.User, 0),\n\t\t\tWebhooks: make([]*structs.Webhook, 0),\n\n\t\t\tSettings: structs.ProjectSettings{\n\t\t\t\tDisplayName: \"Welcomer\",\n\t\t\t\tURL: \"https://welcomer.gg\",\n\t\t\t\tArchived: false,\n\t\t\t\tPrivate: false,\n\t\t\t\tLimited: false,\n\t\t\t},\n\n\t\t\tStarredIssues: 0,\n\t\t\tOpenIssues: 0,\n\t\t\tActiveIssues: 0,\n\t\t\tResolvedIssues: 0,\n\t\t}\n\t\t_, err = db.Model(project).Insert()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tprintln(\"Add project to user\")\n\t\tuser.ProjectIDs = append(user.ProjectIDs, project.ID)\n\t\t_, err = db.Model(user).WherePK().Update()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Add project to user 2\")\n\t\tuser2.ProjectIDs = append(user2.ProjectIDs, project.ID)\n\t\t_, err = db.Model(user).WherePK().Update()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Add second user to contributors\")\n\t\tproject.Settings.ContributorIDs = append(project.Settings.ContributorIDs, user2.ID)\n\t\t_, err = db.Model(project).WherePK().Update()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Create Webhooks\")\n\t\twebhook := &structs.Webhook{\n\t\t\tID: idGen.GenerateID(),\n\t\t\tProjectID: project.ID,\n\t\t\tActive: false,\n\t\t\tFailures: 16,\n\t\t\tCreatedAt: time.Now().UTC(),\n\t\t\tCreatedByID: user.ID,\n\t\t\tURL: \"https://welcomer.gg/webhook\",\n\t\t\tType: structs.DiscordWebhook,\n\t\t\tJSONContent: true,\n\t\t\tSecret: \"\",\n\t\t}\n\t\t_, err = db.Model(webhook).Insert()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Create Integration\")\n\t\tintegration := &structs.User{\n\t\t\tID: idGen.GenerateID(),\n\t\t\tName: \"Welcomer\",\n\n\t\t\tUserType: structs.IntegrationUser,\n\n\t\t\tCreatedAt: time.Now().UTC(),\n\n\t\t\tProjectID: project.ID,\n\t\t\tIntegration: true,\n\t\t\tCreatedByID: user.ID,\n\t\t}\n\t\t_, err = db.Model(integration).Insert()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Create user issue\")\n\t\tnow := time.Now().UTC()\n\t\tissue := &structs.IssueEntry{\n\t\t\tID: idGen.GenerateID(),\n\t\t\tProjectID: project.ID,\n\n\t\t\tStarred: false,\n\n\t\t\tType: structs.EntryOpen,\n\t\t\tOccurrences: 1,\n\t\t\tAssigneeID: 0,\n\n\t\t\tError: \"genericError\",\n\t\t\tFunction: \"createSchema(db *pg.DB)\",\n\t\t\tCheckpoint: \"internal/api.go:147\",\n\t\t\tDescription: \"\",\n\t\t\tTraceback: \"\",\n\n\t\t\tLastModified: now,\n\n\t\t\tCreatedAt: now,\n\t\t\tCreatedByID: user.ID,\n\n\t\t\tCommentCount: 0,\n\t\t\tCommentsLocked: false,\n\t\t}\n\t\t_, err = db.Model(issue).Insert()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Increment project issue counter\")\n\t\tproject.OpenIssues++\n\t\t_, err = db.Model(project).WherePK().Update()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Create user issue 2\")\n\t\tnow = time.Now().UTC()\n\t\tissue2 := &structs.IssueEntry{\n\t\t\tID: idGen.GenerateID(),\n\t\t\tProjectID: project.ID,\n\n\t\t\tStarred: false,\n\n\t\t\tType: structs.EntryOpen,\n\t\t\tOccurrences: 5,\n\t\t\tAssigneeID: user.ID,\n\n\t\t\tError: \"panic:\",\n\t\t\tFunction: \"main.main.func1\",\n\t\t\tCheckpoint: \"main.go:11\",\n\t\t\tDescription: \"\",\n\t\t\tTraceback: \"stacktrace from panic: \\ngoroutine 1 [running]:\\nruntime/debug.Stack(0x1042ff18, 0x98b2, 0xf0ba0, 0x17d048)\\n /usr/local/go/src/runtime/debug/stack.go:24 +0xc0\\nmain.main.func1()\\n /tmp/sandbox973508195/main.go:11 +0x60\\npanic(0xf0ba0, 0x17d048)\\n /usr/local/go/src/runtime/panic.go:502 +0x2c0\\nmain.main()\\n /tmp/sandbox973508195/main.go:16 +0x60\",\n\n\t\t\tLastModified: now,\n\n\t\t\tCreatedAt: now,\n\t\t\tCreatedByID: user2.ID,\n\n\t\t\tCommentCount: 0,\n\t\t\tCommentsLocked: false,\n\t\t}\n\t\t_, err = db.Model(issue2).Insert()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Increment project issue counter\")\n\t\tproject.OpenIssues++\n\t\t_, err = db.Model(project).WherePK().Update()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Create integration issue\")\n\t\tnow = time.Now().UTC()\n\t\tissue3 := &structs.IssueEntry{\n\t\t\tID: idGen.GenerateID(),\n\t\t\tProjectID: project.ID,\n\n\t\t\tStarred: false,\n\n\t\t\tType: structs.EntryOpen,\n\t\t\tOccurrences: 1,\n\t\t\tAssigneeID: user2.ID,\n\n\t\t\tError: \"TypeError\",\n\t\t\tFunction: \"\",\n\t\t\tCheckpoint: \"\",\n\t\t\tDescription: \"can only concatenate str (not \\\"int\\\") to str\",\n\t\t\tTraceback: \"Traceback (most recent call last):\\n File \\\"<stdin>\\\", line 1, in <module>\\n File \\\"<stdin>\\\", line 2, in a\\nTypeError: can only concatenate str (not \\\"int\\\") to str\",\n\n\t\t\tLastModified: now,\n\n\t\t\tCreatedAt: now,\n\t\t\tCreatedByID: integration.ID,\n\n\t\t\tCommentCount: 0,\n\t\t\tCommentsLocked: false,\n\t\t}\n\t\t_, err = db.Model(issue3).Insert()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Increment project issue counter\")\n\t\tproject.OpenIssues++\n\t\t_, err = db.Model(project).WherePK().Update()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Create user issue comment\")\n\t\tcontent := \"Test :)\"\n\t\tcomment := &structs.Comment{\n\t\t\tID: idGen.GenerateID(),\n\t\t\tIssueID: issue.ID,\n\n\t\t\tCreatedAt: time.Now().UTC(),\n\t\t\tCreatedByID: user2.ID,\n\n\t\t\tType: structs.Message,\n\t\t\tContent: &content,\n\t\t}\n\t\t_, err = db.Model(comment).Insert()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\tissue.CommentCount++\n\n\t\tprintln(\"Create user issue comment2\")\n\t\topen := structs.EntryOpen\n\t\tcomment2 := &structs.Comment{\n\t\t\tID: idGen.GenerateID(),\n\t\t\tIssueID: issue.ID,\n\n\t\t\tCreatedAt: time.Now().UTC(),\n\t\t\tCreatedByID: user2.ID,\n\n\t\t\tType: structs.IssueMarked,\n\t\t\tIssueMarked: &open,\n\t\t}\n\t\t_, err = db.Model(comment2).Insert()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\tissue.CommentCount++\n\n\t\tprintln(\"Create user issue comment3\")\n\t\topened := true\n\t\tcomment3 := &structs.Comment{\n\t\t\tID: idGen.GenerateID(),\n\t\t\tIssueID: issue.ID,\n\n\t\t\tCreatedAt: time.Now().UTC(),\n\t\t\tCreatedByID: user.ID,\n\n\t\t\tType: structs.CommentsLocked,\n\t\t\tCommentsOpened: &opened,\n\t\t}\n\t\t_, err = db.Model(comment3).Insert()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\tissue.CommentCount++\n\n\t\tprintln(\"Update issue comment count\")\n\t\t_, err = db.Model(issue).WherePK().Update()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Create user 2 issue comment\")\n\t\tcomment4 := &structs.Comment{\n\t\t\tID: idGen.GenerateID(),\n\t\t\tIssueID: issue2.ID,\n\n\t\t\tCreatedAt: time.Now().UTC(),\n\t\t\tCreatedByID: user.ID,\n\n\t\t\tType: structs.CommentsLocked,\n\t\t\tCommentsOpened: &opened,\n\t\t}\n\t\t_, err = db.Model(comment4).Insert()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Update issue2 comment count\")\n\t\tissue2.CommentCount++\n\t\t_, err = db.Model(issue2).WherePK().Update()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Star user issue\")\n\t\tissue2.Starred = true\n\t\t_, err = db.Model(issue2).WherePK().Update()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Update project stars\")\n\t\tproject.StarredIssues++\n\t\t_, err = db.Model(project).WherePK().Update()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Close integration issue\")\n\t\tissue3.Type = structs.EntryResolved\n\t\tissue3.LastModified = time.Now().UTC()\n\t\t_, err = db.Model(issue3).WherePK().Update()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Update project issue counter\")\n\t\tproject.ResolvedIssues++\n\t\tproject.OpenIssues--\n\t\t_, err = db.Model(project).WherePK().Update()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tprintln(\"Create close integration issue comment\")\n\t\tresolved := structs.EntryResolved\n\t\tcomment5 := &structs.Comment{\n\t\t\tID: idGen.GenerateID(),\n\t\t\tIssueID: issue3.ID,\n\n\t\t\tCreatedAt: time.Now().UTC(),\n\t\t\tCreatedByID: user.ID,\n\n\t\t\tType: structs.IssueMarked,\n\t\t\tIssueMarked: &resolved,\n\t\t}\n\t\t_, err = db.Model(comment5).Insert()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}\n\n\treturn nil\n}", "func createTable() {\n sql := `CREATE TABLE IF NOT EXISTS userinfo_tab_0 (\nid INT(11) NOT NULL AUTO_INCREMENT COMMENT 'primary key',\nusername VARCHAR(64) NOT NULL COMMENT 'unique id',\nnickname VARCHAR(128) NOT NULL DEFAULT '' COMMENT 'user nickname, can be empty',\npasswd VARCHAR(32) NOT NULL COMMENT 'md5 result of real password and key',\nskey VARCHAR(16) NOT NULL COMMENT 'secure key of each user',\nheadurl VARCHAR(128) NOT NULL DEFAULT '' COMMENT 'user headurl, can be empty',\nuptime int(64) NOT NULL DEFAULT 0 COMMENT 'update time: unix timestamp',\nPRIMARY KEY(id),\nUNIQUE KEY username_unique (username)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='user info table';`\n db.Exec(sql)\n for i := 1; i < 20; i++ {\n tableName := fmt.Sprintf(\"userinfo_tab_%d\", i)\n db.Exec(fmt.Sprintf(\"create table if not exists %s like userinfo_tab_0\", tableName))\n }\n}", "func insertUser(data User) (int64, error) {\n\t// perform a db.Query insert\n\tinsert, err := db.Exec(\"INSERT INTO users (username, email, created_at) VALUES (?, ?, ?)\", data.Username, data.Email, data.CreatedAt)\n\n\t// if there is an error inserting, handle it\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\n\tid, err := insert.LastInsertId()\n\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\n\treturn id, err\n}", "func (r *mutationResolver) CreateUserss(ctx context.Context, input *model.RegisterInput) (*model.Register, error) {\n\tuser := &model.Register{\n\t\tID: fmt.Sprintf(\"T%d\", rand.Int()),\n\t\tName: input.Name,\n\t\tLastname: input.Lastname,\n\t\tPassword: input.Password,\n\t}\n\tr.user = append(r.user, user)\n\treturn user, nil\n}", "func insertNick(user *tgbotapi.User, nick string) error {\n\tinsert := fmt.Sprintf(`INSERT INTO %s (nick, tg_id, tg_username) VALUES ($1, $2, $3)`, UsersTable)\n\t_, err := pool.Exec(context.Background(), insert, nick, user.ID, user.UserName)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Could not add user: %s\", err)\n\t}\n\n\tlog.Printf(\"Añadido %s como %s\", user.String(), nick)\n\n\treturn nil\n}", "func tNewUser(lbl string) *tUser {\n\tintBytes := make([]byte, 4)\n\tbinary.BigEndian.PutUint32(intBytes, acctCounter)\n\tacctID := account.AccountID{}\n\tcopy(acctID[:], acctTemplate[:])\n\tcopy(acctID[account.HashSize-4:], intBytes)\n\taddr := strconv.Itoa(int(acctCounter))\n\tsig := []byte{0xab} // Just to differentiate from the addr.\n\tsig = append(sig, intBytes...)\n\tsigHex := hex.EncodeToString(sig)\n\tacctCounter++\n\treturn &tUser{\n\t\tsig: sig,\n\t\tsigHex: sigHex,\n\t\tacct: acctID,\n\t\taddr: addr,\n\t\tlbl: lbl,\n\t}\n}", "func (m *MgoUserManager) insertUser(u *auth.User) error {\n\terr := m.UserColl.Insert(u)\n\tif err != nil {\n\t\tif mgo.IsDup(err) {\n\t\t\treturn auth.ErrDuplicateEmail\n\t\t}\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (d *webData) addUsersWeb(w http.ResponseWriter, r *http.Request) {\n\terr := d.tpl.ExecuteTemplate(w, \"addUserCompletePage\", \"some data\")\n\tif err != nil {\n\t\tlog.Println(\"addUsersWeb: template execution error = \", err)\n\t}\n\n\tr.ParseForm()\n\tu := storage.User{}\n\tgetFormValuesUserInfo(&u, r)\n\n\tif u.FirstName != \"\" {\n\t\tpid, _ := storage.QueryForLastUID(d.PDB)\n\t\t//increment the user index nr by one for the new used to add\n\t\tpid++\n\t\tfmt.Println(\"------pid ---------- = \", pid)\n\t\tprintln(\"addUsersWeb: UID = \", pid)\n\t\tu.Number = pid\n\t\tstorage.AddUser(d.PDB, u)\n\t}\n}", "func insertUser(user User) {\n\tcollection := client.Database(\"Go_task\").Collection(\"users\")\n\tinsertResult, err := collection.InsertOne(context.TODO(), user)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tfmt.Println(\"Inserted user with ID:\", insertResult.InsertedID)\n}", "func (a MySQLAdapter) Create(user entity.User) (uid int, err error) {\n\tinsertStmt := fmt.Sprintf(\"INSERT INTO %s VALUES (DEFAULT, ?, ?, ?, ?)\", a.table)\n\thashedPwd, err := passgen.HashPassword([]byte(user.Password))\n\tif err != nil {\n\t\treturn uid, err\n\t}\n\tres, err := a.db.Exec(insertStmt, user.Username, hashedPwd, user.Email, time.Now())\n\tif err != nil {\n\t\treturn uid, err\n\t}\n\tlastInsertID, err := res.LastInsertId()\n\tif err != nil {\n\t\treturn uid, err\n\t}\n\tuid = int(lastInsertID)\n\treturn uid, nil\n}", "func Submit(w http.ResponseWriter, r *http.Request, input magic_struct.Userdata) (flg bool) {\n\n\t//\tdb, err := sql.Open(\"mysql\", \"root:toor@/test?charset=utf8\")\n\t//\tdefer db.Close()\n\t//\tcheckErr(err)\n\t//\n\t//\tstmt, err2 := db.Prepare(\"INSERT INTO `userinfo` (`ID`,`uname`,`pass`,`address`,`X`,`Y`) VALUES (?,?,?,?,?,?)\")\n\t//\tcheckErr(err2)\n\t//\n\t//\t_, err3 := stmt.Exec(strconv.FormatInt(input.Uid, 10), input.Uname, magic_gcm.Cipher(input.Pass), input.Addr, strconv.FormatFloat(input.Geo.X, 'f', 6, 64), strconv.FormatFloat(input.Geo.Y, 'f', 6, 64))\n\t//\n\t//\tif err3 != nil {\n\t//\n\t//\t\tif strings.Contains(err3.Error(), \"Duplicate\") {\n\t//\n\t//\t\t\tf.Fprintf(w, \"user duplicate\")\n\t//\n\t//\t\t\treturn false\n\t//\n\t//\t\t}\n\t//\n\t//\t\tcheckErr(err3)\n\t//\n\t//\t\treturn false\n\t//\n\t//\t} else {\n\t//\n\t//\t\treturn true\n\t//\n\t//\t}\n\t//\n\n\t////// MONGO DB\n\tsession, err := mgo.Dial(\"127.0.0.1\")\n\tif err != nil {\n\t\tpanic(err)\n\t\treturn false\n\t}\n\n\tdefer session.Close()\n\tsession.SetMode(mgo.Monotonic, true)\n\n\t////////\n\tc := session.DB(\"magic\").C(\"userInfo\")\n\n\t// Insert Datas\n\terr = c.Insert(&magic_struct.Userdata{Uname: input.Uname, Uid: input.Uid, Pass: string(magic_gcm.Cipher(input.Pass)), Addr: input.Addr, Geo: input.Geo, Stat: true, Size: 1000000})\n\n\tif err != nil {\n\t\tfmt.Fprintf(w, \"invalid or duplicated submit data\")\n\t\treturn false\n\t} else {\n\t\tfmt.Fprintf(w, \"submited\")\n\t\treturn true\n\n\t}\n\n}", "func insertUser(db *sql.DB, u *User) {\n\n\t// name := u.name\n\t// rollno := u.rollno\n\tinsertUserSQL := `INSERT INTO User( name, rollno) VALUES (?, ?)`\n\tstatement, err := db.Prepare(insertUserSQL) \n\n\tif err != nil {\n\t\tlog.Fatalln(err.Error())\n\t}\n\t_, err = statement.Exec(u.name, u.rollno)\n\tif err != nil {\n\t\tlog.Fatalln(err.Error())\n\t}\n}", "func CreateUser(user *User) error {\n\n //Validate fields\n\n //Username must be between 3 and 20 alphanumeric characters\n invalidCharsRegex := regexp.MustCompile(\"[^A-Za-z0-9]+\")\n if len(user.Username) < 3 || len(user.Username) > 20 || invalidCharsRegex.MatchString(user.Username) {\n return errors.New(\"username must be 3 to 20 alphanumeric characters\") \n }\n\n //Passwords must be at least 8 characters\n if len(user.Password) < 8 {\n return errors.New(\"password must be at least 8 characters\")\n }\n\n db, err := bolt.Open(DB_NAME, 0600, nil)\n if err != nil {\n panic(err)\n }\n defer db.Close()\n\n err = db.Update(func(tx *bolt.Tx) error {\n\n b := tx.Bucket([]byte(USER_BUCKET))\n\n //ensure username is not taken\n v := b.Get([]byte(user.Username))\n if v != nil {\n return errors.New(\"username already taken\")\n }\n\n // generate UUID that never changes for this user.\n user.ID = uuid.New().String()\n\n // hash password (NOTE this will modify the user object passed in)\n user.Password = hashPassword(user.Password)\n\n // encode to JSON\n encoded, err := json.Marshal(user)\n if err != nil {\n return err\n }\n\n // put in database\n err = b.Put([]byte(user.Username), encoded)\n\n return err //nil implies commit transaction, otherwise rollback\n })\n\n return err\n}", "func createUser(u *User) error {\n\tif u.Status == 0 {\n\t\treturn errors.New(\"Invalid user value\")\n\t}\n\n\treturn nil\n}", "func Register (w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"content-type\", \"application/json\")\n\n\tvar user models.User\n\tvar res models.ResponseResult\n\n\tbody, _ := ioutil.ReadAll(r.Body)\n\terr := json.Unmarshal(body, &user)\n\n\tif err != nil {\n\t\tres.Error = err.Error()\n\t\t_ = json.NewEncoder(w).Encode(res)\n\t\treturn\n\t}\n\n\tif msg, validationResult := user.Valid(); !validationResult {\n\t\tres.Error = msg\n\t\t_ = json.NewEncoder(w).Encode(res)\n\t\treturn\n\t}\n\n\n\thash, err := bcrypt.GenerateFromPassword([]byte(user.Password), 10)\n\tif err != nil {\n\t\tre := models.ResponseError{\n\t\t\tCode: constants.ErrCodeHashError,\n\t\t\tMessage: constants.MsgHashError,\n\t\t\tOriginalError: err,\n\t\t}\n\t\tres.Error = re\n\t\t_ = json.NewEncoder(w).Encode(res)\n\t\treturn\n\t}\n\n\tuser.Password = string(hash)\n\t_, err = models.InsertOne(models.UserCollection, user)\n\n\tif err != nil {\n\t\tre := models.ResponseError{\n\t\t\tCode: constants.ErrCodeInsertOne,\n\t\t\tMessage: strings.Replace(constants.MsgErrorInsertOne, \"%COLLECTION%\", models.UserCollection, -1),\n\t\t\tOriginalError: err,\n\t\t}\n\t\tres.Error = re\n\t\t_ = json.NewEncoder(w).Encode(res)\n\t\treturn\n\t}\n\n\tres.Error = false\n\tres.Result = strings.Replace(constants.MsgSuccessInsertedOne, \"%COLLECTION%\", models.UserCollection, -1)\n\t_ = json.NewEncoder(w).Encode(res)\n\n\treturn\n\n}", "func UsersCreate(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {\n\tr.ParseForm()\n\tNewUser := User{}\n\tNewUser.Username = r.Form.Get(\"username\")\n\tNewUser.Email = r.Form.Get(\"email\")\n\tNewUser.First = r.Form.Get(\"first\")\n\tNewUser.Last = r.Form.Get(\"last\")\n\n\toutput, err := json.Marshal(NewUser)\n\tfmt.Println(string(output))\n\tif err != nil {\n\t\tfmt.Fprintln(w, \"Something went wrong while processing your request...\")\n\t}\n\n\tsql := fmt.Sprintf(\"INSERT INTO users SET user_nickname='%s', \"+\n\t\t\"user_first='%s', user_last='%s', user_email='%s'\",\n\t\tNewUser.Username, NewUser.First, NewUser.Last, NewUser.Email)\n\n\t_, err = database.Exec(sql)\n\tif err != nil {\n\t\tfmt.Fprintln(w, \"Something went wrong while we process your request: \", err)\n\t}\n\tfmt.Fprintf(w, \"Things went fine...\")\n}", "func insertToUserSession(userName string, jwt string) (bool, int) {\n\n\tt := time.Now()\n\tloginTime := t.Format(\"2006-01-01 15:04:05.99999\")\n\tdb := dbConn()\n\tdefer db.Close()\n\t// inserting data to user_session\n\tinsertSession, sessionErr := db.Prepare(\"insert into user_session (user_name,jwt,first_login,last_login) values(?,?,?,?)\")\n\n\tif sessionErr != nil {\n\t\tlog.Println(\"Couldnt insert data to user_session table\")\n\t\treturn false, 0\n\t}\n\n\teventID, insertErr := insertSession.Exec(userName, jwt, loginTime, loginTime)\n\n\tif insertErr != nil {\n\t\tlog.Println(\"Couldnt execute insert to user_session table\")\n\t\treturn false, 0\n\t}\n\t/*\n\t\tCould this be a problem when multiple users access?\n\t*/\n\tid, err := eventID.LastInsertId()\n\n\tif err != nil {\n\t\tlog.Println(\"Couldnt get the last inserted record ID\")\n\t}\n\tlog.Printf(\"Data inserted to user_session table for User : %s : \", userName)\n\n\treturn true, int(id)\n\n}", "func (u *UserFunctions) Insert(username, firstname, lastname, email, password string) error {\n\n\t//Insert user to the database\n\tnewUser := u.CLIENT.Database(\"queue\")\n\tuserCollection := newUser.Collection(\"user\")\n\tvar user models.User\n\n\thashedPassword, err := bcrypt.GenerateFromPassword([]byte(password), 12)\n\tif err != nil {\n\t\treturn err\n\t}\n\tuser.UserName = username\n\tuser.FirstName = firstname\n\tuser.LastName = lastname\n\tuser.Email = email\n\tuser.Password = hashedPassword\n\tuser.Created = time.Now().UTC()\n\tuser.Active = true\n\n\t//Insert the user into the database\n\tresult, err := userCollection.InsertOne(context.TODO(), user)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\n\t//Check ID of the inserted document\n\tinsertedID := result.InsertedID.(primitive.ObjectID)\n\tfmt.Println(insertedID)\n\n\treturn nil\n}", "func addChannel(count int) {\n\ttimestamp := time.Now()\n\tif count < 1 {\n\t\tcount = 1\n\t}\n\n\t// Create new user for foreign key constraint.\n\taddUsers(1)\n\n\tfor i := 1; i <= count; i++ {\n\t\td.Database.Exec(\"INSERT INTO channels(channelid, channelname, maxpopulation, userid, createdat, updatedat) VALUES($1, $2, $3, $4, $5, $6)\", channelTestID, \"channel\"+strconv.Itoa(i), i, userTestID, timestamp, timestamp)\n\t}\n}", "func SeedUserToNotaries(user *User) {\n\n\tname := user.Token\n\thost := user.EmailHost\n\taddress := user.EmailAddress\n\tpubHash := user.PublicHash\n\ttimestamp := time.Now().Unix()\n\tsignature := SignNotaryResponse(name, host, pubHash, timestamp)\n\n\tfor notary := range notaries {\n\t\tif notary == GetConfig().SMTPMxHost {\n\t\t\tcontinue\n\t\t}\n\t\tgo func(notary string) {\n\t\t\tdefer Recover()\n\t\t\tlog.Println(\"Seeding new user \" + user.EmailAddress + \" to \" + notary)\n\t\t\tu := url.URL{}\n\t\t\tu.Scheme = \"https\"\n\t\t\tu.Host = notary\n\t\t\tu.Path = \"/publickeys/seed\"\n\t\t\tbody := url.Values{}\n\t\t\tbody.Set(\"address\", address)\n\t\t\tbody.Set(\"pubHash\", pubHash)\n\t\t\tbody.Set(\"timestamp\", strconv.FormatInt(timestamp, 10))\n\t\t\tbody.Set(\"signature\", signature)\n\t\t\tresp, err := http.PostForm(u.String(), body)\n\t\t\tif err != nil {\n\t\t\t\tlog.Printf(\"Notary seeding failed for %s:\\n%s\", notary, err.Error())\n\t\t\t\treturn\n\t\t\t}\n\t\t\tresp.Body.Close()\n\t\t}(notary)\n\t}\n}", "func (data *StudentMasterDb) Insert(expiryDate string) <-chan DbModelError {\n\n\tJob := make(chan DbModelError, 1)\n\tsuccessResp := map[string]string{}\n\tvar customError DbModelError\n\tif CheckPing(&customError); customError.Err != nil {\n\t\tJob <- customError\n\t\treturn Job\n\t}\n\n\t// Verify as a new User\n\tvar studentExists bool\n\tdbSP, _ := RetriveSP(\"STU_EXISTS_WITH_EMAIL\")\n\terr := Db.QueryRow(dbSP, data.PersonalEmail).Scan(&data.StakeholderID, &data.PersonalEmail, &studentExists)\n\n\tif err != nil && err != sql.ErrNoRows {\n\t\tfmt.Println(\"query operation failed\" + err.Error())\n\t\tJob <- DbModelError{\n\t\t\t\"500\", \"S1AUT001\", fmt.Errorf(\"Cannot Read Database %v \", err.Error()), successResp,\n\t\t}\n\t\treturn Job\n\t}\n\t//fmt.Printf(\" 49 %v %+v\\n \", studentExists, err)\n\n\t// Return if already exists\n\tif studentExists {\n\t\tJob <- DbModelError{\n\t\t\t\"403\", \"S1AUT002\", fmt.Errorf(\"Account exists with email: %s\", data.PersonalEmail), successResp,\n\t\t}\n\t\treturn Job\n\n\t}\n\tsID, refCode, cbError := createStuSID(data.DateOfBirth)\n\tif cbError.ErrCode != \"000\" {\n\t\tJob <- cbError\n\t\treturn Job\n\t}\n\tdata.StakeholderID = sID\n\tfmt.Println(data.StakeholderID)\n\t// Prepare Db Insert\n\tdbSP, _ = RetriveSP(\"STU_INS_NEW_USR\")\n\tstmt, err := Db.Prepare(dbSP)\n\tif err != nil {\n\n\t\tfmt.Println(\"error while inserting\" + err.Error())\n\t\tJob <- DbModelError{\n\t\t\t\"500\", \"S1AUT003\", fmt.Errorf(\"Error While registering Student %v \", err.Error()), successResp,\n\t\t}\n\t\treturn Job\n\t}\n\tdefer stmt.Close()\n\tdata.CreationDate = time.Now()\n\tdata.LastUpdatedDate = data.CreationDate\n\tresults, err := stmt.Exec(&data.StakeholderID, &data.FirstName, &data.MiddleName, &data.LastName, &data.PersonalEmail, &data.PhoneNumber, &data.AlternatePhoneNumber, &data.Gender, &data.DateOfBirth, &data.AadharNumber, &data.PermanentAddressLine1, &data.PermanentAddressLine2, &data.PermanentAddressLine3, &data.PermanentAddressCountry, &data.PermanentAddressState, &data.PermanentAddressCity, &data.PermanentAddressDistrict, &data.PermanentAddressZipcode, &data.PermanentAddressPhone, &data.PresentAddressLine1, &data.PresentAddressLine2, &data.PresentAddressLine3, &data.PresentAddressCountry, &data.PresentAddressState, &data.PresentAddressCity, &data.PresentAddressDistrict, &data.PresentAddressZipcode, &data.PresentAddressPhone, &data.UniversityName, &data.UniversityID, &data.ProgramName, &data.ProgramID, &data.BranchName, &data.BranchID, &data.CollegeID, &data.CollegeEmailID, &data.Password, &data.UniversityApprovedFlag, &data.CreationDate, &data.LastUpdatedDate, &data.AccountStatus, false, false, expiryDate, &data.Attachment, data.AttachmentName, &data.CreationDate, refCode, data.ProfilePicture)\n\tfmt.Printf(\"results: %+v \\n %+v\", results, err)\n\tif err != nil {\n\n\t\tfmt.Println(\"error while inserting\" + err.Error())\n\t\tJob <- DbModelError{\n\t\t\t\"500\", \"S1AUT004\", fmt.Errorf(\"Error While registering Student %v \", err.Error()), successResp,\n\t\t}\n\t\treturn Job\n\t}\n\n\t// Print data in Console\n\tfmt.Printf(\"line 80 %+v %+v \\n \", data, err)\n\n\tcustomError.ErrTyp = \"000\"\n\tsuccessResp[\"Phone\"] = data.PhoneNumber\n\tsuccessResp[\"StakeholderID\"] = data.StakeholderID\n\tsuccessResp[\"Email\"] = data.PersonalEmail\n\tcustomError.SuccessResp = successResp\n\n\tJob <- customError\n\n\treturn Job\n\n}", "func AddNewUser(u *Users) {\n\to := orm.NewOrm()\n\texist := o.QueryTable(new(Users)).Filter(\"chat_id\", u.ChatID).Exist()\n\n\tif !exist {\n\t\t_, _ = o.Insert(u)\n\t\treturn\n\t}\n\tlog.Info(\"user already exist\")\n}", "func main() {\n\tdb, err := sql.Open(\"mysql\", \"monstr:Qwertys!23@tcp(localhost:3306)/world?charset=utf8\")\n\tcheckErr(err)\n\n\t// вставка\n\tstmt, err := db.Prepare(\"INSERT userinfo SET username=?,email=?,created=?\")\n\tcheckErr(err)\n\n\tres, err := stmt.Exec(\"pavel\", \"[email protected]\", \"2018-01-02\")\n\tcheckErr(err)\n\n\tid, err := res.LastInsertId()\n\tcheckErr(err)\n\n\tfmt.Println(id)\n\n\t// обновление\n\tstmt, err = db.Prepare(\"update userinfo set username=? where uid=?\")\n\tcheckErr(err)\n\n\tres, err = stmt.Exec(\"pavelupdate\", id)\n\tcheckErr(err)\n\n\taffect, err := res.RowsAffected()\n\tcheckErr(err)\n\n\tfmt.Println(affect)\n\n\t// запрос\n\trows, err := db.Query(\"SELECT * FROM userinfo\")\n\tcheckErr(err)\n\n\tfor rows.Next() {\n\t\tvar uid int\n\t\tvar username string\n\t\tvar email string\n\t\tvar created string\n\t\terr = rows.Scan(&uid, &username, &email, &created)\n\t\tcheckErr(err)\n\t\tfmt.Println(uid)\n\t\tfmt.Println(username)\n\t\tfmt.Println(email)\n\t\tfmt.Println(created)\n\t}\n\n\t// удаление\n\tstmt, err = db.Prepare(\"delete from userinfo where uid=?\")\n\tcheckErr(err)\n\n\tres, err = stmt.Exec(id)\n\tcheckErr(err)\n\n\taffect, err = res.RowsAffected()\n\tcheckErr(err)\n\n\tfmt.Println(affect)\n\n\tdb.Close()\n\n}", "func InsertUser(email string, lName string, first string, last string, active bool, superadmin bool, password string) {\n\t/*\n\t\temail varchar(30),\n\t\tloginName varchar(30),\n\t\tfirstname varchar(30),\n\t\tlastname varchar(30),\n\t\tactive boolean,\n\t\tsuperadmin boolean,\n\t\tpassword varchar(30)\n\t*/\n\tquery := \"INSERT INTO coyoUser VALUES (NULL, '\" + email + \"' , '\" + lName + \"' , '\" + first + \"' , '\" + last + \"' ,\" + strconv.FormatBool(active) + \" ,\" + strconv.FormatBool(superadmin) + \" , '\" + password + \"')\"\n\tinsert, err := DB.Query(query)\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\tdefer insert.Close()\n}", "func (s stage) createUsers(config types.Config) error {\n\tif len(config.Passwd.Users) == 0 {\n\t\treturn nil\n\t}\n\ts.Logger.PushPrefix(\"createUsers\")\n\tdefer s.Logger.PopPrefix()\n\n\tfor _, u := range config.Passwd.Users {\n\t\tif err := s.CreateUser(u); err != nil {\n\t\t\treturn fmt.Errorf(\"failed to create user %q: %v\",\n\t\t\t\tu.Name, err)\n\t\t}\n\n\t\tif err := s.SetPasswordHash(u); err != nil {\n\t\t\treturn fmt.Errorf(\"failed to set password for %q: %v\",\n\t\t\t\tu.Name, err)\n\t\t}\n\n\t\tif err := s.AuthorizeSSHKeys(u); err != nil {\n\t\t\treturn fmt.Errorf(\"failed to add keys to user %q: %v\",\n\t\t\t\tu.Name, err)\n\t\t}\n\t}\n\n\treturn nil\n}", "func (conn *Conn) Insert(dataSet map[int][]string) {\n\tdb := conn.db\n\tshardMap := make(map[int][]Pair)\n\tfor userid, emails := range dataSet {\n\t\tshardMap[modId(userid)] = append(shardMap[modId(userid)], Pair{userid, emails})\n\t}\n\tvar sqlStrings []SqlStrVal\n\n\tfor tabNum, pairs := range shardMap {\n\t\ttableName := \"unsub_\" + strconv.Itoa(tabNum)\n\t\tsqlStr := \"INSERT INTO \" + tableName + \"(user_id, email, ts) VALUES \"\n\t\tvar vals []interface{}\n\t\tcounter := 0\n\n\t\tfor p := range pairs {\n\t\t\tfor e := range pairs[p].emails {\n\t\t\t\tsqlStr += \"(?, ?, CURRENT_TIMESTAMP), \"\n\t\t\t\tvals = append(vals, pairs[p].id, pairs[p].emails[e])\n\t\t\t\tcounter += 1\n\t\t\t\tif counter >= 32000 {\n\t\t\t\t\tsqlStrings = append(sqlStrings, SqlStrVal{sqlStr, vals[0:len(vals)]})\n\t\t\t\t\tsqlStr = \"INSERT INTO \" + tableName + \"(user_id, email, ts) VALUES \"\n\t\t\t\t\tvals = make([]interface{}, 0)\n\t\t\t\t\tcounter = 0\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif len(vals) != 0 {\n\t\t\tsqlStrings = append(sqlStrings, SqlStrVal{sqlStr, vals[0:len(vals)]})\n\t\t}\n\n\t}\n\n\tfor i := range sqlStrings {\n\t\tstmt, err := db.Prepare(sqlStrings[i].sqlStr[0 : len(sqlStrings[i].sqlStr)-2])\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Error preparing statement: %v\\n\", err)\n\t\t\treturn\n\t\t}\n\t\t_, err = stmt.Exec(sqlStrings[i].val...)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Error executing statement: %v\\n\", err)\n\t\t\treturn\n\t\t}\n\t}\n}", "func (u *User) Create(ctx context.Context, log *log.Logger, w http.ResponseWriter, r *http.Request, params map[string]string) error {\n\tvar newU NewUser\n\n\tif err := web.Unmarshal(r.Body, &newU); err != nil {\n\t\treturn errors.Wrap(err, \"\")\n\t}\n\n\tid, err := uuid.NewV4()\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tuserDetails := UserDetails{\n\t\tUserName: newU.UserName,\n\t\tEmail: newU.Email,\n\t\tID: id.String(),\n\t}\n\n\tencodedData, err := json.Marshal(userDetails)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdbConn := u.MasterDB\n\n\t// Save User by ID\n\tfID := func(db *leveldb.DB) error {\n\t\t// TODO: while inserting validate if email already exists by checking with email index\n\t\treturn db.Put([]byte(userDetails.ID), encodedData, nil)\n\t}\n\n\t// Save User by Email\n\tfEmail := func(db *leveldb.DB) error {\n\t\t// TODO: while inserting validate if already exists or not\n\t\treturn db.Put([]byte(userDetails.Email), []byte(userDetails.ID), nil)\n\t}\n\n\tif err := dbConn.Execute(fID); err != nil {\n\t\treturn errors.Wrap(err, \"\")\n\t}\n\n\tif err := dbConn.Execute(fEmail); err != nil {\n\t\t// TODO: if error remove the data from ID as well and throw error\n\t\treturn errors.Wrap(err, \"\")\n\t}\n\n\tstatus := struct {\n\t\tID string `json:\"id\"`\n\t}{\n\t\tID: userDetails.ID,\n\t}\n\tweb.Respond(ctx, log, w, status, http.StatusCreated)\n\treturn nil\n}", "func createUser(firstName string, MI string, lastName string, username string, password string, privLevel int, courseName string) error {\n\n\tdb, err := sql.Open(\"mysql\", DB_USER_NAME+\":\"+DB_PASSWORD+\"@unix(/var/run/mysql/mysql.sock)/\"+DB_NAME)\n\n\tif err != nil {\n\t\treturn errors.New(\"No connection\")\n\t}\n\n\thashedPassword, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost)\n\n\tif err != nil {\n\t\treturn errors.New(\"Error\")\n\t}\n\n\t_, err = db.Exec(\"INSERT INTO Users(FirstName, MiddleInitial, LastName, Username, Password, PrivLevel) VALUES(?, ?, ?, ?, ?, ?)\", firstName, MI, lastName, username, hashedPassword, privLevel)\n\n\tif err != nil {\n\t\treturn errors.New(\"User creation failed.\")\n\t}\n\n\tsendRandomPassword(username)\n\n\t_, err = db.Exec(\"INSERT INTO StudentCourses(Student, CourseName) VALUES ((select UserID from Users where Username=?), ?)\", username, courseName)\n\n\tif err != nil {\n\t\treturn errors.New(\"User unable to be added to student courses.\")\n\t}\n\n\t/*_, err = db.Exec(\"INSERT INTO GradeReport\" + courseName + \"(Student) VALUES(select UserID from users where Username=\" + username + \")\")\n\n\tif err != nil {\n\t\treturn errors.New(\"User unable to be added to GradeReport table.\")\n\t}*/\n\n\treturn nil\n}", "func SignUp() {\n\tvar user User\n\n\tvar db sqldb.MysqlDriver\n\tsdbcf := sqldb.GetSQLDBConfig()\n\n\tvar err error\n\n\terr = db.Init(&sdbcf)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\n\trows, uerr := db.Query(\"SELECT username FROM users WHERE username=?\", user.UserName)\n\tvar hashedPassword []byte\n\tvar usrName string\n\tif rows.Next() {\n\t\trows.Scan(&usrName)\n\t}\n\tif usrName == user.UserName {\n\t\tfmt.Println(\"This user is already registered.\")\n\t\tSignUp()\n\t}\n\tif uerr == nil {\n\t\thashedPassword, err = bcrypt.GenerateFromPassword([]byte(user.Password), bcrypt.DefaultCost)\n\t\tif err != nil {\n\t\t\tfmt.Println(\"unable to incrypt password\")\n\t\t\tSignUp()\n\t\t}\n\t} else {\n\t\tfmt.Println(\"Failed to signup\")\n\t\tSignUp()\n\t}\n\n\t//create new user\n\tquery := \"INSERT INTO `users` (`name`, `contact_number`, `address`, `username`, `password`, `role`, `email_address`) VALUES (?,?,?,?,?,?,?);\"\n\n\tsqlresult, eerr := db.Execute(query, user.Name, user.ContactNumber, user.Address, user.UserName, string(hashedPassword), user.Role, user.EmailAddress)\n\tif eerr != nil {\n\t\tfmt.Println(\"Failed to Signup \")\n\t\tSignUp()\n\t}\n\tnum, rerr := sqlresult.RowsAffected()\n\tif num < 0 || rerr != nil {\n\t\tfmt.Println(\"Failed to Signup \")\n\t\tSignUp()\n\t}\n\n\tvar id int\n\trows, serr := db.Query(\"SELECT id FROM users WHERE username=?\", user.UserName)\n\tif serr != nil {\n\t\tfmt.Println(\"Failed to Signup \")\n\t\tSignUp()\n\t}\n\n\tif rows.Next() {\n\t\terr := rows.Scan(&id)\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Failed to Signup \")\n\t\t\tSignUp()\n\t\t}\n\t}\n\n\tfmt.Println(\"Welcomem to the EShopee !!!\\n\")\n\n\tCustomer(id, db)\n\n}", "func createUser() *User {\n\tcurUID++\n\tu := new(User)\n\tu.id = curUID\n\tu.cash = 0\n\tu.assets = make([]int, 0)\n\tu.sharesOwned = make(map[int]int)\n\treturn u\n}", "func CreateUser(w http.ResponseWriter, r *http.Request) {\n\tenableCors(&w)\n\tvars := mux.Vars(r)\n\tuid := vars[\"uid\"]\n\t//uuid := uuid.New().String()\n\t//ip := strings.Split(r.RemoteAddr, \":\")[0]\n\tfilesFirstore := StructToJSON(userFiles)\n\t_, err := db.Collection(\"users\").Doc(uid).Update(context.Background(), []firestore.Update{{Path: \"rudi\", Value:filesFirstore }})\n\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t}\n}", "func (ucb *UserCreateBulk) Save(ctx context.Context) ([]*User, error) {\n\tspecs := make([]*sqlgraph.CreateSpec, len(ucb.builders))\n\tnodes := make([]*User, len(ucb.builders))\n\tmutators := make([]Mutator, len(ucb.builders))\n\tfor i := range ucb.builders {\n\t\tfunc(i int, root context.Context) {\n\t\t\tbuilder := ucb.builders[i]\n\t\t\tvar mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {\n\t\t\t\tmutation, ok := m.(*UserMutation)\n\t\t\t\tif !ok {\n\t\t\t\t\treturn nil, fmt.Errorf(\"unexpected mutation type %T\", m)\n\t\t\t\t}\n\t\t\t\tif err := builder.check(); err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\tbuilder.mutation = mutation\n\t\t\t\tnodes[i], specs[i] = builder.createSpec()\n\t\t\t\tvar err error\n\t\t\t\tif i < len(mutators)-1 {\n\t\t\t\t\t_, err = mutators[i+1].Mutate(root, ucb.builders[i+1].mutation)\n\t\t\t\t} else {\n\t\t\t\t\t// Invoke the actual operation on the latest mutation in the chain.\n\t\t\t\t\tif err = sqlgraph.BatchCreate(ctx, ucb.driver, &sqlgraph.BatchCreateSpec{Nodes: specs}); err != nil {\n\t\t\t\t\t\tif cerr, ok := isSQLConstraintError(err); ok {\n\t\t\t\t\t\t\terr = cerr\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tmutation.done = true\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\tid := specs[i].ID.Value.(int64)\n\t\t\t\tnodes[i].ID = int(id)\n\t\t\t\treturn nodes[i], nil\n\t\t\t})\n\t\t\tfor i := len(builder.hooks) - 1; i >= 0; i-- {\n\t\t\t\tmut = builder.hooks[i](mut)\n\t\t\t}\n\t\t\tmutators[i] = mut\n\t\t}(i, ctx)\n\t}\n\tif len(mutators) > 0 {\n\t\tif _, err := mutators[0].Mutate(ctx, ucb.builders[0].mutation); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn nodes, nil\n}", "func CreateUsers2(users structs.CreateUsers) (structs.CreateUsers, error) {\n\n\tvar err error\n\tvar t = structs.Component{}\n\ttx := idb.DB.Begin()\n\tif err = tx.Error; err != nil {\n\t\tfmt.Println(\"err start tx\", err.Error())\n\t\treturn users, err\n\t}\n\tusers.CreatedAt = t.GetTimeNow()\n\tif err = tx.Table(\"users\").Create(&users).Error; err != nil {\n\t\ttx.Rollback()\n\t\treturn users, err\n\t}\n\ttx.Commit()\n\treturn users, err\n}", "func (r *Repository) create(user *domain.UserInfoModel) error {\n\n\tctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)\n\tdefer cancel()\n\n\tquery := \"INSERT INTO users (namee, email, password) VALUES ($1, $2, $3)\"\n\tstmt, err := r.db.PrepareContext(ctx, query)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer stmt.Close()\n\n\tqueryStart := time.Now().Nanosecond() / 1000\n\t_, err = stmt.ExecContext(ctx, user.Name, user.Email, user.PassWord)\n\tif err != nil {\n\t\treturn err\n\t}\n\tqueryEnd := time.Now().Nanosecond() / 1000\n\texecutionTime := queryEnd - queryStart\n\terr = r.insertTimeSpent(\"Create\", executionTime)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn nil\n}", "func (u *User) Create(w http.ResponseWriter, r *http.Request) {\n\tif r.Method != \"POST\" {\n\t\tlog.Println(\"Invalid Operation\")\n\t\thttp.Error(w, \"Invalid operation\", http.StatusMethodNotAllowed)\n\t\treturn\n\t}\n\tfmt.Printf(r.FormValue(\"url[0]\"))\n\tuserModel := models.NewUser()\n\tuserModel.Url1 = r.FormValue(\"url[0]\")\n\tuserModel.Url2 = r.FormValue(\"url[1]\")\n\tuserModel.Url3 = r.FormValue(\"url[2]\")\n\tif _, err := userModel.Insert(u.db); err != nil {\n\t\tlog.Println(err.Error())\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tpages := []string{userModel.Url1, userModel.Url2, userModel.Url3}\n\tvar wg sync.WaitGroup\n\tfor _, page := range pages {\n\t\twg.Add(1)\n\t\tgo func(page string) {\n\t\t\tdefer wg.Done()\n\t\t\tgetPage(page)\n\t\t}(page)\n\t}\n\twg.Wait()\n\n\thttp.Redirect(w, r, \"/list\", http.StatusMovedPermanently)\n}", "func CreateDefaultUser(username, password string) (err error) {\n\n err = checkInit()\n if err != nil {\n return\n }\n\n var users = data[\"users\"].(map[string]interface{})\n // Check if the default user exists\n if len(users) > 0 {\n err = createError(001)\n return\n }\n\n var defaults = defaultsForNewUser(username, password)\n users[defaults[\"_id\"].(string)] = defaults\n saveDatabase(data)\n\n return\n}", "func (i *UsersInteractor) Create(user User) (int, error) {\n\tif len(user.Name) == 0 {\n\t\treturn 0, errors.New(\"username can't be empty\")\n\t}\n\tid, err := i.users.Store(user)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn id, nil\n}", "func UserSignup(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {\n\tresp := make(map[string]interface{})\n\n\tusername := strings.TrimSpace(r.PostFormValue(\"username\"))\n\temail := strings.TrimSpace(r.PostFormValue(\"email\"))\n\tpassword := strings.TrimSpace(r.PostFormValue(\"password\"))\n\tpasswordAgain := strings.TrimSpace(r.PostFormValue(\"password_again\"))\n\n\tmailErr := checkmail.ValidateFormat(email)\n\n\tdb := M.DB()\n\n\tvar (\n\t\tuserCount int\n\t\temailCount int\n\t)\n\n\tdb.QueryRow(\"SELECT COUNT(id) AS userCount FROM users WHERE username=?\", username).Scan(&userCount)\n\tdb.QueryRow(\"SELECT COUNT(id) AS emailCount FROM users WHERE email=?\", email).Scan(&emailCount)\n\n\tif username == \"\" || email == \"\" || password == \"\" || passwordAgain == \"\" {\n\t\tresp[\"mssg\"] = \"Some values are missing!\"\n\t} else if len(username) < 4 || len(username) > 32 {\n\t\tresp[\"mssg\"] = \"Username should be between 4 and 32\"\n\t} else if mailErr != nil {\n\t\tresp[\"mssg\"] = \"Invalid Format!\"\n\t} else if password != passwordAgain {\n\t\tresp[\"mssg\"] = \"Passwords don't match\"\n\t} else if userCount > 0 {\n\t\tresp[\"mssg\"] = \"Username already exists!\"\n\t} else if emailCount > 0 {\n\t\tresp[\"mssg\"] = \"Email already exists!\"\n\t} else {\n\n\t\thash, hashErr := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost)\n\t\tif hashErr != nil {\n\t\t\tlog.Fatal(hashErr)\n\t\t}\n\n\t\trs, iErr := db.Exec(\n\t\t\t\"INSERT INTO users(username, email, password, bio, joined) VALUES(?, ?, ?, ?, ?)\",\n\t\t\tusername,\n\t\t\temail,\n\t\t\thash,\n\t\t\t\"\",\n\t\t\tM.MakeTimestamp(),\n\t\t)\n\n\t\tif iErr != nil {\n\t\t\tlog.Fatal(iErr)\n\t\t}\n\n\t\tinsertID, _ := rs.LastInsertId()\n\t\tdir, _ := os.Getwd()\n\t\tuserPath := dir + \"/public/users/\" + strconv.FormatInt(insertID, 10)\n\n\t\tmkErr := os.Mkdir(userPath, 0655)\n\t\tif mkErr != nil {\n\t\t\tlog.Fatal(mkErr)\n\t\t}\n\n\t\tcErr := os.Link(dir+\"/public/images/golang-color-icon2.png\", userPath+\"/avatar.png\")\n\t\tif cErr != nil {\n\t\t\tlog.Fatal(cErr)\n\t\t}\n\n\t\tsession := M.GetSession(r)\n\t\tsession.Values[\"id\"] = insertID\n\t\tsession.Values[\"username\"] = username\n\t\tsession.Save(r, w)\n\n\t\tresp[\"success\"] = true\n\t\tresp[\"mssg\"] = \"Hello, \" + username\n\n\t}\n\n\tM.JSON(w, r, resp)\n}", "func (db *BotDB) CountNewUsers(seconds int64, guild uint64) int {\n\tvar i int\n\terr := db.sqlCountNewUsers.QueryRow(seconds, guild).Scan(&i)\n\tdb.CheckError(\"CountNewUsers\", err)\n\treturn i\n}", "func MustBehaveLikeUsers(t *testing.T, us model.Users) {\n\t// Create and save a user\n\tu1 := model.User{\n\t\tName: \"Jane Doe\",\n\t\tGithubID: 66235,\n\t\tAPIKey: \"deadc0ffee\",\n\t}\n\terr := us.Save(&u1)\n\trequire.NoError(t, err, \"errored when saving a user\")\n\t// ID should be auto-generated, if not specified\n\trequire.NotEmpty(t, u1.ID, \"User ID should be auto-generated\")\n\n\t// added user record should find-able by its ID\n\tux, err := us.Find(u1.ID)\n\trequire.NoError(t, err, \"errored when finding a user\")\n\trequire.Equal(t, u1, *ux)\n\t// but not if it's a wrong one\n\tux, err = us.Find(u1.ID * 100)\n\trequire.Equal(t, model.ErrNotFound, err)\n\n\t// and by its Github ID\n\tux, err = us.FindByGithubID(u1.GithubID)\n\trequire.NoError(t, err)\n\trequire.Equal(t, u1, *ux)\n\t// but not if it's a wrong one\n\tux, err = us.FindByGithubID(u1.GithubID * 100)\n\trequire.Equal(t, model.ErrNotFound, err)\n\n\t// user, when added, should have the API key generated\n\tak := u1.APIKey\n\trequire.NotEmpty(t, ak)\n\t// which can be used to find the same user\n\tux, err = us.FindByAPIKey(ak)\n\trequire.NoError(t, err)\n\trequire.Equal(t, u1, *ux)\n\t// but, again, not if it's a wrong one\n\tux, err = us.FindByAPIKey(\"o_O\")\n\trequire.Equal(t, model.ErrNotFound, err)\n\n\t// the second user, when added, must receive a different ID\n\t// Create and save a user\n\tu2 := model.User{\n\t\tName: \"Gordon Freeman\",\n\t}\n\terr = us.Save(&u2)\n\trequire.NoError(t, err)\n\t// ID should be auto-generated, if not specified\n\trequire.NotEqual(t, u2.ID, 0, \"User ID should be auto-generated\")\n\trequire.NotEqual(t, u1.ID, u2.ID)\n}", "func Seed(myDB database.Manage) error {\n\n\tvar u User\n\tsql := `INSERT INTO users\n\t\t(username, email, password_hash)\n\t\tVALUES\n\t\t(:username, :email, :password_hash);`\n\n\tu.UserName = \"tomm\"\n\tu.Email = \"[email protected]\"\n\thash, _ := HashPassword(\"mygreatpassword\")\n\tu.Password = hash\n\tres, err := myDB.DB.NamedExec(sql, &u)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tid, _ := res.LastInsertId()\n\taffected, _ := res.RowsAffected()\n\tlog.Printf(\"Seed userID :%d , affected :%d\", id, affected)\n\n\treturn err\n}", "func addLoginAttempt(usr string) {\n\tcurr_time := time.Now().Unix()\n\thashed_usr := Hash1(usr)\n\tquery := QUERY_INSERT_LAST_LOGIN\n\tExecDB(query, hashed_usr, curr_time)\n}", "func preprocessUsers() {\n\tcsv_data_points, err := os.Open(\"./users.csv\")\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\tr := csv.NewReader(csv_data_points)\n\tfor {\n\t\trecord, err := r.Read()\n\t\tif err == io.EOF {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tage, _ := strconv.Atoi(record[4])\n\t\thr1l, _ := strconv.Atoi(record[5])\n\t\thr1u, _ := strconv.Atoi(record[6])\n\t\thr2l, _ := strconv.Atoi(record[7])\n\t\thr2u, _ := strconv.Atoi(record[8])\n\t\thr3l, _ := strconv.Atoi(record[9])\n\t\thr3u, _ := strconv.Atoi(record[10])\n\t\thr4l, _ := strconv.Atoi(record[11])\n\t\thr4u, _ := strconv.Atoi(record[12])\n\t\tnewSes := make([]int, 0)\n\t\tnewUser := Person{record[0], record[1], record[2], record[3], age, hr1l, hr1u, hr2l, hr2u, hr3l, hr3u, hr4l, hr4u, newSes}\n\t\tpeople = append(people, newUser)\n\t}\n\tcsv_data_points.Close()\n}", "func (user *User) Save() error {\n\tif user.RegistrationDate.IsZero() {\n\t\tuser.RegistrationDate = time.Now()\n\t}\n\tvar q *ara.Query\n\tif user.Key == nil {\n\t\trd, _ := user.RegistrationDate.MarshalJSON()\n\t\tq = ara.NewQuery(`INSERT {\n\t\t\t\tUsername: %q,\n\t\t\t\tEmail: %q,\n\t\t\t\tPassword: %q,\n\t\t\t\tAge: %d,\n\t\t\t\tGender: %q,\n\t\t\t\tLikes: %q,\n\t\t\t\tMeets: %q,\n\t\t\t\tRegistrationDate: %s\n\t\t\t} IN users`,\n\t\t\tuser.Username,\n\t\t\tuser.Email,\n\t\t\tuser.Password,\n\t\t\tuser.Age,\n\t\t\tuser.Gender,\n\t\t\tuser.Likes,\n\t\t\tuser.Meets,\n\t\t\trd,\n\t\t\t)\n\n\t} else {\n\t\tq = ara.NewQuery(`UPDATE %q WITH {\n\t\t\t\tUsername: %q,\n\t\t\t\tEmail: %q,\n\t\t\t\tPassword: %q,\n\t\t\t\tAge: %d,\n\t\t\t\tGender: %q,\n\t\t\t\tLikes: %q,\n\t\t\t\tMeets: %q\n\t\t\t} IN users`,\n\t\t\t*user.Key,\n\t\t\tuser.Username,\n\t\t\tuser.Email,\n\t\t\tuser.Password,\n\t\t\tuser.Age,\n\t\t\tuser.Gender,\n\t\t\tuser.Likes,\n\t\t\tuser.Meets,\n\t\t\t)\n\t}\n\tlog.Println(q)\n\t_, err := db.Run(q)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn err\n\t}\n\tvar users []User\n\tq = ara.NewQuery(`FOR user IN users FILTER user.Username == %q RETURN user`, user.Username).Cache(true).BatchSize(500)\n\tresp, err := db.Run(q)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn err\n\t}\n\tlog.Println(string(resp))\n\terr = json.Unmarshal(resp, &users)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn err\n\t}\n\tlog.Println(users)\n\tif len(users) > 0 {\n\t\t*user = users[0]\n\t\treturn nil\n\t}\n\treturn errors.New(\"prout\")\n}", "func init() {\n\tCreateUser(UserObject{\n\t\tFname: \"Debapriya\",\n\t\tLname: \"Das\",\n\t\tAge: 24,\n\t})\n\tCreateUser(UserObject{\n\t\tFname: \"Anuja\",\n\t\tLname: \"Saha\",\n\t\tAge: 21,\n\t})\n}", "func agregarUsuario(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tvar User usr\n\treqBody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tfmt.Fprintf(w, \"Insert a Valid Task Data\")\n\t}\n\tjson.Unmarshal(reqBody, &User)\n\tfmt.Println(User)\n\tpol := newCn()\n\tpol.abrir()\n\trows, err := pol.db.Query(\"insert into usuario(username, password, nombre, apellido, fecha_nacimiento, correo) values(:1,:2,:3,:4,to_date(:5, 'yyyy/mm/dd'),:6)\", User.User, User.Contrasena, User.Nombre, User.Apellido, User.Fechanacimiento, User.Correo)\n\tpol.cerrar()\n\tif err != nil {\n\t\tfmt.Println(\"Error running query\")\n\t\tfmt.Println(err)\n\t\tfmt.Fprintf(w, \"usuario ya existe o correo invalido\")\n\t\treturn\n\t} else {\n\t\tfmt.Fprintf(w, \"registro exitos\")\n\t}\n\tdefer rows.Close()\n\n}", "func (u UserRepo) Create(user model.User) (int, error) {\n\tvar id int\n\trows, err := u.db.Query(\"INSERT INTO users (login , password,roleID) VALUES ($1,$2,$3) RETURNING id \", user.Login, user.Password, dto.USER)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tif rows.Next() {\n\t\terr = rows.Scan(&id)\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\treturn id, rows.Err()\n}", "func Create(user User) error {\n\t\n}", "func (ucb *UserCreateBulk) Save(ctx context.Context) ([]*User, error) {\n\tspecs := make([]*sqlgraph.CreateSpec, len(ucb.builders))\n\tnodes := make([]*User, len(ucb.builders))\n\tmutators := make([]Mutator, len(ucb.builders))\n\tfor i := range ucb.builders {\n\t\tfunc(i int, root context.Context) {\n\t\t\tbuilder := ucb.builders[i]\n\t\t\tvar mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {\n\t\t\t\tif err := builder.preSave(); err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\tmutation, ok := m.(*UserMutation)\n\t\t\t\tif !ok {\n\t\t\t\t\treturn nil, fmt.Errorf(\"unexpected mutation type %T\", m)\n\t\t\t\t}\n\t\t\t\tbuilder.mutation = mutation\n\t\t\t\tnodes[i], specs[i] = builder.createSpec()\n\t\t\t\tvar err error\n\t\t\t\tif i < len(mutators)-1 {\n\t\t\t\t\t_, err = mutators[i+1].Mutate(root, ucb.builders[i+1].mutation)\n\t\t\t\t} else {\n\t\t\t\t\t// Invoke the actual operation on the latest mutation in the chain.\n\t\t\t\t\tif err = sqlgraph.BatchCreate(ctx, ucb.driver, &sqlgraph.BatchCreateSpec{Nodes: specs}); err != nil {\n\t\t\t\t\t\tif cerr, ok := isSQLConstraintError(err); ok {\n\t\t\t\t\t\t\terr = cerr\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tmutation.done = true\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t\tid := specs[i].ID.Value.(int64)\n\t\t\t\tnodes[i].ID = int(id)\n\t\t\t\treturn nodes[i], nil\n\t\t\t})\n\t\t\tfor i := len(builder.hooks) - 1; i >= 0; i-- {\n\t\t\t\tmut = builder.hooks[i](mut)\n\t\t\t}\n\t\t\tmutators[i] = mut\n\t\t}(i, ctx)\n\t}\n\tif len(mutators) > 0 {\n\t\tif _, err := mutators[0].Mutate(ctx, ucb.builders[0].mutation); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn nodes, nil\n}", "func CreateUser(response http.ResponseWriter, request *http.Request) {\r\n\tresponse.Header().Set(\"content-type\", \"application/json\")\r\n\tvar meet User\r\n\t_ = json.NewDecoder(request.Body).Decode(&meet)\r\n\tmeet.def()\r\n\tif meet.Starttime < meet.Creationtime {\r\n\t\tresponse.WriteHeader(http.StatusBadRequest)\r\n\t\tresponse.Write([]byte(`{ \"message\": \"Meeting cannot start in the past\" }`))\r\n\t\treturn\r\n\t}\r\n\tif meet.Starttime > meet.Endtime {\r\n\t\tresponse.WriteHeader(http.StatusBadRequest)\r\n\t\tresponse.Write([]byte(`{ \"message\": \"Invalid time\" }`))\r\n\t\treturn\r\n\t}\r\n\tlock.Lock()\r\n\tdefer lock.Unlock()\r\n\terr := UsersBusy(meet)\r\n\tif err != nil {\r\n\t\tresponse.WriteHeader(http.StatusBadRequest)\r\n\t\tresponse.Write([]byte(`{ \"message\": \"` + err.Error() + `\" }`))\r\n\t\treturn\r\n\t}\r\n\tcollection := client.Database(\"appointy\").Collection(\"users\")\r\n\tctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)\r\n\tdefer cancel()\r\n\tresult, _ := collection.InsertOne(ctx, meet)\r\n\tmeet.ID = result.InsertedID.(primitive.ObjectID)\r\n\tjson.NewEncoder(response).Encode(meet)\r\n\tfmt.Println(meet)\r\n}", "func (t *SimpleChaincode) registerUser(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar err error\n\n\tif len(args) != 16 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 8\")\n\t}\n\n\t//input sanitation\n\tfmt.Println(\"- start registration\")\n\tif len(args[0]) <= 0 {\n\t\treturn nil, errors.New(\"0th argument must be a non-empty string\")\n\t}\n\tif len(args[1]) <= 0 {\n\t\treturn nil, errors.New(\"1st argument must be a non-empty string\")\n\t}\n\tif len(args[2]) <= 0 {\n\t\treturn nil, errors.New(\"2nd argument must be a non-empty string\")\n\t}\n\tif len(args[3]) <= 0 {\n\t\treturn nil, errors.New(\"3rd argument must be a non-empty string\")\n\t}\n\tif len(args[4]) <= 0 {\n\t\treturn nil, errors.New(\"4th argument must be a non-empty string\")\n\t}\n\tif len(args[5]) <= 0 {\n\t\treturn nil, errors.New(\"5th argument must be a non-empty string\")\n\t}\n\n\tif len(args[8]) <= 0 {\n\t\treturn nil, errors.New(\"8th argument must be a non-empty string\")\n\t}\n\tif len(args[13]) <= 0 {\n\t\treturn nil, errors.New(\"13th argument must be a non-empty string\")\n\t}\n\tif len(args[14]) <= 0 {\n\t\treturn nil, errors.New(\"14th argument must be a non-empty string\")\n\t}\n\tif len(args[15]) <= 0 {\n\t\treturn nil, errors.New(\"15th argument must be a non-empty string\")\n\t}\n\tuser := User{}\n\tuser.Id, err = strconv.Atoi(args[0])\n\tif err != nil {\n\t\treturn nil, errors.New(\"Failed to get id as cannot convert it to int\")\n\t}\n\tuser.UserType = args[1]\n\tuser.FisrtName = args[2]\n\tuser.LastName = args[3]\n\tuser.Email = args[4]\n\tuser.Password = args[5]\n\t//user.ReTypePassword=args[6]\n\tuser.Operationalemail = args[6]\n\tuser.Phone, err = strconv.Atoi(args[7])\n\tif err != nil {\n\t\treturn nil, errors.New(\"Failed to get phone as cannot convert it to int\")\n\t}\n\tuser.RelationshipManagerEmail = args[8]\n\tuser.CustomersLimit, err = strconv.Atoi(args[9])\n\tif err != nil {\n\t\treturn nil, errors.New(\"Failed to get CustomersLimit as cannot convert it to int\")\n\t}\n\tuser.FeePercentage, err = strconv.Atoi(args[10])\n\tif err != nil {\n\t\treturn nil, errors.New(\"Failed to get FeePercentage as cannot convert it to int\")\n\t}\n\tuser.InterestEarning, err = strconv.Atoi(args[11])\n\tif err != nil {\n\t\treturn nil, errors.New(\"Failed to get InterestEarning as cannot convert it to int\")\n\t}\n\tuser.AccountNo, err = strconv.Atoi(args[12])\n\tif err != nil {\n\t\treturn nil, errors.New(\"Failed to get AccountNo as cannot convert it to int\")\n\t}\n\tuser.IfscCode = args[13]\n\tuser.Pan = args[14]\n\n\tuser.Address = args[15]\n\n\tfmt.Println(\"user\", user)\n\t// get users data from chaincode\n\tUserAsBytes, err := stub.GetState(\"getvfmuser\")\n\tif err != nil {\n\t\treturn nil, errors.New(\"Failed to get users\")\n\t}\n\tvar allusers AllUsers\n\tjson.Unmarshal(UserAsBytes, &allusers) //un stringify it aka JSON.parse()\n\n\tallusers.Userlist = append(allusers.Userlist, user)\n\tfmt.Println(\"allusers\", allusers.Userlist) //append usersdetails to allusers[]\n\tfmt.Println(\"! appended user to allusers\")\n\tjsonAsBytes, _ := json.Marshal(allusers)\n\tfmt.Println(\"json\", jsonAsBytes)\n\terr = stub.PutState(\"getvfmuser\", jsonAsBytes) //rewrite allusers[]\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfmt.Println(\"- end user_register\")\n\treturn nil, nil\n}", "func (s *UsersService) CreateUsersSAS(users []models.User) (bool, error) {\n\tif len(users) == 0 {\n\t\tlog.Fatal(\"No usuarios para insertar en esta consulta\")\n\t}\n\n\tstmt, err := s.db2.Prepare(querys.CreateUsersSAS())\n\tif err != nil {\n\t\tlog.Fatal(\"Ha ocurrido un error al preparar la consulta\")\n\t}\n\n\tdefer stmt.Close()\n\tfor _, value := range users {\n\n\t\t// Storage image\n\t\timage_url, _ := s.StorageImageUser(value.Img_Url.String)\n\n\t\t_, err := stmt.Exec(value.Id, value.Apellido, value.Name, value.Cuit, value.Email, value.Password, value.Block, value.Created_At, value.Updated_At, image_url)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\tdefer stmt.Close()\n\t}\n\n\treturn true, nil\n}", "func createUser(user *User) {\n\tvar dummy string\n\n\t// create user\n\tdb.QueryRow(\"INSERT INTO users (gh_id, username, realname, email, token, \"+\n\t\t\"worker_token, admin) VALUES ($1, $2, $3, $4, $5, $6, $7)\", user.GH_Id,\n\t\tuser.User_name, user.Real_name, user.Email, user.Token,\n\t\tuser.Worker_token, user.Admin).Scan(&dummy)\n}", "func InsertNewUser(user *User, uc *mongo.Collection) (string, error) {\n\tresult, err := uc.InsertOne(context.Background(), user)\n\tif err != nil {\n\t\tlog.Println(err.Error())\n\t\treturn \"\", err\n\t}\n\n\treturn result.InsertedID.(primitive.ObjectID).Hex(), nil\n}", "func (_obj *WebApiAuth) SysUser_Insert(req *SysUser, id *int32, _opt ...map[string]string) (err error) {\n\n\tvar length int32\n\tvar have bool\n\tvar ty byte\n\t_os := codec.NewBuffer()\n\terr = req.WriteBlock(_os, 1)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = _os.Write_int32((*id), 2)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar _status map[string]string\n\tvar _context map[string]string\n\tif len(_opt) == 1 {\n\t\t_context = _opt[0]\n\t} else if len(_opt) == 2 {\n\t\t_context = _opt[0]\n\t\t_status = _opt[1]\n\t}\n\t_resp := new(requestf.ResponsePacket)\n\ttarsCtx := context.Background()\n\n\terr = _obj.s.Tars_invoke(tarsCtx, 0, \"SysUser_Insert\", _os.ToBytes(), _status, _context, _resp)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t_is := codec.NewReader(tools.Int8ToByte(_resp.SBuffer))\n\terr = _is.Read_int32(&(*id), 2, true)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif len(_opt) == 1 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t} else if len(_opt) == 2 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t\tfor k := range _status {\n\t\t\tdelete(_status, k)\n\t\t}\n\t\tfor k, v := range _resp.Status {\n\t\t\t_status[k] = v\n\t\t}\n\n\t}\n\t_ = length\n\t_ = have\n\t_ = ty\n\treturn nil\n}", "func Insert() error {\n\tuser := &Users{\n\t\tUid: 1,\n\t\tName: \"viney\",\n\t\tEmail: \"[email protected]\",\n\t\tCreated: time.Now(),\n\t}\n\n\tid, err := engine.InsertOne(user)\n\tif err != nil {\n\t\treturn err\n\t} else if id <= 0 {\n\t\treturn errors.New(\"插入失败\")\n\t}\n\n\treturn nil\n}", "func S1() {\n\tdb, err := sql.Open(\"mysql\", \"root:123456@(127.0.0.1:3306)/test?parseTime=true\")\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tif err := db.Ping(); err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\t// insert\n\t{\n\t\tusername := \"yinweiwen\"\n\t\tpasword := \"poi373\"\n\t\tcreateAt := time.Now()\n\n\t\tresult, err := db.Exec(`\ninsert into users (username,password,created_at) values (?,?,?)\n`, username, pasword, createAt)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\tid, err := result.LastInsertId()\n\t\tfmt.Println(id)\n\t}\n\n\t// query\n\t{\n\t\tvar (\n\t\t\tid int\n\t\t\tusername string\n\t\t\tpassword string\n\t\t\tcreateAt time.Time\n\t\t)\n\n\t\terr := db.QueryRow(`select id, username, password, created_at from users where id=?`, 1).Scan(&id, &username, &password, &createAt)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tfmt.Println(\"query 1:\")\n\t\tfmt.Println(id, username, password, createAt)\n\t}\n\t// query all\n\t{\n\t\ttype user struct {\n\t\t\tid int\n\t\t\tusername string\n\t\t\tpassword string\n\t\t\tcreateAt time.Time\n\t\t}\n\t\trows, err := db.Query(`SELECT id, username, password, created_at FROM users`)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tdefer rows.Close()\n\n\t\tvar users []user\n\t\tfor rows.Next() {\n\t\t\tvar u user\n\n\t\t\terr := rows.Scan(&u.id, &u.username, &u.password, &u.createAt)\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatal(err)\n\t\t\t}\n\t\t\tusers = append(users, u)\n\t\t}\n\t\tif err := rows.Err(); err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\tfmt.Printf(\"%#v\", users)\n\t}\n}", "func RegisterUser(w http.ResponseWriter, r *http.Request) {\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\thttp.Error(w, \"No input found!\", http.StatusBadRequest)\n\t\treturn\n\t}\n\tvar newReq User\n\terr = json.Unmarshal(body, &newReq)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tvar username = newReq.UserID\n\tif _, ok := userData[username]; ok {\n\t\thttp.Error(w, \"User already exists!\", http.StatusBadRequest)\n\t\treturn\n\t}\n\n\t// log.Println(util.StringWithCharset(random.Intn(20)+10, charset))\n\tpreHashString := newReq.UserID + util.StringWithCharset(random.Intn(20)+10, util.Charset)\n\thashedString := crypto.CreateSHA256Hash(preHashString)\n\tuserData[username] = hashedString\n\thashOutput := UserHash{hashedString}\n\tlog.Println(userData)\n\toutJSON, err := json.Marshal(hashOutput)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.Write(outJSON)\n}", "func (dau *DdgAdminUser) Insert(ctx context.Context, key ...interface{}) error {\n\tvar err error\n\tvar dbConn *sql.DB\n\tvar res sql.Result\n\t// if already exist, bail\n\tif dau._exists {\n\t\treturn errors.New(\"insert failed: already exists\")\n\t}\n\n\ttx, err := components.M.GetConnFromCtx(ctx)\n\tif err != nil {\n\t\tdbConn, err = components.M.GetMasterConn()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\ttableName, err := GetDdgAdminUserTableName(key...)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// sql insert query, primary key provided by autoincrement\n\tsqlstr := `INSERT INTO ` + tableName +\n\t\t` (` +\n\t\t`name, account, password, permission_ids, status` +\n\t\t`) VALUES (` +\n\t\t`?, ?, ?, ?, ?` +\n\t\t`)`\n\n\t// run query\n\tutils.GetTraceLog(ctx).Debug(\"DB\", zap.String(\"SQL\", fmt.Sprint(sqlstr, dau.Name, dau.Account, dau.Password, dau.PermissionIds, dau.Status)))\n\tif err != nil {\n\t\treturn err\n\t}\n\tif tx != nil {\n\t\tres, err = tx.Exec(sqlstr, dau.Name, dau.Account, dau.Password, dau.PermissionIds, dau.Status)\n\t} else {\n\t\tres, err = dbConn.Exec(sqlstr, dau.Name, dau.Account, dau.Password, dau.PermissionIds, dau.Status)\n\t}\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// retrieve id\n\tid, err := res.LastInsertId()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// set primary key and existence\n\tdau.ID = uint64(id)\n\tdau._exists = true\n\n\treturn nil\n}", "func Register(user models.User) (string, bool, error){\n\tctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)\n\t\n\tdefer cancel()\n\n\tdb := MongoClient.Database(\"test-api-go\")\n\tcol := db.Collection(\"users\")\n\n\tuser.Password, _ = PasswordEncrypt(user.Password)\n\n\tresult, err := col.InsertOne(ctx, user)\n\n\tif err != nil {\n\t\treturn \"\", false, err\n\t}\n\n\tObjID, _ := result.InsertedID.(primitive.ObjectID)\n\treturn ObjID.String(), true, nil\n}", "func AddUser(jsonData []uint8) {\n\tuser := new(models.Users)\n\n\tjson.Unmarshal(jsonData, user)\n\tfmt.Println(\"User================================>>>>>>>>>>>>>>>>\", string(jsonData))\n\n\tmodels.Db.NewRecord(user)\n\taffected := models.Db.Create(&user)\n\n\tfmt.Println(\"User Created :::::::::::::::::::::::\", models.Db.NewRecord(user), affected)\n}", "func (a *api) h_POST_users(c *gin.Context) {\n\tusr := &User{}\n\tif a.errorResponse(c, bindAppJson(c, usr)) {\n\t\treturn\n\t}\n\ta.logger.Info(\"Creating new user \", usr)\n\tmu := a.user2muser(usr)\n\tif a.errorResponse(c, a.Dc.CreateUser(mu)) {\n\t\treturn\n\t}\n\n\tif usr.Password != nil {\n\t\tif err := a.Dc.SetUserPasswd(usr.Login, ptr2string(usr.Password, \"\")); err != nil {\n\t\t\ta.logger.Warn(\"Could not set user password for new user \", usr.Login, \", err=\", err, \". Will leave it intact\")\n\t\t}\n\t}\n\n\tw := c.Writer\n\turi := composeURI(c.Request, usr.Login)\n\tw.Header().Set(\"Location\", uri)\n\tc.Status(http.StatusCreated)\n}" ]
[ "0.64592063", "0.6140572", "0.6128522", "0.6051467", "0.6021181", "0.5978919", "0.5976009", "0.5930088", "0.5891219", "0.5865668", "0.586433", "0.5858872", "0.579833", "0.57844", "0.5707237", "0.5693628", "0.5678874", "0.5669299", "0.56681514", "0.56557703", "0.564541", "0.5619603", "0.5596056", "0.5588347", "0.5587654", "0.55833125", "0.55808383", "0.5579844", "0.55646265", "0.5562761", "0.554701", "0.55195963", "0.551522", "0.55094", "0.54975677", "0.5494037", "0.5491789", "0.5491529", "0.54882586", "0.5487966", "0.54831487", "0.54828495", "0.5478741", "0.5471266", "0.54344445", "0.54310995", "0.54267544", "0.5425447", "0.5415262", "0.54078037", "0.54037136", "0.5401937", "0.53973395", "0.53866565", "0.5384201", "0.5371186", "0.5370747", "0.5369683", "0.5364727", "0.5363089", "0.53613174", "0.53546864", "0.5354097", "0.53470355", "0.5346981", "0.5342299", "0.53361946", "0.5321329", "0.53154707", "0.5308103", "0.5302771", "0.5299101", "0.5284969", "0.5284938", "0.52739835", "0.5271598", "0.526607", "0.52658045", "0.52645725", "0.5261099", "0.5260314", "0.5257461", "0.5254092", "0.5252904", "0.52488756", "0.5246571", "0.52441156", "0.5243526", "0.52426183", "0.5241623", "0.523494", "0.5234548", "0.5233551", "0.5230175", "0.5226803", "0.522007", "0.5218388", "0.52174443", "0.5212381", "0.521148" ]
0.6181628
1
2 Try to insert a user that already exists
func TestInsertNewUserServiceAlreadyExists (t *testing.T){ err := PostNewUserService(user_01) assert.Equal(t, 409, err.HTTPStatus) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func createUser(usr *User) error {\n\tpasswordHash, err := encrypt(usr.password)\n\tusr.password = \"\"\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif usr.email != \"\" {\n\t\tusr.emailToken, err = generateEmailToken()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\temailTokenHash, err := encrypt(usr.emailToken)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tqueryStr := \"INSERT INTO users(username, password, email, email_token) VALUES($1, $2, $3, $4) returning id\"\n\t\terr = db.QueryRow(queryStr, usr.Username, passwordHash, usr.email, emailTokenHash).Scan(&usr.id)\n\n\t} else {\n\t\tqueryStr := \"INSERT INTO users(username, password) VALUES($1, $2) returning id\"\n\n\t\terr = db.QueryRow(queryStr, usr.Username, passwordHash).Scan(&usr.id)\n\t}\n\n\tif err != nil {\n\t\t// check if the error is for a violation of a unique constraint like the username or email index\n\t\tif err.(*pq.Error).Code == \"23505\" { // 23505 is duplicate key value violates unique constraint\n\t\t\tswitch err.(*pq.Error).Constraint {\n\t\t\tcase \"unique_username\":\n\t\t\t\treturn ErrDuplicateUsername\n\t\t\tcase \"unique_email\":\n\t\t\t\treturn ErrDuplicateEmail\n\t\t\t}\n\t\t}\n\n\t\t// all our other sql errors\n\t\treturn err\n\t}\n\tlog.Printf(\"user %s created\", usr.Username)\n\treturn addSession(usr)\n\n}", "func CreateNewUser(username, password string) (userID string, err error) {\n\n err = checkInit()\n if err != nil {\n return\n }\n\n var checkIfTheUserAlreadyExists = func(username string, userData map[string]interface{}) (err error) {\n var salt = userData[\"_salt\"].(string)\n var loginUsername = userData[\"_username\"].(string)\n\n if SHA256(username, salt) == loginUsername {\n err = createError(020)\n }\n\n return\n }\n\n var users = data[\"users\"].(map[string]interface{})\n for _, userData := range users {\n err = checkIfTheUserAlreadyExists(username, userData.(map[string]interface{}))\n if err != nil {\n return\n }\n }\n\n var defaults = defaultsForNewUser(username, password)\n userID = defaults[\"_id\"].(string)\n users[userID] = defaults\n\n saveDatabase(data)\n\n return\n}", "func (user *User) Create() *errors.RestError {\n\t// check if user already exists or email passed has already been registered or not\n\tcurrent := usersDB[user.ID]\n\tif current != nil {\n\t\tif current.Email == user.Email {\n\t\t\treturn errors.BadRequestError(fmt.Sprintf(\"email %s already registered\", user.Email))\n\t\t}\n\t\treturn errors.BadRequestError(fmt.Sprintf(\"user %d already exists\", user.ID))\n\t}\n\tuser.DateCreated = date.GetNowString()\n\tusersDB[user.ID] = user\n\treturn nil\n}", "func UserAlreadyExists() error {\n\treturn fmt.Errorf(\"user already exists\")\n}", "func createUser(u *User) error {\n\tif u.Status == 0 {\n\t\treturn errors.New(\"Invalid user value\")\n\t}\n\n\treturn nil\n}", "func ExistingUser(email, password string) bool {\n\tvar u User\n\tDb.Where(\"email = ? AND password = ?\", email, password).First(&u)\n\tif email != u.Email && password != u.Password {\n\t\treturn false\n\t}\n\treturn true\n}", "func (u *User) checkExistUser() error {\n\tif u.Id == \"\" && u.Name == \"\" {\n\t\treturn fmt.Errorf(\"invalid user\")\n\t}\n\n\tif u.Id != \"\" {\n\t\t_,err := GetUserById(u.Id)\n\t\tif err == nil {\n\t\t\treturn fmt.Errorf(\"user exist\")\n\t\t}\n\t}\n\n\tif u.Name != \"\" {\n\t\t_,err := GetUserByUserName(u.Name)\n\t\tif err == nil {\n\t\t\treturn fmt.Errorf(\"user exist\")\n\t\t}\n\t}\n\n\treturn nil\n}", "func UserExistDb(email string) bool {\n\tlog.Println(\"call db func\")\n\tif _, ok := db[email]; !ok {\n\t\treturn false\n\t}\n\treturn true\n}", "func (rep rep_users) Create(user models.Usuario) (uint64, error) {\n\n\tstatement, erro := rep.db.Prepare(\"INSERT INTO usuarios (nome, nick, email, senha) VALUES (?, ?, ?, ?)\")\n\n\tif erro != nil {\n\t\treturn 0, erro\n\t}\n\n\tdefer statement.Close()\n\n\tresult, erro := statement.Exec(user.Nome, user.Nick, user.Email, user.Senha)\n\n\tif erro != nil {\n\t\treturn 0, erro\n\t}\n\n\tlastIDInsert, erro := result.LastInsertId()\n\n\tif erro != nil {\n\t\treturn 0, erro\n\t}\n\n\treturn uint64(lastIDInsert), nil\n}", "func CreateUser(db *sql.DB, user *models.UserCreate) (int, error) {\n\t// check unique username\n\tquery := \"SELECT * FROM users WHERE username = ?\"\n\trows, err := db.Query(query, user.Username)\n\tif err != nil {\n\t\tlog.Errorf(\"Error executing query %v \", query)\n\t\tlog.Error(err)\n\t\treturn 0, err\n\t}\n\tif rows.Next() {\n\t\treturn 0, ErrUsernameIsNotUnique\n\t}\n\n\t// check unique email\n\tquery = \"SELECT * FROM users WHERE email = ?\"\n\trows, err = db.Query(query, user.Email)\n\tif err != nil {\n\t\tlog.Errorf(\"Error executing query %v \", query)\n\t\tlog.Error(err)\n\t\treturn 0, err\n\t}\n\tif rows.Next() {\n\t\treturn 0, ErrEmailIsNotUnique\n\t}\n\n\t// Insert\n\tres, err := db.Exec(\"INSERT INTO users (username, email, password) VALUES(?, ?, ?)\", user.Username, user.Email, user.Hash)\n\tif err != nil {\n\t\tlog.Errorf(\"Error inserting\")\n\t\tlog.Error(err)\n\t\treturn 0, err\n\t}\n\n\tid, err := res.LastInsertId()\n\tif err != nil {\n\t\tlog.Error(err)\n\t\treturn 0, err\n\t}\n\n\treturn int(id), nil\n}", "func insertUser(username string, password string, kind int) bool {\n\tresult, err := mysql_client.Exec(\"INSERT INTO User(username, password, kind) VALUES(?,?,?)\", username, password, kind)\n\tif err != nil {\n\t\t// insert failed\n\t\treturn false\n\t}\n\t_, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn false\n\t}\n\t_, err = result.RowsAffected()\n\tif err != nil {\n\t\treturn false\n\t}\n\treturn true\n}", "func createUserHandler(res http.ResponseWriter, req *http.Request) {\n\tvar user MongoUserSchema\n\tjson.NewDecoder(req.Body).Decode(&user)\n\t// fmt.Println(hash(user.Password))\n\tif checkEmailValidity(user.Email) == false {\n\t\tres.WriteHeader(http.StatusInternalServerError)\n\t\tres.Write([]byte(\"Invalid e-mail id!\"))\n\t\treturn\n\t}\n\n\tusersCol := client.Database(\"Aviroop_Nandy_Appointy\").Collection(\"users\")\n\tctx, _ := context.WithTimeout(context.Background(), 15*time.Second)\n\tcursor, err := usersCol.Find(ctx, bson.M{})\n\n\tfor cursor.Next(ctx) {\n\t\tvar backlogUser MongoUserSchema\n\t\tcursor.Decode(&backlogUser)\n\t\tif backlogUser.Email == user.Email {\n\t\t\tres.WriteHeader(http.StatusInternalServerError)\n\t\t\tres.Write([]byte(`{\"This e-mail is already registered!\":\"` + err.Error() + `\"}`))\n\t\t\treturn\n\t\t}\n\t}\n\n\thashedPswd := hashPassword(user.Password)\n\tuser.Password = hashedPswd\n\n\tuserResult, insertErrorUser := usersCol.InsertOne(ctx, user)\n\tif insertErrorUser != nil {\n\t\tfmt.Println(\"Error while creating user: \", insertErrorUser)\n\t} else {\n\t\tjson.NewEncoder(res).Encode(userResult)\n\t\tuserID := userResult.InsertedID\n\t\tfmt.Println(\"New user id: \", userID)\n\t}\n\n\tres.Header().Add(\"content-type\", \"application/json\")\n\tres.WriteHeader(http.StatusOK)\n}", "func (m *MgoUserManager) insertUser(u *auth.User) error {\n\terr := m.UserColl.Insert(u)\n\tif err != nil {\n\t\tif mgo.IsDup(err) {\n\t\t\treturn auth.ErrDuplicateEmail\n\t\t}\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func AddNewUser(u *Users) {\n\to := orm.NewOrm()\n\texist := o.QueryTable(new(Users)).Filter(\"chat_id\", u.ChatID).Exist()\n\n\tif !exist {\n\t\t_, _ = o.Insert(u)\n\t\treturn\n\t}\n\tlog.Info(\"user already exist\")\n}", "func UserSignUp(username string, passwd string) bool {\n\tstmt, err := mydb.DBConn().Prepare(\n\t\t\"insert ignore into tb_user(`username`, `password`, `status`) values(?,?,1)\")\n\tif err != nil {\n\t\tfmt.Println(\"Failded to prepare statement, err: \", err.Error())\n\t\treturn false\n\t}\n\tdefer stmt.Close()\n\n\tret, err := stmt.Exec(username, passwd)\n\tif err != nil {\n\t\tfmt.Println(\"Failed to insert, err:\" + err.Error())\n\t\treturn false\n\t}\n\trows, err := ret.RowsAffected()\n\tfmt.Printf(\"rows: %d\\n\", rows)\n\tif nil == err && rows > 0 {\n\t\tfmt.Printf(\"user %s creat OK\\n\", username)\n\t\treturn true\n\t}\n\treturn false\n}", "func (r *UserRepository) Create(ctx context.Context, u *model.User) error {\n\tvar user model.User\n\tr.DB.Where(\"email = ?\", u.Email).Find(&user)\n\t\n\tif user.Email != u.Email {\n\t\tuser.UID = uuid.New()\n\t\tuser.Email = u.Email\n\t\tuser.Password = u.Password\n\t\tr.DB.Create(user)\n\t\treturn nil\n\t}\n\n\treturn apperrors.NewConflict(\"email\", u.Email)\n}", "func (s *authenticationService) createUser(ctx context.Context, m query.SQLManager, user *model.User) (*model.User, error) {\n\t// not allow duplicated name.\n\tyes, err := s.userService.IsAlreadyExistName(ctx, m, user.Name)\n\tif yes {\n\t\terr = &model.AlreadyExistError{\n\t\t\tPropertyName: model.NameProperty,\n\t\t\tPropertyValue: user.Name,\n\t\t\tDomainModelName: model.DomainModelNameUser,\n\t\t}\n\n\t\treturn nil, errors.WithStack(err)\n\t}\n\n\tif err != nil {\n\t\tif _, ok := errors.Cause(err).(*model.NoSuchDataError); !ok {\n\t\t\treturn nil, errors.Wrap(err, \"failed to check whether already exists name or not\")\n\t\t}\n\t}\n\n\tid, err := s.userRepository.InsertUser(ctx, m, user)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to insert user\")\n\t}\n\tuser.ID = id\n\n\treturn user, nil\n}", "func (r userRepository) newErrExisted() error {\n\treturn apperror.New(\"USER_EXISTED\", \"user is existed\", \"用户已存在\")\n}", "func CreateUser(w http.ResponseWriter, r *http.Request) {\n\treq := &models.User{}\n\tif err := DecodeRequestBody(r, req); err != nil {\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\tw.Write([]byte(\"Error while decoding the request body\" + err.Error()))\n\t\treturn\n\t}\n\tif _, err := dal.GetUsers(req.Name); err == nil {\n\t\tw.WriteHeader(http.StatusConflict)\n\t\tw.Write([]byte(\"already existing user\"))\n\t\treturn\n\t}\n\tif err := dal.CreateUser(req); err != nil {\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\tw.Write([]byte(err.Error()))\n\t}\n}", "func (u *User) Save() *errors.RestErr {\n\tcurrent := usersDB[u.Id]\n\tif current != nil {\n\t\tif current.Email == u.Email {\n\t\t\treturn errors.NewBadRequestError(fmt.Sprintf(\"email %s already registerd\", u.Email))\n\t\t}\n\t\treturn errors.NewBadRequestError(\"user already exists\")\n\t}\n\n\tu.DateCreated = date_utils.GetNowString()\n\n\tusersDB[u.Id] = u\n\treturn nil\n}", "func CheckExistUser(email string) (models.User, bool, string) {\n\tctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)\n\n\t//When end instruction remove timeout operation and liberate context\n\tdefer cancel()\n\n\tdb := MongoConnection.Database(\"socialnetwork\")\n\tcollection := db.Collection(\"Users\")\n\n\tobject := bson.M{\"Email\": email}\n\n\tvar result models.User\n\n\terr := collection.FindOne(ctx, object).Decode(&result)\n\n\tID := result.ID.Hex()\n\n\tif err != nil {\n\t\treturn result, false, ID\n\t}\n\n\treturn result, true, ID\n\n}", "func CreateUser(id int, email string, pass string, bName string,\n\tbAccNum int, bRoutNum int) {\n\tvar count int = 0\n\n\tfor count < len(userList) {\n\t\tif userList[count].uID == id {\n\t\t\tfmt.Println(\"That user id is taken. Please choose a new ID.\")\n\t\t\treturn\n\t\t} else {\n\t\t\tcount++\n\t\t}\n\t}\n\n\tpANew := payAccount{\n\t\tbankName: bName,\n\t\taccountNumber: bAccNum,\n\t\troutingNumber: bRoutNum,\n\t}\n\tuNew := user{\n\t\tuID: id,\n\t\tuEmail: email,\n\t\tuPassword: pass,\n\t\tuBankAccount: pANew,\n\t}\n\tAddUserToDatabase(uNew)\n}", "func CreateUser(u *modelUser.User) error {\n\texists := FindOneUser(u.Username)\n\tfmt.Println(exists)\n\tif exists == nil {\n\t\tgeneratehash, err := bcrypt.GenerateFromPassword([]byte(u.Password), bcrypt.DefaultCost)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tu.Password = string(generatehash)\n\t\tif err := r.Table(\"users\").Insert(u).Exec(session); err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t}\n\terr := errors.New(\"User already exists!\")\n\tfmt.Println(err)\n\treturn err\n}", "func CreateUser(e *Engine, username string, password string) (int64, error) {\n\t//First ensure no such user exists.\n\tsres, _, err := e.RawSelect(Filter(\"autoscope_users\", map[string]interface{}{\n\t\t\t\"username\": username,\n\t}))\n\tif sres.Next() {\n\t\treturn -1, errors.New(\"User with given username already exists\")\n\t}\n\n\tsalt := rand.Int31()\n\tsalted := password + strconv.FormatInt(int64(salt), 10)\n\tpasshash, err := bcrypt.GenerateFromPassword([]byte(salted), 10)\n\tif err != nil { return -1, err }\n\tres, err := e.RawInsert(InsertQuery{\n\t\tTable: \"autoscope_users\",\n\t\tData: map[string]interface{}{\n\t\t\t\"username\": username,\n\t\t\t\"passhash\": string(passhash),\n\t\t\t\"salt\": salt,\n\t\t},\n\t})\n\tif err != nil { return -2, err }\n\tinsertId, err := res.LastInsertId()\n\treturn insertId, err\n}", "func (r mysqlUserRepository) Create(email string) (domain.User, error) {\n\tuser := domain.User{Email: email}\n\terr := r.db.Create(&user).Error\n\n\tif err != nil && strings.Contains(err.Error(), \"Duplicate entry\") {\n\t\treturn domain.User{}, db.DuplicateError{}\n\t}\n\n\treturn user, err\n}", "func (u UserRepo) Create(user model.User) (int, error) {\n\tvar id int\n\trows, err := u.db.Query(\"INSERT INTO users (login , password,roleID) VALUES ($1,$2,$3) RETURNING id \", user.Login, user.Password, dto.USER)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tif rows.Next() {\n\t\terr = rows.Scan(&id)\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\treturn id, rows.Err()\n}", "func (db *Database) UserExistInDB(newUserRecord models.User) bool {\n\tvar count int64\n\t// Count DB entries matching the Slack User ID\n\tif err := db.Where(\"slack_user = ?\", newUserRecord.SlackUser).First(&newUserRecord).Count(&count); err != nil {\n\t\tif count == 0 { // Avoid duplicate User entries in the DB.\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func (u *UserModel) Insert(name, email, password string) error {\n\t// Create a bcrypt hash of the plain-text password.\n\thashedPw, err := bcrypt.GenerateFromPassword([]byte(password), 12)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tstmt := `INSERT INTO users (name, email, hashed_password, created)\n\tVALUES(?, ?, ?, UTC_TIMESTAMP())`\n\n\t// Use the Exec(0 method to insert the user details and hashed password\n\t// into the users table.\n\t_, err = u.DB.Exec(stmt, name, email, string(hashedPw))\n\tif err != nil {\n\t\t// If this returns an error, we use the errors.As() function to check\n\t\t// whether the error has the type *mysql.MySQLError. If it does, the\n\t\t// error will be assigned to the mySQLError variable. We can then check\n\t\t// whether or not the error relates to our users_uc_email key by checking\n\t\t// the contents of the message string. If it does, we return an\n\t\t// ErrDuplicateEmail error.\n\t\tvar mySQLError *mysql.MySQLError\n\t\tif errors.As(err, &mySQLError) {\n\t\t\tif mySQLError.Number == 1062 && strings.Contains(mySQLError.Message,\n\t\t\t\t\"users_uc_email\") {\n\t\t\t\treturn models.ErrDuplicateEmail\n\t\t\t}\n\t\t}\n\t\treturn err\n\t}\n\treturn nil\n\n}", "func Create(nickname, email, password string) (int, int) {\n\tnickname, ok := misc.ValidateString(nickname, misc.MaxLenS)\n\tif !ok {\n\t\tlog.Println(\"Wrong nickname\", nickname)\n\t\treturn 0, misc.WrongName\n\t}\n\n\temail, ok = misc.ValidateEmail(email)\n\tif !ok {\n\t\tlog.Println(\"Wrong email\", email)\n\t\treturn 0, misc.WrongEmail\n\t}\n\n\tif !misc.IsPasswordValid(password) {\n\t\tlog.Println(\"Wrong password\")\n\t\treturn 0, misc.WrongPassword\n\t}\n\n\tsalt, err := auth.GenerateSalt()\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn 0, misc.NoSalt\n\t}\n\n\thash, err := auth.PasswordHash(password, salt)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn 0, misc.NothingToReport\n\t}\n\n\tuserId, confirmationCode := 0, misc.RandomString(misc.ConfCodeLen)\n\terr = psql.Db.QueryRow(`\n\t\tINSERT INTO users (nickname, email, password, salt, confirmation_code)\n\t\tVALUES ($1, $2, $3, $4, $5)\n\t\tRETURNING id`, nickname, email, hash, salt, confirmationCode,\n\t).Scan(&userId)\n\tif err == nil {\n\t\tmailer.EmailConfirmation(email, confirmationCode)\n\t\treturn userId, misc.NothingToReport\n\t}\n\n\terr, code := psql.CheckSpecificDriverErrors(err)\n\tlog.Println(err)\n\treturn 0, code\n}", "func sqlUserExists(db *sql.DB, username string) (userID, userStatus int) {\n\tsqlUserQuery := `SELECT user_id, status FROM public.users WHERE username=$1;`\n\trow := db.QueryRow(sqlUserQuery, username)\n\tswitch err := row.Scan(&userID, &userStatus); err {\n\tcase sql.ErrNoRows:\n\t\tfmt.Println(\"User not found, attempting insert\")\n\t\tuserID = sqlUserInsert(db, username)\n\t\tuserStatus = 3\n\t\treturn\n\tcase nil:\n\t\tfmt.Println(\"User found, checking hash\")\n\t\treturn\n\tdefault:\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n}", "func (u *User) Create(ctx context.Context, log *log.Logger, w http.ResponseWriter, r *http.Request, params map[string]string) error {\n\tvar newU NewUser\n\n\tif err := web.Unmarshal(r.Body, &newU); err != nil {\n\t\treturn errors.Wrap(err, \"\")\n\t}\n\n\tid, err := uuid.NewV4()\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tuserDetails := UserDetails{\n\t\tUserName: newU.UserName,\n\t\tEmail: newU.Email,\n\t\tID: id.String(),\n\t}\n\n\tencodedData, err := json.Marshal(userDetails)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdbConn := u.MasterDB\n\n\t// Save User by ID\n\tfID := func(db *leveldb.DB) error {\n\t\t// TODO: while inserting validate if email already exists by checking with email index\n\t\treturn db.Put([]byte(userDetails.ID), encodedData, nil)\n\t}\n\n\t// Save User by Email\n\tfEmail := func(db *leveldb.DB) error {\n\t\t// TODO: while inserting validate if already exists or not\n\t\treturn db.Put([]byte(userDetails.Email), []byte(userDetails.ID), nil)\n\t}\n\n\tif err := dbConn.Execute(fID); err != nil {\n\t\treturn errors.Wrap(err, \"\")\n\t}\n\n\tif err := dbConn.Execute(fEmail); err != nil {\n\t\t// TODO: if error remove the data from ID as well and throw error\n\t\treturn errors.Wrap(err, \"\")\n\t}\n\n\tstatus := struct {\n\t\tID string `json:\"id\"`\n\t}{\n\t\tID: userDetails.ID,\n\t}\n\tweb.Respond(ctx, log, w, status, http.StatusCreated)\n\treturn nil\n}", "func Create(user User) error {\n\t\n}", "func (i *UsersInteractor) Create(user User) (int, error) {\n\tif len(user.Name) == 0 {\n\t\treturn 0, errors.New(\"username can't be empty\")\n\t}\n\tid, err := i.users.Store(user)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn id, nil\n}", "func CreateUser(db *gorm.DB) gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\t// Get the mandatory query parameters.\n\t\tname, ok := c.GetPostForm(\"name\")\n\t\tif !ok {\n\t\t\terrors.Apply(c, errors.MissingParameters)\n\t\t\treturn\n\t\t}\n\t\tusername, ok := c.GetPostForm(\"username\")\n\t\tif !ok {\n\t\t\terrors.Apply(c, errors.MissingParameters)\n\t\t\treturn\n\t\t}\n\t\tif !usernameRegexp.MatchString(username) {\n\t\t\terrors.Apply(c, errors.BadParameters)\n\t\t\treturn\n\t\t}\n\t\tpassword, ok := c.GetPostForm(\"password\")\n\t\tif !ok {\n\t\t\terrors.Apply(c, errors.MissingParameters)\n\t\t\treturn\n\t\t}\n\n\t\t// Try getting type.\n\t\tuserType, ok := c.GetPostForm(\"type\")\n\t\tif !ok {\n\t\t\tuserType = models.General\n\t\t}\n\t\tif userType != models.Admin && userType != models.Writer && userType != models.General {\n\t\t\terrors.Apply(c, errors.BadParameters)\n\t\t\treturn\n\t\t}\n\t\tif _, ok := c.Get(\"user\"); userType != models.General && !ok {\n\t\t\terrors.Apply(c, errors.NoPermission)\n\t\t\treturn\n\t\t}\n\n\t\t// Check if any users have the same username.\n\t\tvar checkUsers []models.User\n\t\terr := db.Where(\"user_name = ?\", username).\n\t\t\tFind(&checkUsers).\n\t\t\tError\n\t\tif err != nil && err != gorm.ErrRecordNotFound {\n\t\t\terrors.Apply(c, err)\n\t\t\treturn\n\t\t}\n\t\tif len(checkUsers) != 0 {\n\t\t\terrors.Apply(c, errors.UserExists)\n\t\t\treturn\n\t\t}\n\n\t\t// Create the user.\n\t\tuser := &models.User{\n\t\t\tType: userType,\n\t\t\tName: name,\n\t\t\tUserName: username,\n\t\t}\n\t\tif err := user.SetPassword(password); err != nil {\n\t\t\terrors.Apply(c, err)\n\t\t\treturn\n\t\t}\n\t\tif err := db.Create(user).Error; err != nil {\n\t\t\terrors.Apply(c, err)\n\t\t\treturn\n\t\t}\n\n\t\t// Respond with the user's JSON.\n\t\tc.JSON(200, user)\n\t}\n}", "func (u *UsersController) Create(ctx *gin.Context) {\n\tvar userJSON tat.UserCreateJSON\n\tctx.Bind(&userJSON)\n\tvar userIn tat.User\n\tuserIn.Username = u.computeUsername(userJSON)\n\tuserIn.Fullname = strings.TrimSpace(userJSON.Fullname)\n\tuserIn.Email = strings.TrimSpace(userJSON.Email)\n\tcallback := strings.TrimSpace(userJSON.Callback)\n\n\tif len(userIn.Username) < 3 || len(userIn.Fullname) < 3 || len(userIn.Email) < 7 {\n\t\terr := fmt.Errorf(\"Invalid username (%s) or fullname (%s) or email (%s)\", userIn.Username, userIn.Fullname, userIn.Email)\n\t\tAbortWithReturnError(ctx, http.StatusInternalServerError, err)\n\t\treturn\n\t}\n\n\tif err := u.checkAllowedDomains(userJSON); err != nil {\n\t\tctx.JSON(http.StatusForbidden, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tuser := tat.User{}\n\tfoundEmail, errEmail := userDB.FindByEmail(&user, userJSON.Email)\n\tfoundUsername, errUsername := userDB.FindByUsername(&user, userJSON.Username)\n\tfoundFullname, errFullname := userDB.FindByFullname(&user, userJSON.Fullname)\n\n\tif foundEmail || foundUsername || foundFullname || errEmail != nil || errUsername != nil || errFullname != nil {\n\t\te := fmt.Errorf(\"Please check your username, email or fullname. If you are already registered, please reset your password\")\n\t\tAbortWithReturnError(ctx, http.StatusBadRequest, e)\n\t\treturn\n\t}\n\n\ttokenVerify, err := userDB.Insert(&userIn)\n\tif err != nil {\n\t\tlog.Errorf(\"Error while InsertUser %s\", err)\n\t\tctx.AbortWithError(http.StatusInternalServerError, err)\n\t\treturn\n\t}\n\n\tgo userDB.SendVerifyEmail(userIn.Username, userIn.Email, tokenVerify, callback)\n\n\tinfo := \"\"\n\tif viper.GetBool(\"username_from_email\") {\n\t\tinfo = fmt.Sprintf(\" Note that configuration of Tat forced your username to %s\", userIn.Username)\n\t}\n\tctx.JSON(http.StatusCreated, gin.H{\"info\": fmt.Sprintf(\"please check your mail to validate your account.%s\", info)})\n}", "func CreateUser(username string, password string, email string) (*UserEntry, error) {\n if users, nerr := getUser(username); nerr != nil || users.Next() {\n if nerr != nil {\n return nil, nerr\n }\n return nil, UserExistsError\n }\n\n // Encrypt password\n encryptedPassword, nerr := bcrypt.GenerateFromPassword([]byte(password), passwordEncryptCost)\n if nerr != nil {\n return nil, nerr\n }\n \n // Send DB query to create the user.\n newUser := UserEntry{Username: username, Password: encryptedPassword, IsAdmin: false, Email: email}\n retRow, nerr := model.Database.Exec(\"INSERT INTO users (username, password, isadmin, email) VALUES (?, ?, ?, ?)\", newUser.Username, newUser.Password, newUser.IsAdmin, newUser.Email)\n if nerr != nil {\n return nil, nerr\n }\n newUser.UserId, nerr = retRow.LastInsertId()\n if nerr != nil {\n return nil, nerr\n }\n return &newUser, nil\n}", "func UserExist(db *gorm.DB, tel string) bool {\n\tvar user model.User\n\tdb.First(&user, \"telephone = ?\", tel)\n\tif user.ID != 0 {\n\t\treturn true\n\t}\n\treturn false\n}", "func getOrRegisterUser(provider string, user *structs.User) models.User {\n\tvar userData models.User\n\n\tconfig.DB.Where(\"provider = ? AND social_id = ?\", provider, user.ID).First(&userData)\n\n\tif userData.ID == 0 {\n\t\ttoken, _ := RandomToken()\n\n\t\tnewUser := models.User{\n\t\t\tFullName: user.FullName,\n\t\t\tUserName: user.Username,\n\t\t\tEmail: user.Email,\n\t\t\tSocialID: user.ID,\n\t\t\tProvider: provider,\n\t\t\tAvatar: user.Avatar,\n\t\t\tVerificationToken: token,\n\t\t}\n\n\t\tconfig.DB.Create(&newUser)\n\n\t\treturn newUser\n\t}\n\n\treturn userData\n}", "func (db *Database) CreateUser(name string, login UserLogin, ip, ua string) (*DBUser, error) {\n\tuser := &DBUser{\n\t\tName: name,\n\t\tLogin: login,\n\t\tSignupIP: ip,\n\t\tLastIP: ip,\n\t\tUserAgent: ua,\n\t}\n\n\tvar c uint\n\tdb.sql.Model(user).Where(\"login = ?\", user.Login.String()).Count(&c)\n\tif c > 0 {\n\t\treturn nil, fmt.Errorf(\"user with same login (%s) already in database\", user.Login.String())\n\t}\n\tdb.sql.Create(user)\n\n\treturn user, nil\n}", "func (m *MultiDB) ExistingUser(username string) bool {\n\tresult := m.isExisting(\"Username\", username)\n\treturn result\n}", "func (s *Storage) AnotherUserExists(userID int64, username string) bool {\n\tvar result bool\n\ts.db.QueryRow(`SELECT true FROM users WHERE id != $1 AND username=LOWER($2)`, userID, username).Scan(&result)\n\treturn result\n}", "func (call *UserUsecaseImpl) Create(user *models.User) (*models.User, error) {\n\tuser, err := common.Encrypt(user)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tstatus := call.userRepo.CheckMail(user)\n\tif !status {\n\t\treturn nil, errors.New(\"Opps.. sorry email already use other account\")\n\t}\n\n\treturn call.userRepo.Create(user)\n}", "func (serv *Server) RegisterUser(creds Credentials) (err error) {\n row := serv.db.QueryRow(\"select uid from users where username = ?;\", creds.Username)\n\n var uid int\n if row.Scan(&uid) == sql.ErrNoRows {\n salt := make([]byte, SaltLength)\n rand.Read(salt)\n\n saltedHash, err := HashAndSaltPassword([]byte(creds.Password), salt)\n if err != nil {\n return err\n }\n\n _, err = serv.db.Exec(\n `insert into users (username, salt, saltedhash) values (?, ?, ?);`,\n creds.Username, salt, saltedHash)\n\n if err != nil {\n err = ErrRegistrationFailed\n }\n } else {\n err = ErrUsernameTaken\n }\n\n return\n}", "func (a MySQLAdapter) Create(user entity.User) (uid int, err error) {\n\tinsertStmt := fmt.Sprintf(\"INSERT INTO %s VALUES (DEFAULT, ?, ?, ?, ?)\", a.table)\n\thashedPwd, err := passgen.HashPassword([]byte(user.Password))\n\tif err != nil {\n\t\treturn uid, err\n\t}\n\tres, err := a.db.Exec(insertStmt, user.Username, hashedPwd, user.Email, time.Now())\n\tif err != nil {\n\t\treturn uid, err\n\t}\n\tlastInsertID, err := res.LastInsertId()\n\tif err != nil {\n\t\treturn uid, err\n\t}\n\tuid = int(lastInsertID)\n\treturn uid, nil\n}", "func createUser(u *models.User, db *sql.DB) error {\n\tif err := u.CryptPwd(); err != nil {\n\t\treturn fmt.Errorf(\"Cryptage du mot de passe de %s : %v\", u.Name, err)\n\t}\n\tif err := u.Create(db); err != nil {\n\t\treturn fmt.Errorf(\"Création en base de données de %s : %v\", u.Name, err)\n\t}\n\treturn nil\n}", "func insertUser(id int) result {\n\tr := result{\n\t\tid: id,\n\t\top: fmt.Sprintf(\"insert USERS value (%d)\", id),\n\t}\n\n\t// Randomize if the insert fails or not.\n\tif rand.Intn(10) == 0 {\n\t\tr.err = fmt.Errorf(\"Unable to insert %d into USER table\", id)\n\t}\n\n\treturn r\n}", "func (m Users) Register(user User) error {\n\tif !isValidPass(user.Password) {\n\t\treturn ErrInvalidPass\n\t}\n\tif !validEmail.MatchString(user.Email) {\n\t\treturn ErrInvalidEmail\n\t}\n\thash, err := hashPassword(user.Password)\n\tif err != nil {\n\t\treturn err\n\t}\n\tsqlStatement := `INSERT INTO users (email, password) VALUES($1, $2) RETURNING id, created_at;`\n\t_, err = m.DB.Exec(sqlStatement, user.Email, hash)\n\tif err, ok := err.(*pq.Error); ok {\n\t\tif err.Code == \"23505\" {\n\t\t\treturn ErrUserAlreadyExist\n\t\t}\n\t}\n\n\treturn err\n}", "func (repository Users) Create(user models.User) (uint64, error) {\n\n\tstatement, error := repository.db.Prepare(\"insert into users (name, nick, email, password) values(?,?,?,?)\")\n\n\tif error != nil {\n\t\treturn 0, error\n\t}\n\n\tdefer statement.Close()\n\n\tresult, error := statement.Exec(user.Name, user.Nick, user.Email, user.Password)\n\n\tif error != nil {\n\t\treturn 0, error\n\t}\n\n\tlastInsertedID, error := result.LastInsertId()\n\n\tif error != nil {\n\t\treturn 0, error\n\t}\n\n\treturn uint64(lastInsertedID), nil\n}", "func (m *Manager) Create(ctx context.Context, tx *sql.Tx, user v0.User) error {\n\t_, err := tx.ExecContext(ctx, `\n\t\t\t\tINSERT INTO users (\n\t\t\t\t\tname, \n\t\t\t\t\temail, \n\t\t\t\t\tprimary_public_key, \n\t\t\t\t\trecovery_public_key, \n\t\t\t\t\tsuper_user, \n\t\t\t\t\tauth_level, \n\t\t\t\t\tweight,\n\t\t\t\t\tuser_set\n\t\t\t\t\t) VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,\n\t\tuser.Name,\n\t\tuser.Email,\n\t\tuser.PrimaryPublicKey,\n\t\tuser.RecoveryPublicKey,\n\t\tuser.SuperUser,\n\t\tuser.AuthLevel,\n\t\tuser.Weight,\n\t\tuser.Set,\n\t)\n\treturn err\n}", "func (dbservice *UserDbservice) SaveUser(user model.User) (model.User, *myerror.DBError) {\n\tpass, err := bcrypt.GenerateFromPassword([]byte(user.Password), bcrypt.DefaultCost)\n\tif err != nil {\n\t\treturn user, &myerror.DBError{Code:500, Clause: \"Cannot Hash\", Message: err.Error()}\n\t} else {\n\t\tuser.Password = string(pass)\n\t}\n\tsaveerr := dbservice.DbConnection.Create(&user).Error\n\tif saveerr != nil {\n\t\tlog.Println(saveerr)\n\t\tdbError := myerror.DBError{Code: 409, Message: saveerr.Error(), Clause: \"DUPLICATE\"}\n\t\treturn user, &dbError\n\t}\n\treturn user, nil\n}", "func isUserExist(usernameQuery string) bool {\n\tvar user User_DB\n\terr := mysql_client.QueryRow(\"SELECT username, password, kind FROM User WHERE username=?\", usernameQuery).Scan(&user.Username, &user.Password, &user.Kind)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn false\n\t} else {\n\t\treturn true\n\t}\n}", "func handleSignUp(w http.ResponseWriter, r *http.Request) {\n\tif parseFormErr := r.ParseForm(); parseFormErr != nil {\n\t\thttp.Error(w, \"Sent invalid form\", 400)\n\t}\n\n\tname := r.FormValue(\"name\")\n\tuserHandle := r.FormValue(\"userHandle\")\n\temail := r.FormValue(\"email\")\n\tpassword := r.FormValue(\"password\")\n\n\tif !verifyUserHandle(userHandle) {\n\t\thttp.Error(w, \"Invalid userHandle\", http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tif !verifyEmail(email) {\n\t\thttp.Error(w, \"Invalid email\", http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tif !verifyPassword(password) {\n\t\thttp.Error(w, \"Password does not meet complexity requirements\", http.StatusBadRequest)\n\t\treturn\n\t}\n\n\thashed, _ := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost)\n\n\turChannel := make(chan *database.InsertResponse)\n\tgo createUser(\n\t\tmodel.User{Name: name, UserHandle: userHandle, Email: email, Password: hashed},\n\t\turChannel,\n\t)\n\tcreatedUser := <-urChannel\n\n\tif createdUser.Err != nil {\n\t\tlog.Println(createdUser.Err)\n\n\t\tif strings.Contains(createdUser.Err.Error(), \"E11000\") {\n\t\t\tif strings.Contains(createdUser.Err.Error(), \"index: userHandle_1\") {\n\t\t\t\thttp.Error(w, \"Userhandle \"+userHandle+\" already registered\", http.StatusConflict)\n\t\t\t} else {\n\t\t\t\thttp.Error(w, \"Email \"+email+\" already registered\", http.StatusConflict)\n\t\t\t}\n\t\t} else {\n\t\t\tcommon.SendInternalServerError(w)\n\t\t}\n\n\t} else {\n\t\tlog.Println(\"Created user with ID \" + createdUser.ID)\n\t\tw.WriteHeader(http.StatusOK)\n\t\t_, wError := w.Write([]byte(\"Created user with ID \" + createdUser.ID))\n\n\t\tif wError != nil {\n\t\t\tlog.Println(\"Error while writing: \" + wError.Error())\n\t\t}\n\t}\n\n}", "func CreateNewUser(db *sql.DB, userName string, password string) string {\n\t_, err := db.Query(\"INSERT INTO user (user_name, password) VALUES (?, ?)\", userName, password)\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\treturn \"Success\"\n}", "func (model *UserModel) Insert(name, email, password string) error {\n\thashedPassword, err := bcrypt.GenerateFromPassword([]byte(password), 12)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tstmt := `INSERT INTO users (name, email, hashed_password, created) VALUES(?, ?, ?, UTC_TIMESTAMP())`\n\n\t_, err = model.DB.Exec(stmt, name, email, string(hashedPassword))\n\tif err != nil {\n\t\tvar mySQLError *mysql.MySQLError\n\t\tif errors.As(err, &mySQLError) {\n\t\t\tif mySQLError.Number == 1062 && strings.Contains(mySQLError.Message, \"users_uc_email\") {\n\t\t\t\treturn models.ErrDuplicateEmail\n\t\t\t}\n\t\t}\n\t\treturn err\n\t}\n\treturn nil\n}", "func (userservice Userservice) Create(user *(entity.User)) error {\n\tctx, _ := context.WithTimeout(context.Background(), 10*time.Second)\n\n\tdb := db.ConfigDB()\n\t// var user entity.User\n\n\tcount, err := db.Collection(\"users\").CountDocuments(ctx, bson.M{\"email\": user.Email})\n\tif err != nil {\n\t\treturn err\n\t}\n\tif count > 0 {\n\t\treturn errors.New(\"email being used\")\n\t}\n\t_, errIns := db.Collection(\"users\").InsertOne(ctx, user)\n\tif errIns != nil {\n\t\treturn errIns\n\t}\n\n\treturn nil\n}", "func (user User) Insert() (User, error) {\n\tdigest, err := GenerateHash(user.Password)\n\tif err != nil {\n\t\treturn user, err\n\t}\n\t_, err = time.LoadLocation(user.Location)\n\tif err != nil {\n\t\treturn user, errors.New(\"user location invalid\")\n\t}\n\tuser.Digest = digest\n\t_, err = db.NamedExec(\"INSERT INTO users (name, digest, email, location) VALUES (:name, :digest, :email, :location)\", user)\n\tif err != nil {\n\t\tif err.Error() == \"UNIQUE constraint failed: users.email\" || err.Error() == `pq: duplicate key value violates unique constraint \"users_email_key\"` {\n\t\t\treturn user, errors.New(\"user email exists\")\n\t\t}\n\t\treturn user, err\n\t}\n\treturn user, nil\n}", "func SignUp(db *gorm.DB, _ *redis.Client, _ http.ResponseWriter, r *http.Request, s *status.Status) (int, error) {\n\ts.Message = status.SignupFailure\n\tcredStatus := status.CredentialStatus{}\n\tcreds, isValidCred := verifyCredentials(r)\n\thashedPass, hashErr := Hash(creds.Password)\n\tif !(isValidCred == nil && hashErr == nil) {\n\t\tcredStatus.Username = status.UsernameAlphaNum\n\t\tcredStatus.Email = status.ValidEmail\n\t\tcredStatus.Password = status.PasswordRequired\n\t\ts.Data = credStatus\n\t\treturn http.StatusUnprocessableEntity, nil\n\t}\n\tcreds.Password = hashedPass\n\tuser := model.NewUser()\n\tunameAvailable := !SingleRecordExists(db, model.UserTable, model.UsernameColumn, creds.Username, user)\n\temailAvailable := !SingleRecordExists(db, model.UserTable, model.EmailColumn, creds.Email, user)\n\tif unameAvailable && emailAvailable {\n\t\terr := createUser(db, creds, user)\n\t\tif err != nil {\n\t\t\treturn http.StatusInternalServerError, fmt.Errorf(http.StatusText(http.StatusInternalServerError))\n\t\t}\n\t\ts.Code = status.SuccessCode\n\t\ts.Message = status.SignupSuccess\n\t\treturn http.StatusCreated, nil\n\t}\n\tif !unameAvailable {\n\t\tcredStatus.Username = status.UsernameExists\n\t}\n\tif !emailAvailable {\n\t\tcredStatus.Email = status.EmailExists\n\t}\n\ts.Data = credStatus\n\treturn http.StatusConflict, nil\n}", "func (ua *UserAuth) Insert(ctx context.Context, db XODB) error {\n\tvar err error\n\n\t// if already exist, bail\n\tif ua._exists {\n\t\treturn errors.New(\"insert failed: already exists\")\n\t}\n\n\t// sql insert query, primary key must be provided\n\tconst sqlstr = `INSERT INTO user_auths (` +\n\t\t`user_id, email, password_hash, created_at, updated_at` +\n\t\t`) VALUES (` +\n\t\t`?, ?, ?, ?, ?` +\n\t\t`)`\n\n\t// run query\n\tXOLog(sqlstr, ua.UserID, ua.Email, ua.PasswordHash, ua.CreatedAt, ua.UpdatedAt)\n\t_, err = db.ExecContext(ctx, sqlstr, ua.UserID, ua.Email, ua.PasswordHash, ua.CreatedAt, ua.UpdatedAt)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// set existence\n\tua._exists = true\n\n\treturn nil\n}", "func CreateUser(u models.User) (string, bool, error) {\n\n\t//Contexto que controla que no tarde mas de 15 segundos\n\tctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)\n\n\t//Al final de la funcion se cancela el contexto WithTimeout. Esto se hace para evitar\n\t//dejar contextos vivos que no se están usando\n\tdefer cancel()\n\n\tdb := MongoCN.Database(\"twittor\")\n\tcoll := db.Collection(\"users\")\n\n\t//Encriptando el password\n\tu.Password, _ = EncryptPassword(u.Password)\n\n\tresult, err := coll.InsertOne(ctx, u)\n\tif err != nil {\n\t\treturn \"\", false, err\n\t}\n\n\tObjID, _ := result.InsertedID.(primitive.ObjectID)\n\treturn ObjID.String(), true, nil\n}", "func IsErrUserAlreadyExist(err error) bool {\n\t_, ok := err.(ErrUserAlreadyExist)\n\treturn ok\n}", "func InsertRegister(object models.User) (string, bool, error) {\n\tctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)\n\n\t//When end instruction remove timeout operation and liberate context\n\tdefer cancel()\n\n\tdb := MongoConnection.Database(\"socialnetwork\")\n\tcollection := db.Collection(\"Users\")\n\n\t//Set password encrypted\n\tpassWordEncrypted, _ := utils.EcryptPasswordUtil(object.Password)\n\tobject.Password = passWordEncrypted\n\n\tresult, err := collection.InsertOne(ctx, object)\n\n\tif err != nil {\n\t\treturn \"\", false, err\n\t}\n\n\t//Get id of created object\n\tObjectID, _ := result.InsertedID.(primitive.ObjectID)\n\n\t//Return created object id\n\treturn ObjectID.String(), true, nil\n\n}", "func insertUser(data User) (int64, error) {\n\t// perform a db.Query insert\n\tinsert, err := db.Exec(\"INSERT INTO users (username, email, created_at) VALUES (?, ?, ?)\", data.Username, data.Email, data.CreatedAt)\n\n\t// if there is an error inserting, handle it\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\n\tid, err := insert.LastInsertId()\n\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\n\treturn id, err\n}", "func CreateUser(w http.ResponseWriter, r *http.Request){\n\n\t\tu := User{}\n\n\t\terr:= json.NewDecoder(r.Body).Decode(&u)\n\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\n\t\t// Checks if name is Empty\n\t\tfmt.Printf(\"name: [%+v]\\n\", u.Name)\n\t\tif u.Name == \"\" {\n\t\t\tfmt.Println(\"Empty string\")\n\t\t\tw.Write([]byte(`{\"status\":\"Invalid Name\"}`))\n\t\t\treturn\n\t\t}\n\n\n\t\t//start validation for username\n\t\tvar isStringAlphabetic = regexp.MustCompile(`^[A-Za-z][A-Za-z0-9]*$`).MatchString\n\t\tif !isStringAlphabetic(u.Name){\n\t\t\tfmt.Println(\"is not alphanumeric\")\n\t\t\tw.Write([]byte(`{\"status\":\"Invalid Name\"}`))\n\t\t\treturn\n\t\t}\n\n\t\t//make the Name Uppercase\n\t\tu.Name = strings.ToUpper(u.Name)\n\n\t\t// check if username already exists\n\t\tuser := userExist(u.Name)\n\t\tif user != (User{}) {\n\t\t\tfmt.Println(\"Name already exists\")\n\t\t\tw.Write([]byte(`{\"status\":\"Name Exists\"}`))\n\t\t\treturn\n\t\t}\n\n\t\t//if it does exist create the user with a random ID and score = 0\n\t\tuuid, err := uuid.NewV4()\n\t\tu.ID = uuid.String()\n\t\tu.Score = 0\n\n\t\tquery := \"INSERT INTO users (id, name, score) VALUES ($1, $2, $3);\"\n\t\t_, err = db.Exec(query, u.ID, u.Name, u.Score);\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\tw.WriteHeader(201)\n\t\tjson.NewEncoder(w).Encode(u)\n\n}", "func insertUser(db *sql.DB, u *User) {\n\n\t// name := u.name\n\t// rollno := u.rollno\n\tinsertUserSQL := `INSERT INTO User( name, rollno) VALUES (?, ?)`\n\tstatement, err := db.Prepare(insertUserSQL) \n\n\tif err != nil {\n\t\tlog.Fatalln(err.Error())\n\t}\n\t_, err = statement.Exec(u.name, u.rollno)\n\tif err != nil {\n\t\tlog.Fatalln(err.Error())\n\t}\n}", "func CreateUser(c *gin.Context) {}", "func (r *userRepository) Create(u *User) (*User, *dto.Error) {\n\tvar xUser User\n\n\tr.db.Where(&User{Email: u.Email}).Find(&xUser)\n\tif xUser.Email == u.Email {\n\t\treturn nil, dto.NewError(http.StatusConflict, \"email conflict\")\n\t}\n\n\tr.db.Where(&User{Username: u.Username}).Find(&xUser)\n\tif xUser.Username == u.Username {\n\t\treturn nil, dto.NewError(http.StatusConflict, \"username conflict\")\n\t}\n\n\tif err := r.db.Create(&u).Error; err != nil {\n\t\tlogger.Error(\"[DATABASE]::[CREATE_ERROR]\", err)\n\t\treturn nil, dto.NewError(http.StatusBadRequest, err.Error())\n\t}\n\n\treturn u, nil\n}", "func (u *User) Insert(db XODB) error {\n\tvar err error\n\n\t// if already exist, bail\n\tif u._exists {\n\t\treturn errors.New(\"insert failed: already exists\")\n\t}\n\n\t// sql insert query, primary key provided by autoincrement\n\tconst sqlstr = `INSERT INTO test_database.users (` +\n\t\t`username, created_at` +\n\t\t`) VALUES (` +\n\t\t`?, ?` +\n\t\t`)`\n\n\t// run query\n\tXOLog(sqlstr, u.Username, u.CreatedAt)\n\tres, err := db.Exec(sqlstr, u.Username, u.CreatedAt)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// retrieve id\n\tid, err := res.LastInsertId()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// set primary key and existence\n\tu.UserID = uint64(id)\n\tu._exists = true\n\n\treturn nil\n}", "func CheckUserExists(username string, table string, session *r.Session) bool {\n\tvar u interface{}\n\tdb := os.Getenv(\"DB\")\n\t// userTable := os.Getenv(\"USERTABLE\")\n\tcur, _ := r.DB(db).Table(table).GetAllByIndex(\"username\", username).Run(session)\n\t_ = cur.One(&u)\n\tcur.Close()\n\t// fmt.Println(u)\n\tif u == nil {\n\t\t// fmt.Println(\"NO\")\n\t\treturn false\n\t}\n\t// fmt.Println(\"YES\")\n\treturn true\n}", "func (str *impl) CreateUser(\n\tctx context.Context,\n\tcreationTime time.Time,\n\temail string,\n\tdisplayName string,\n\tpasswordHash string,\n) (\n\tresult store.User,\n\terr error,\n) {\n\tresult.Creation = creationTime\n\tresult.Email = email\n\tresult.DisplayName = displayName\n\tresult.Password = passwordHash\n\n\t// Prepare\n\tresult.ID = store.NewID()\n\n\t// Begin transaction\n\ttxn, close := str.txn(&err)\n\tif err != nil {\n\t\treturn\n\t}\n\tdefer close()\n\n\t// Ensure no users with a similar email already exist\n\tvar qr struct {\n\t\tByID []struct {\n\t\t\tUID string `json:\"uid\"`\n\t\t} `json:\"byId\"`\n\t\tByEmail []struct {\n\t\t\tUID string `json:\"uid\"`\n\t\t} `json:\"byEmail\"`\n\t\tByDisplayName []struct {\n\t\t\tUID string `json:\"uid\"`\n\t\t} `json:\"byDisplayName\"`\n\t}\n\terr = txn.QueryVars(\n\t\tctx,\n\t\t`query User(\n\t\t\t$id: string,\n\t\t\t$email: string,\n\t\t\t$displayName: string\n\t\t) {\n\t\t\tbyId(func: eq(User.id, $id)) { uid }\n\t\t\tbyEmail(func: eq(User.email, $email)) { uid }\n\t\t\tbyDisplayName(func: eq(User.displayName, $displayName)) { uid }\n\t\t}`,\n\t\tmap[string]string{\n\t\t\t\"$email\": email,\n\t\t\t\"$displayName\": displayName,\n\t\t},\n\t\t&qr,\n\t)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tif len(qr.ByID) > 0 {\n\t\terr = errors.Errorf(\"duplicate User.id: %s\", result.ID)\n\t\treturn\n\t}\n\tif len(qr.ByEmail) > 0 {\n\t\terr = strerr.Newf(\n\t\t\tstrerr.ErrInvalidInput,\n\t\t\t\"%d users with a similar email already exist\",\n\t\t\tlen(qr.ByEmail),\n\t\t)\n\t\treturn\n\t}\n\tif len(qr.ByDisplayName) > 0 {\n\t\terr = strerr.Newf(\n\t\t\tstrerr.ErrInvalidInput,\n\t\t\t\"%d users with a similar displayName already exist\",\n\t\t\tlen(qr.ByDisplayName),\n\t\t)\n\t\treturn\n\t}\n\n\t// Create user account\n\tvar newUserJSON []byte\n\tnewUserJSON, err = json.Marshal(struct {\n\t\tID string `json:\"User.id\"`\n\t\tEmail string `json:\"User.email\"`\n\t\tDisplayName string `json:\"User.displayName\"`\n\t\tCreation time.Time `json:\"User.creation\"`\n\t\tPassword string `json:\"User.password\"`\n\t}{\n\t\tID: string(result.ID),\n\t\tEmail: email,\n\t\tDisplayName: displayName,\n\t\tCreation: creationTime,\n\t\tPassword: string(passwordHash),\n\t})\n\tif err != nil {\n\t\treturn\n\t}\n\n\tvar userCreationMut map[string]string\n\tuserCreationMut, err = txn.Mutation(ctx, &api.Mutation{\n\t\tSetJson: newUserJSON,\n\t})\n\tif err != nil {\n\t\treturn\n\t}\n\tresult.UID = userCreationMut[\"blank-0\"]\n\n\t// Add the new account to the global Index\n\tvar newUsersIndexJSON []byte\n\tnewUsersIndexJSON, err = json.Marshal(struct {\n\t\tUID UID `json:\"users\"`\n\t}{\n\t\tUID: UID{NodeID: result.UID},\n\t})\n\tif err != nil {\n\t\treturn\n\t}\n\n\t_, err = txn.Mutation(ctx, &api.Mutation{\n\t\tSetJson: newUsersIndexJSON,\n\t\tSet: nil,\n\t})\n\n\treturn\n}", "func (db *DB) CreateUser(u *model.User) error {\n\tu.ID = primitive.NewObjectID().Hex()\n\n\t_, err := db.collections.users.InsertOne(context.Background(), u)\n\tif err != nil {\n\t\tif writeErr, ok := err.(mongo.WriteErrors); ok {\n\t\t\tif len(writeErr) == 1 && writeErr[0].Code == 11000 {\n\t\t\t\treturn fmt.Errorf(\"email_address_already_exists\")\n\t\t\t}\n\t\t}\n\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func UserCreate(w http.ResponseWriter, r *http.Request) {\n\terr := json.NewDecoder(r.Body).Decode(&user)\n\tif err != nil {\n\t\tm.Message = fmt.Sprintf(\"Error al leer el usuario a registrarse: %s\", err)\n\t\tm.Code = http.StatusBadRequest\n\t\tcommons.DisplayMessage(w, m)\n\t\treturn\n\t}\n\tif user.Password != user.ConfirmPassword {\n\t\tm.Message = \"Las contraseña no coinciden\"\n\t\tm.Code = http.StatusBadRequest\n\t\tcommons.DisplayMessage(w, m)\n\t\treturn\n\t}\n\tuser.Password = password\n\tavatarmd5 := md5.Sum([]byte(user.Password))\n\tavatarstr := fmt.Sprintf(\"%x\", avatarmd5)\n\tuser.Avatar = \"https://gravatar.com/avatar/\" + avatarstr + \"?s=100\"\n\tdatabase := configuration.GetConnection()\n\tdefer database.Close()\n\terr = database.Create(&user).Error\n\tif err != nil {\n\t\tm.Message = fmt.Sprintf(\"Error al crear el registro: %s\", err)\n\t\tm.Code = http.StatusBadRequest\n\t\tcommons.DisplayMessage(w, m)\n\t\treturn\n\t}\n\tm.Message = \"Usuario creado con éxito\"\n\tm.Code = http.StatusCreated\n\tcommons.DisplayMessage(w, m)\n}", "func (dau *DdgAdminUser) Insert(ctx context.Context, key ...interface{}) error {\n\tvar err error\n\tvar dbConn *sql.DB\n\tvar res sql.Result\n\t// if already exist, bail\n\tif dau._exists {\n\t\treturn errors.New(\"insert failed: already exists\")\n\t}\n\n\ttx, err := components.M.GetConnFromCtx(ctx)\n\tif err != nil {\n\t\tdbConn, err = components.M.GetMasterConn()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\ttableName, err := GetDdgAdminUserTableName(key...)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// sql insert query, primary key provided by autoincrement\n\tsqlstr := `INSERT INTO ` + tableName +\n\t\t` (` +\n\t\t`name, account, password, permission_ids, status` +\n\t\t`) VALUES (` +\n\t\t`?, ?, ?, ?, ?` +\n\t\t`)`\n\n\t// run query\n\tutils.GetTraceLog(ctx).Debug(\"DB\", zap.String(\"SQL\", fmt.Sprint(sqlstr, dau.Name, dau.Account, dau.Password, dau.PermissionIds, dau.Status)))\n\tif err != nil {\n\t\treturn err\n\t}\n\tif tx != nil {\n\t\tres, err = tx.Exec(sqlstr, dau.Name, dau.Account, dau.Password, dau.PermissionIds, dau.Status)\n\t} else {\n\t\tres, err = dbConn.Exec(sqlstr, dau.Name, dau.Account, dau.Password, dau.PermissionIds, dau.Status)\n\t}\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// retrieve id\n\tid, err := res.LastInsertId()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// set primary key and existence\n\tdau.ID = uint64(id)\n\tdau._exists = true\n\n\treturn nil\n}", "func (db *DB) CreateOrGetUser(ctx context.Context, email string) (string, error) {\n\trow := db.sql.QueryRowContext(ctx, `INSERT INTO users \n\t\t\t\t\t\t\t\t\t\t(email) \n\t\t\t\t\t\t\t\t\t\tVALUES ($1)\n\t\t\t\t\t\t\t\t\t\tON CONFLICT (email)\n\t\t\t\t\t\t\t\t\t\tDO UPDATE SET email = EXCLUDED.email\n\t\t\t\t\t\t\t\t\t\tRETURNING id;`, email)\n\n\tvar userID string\n\terr := row.Scan(&userID)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn userID, nil\n}", "func (us *UserService) Create(u *User) (int, error) {\n\tif us.UserInterceptor != nil {\n\t\tif err := us.UserInterceptor.PreCreate(u); err != nil {\n\t\t\treturn -1, httperror.InternalServerError(fmt.Errorf(\"error while executing user interceptor 'PreCreate' error %v\", err))\n\t\t}\n\t}\n\n\tif err := u.validate(us, us.Config.MinPasswordLength, VDupUsername|VDupEmail|VPassword); err != nil {\n\t\treturn -1, err\n\t}\n\n\tsalt := crypt.GenerateSalt()\n\tsaltedPassword := append(u.PlainPassword[:], salt[:]...)\n\tpassword, err := crypt.CryptPassword([]byte(saltedPassword))\n\n\tif err != nil {\n\t\treturn -1, err\n\t}\n\n\tu.Salt = salt\n\tu.Password = password\n\n\tuserID, err := us.Datasource.Create(u)\n\n\tif err != nil {\n\t\treturn -1, err\n\t}\n\n\tif us.UserInterceptor != nil {\n\t\terrUserInterceptor := us.UserInterceptor.PostCreate(u)\n\t\tlogger.Log.Errorf(\"error while executing PostCreate user interceptor method %v\", errUserInterceptor)\n\t}\n\n\tsalt = nil\n\tsaltedPassword = nil\n\tu.PlainPassword = nil\n\n\treturn userID, nil\n}", "func InsertUser(w http.ResponseWriter, r *http.Request) string {\n\tdb, err_open := sql.Open(\"mysql\", \"doubly_app:doubly_user1@tcp(doublydb.ct2fpvea2u25.us-west-2.rds.amazonaws.com:3306)/Doubly\")\n if err_open != nil {\n log.Fatal(err_open)\n }\n\tvar rStrings = strings.Split(r.URL.Path, \"/\")\n\tvar rUserName = rStrings[2]\n\tvar rUserEmail = rStrings[3]\n\tvar rPassword = rStrings[4]\n\tvar rDOB = rStrings[5]\n\tvar rGender = rStrings[6]\n rows, err_query := db.Query(\"SELECT * FROM Users WHERE Users.Email = '\" + rUserEmail + \"'\")\n\tdefer rows.Close()\n if err_query != nil {\n panic(err_query.Error())\n }\n\tvar count = 0\n\tfor rows.Next() {\n\t\tcount++\n\t}\n\tif count > 0 {\n\t\treturn \"{\\\"Error\\\":\\\"UserExists\\\"}\"\n\t}\n\tresults_insert, err_insert := db.Exec(\"INSERT INTO Users(UserName, Email, Password, DOB, Gender) VALUES ('\" + rUserName + \"', '\" + rUserEmail + \"', '\" + rPassword + \"', \" + rDOB + \", '\" + rGender + \"')\")\n\tif err_insert != nil {\n\t\tpanic(err_insert.Error())\n\t}\n\tlastInsertedID, err_last_id := results_insert.LastInsertId()\n\tif err_last_id != nil {\n\t\tprintln(\"Error: UserID not found\")\n\t\tpanic(err_last_id.Error())\n\t\treturn \"{\\\"Error\\\":\\\"UserID Not Found\\\"}\"\n\t}\n\treturn \"{\\\"UserID\\\":\\\"\" + strconv.FormatInt(lastInsertedID, 10) + \"\\\"}\"\n}", "func (d *PostgresDatabase) Store(u model.User) error {\n\topMetric.WithLabelValues(\"store-user\").Inc()\n\n\tostart := time.Now()\n\tdefer opDuration.WithLabelValues(\"store-user\").Observe(time.Since(ostart).Seconds())\n\n\tdob, err := time.Parse(\"2006-01-02\", u.DOB)\n\tif err != nil {\n\t\topErrMetric.WithLabelValues(\"store-user\").Inc()\n\t\treturn err\n\t}\n\t_, err = d.Get(u.Username)\n\t// if the user is not present we insert\n\tif err == ErrUserNotFound {\n\t\t_, err := d.db.Query(queryInsertUser, u.Username, dob)\n\t\tif err != nil {\n\t\t\topErrMetric.WithLabelValues(\"store-user\").Inc()\n\t\t\treturn err\n\t\t}\n\t\topMetric.WithLabelValues(\"store-user\").Inc()\n\t\treturn nil\n\t} else if err != nil {\n\t\topErrMetric.WithLabelValues(\"store-user\").Inc()\n\t\treturn err\n\t}\n\n\t// if we got here, the user is already present and we should do an update\n\tstmt, err := d.db.Prepare(queryUpdateUser)\n\t_, err = stmt.Exec(dob, u.Username)\n\tif err != nil {\n\t\topErrMetric.WithLabelValues(\"store-user\").Inc()\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func CreateUser(username, password string) error{\n\t// before creating user check if the user name exits\n\tif _, err := db.Query(\"INSERT INTO avelival (username, password) VALUES ($1,$2)\", username, password); err != nil {\n\t\treturn err\n\t}\n\t\n\treturn nil\n}", "func (h *User) Create(w http.ResponseWriter, r *http.Request) {\n\tdefer r.Body.Close()\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tR.JSON500(w)\n\t\treturn\n\t}\n\n\tuser, err := validator.UserCreate(body)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tR.JSON400(w)\n\t\treturn\n\t}\n\n\terr = h.Storage.CreateUser(user)\n\t// @todo this might be also 400 response since email can be a duplicate\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tR.JSON500(w)\n\t\treturn\n\t}\n\n\tR.JSON200OK(w)\n}", "func (db *DataBase) Register(user *models.UserPrivateInfo) (userID int, err error) {\n\n\tvar (\n\t\ttx *sql.Tx\n\t)\n\n\tif tx, err = db.Db.Begin(); err != nil {\n\t\treturn\n\t}\n\tdefer tx.Rollback()\n\n\tif userID, err = db.createPlayer(tx, user); err != nil {\n\t\treturn\n\t}\n\n\tif err = db.createRecords(tx, userID); err != nil {\n\t\treturn\n\t}\n\n\terr = tx.Commit()\n\treturn\n}", "func TestEmailAlreadyInUse(t *testing.T) {\n handle1 := \"test_handleinuse1\"\n handle2 := \"test_handleinuse2\"\n email := \"[email protected]\"\n\n var account UserAccount\n if err := Create(handle1, email, \"timisadork\", &account); err != nil {\n t.Error(err)\n }\n\n if err := Create(handle2, email, \"timisadork\", &account); err == nil {\n t.Errorf(\"Used the same email twice\")\n }\n\n if err := Delete(account.Key); err != nil {\n t.Error(err)\n }\n}", "func insertNick(user *tgbotapi.User, nick string) error {\n\tinsert := fmt.Sprintf(`INSERT INTO %s (nick, tg_id, tg_username) VALUES ($1, $2, $3)`, UsersTable)\n\t_, err := pool.Exec(context.Background(), insert, nick, user.ID, user.UserName)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Could not add user: %s\", err)\n\t}\n\n\tlog.Printf(\"Añadido %s como %s\", user.String(), nick)\n\n\treturn nil\n}", "func createDefaultUser() error {\n\tdb, err := gorm.Open(\"sqlite3\", dbPath)\n\tdefer db.Close()\n\tif err != nil {\n\t\treturn err\n\t}\n\tregisteredValues := 0\n\tdb.Find(&user{}).Count(&registeredValues)\n\tif registeredValues == 0 {\n\t\thashedPassword, _ := bcrypt.GenerateFromPassword([]byte(\"admin\"), 14)\n\t\tdb.Create(&user{\n\t\t\tUsername: \"admin\",\n\t\t\tHashedPassword: hashedPassword,\n\t\t})\n\t}\n\treturn nil\n}", "func sign_up(conn net.Conn, args []string, db *sql.DB, newMsgCh *chan message) serverError {\n login := args[0]\n // check this login\n // login must be free\n _, err := getUserID(login, db)\n if err == sql.ErrNoRows {\n // add new user if everything ok\n // check args\n\n // RSA key.module must be big.Int()\n n := big.NewInt(0)\n if _, ok := n.SetString(args[1], 10); !ok {\n return serverError{ errors.New(\"Expected public key module. Got \" + args[1]),\n WRONG_PUBLIC_KEY_MODULE}\n }\n // size of n must be RSA_KEY_LEN bit\n if n.BitLen() != RSA_KEY_LEN {\n return serverError{ errors.New(\"Wrong public key module size. Expected \" +\n strconv.Itoa(RSA_KEY_LEN) + \" bit, got \" +\n strconv.Itoa(n.BitLen())), WRONG_PUBLIC_KEY_SIZE}\n }\n\n //check that args[2] is RSA exponent\n e, err := strconv.Atoi(args[2])\n if err != nil {\n return serverError{ errors.New(\"error convert \" + args[2] + \" to key exponent\"),\n WRONG_KEY_EXPONENT }\n }\n\n //register new user\n _, err = db.Exec(`INSERT INTO users(login, pubKey_n, pubKey_e)\n VALUES ($1, $2, $3)`, login, args[1], e)\n return serverError{err, SERVER_INNER_ERR}\n } else {\n return serverError{ errors.New(\"login \" + login + \" is used\"),\n LOGIN_IS_USED }\n }\n\n return NoErrors\n}", "func CreateUser(db *sql.DB, name, email string) (err error) {\n\ttx, err := db.Begin()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"unable to begin tx for createUser\")\n\t}\n\n\tdefer func() {\n\t\tif err != nil {\n\t\t\t// attempt a rollback and return the original error\n\t\t\ttx.Rollback()\n\t\t\treturn\n\t\t}\n\t\terr = tx.Commit()\n\t\tif err != nil {\n\t\t\terr = errors.Wrap(err, \"error committing tx on createUser\")\n\t\t}\n\t}()\n\n\trow := tx.QueryRow(\"select id from users where email=? limit 1\", email)\n\tvar id int\n\terr = row.Scan(&id)\n\tif err == sql.ErrNoRows || id == 0 {\n\t\t_, err := tx.Exec(\"insert into users (name, email) values (?,?)\", name, email)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"unable to create user\")\n\t\t}\n\t} else if err != nil {\n\t\treturn errors.Wrap(err, \"unexpected error in createUser\")\n\t}\n\treturn nil\n}", "func CreateUser(datastore core.ApplicationUser, accountID, applicationID int64, user *entity.ApplicationUser) (errs []errors.Error) {\n\tif user.FirstName != \"\" {\n\t\tif !StringLengthBetween(user.FirstName, userNameMin, userNameMax) {\n\t\t\terrs = append(errs, errmsg.ErrApplicationUserFirstNameSize.SetCurrentLocation())\n\t\t}\n\t}\n\n\tif user.LastName != \"\" {\n\t\tif !StringLengthBetween(user.LastName, userNameMin, userNameMax) {\n\t\t\terrs = append(errs, errmsg.ErrApplicationUserLastNameSize.SetCurrentLocation())\n\t\t}\n\t}\n\n\tif user.Username != \"\" {\n\t\tif !StringLengthBetween(user.Username, userNameMin, userNameMax) {\n\t\t\terrs = append(errs, errmsg.ErrApplicationUserUsernameSize.SetCurrentLocation())\n\t\t}\n\t}\n\n\tif user.Username == \"\" && user.Email == \"\" {\n\t\terrs = append(errs, errmsg.ErrApplicationUsernameAndEmailAreEmpty.SetCurrentLocation())\n\t}\n\n\tif user.Password == \"\" {\n\t\terrs = append(errs, errmsg.ErrAuthPasswordEmpty.SetCurrentLocation())\n\t}\n\n\tif user.Email != \"\" && !IsValidEmail(user.Email) {\n\t\terrs = append(errs, errmsg.ErrApplicationUserEmailInvalid.SetCurrentLocation())\n\t}\n\n\tif user.Email != \"\" {\n\t\tif isDuplicate, err := DuplicateApplicationUserEmail(datastore, accountID, applicationID, user.Email); isDuplicate || err != nil {\n\t\t\tif isDuplicate {\n\t\t\t\terrs = append(errs, errmsg.ErrApplicationUserEmailAlreadyExists.SetCurrentLocation())\n\t\t\t} else {\n\t\t\t\terrs = append(errs, err...)\n\t\t\t}\n\t\t}\n\t}\n\n\tif user.Username != \"\" {\n\t\tif isDuplicate, err := DuplicateApplicationUserUsername(datastore, accountID, applicationID, user.Username); isDuplicate || err != nil {\n\t\t\tif isDuplicate {\n\t\t\t\terrs = append(errs, errmsg.ErrApplicationUserUsernameInUse.SetCurrentLocation())\n\t\t\t} else {\n\t\t\t\terrs = append(errs, err...)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn\n}", "func IsUserExist(qr db.Queryer, email string) bool {\n\tstr := \"SELECT count(*) as cnt FROM users WHERE email = ?\"\n\tuid := int64(0)\n\terr := qr.Get(&uid, str, email)\n\tif err != nil {\n\t\tlog.Println(\"err\", err)\n\t\treturn false\n\t}\n\tlog.Println(\"uid\", err)\n\tif uid > 0 {\n\t\treturn true\n\t}\n\n\treturn false\n\n}", "func (wad *WriteUsersDatabase) Create(ctx context.Context, user *pb.User) error {\n\twad.RLock()\n\tdefer wad.RUnlock()\n\n\treturn nil\n}", "func (pg *PGUser) Add(in *user.User) (err error) {\n\tfmt.Printf(\"\\nPGUser in: %+v\\n\", in)\n\tif err := pg.DB.Create(in).Scan(&in); err != nil {\n\t\treturn oops.Err(err.Error)\n\t}\n\treturn nil\n}", "func createUser(c *gin.Context) {\n password,_ := HashPassword(c.PostForm(\"password\"))\n\tuser := user{Login: c.PostForm(\"login\"), Password: password}\n\tdb.Save(&user)\n\tc.JSON(http.StatusCreated, gin.H{\"status\": http.StatusCreated, \"message\": \"User item created successfully!\"})\n}", "func CreateDefaultUser(username, password string) (err error) {\n\n err = checkInit()\n if err != nil {\n return\n }\n\n var users = data[\"users\"].(map[string]interface{})\n // Check if the default user exists\n if len(users) > 0 {\n err = createError(001)\n return\n }\n\n var defaults = defaultsForNewUser(username, password)\n users[defaults[\"_id\"].(string)] = defaults\n saveDatabase(data)\n\n return\n}", "func createUser(firstName string, MI string, lastName string, username string, password string, privLevel int, courseName string) error {\n\n\tdb, err := sql.Open(\"mysql\", DB_USER_NAME+\":\"+DB_PASSWORD+\"@unix(/var/run/mysql/mysql.sock)/\"+DB_NAME)\n\n\tif err != nil {\n\t\treturn errors.New(\"No connection\")\n\t}\n\n\thashedPassword, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost)\n\n\tif err != nil {\n\t\treturn errors.New(\"Error\")\n\t}\n\n\t_, err = db.Exec(\"INSERT INTO Users(FirstName, MiddleInitial, LastName, Username, Password, PrivLevel) VALUES(?, ?, ?, ?, ?, ?)\", firstName, MI, lastName, username, hashedPassword, privLevel)\n\n\tif err != nil {\n\t\treturn errors.New(\"User creation failed.\")\n\t}\n\n\tsendRandomPassword(username)\n\n\t_, err = db.Exec(\"INSERT INTO StudentCourses(Student, CourseName) VALUES ((select UserID from Users where Username=?), ?)\", username, courseName)\n\n\tif err != nil {\n\t\treturn errors.New(\"User unable to be added to student courses.\")\n\t}\n\n\t/*_, err = db.Exec(\"INSERT INTO GradeReport\" + courseName + \"(Student) VALUES(select UserID from users where Username=\" + username + \")\")\n\n\tif err != nil {\n\t\treturn errors.New(\"User unable to be added to GradeReport table.\")\n\t}*/\n\n\treturn nil\n}", "func RegisterNewUser(c *soso.Context) {\n\treq := c.RequestMap\n\trequest := &auth_protocol.NewUserRequest{}\n\n\tif value, ok := req[\"source\"].(string); ok {\n\t\trequest.Source = value\n\t}\n\n\tif value, ok := req[\"phone\"].(string); ok {\n\t\trequest.PhoneNumber = value\n\t}\n\n\tif value, ok := req[\"instagram_username\"].(string); ok {\n\t\tvalue = strings.Trim(value, \" \\r\\n\\t\")\n\t\tif !nameValidator.MatchString(value) {\n\t\t\tlog.Debug(\"name '%v' isn't valid\", value)\n\t\t\tc.ErrorResponse(http.StatusBadRequest, soso.LevelError, errors.New(\"Invalid instagram name\"))\n\t\t\treturn\n\t\t}\n\t\trequest.InstagramUsername = value\n\t}\n\n\tif value, ok := req[\"username\"].(string); ok {\n\t\tvalue = strings.Trim(value, \" \\r\\n\\t\")\n\t\tif !nameValidator.MatchString(value) {\n\t\t\tlog.Debug(\"name '%v' isn't valid\", value)\n\t\t\tc.ErrorResponse(http.StatusBadRequest, soso.LevelError, errors.New(\"Invalid user name\"))\n\t\t\treturn\n\t\t}\n\t\trequest.Username = value\n\t}\n\n\tif request.InstagramUsername == \"\" && request.Username == \"\" {\n\t\tc.ErrorResponse(http.StatusBadRequest, soso.LevelError, errors.New(\"User name or instagram name is required\"))\n\t\treturn\n\t}\n\n\tif request.PhoneNumber == \"\" {\n\t\tc.ErrorResponse(http.StatusBadRequest, soso.LevelError, errors.New(\"User phone number is required\"))\n\t\treturn\n\t}\n\n\tctx, cancel := rpc.DefaultContext()\n\tdefer cancel()\n\tresp, err := authClient.RegisterNewUser(ctx, request)\n\n\tif err != nil {\n\t\tc.ErrorResponse(http.StatusBadRequest, soso.LevelError, err)\n\t\treturn\n\t}\n\n\tc.SuccessResponse(map[string]interface{}{\n\t\t\"ErrorCode\": resp.ErrorCode,\n\t\t\"ErrorMessage\": resp.ErrorMessage,\n\t})\n}", "func (r *mutationResolver) CreateUserss(ctx context.Context, input *model.RegisterInput) (*model.Register, error) {\n\tuser := &model.Register{\n\t\tID: fmt.Sprintf(\"T%d\", rand.Int()),\n\t\tName: input.Name,\n\t\tLastname: input.Lastname,\n\t\tPassword: input.Password,\n\t}\n\tr.user = append(r.user, user)\n\treturn user, nil\n}", "func (s *userService) Duplicated(email string) error {\n\tuser, err := s.repo.FindByEmail(email)\n\tif user != nil {\n\t\treturn fmt.Errorf(\"%v already exists\", email)\n\t}\n\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (db *Database) UserExists(name string) (bool, error) {\n\trow := db.db.QueryRow(`\n\t\tSELECT id FROM melodious.accounts WHERE username=$1 LIMIT 1;\n\t`, name)\n\n\tvar id int // this is unused though\n\terr := row.Scan(&id)\n\tif err == sql.ErrNoRows {\n\t\treturn false, nil\n\t} else if err != nil {\n\t\treturn false, err\n\t}\n\n\treturn true, nil\n}", "func (userRepository UserRepository) Create(user models.User) (uint64, error) {\n\tstatement, err := userRepository.db.Prepare(\n\t\t\"insert into users (name, nick, email, password) values (?, ?, ?, ?)\",\n\t)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tdefer statement.Close()\n\n\tresult, err := statement.Exec(user.Name, user.Nick, user.Email, user.Password)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tlastInsertId, err := result.LastInsertId()\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn uint64(lastInsertId), nil\n}", "func (r Repository) Signup(user shared.User) Message {\n\tsession, dbMessage := NewMongoSession(address, DBNAME, DBUsername, DBPassword)\n\tif dbMessage.Status != 200 {\n\t\treturn dbMessage\n\t}\n\tdefer session.CloseSession()\n\tvar userCheck shared.User\n\tfindQuery := session.FindUser(DOCNAME, user.UserID, &userCheck)\n\tif findQuery != nil {\n\t\tif findQuery.Error() == \"not found\" {\n\t\t\terr := session.InsertUser(DOCNAME, user)\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatal(err)\n\t\t\t\treturn internalServerErrorMessage()\n\t\t\t}\n\t\t\treturnMessage := Message{\n\t\t\t\tStatus: 201,\n\t\t\t\tMessage: \"Signup successfull\",\n\t\t\t\tUserID: user.UserID,\n\t\t\t}\n\t\t\treturn returnMessage\n\t\t}\n\t\treturn internalServerErrorMessage()\n\t}\n\tif userCheck != (shared.User{}) {\n\t\treturnMessage := Message{\n\t\t\tStatus: 409,\n\t\t\tMessage: \"UserID already exists\",\n\t\t}\n\t\treturn returnMessage\n\t}\n\treturn internalServerErrorMessage()\n\n}", "func RegisterUser(user models.User)(string,bool,error) {\n\n\tctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)\n\tdefer cancel()\n\tdb := MongoC.Database(\"twitter\")\n\tcol := db.Collection(\"users\")\n\tuser.Password,_ = EncriptPassw(user.Password)\n\tresult, err := col.InsertOne(ctx,user)\n\tif err != nil{\n\t\treturn \"\",false,err\n\t}\n\tObjID,_ := result.InsertedID.(primitive.ObjectID)\n\treturn ObjID.String(),true,nil\n}", "func (u *User)IsNicknameExist(nickname *string,exist *bool)(err error) {\n rows,err := u.DB.Query(SQL_CHECK_NICKNAME_LOWER,nickname)\n if !rows.Next() {\n *exist = false\n return \n }\n *exist = true\n return\n}", "func createUser(user *User) {\n\tvar dummy string\n\n\t// create user\n\tdb.QueryRow(\"INSERT INTO users (gh_id, username, realname, email, token, \"+\n\t\t\"worker_token, admin) VALUES ($1, $2, $3, $4, $5, $6, $7)\", user.GH_Id,\n\t\tuser.User_name, user.Real_name, user.Email, user.Token,\n\t\tuser.Worker_token, user.Admin).Scan(&dummy)\n}" ]
[ "0.71197665", "0.67626286", "0.6727451", "0.6609683", "0.6563545", "0.65301263", "0.6466772", "0.6456422", "0.6431449", "0.6404142", "0.63980633", "0.639349", "0.6377807", "0.6352397", "0.63426137", "0.63360345", "0.6325099", "0.6308572", "0.63084495", "0.63005865", "0.6291556", "0.6281891", "0.62722874", "0.62649083", "0.6264029", "0.6253047", "0.62306863", "0.6227082", "0.6225584", "0.62209046", "0.6207234", "0.61952895", "0.6191825", "0.6175866", "0.6167954", "0.6165509", "0.6137294", "0.611962", "0.61128825", "0.6103039", "0.60916334", "0.60911155", "0.60894746", "0.6083962", "0.6060138", "0.60583436", "0.6058179", "0.6036375", "0.6036223", "0.60281193", "0.6026648", "0.60251343", "0.602361", "0.6022592", "0.6017918", "0.6014218", "0.60121775", "0.60041285", "0.60037893", "0.60029054", "0.6002661", "0.59992474", "0.5988458", "0.59855294", "0.5977671", "0.597341", "0.5972809", "0.5972596", "0.59619737", "0.5959727", "0.594712", "0.59451526", "0.59387195", "0.5938703", "0.59328246", "0.5932581", "0.593138", "0.5927445", "0.59191096", "0.59183604", "0.59077084", "0.59057605", "0.5904716", "0.59012616", "0.5900349", "0.58998317", "0.58981925", "0.589676", "0.5896627", "0.58915657", "0.5884096", "0.5879773", "0.58793676", "0.58776325", "0.587565", "0.5872221", "0.5868461", "0.58623785", "0.58580786", "0.5857628" ]
0.66131705
3
3 Get and validate all 4 users
func TestGetUserService (t *testing.T){ user1, err := GetUserService(user_01.SocialNumber) assert.Equal(t, 200, err.HTTPStatus) assert.Equal(t, user_01, user1) user2, err := GetUserService(user_01.SocialNumber) assert.Equal(t, 200, err.HTTPStatus) assert.Equal(t, user_01, user2) user3, err := GetUserService(user_01.SocialNumber) assert.Equal(t, 200, err.HTTPStatus) assert.Equal(t, user_01, user3) user4, err := GetUserService(user_01.SocialNumber) assert.Equal(t, 200, err.HTTPStatus) assert.Equal(t, user_01, user4) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetUsers(c *gin.Context) {\n\tdb := dbConn()\n\tselDB, err := db.Query(\"CALL read_users()\")\n\tif err != nil {\n\t\tpanic(err.Error)\n\t}\n\n\tuser := User{}\n\tusers := []User{}\n\tfor selDB.Next() {\n\t\tvar id, username, useremail, fname, lname, password, passwordchange, passwordexpired, lastlogon, accountlocked string\n\t\terr = selDB.Scan(&id, &username, &useremail, &fname, &lname, &password, &passwordchange, &passwordexpired, &lastlogon, &accountlocked)\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t\tc.JSON(500, gin.H{\n\t\t\t\t\"error\": err.Error(),\n\t\t\t})\n\t\t}\n\t\tuser.ID = id\n\t\tuser.UserName = username\n\t\tuser.UserEmail = useremail\n\t\tuser.FName = fname\n\t\tuser.LName = lname\n\t\tuser.Password = password\n\t\tuser.PasswordChange = passwordchange\n\t\tuser.PasswordExpired = passwordexpired\n\t\tuser.LastLogon = lastlogon\n\t\tuser.AccountLocked = accountlocked\n\t\tiid, err := strconv.Atoi(id)\n\t\tif err != nil {\n\t\t\tpanic(err.Error)\n\t\t}\n\t\tselDB02, err := db.Query(\"CALL read_access_userid(?)\", iid)\n\t\tif err != nil {\n\t\t\tpanic(err.Error)\n\t\t}\n\t\taccess := Access{}\n\t\taccessList := []Access{}\n\t\tfor selDB02.Next() {\n\t\t\tvar accessid, userid, courtid, caseaccess, personaccess, accountingaccess, juryaccess, attorneyaccess, configaccess, securitylevel, sealedcase string\n\t\t\terr := selDB02.Scan(&accessid, &userid, &courtid, &caseaccess, &personaccess, &accountingaccess, &juryaccess, &attorneyaccess, &configaccess, &securitylevel, &sealedcase)\n\t\t\tif err != nil {\n\t\t\t\tlog.Println(err)\n\t\t\t\tc.JSON(500, gin.H{\n\t\t\t\t\t\"error\": err.Error(),\n\t\t\t\t})\n\t\t\t}\n\t\t\taccess.AccessID = accessid\n\t\t\taccess.IDUser = userid\n\t\t\taccess.IDCourt = courtid\n\t\t\taccess.CaseAccess = caseaccess\n\t\t\taccess.PersonAccess = personaccess\n\t\t\taccess.AccountingAccess = accountingaccess\n\t\t\taccess.JuryAccess = juryaccess\n\t\t\taccess.AttorneyAccess = attorneyaccess\n\t\t\taccess.ConfigAccess = configaccess\n\t\t\taccess.SecurityLevel = securitylevel\n\t\t\taccess.SealedCase = sealedcase\n\t\t\taccessList = append(accessList, access)\n\t\t}\n\t\tuser.AccessList = accessList\n\t\tusers = append(users, user)\n\t}\n\n\tc.JSON(200, gin.H{\n\t\t\"result\": users,\n\t})\n\n\tdefer db.Close()\n}", "func getUsers(types int) {\n\treq, _ := http.NewRequest(\"GET\", cfg.Main.Server+\"users\", nil)\n\treq.Header.Set(\"Content-Type\", \"application/xml\")\n\treq.Header.Set(\"Authorization\", cfg.Main.Key)\n\tclient := &http.Client{}\n\tres, err := client.Do(req)\n\tif err != nil {\n\t\tp(\"Couldn't connect to Openfire server: %s\", err.Error())\n\t\treturn\n\t}\n\tdefer res.Body.Close()\n\tif res.StatusCode != 200 {\n\t\tp(\"Error requesting userlist from the server.\")\n\t\treturn\n\t}\n\tbody, _ := ioutil.ReadAll(res.Body)\n\tvar users XMLUsers\n\txml.Unmarshal(body, &users)\n\tfor _, e := range users.User {\n\t\tn := e.Username + \",\"\n\t\tif e.Name != \"\" {\n\t\t\tn = e.Username + \",\" + e.Name\n\t\t}\n\t\tswitch types {\n\t\tcase 0:\n\t\t\tm := \"<missing e-mail>\"\n\t\t\tif e.Email != \"\" {\n\t\t\t\tm = e.Email\n\t\t\t}\n\t\t\tp(\"%s,%s\", n, m)\n\t\tcase 1:\n\t\t\tif e.Email != \"\" {\n\t\t\t\tp(\"%s,%s\", n, e.Email)\n\t\t\t}\n\t\tcase 2:\n\t\t\tif e.Email == \"\" {\n\t\t\t\tp(\"%s\", n)\n\t\t\t}\n\t\t}\n\t}\n}", "func GetUser(c *gin.Context) {\n\tnID := c.Param(\"user_id\")\n\tdb := dbConn()\n\tselDB, err := db.Query(\"CALL read_user(?)\", nID)\n\tif err != nil {\n\t\tpanic(err.Error)\n\t}\n\n\tuser := User{}\n\tusers := []User{}\n\tfor selDB.Next() {\n\t\tvar id, username, useremail, fname, lname, password, passwordchange, passwordexpired, lastlogon, accountlocked string\n\t\terr = selDB.Scan(&id, &username, &useremail, &fname, &lname, &password, &passwordchange, &passwordexpired, &lastlogon, &accountlocked)\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t\tc.JSON(500, gin.H{\n\t\t\t\t\"error\": err.Error(),\n\t\t\t})\n\t\t}\n\t\tuser.ID = id\n\t\tuser.UserName = username\n\t\tuser.UserEmail = useremail\n\t\tuser.FName = fname\n\t\tuser.LName = lname\n\t\tuser.Password = password\n\t\tuser.PasswordChange = passwordchange\n\t\tuser.PasswordExpired = passwordexpired\n\t\tuser.LastLogon = lastlogon\n\t\tuser.AccountLocked = accountlocked\n\t\tiid, err := strconv.Atoi(id)\n\t\tif err != nil {\n\t\t\tpanic(err.Error)\n\t\t}\n\t\tselDB02, err := db.Query(\"CALL read_access_userid(?)\", iid)\n\t\tif err != nil {\n\t\t\tpanic(err.Error)\n\t\t}\n\t\taccess := Access{}\n\t\taccessList := []Access{}\n\t\tfor selDB02.Next() {\n\t\t\tvar accessid, userid, courtid, caseaccess, personaccess, accountingaccess, juryaccess, attorneyaccess, configaccess, securitylevel, sealedcase string\n\t\t\terr := selDB02.Scan(&accessid, &userid, &courtid, &caseaccess, &personaccess, &accountingaccess, &juryaccess, &attorneyaccess, &configaccess, &securitylevel, &sealedcase)\n\t\t\tif err != nil {\n\t\t\t\tlog.Println(err)\n\t\t\t\tc.JSON(500, gin.H{\n\t\t\t\t\t\"error\": err.Error(),\n\t\t\t\t})\n\t\t\t}\n\t\t\taccess.AccessID = accessid\n\t\t\taccess.IDUser = userid\n\t\t\taccess.IDCourt = courtid\n\t\t\taccess.CaseAccess = caseaccess\n\t\t\taccess.PersonAccess = personaccess\n\t\t\taccess.AccountingAccess = accountingaccess\n\t\t\taccess.JuryAccess = juryaccess\n\t\t\taccess.AttorneyAccess = attorneyaccess\n\t\t\taccess.ConfigAccess = configaccess\n\t\t\taccess.SecurityLevel = securitylevel\n\t\t\taccess.SealedCase = sealedcase\n\t\t\taccessList = append(accessList, access)\n\t\t}\n\t\tuser.AccessList = accessList\n\t\tusers = append(users, user)\n\t}\n\n\tc.JSON(200, gin.H{\n\t\t\"result\": users,\n\t})\n\n\tdefer db.Close()\n}", "func (*UserValidator) Validate(rest *models.User) []ValidationError {\n\n\tvalidationErrors := make([]ValidationError, 0)\n\n\tvalidateNameField(rest.FirstName, firstNameField, &validationErrors)\n\tvalidateNameField(rest.LastName, lastNameField, &validationErrors)\n\tvalidateEmail(rest.Email, &validationErrors)\n\tvalidateCountry(rest.Country, &validationErrors)\n\n\treturn validationErrors\n}", "func UserVerify(w http.ResponseWriter, r *http.Request) {\n\n\tw.Header().Set(\"Content-Type\", \"text/javascript\")\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\n\tif r.Method != \"POST\" {\n\t\tfmt.Fprintln(w, \"bad request\")\n\n\t} else {\n\n\t\tr.ParseForm()\n\t\tdata := r.Form[\"ID\"][0] //mail or phone\n\t\tvc := r.Form[\"vc\"][0]\n\t\tcollection := session.DB(\"bkbfbtpiza46rc3\").C(\"loginRequests\")\n\t\trecordTemp := new(structs.VcTable)\n\t\tvar FindErr error\n\t\tvar result bool\n\t\tvar objid bson.ObjectId\n\n\t\tFindErr = collection.Find(bson.M{\"userid\": data}).One(&recordTemp)\n\n\t\tif FindErr == nil {\n\t\t\tif recordTemp.VC == vc {\n\n\t\t\t\tcollection = session.DB(\"bkbfbtpiza46rc3\").C(\"users\")\n\t\t\t\tvar usrTemp structs.User\n\n\t\t\t\tif strings.Contains(data, \"@\") {\n\t\t\t\t\tFindErr = collection.Find(bson.M{\"email\": data}).One(&usrTemp)\n\t\t\t\t} else {\n\t\t\t\t\tFindErr = collection.Find(bson.M{\"phone\": data}).One(&usrTemp)\n\t\t\t\t}\n\n\t\t\t\t///if user doesnt exist then init new one\n\t\t\t\tif FindErr == mgo.ErrNotFound {\n\t\t\t\t\tobjid, result = services.InitUser(data, vc, session, redisClient)\n\t\t\t\t}\n\n\t\t\t\t//if exist then login it\n\t\t\t\tif FindErr == nil {\n\t\t\t\t\tresult = services.LoginUser(data, vc, session)\n\t\t\t\t\tobjid = usrTemp.ID\n\t\t\t\t\tinit := structs.UserCache{Geo: \"0,0\", Vc: vc, FriendList: usrTemp.FriendList, Visibility: true}\n\t\t\t\t\tif !services.SendToCache(objid.Hex(), init, redisClient) {\n\t\t\t\t\t\tlog.Println(\"redis init failed,trying again\")\n\t\t\t\t\t\tservices.SendToCache(objid.Hex(), init, redisClient)\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t\t// if error\n\t\t\t\tif FindErr != nil && FindErr != mgo.ErrNotFound {\n\t\t\t\t\tlog.Println(\"user verification failed due to DB query failur\")\n\t\t\t\t\tlog.Println(FindErr)\n\t\t\t\t\tlog.Println(\"user ID:\")\n\t\t\t\t\tlog.Println(data)\n\t\t\t\t\tlog.Println(\"<=End\")\n\t\t\t\t\tfmt.Fprintln(w, \"0\")\n\t\t\t\t}\n\n\t\t\t\tif result == true {\n\t\t\t\t\tfmt.Fprintln(w, objid.Hex()+\"<>\"+usrTemp.Name+\"<>\"+usrTemp.Avatar)\n\t\t\t\t} else {\n\t\t\t\t\tlog.Println(\"user verification failed due to inituser/loginuser service failur:\")\n\t\t\t\t\tlog.Println(data)\n\t\t\t\t\tlog.Println(\"<=End\")\n\t\t\t\t\tfmt.Fprintln(w, \"0\")\n\t\t\t\t}\n\n\t\t\t} else {\n\t\t\t\tfmt.Fprintln(w, \"-1\")\n\t\t\t}\n\t\t} else {\n\t\t\tlog.Println(\"user verification failed due to VC table failur\")\n\t\t\tlog.Println(FindErr)\n\t\t\tlog.Println(\"<=End\")\n\t\t\tfmt.Fprintln(w, \"0\")\n\t\t}\n\n\t}\n\n}", "func validate(user *customer_api.DbUser, allowEmpty bool) error {\n\tconst minNameLength, maxNameLength = 3, 20\n\tconst emailRegexString = \"^[a-zA-Z0-9+_.-]+@[a-zA-Z0-9.-]+$\"\n\tvar emailRegex = regexp.MustCompile(emailRegexString)\n\n\tif !(allowEmpty && user.Email == \"\") {\n\t\tif len(user.Email) < 5 || !emailRegex.MatchString(user.Email) {\n\t\t\treturn errors.New(\"invalid email\")\n\t\t}\n\t}\n\n\tif !(allowEmpty && user.FirstName == \"\") {\n\t\tif len(user.FirstName) < minNameLength || len(user.FirstName) > maxNameLength {\n\t\t\treturn errors.New(\"first_name should be between 3 and 20 characters\")\n\t\t}\n\t}\n\n\tif !(allowEmpty && user.LastName == \"\") {\n\t\tif len(user.LastName) < minNameLength || len(user.LastName) > maxNameLength {\n\t\t\treturn errors.New(\"last_name should be between 3 and 20 characters\")\n\t\t}\n\t}\n\n\tif !(allowEmpty && user.Phone == 0) {\n\t\tif user.Phone < 1000000000 || user.Phone > 9999999999 {\n\t\t\treturn errors.New(\"invalid phone no\")\n\t\t}\n\t}\n\n\tif !(allowEmpty && user.Id == \"\") {\n\t\tif user.Id == \"\" {\n\t\t\treturn errors.New(\"id cannot be empty\")\n\t\t}\n\t}\n\treturn nil\n}", "func (uu *UserUpdate) check() error {\n\tif v, ok := uu.mutation.AccountName(); ok {\n\t\tif err := user.AccountNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"account_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"account_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.StaffType(); ok {\n\t\tif err := user.StaffTypeValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"staff_type\", err: fmt.Errorf(\"ent: validator failed for field \\\"staff_type\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.FamilyName(); ok {\n\t\tif err := user.FamilyNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"family_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"family_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.GivenName(); ok {\n\t\tif err := user.GivenNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"given_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"given_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.DisplayName(); ok {\n\t\tif err := user.DisplayNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"display_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"display_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.IDNumber(); ok {\n\t\tif err := user.IDNumberValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"id_number\", err: fmt.Errorf(\"ent: validator failed for field \\\"id_number\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Sex(); ok {\n\t\tif err := user.SexValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"sex\", err: fmt.Errorf(\"ent: validator failed for field \\\"sex\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.PhoneNumber(); ok {\n\t\tif err := user.PhoneNumberValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"phone_number\", err: fmt.Errorf(\"ent: validator failed for field \\\"phone_number\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Address(); ok {\n\t\tif err := user.AddressValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"address\", err: fmt.Errorf(\"ent: validator failed for field \\\"address\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.StaffID(); ok {\n\t\tif err := user.StaffIDValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"staff_id\", err: fmt.Errorf(\"ent: validator failed for field \\\"staff_id\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.PersonalEmail(); ok {\n\t\tif err := user.PersonalEmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"personal_email\", err: fmt.Errorf(\"ent: validator failed for field \\\"personal_email\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.IntranetWorkEmail(); ok {\n\t\tif err := user.IntranetWorkEmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"intranet_work_email\", err: fmt.Errorf(\"ent: validator failed for field \\\"intranet_work_email\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.ExtranetWorkEmail(); ok {\n\t\tif err := user.ExtranetWorkEmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"extranet_work_email\", err: fmt.Errorf(\"ent: validator failed for field \\\"extranet_work_email\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func getUsersHandler(c *gin.Context) {\n\tuser, _ := c.Get(JwtIdentityKey)\n\n\t// Role check.\n\tif !isAdmin(user) {\n\t\tc.JSON(http.StatusUnauthorized, gin.H{\"message\": \"unauthorized\"})\n\t\treturn\n\t}\n\n\tpage := c.DefaultQuery(\"page\", \"1\")\n\tcount := c.DefaultQuery(\"count\", \"10\")\n\tpageInt, _ := strconv.Atoi(page)\n\tcountInt, _ := strconv.Atoi(count)\n\n\tif page == \"0\" {\n\t\tpageInt = 1\n\t}\n\n\tvar wg sync.WaitGroup\n\tvar users *[]types.User\n\tvar usersCount int\n\n\tdb := data.New()\n\twg.Add(1)\n\tgo func() {\n\t\tusers = db.Users.GetUsers((pageInt-1)*countInt, countInt)\n\t\twg.Done()\n\t}()\n\n\twg.Add(1)\n\tgo func() {\n\t\tusersCount = db.Users.GetUsersCount()\n\t\twg.Done()\n\t}()\n\twg.Wait()\n\n\tc.JSON(http.StatusOK, gin.H{\n\t\t\"code\": http.StatusOK,\n\t\t\"users\": users,\n\t\t\"count\": usersCount,\n\t})\n}", "func (uuo *UserUpdateOne) check() error {\n\tif v, ok := uuo.mutation.AccountName(); ok {\n\t\tif err := user.AccountNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"account_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"account_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.StaffType(); ok {\n\t\tif err := user.StaffTypeValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"staff_type\", err: fmt.Errorf(\"ent: validator failed for field \\\"staff_type\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.FamilyName(); ok {\n\t\tif err := user.FamilyNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"family_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"family_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.GivenName(); ok {\n\t\tif err := user.GivenNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"given_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"given_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.DisplayName(); ok {\n\t\tif err := user.DisplayNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"display_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"display_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.IDNumber(); ok {\n\t\tif err := user.IDNumberValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"id_number\", err: fmt.Errorf(\"ent: validator failed for field \\\"id_number\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Sex(); ok {\n\t\tif err := user.SexValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"sex\", err: fmt.Errorf(\"ent: validator failed for field \\\"sex\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.PhoneNumber(); ok {\n\t\tif err := user.PhoneNumberValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"phone_number\", err: fmt.Errorf(\"ent: validator failed for field \\\"phone_number\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Address(); ok {\n\t\tif err := user.AddressValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"address\", err: fmt.Errorf(\"ent: validator failed for field \\\"address\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.StaffID(); ok {\n\t\tif err := user.StaffIDValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"staff_id\", err: fmt.Errorf(\"ent: validator failed for field \\\"staff_id\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.PersonalEmail(); ok {\n\t\tif err := user.PersonalEmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"personal_email\", err: fmt.Errorf(\"ent: validator failed for field \\\"personal_email\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.IntranetWorkEmail(); ok {\n\t\tif err := user.IntranetWorkEmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"intranet_work_email\", err: fmt.Errorf(\"ent: validator failed for field \\\"intranet_work_email\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.ExtranetWorkEmail(); ok {\n\t\tif err := user.ExtranetWorkEmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"extranet_work_email\", err: fmt.Errorf(\"ent: validator failed for field \\\"extranet_work_email\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func (uuo *UserUpdateOne) check() error {\n\tif v, ok := uuo.mutation.Username(); ok {\n\t\tif err := user.UsernameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"username\", err: fmt.Errorf(\"ent: validator failed for field \\\"username\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Fullname(); ok {\n\t\tif err := user.FullnameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"fullname\", err: fmt.Errorf(\"ent: validator failed for field \\\"fullname\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Password(); ok {\n\t\tif err := user.PasswordValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"password\", err: fmt.Errorf(\"ent: validator failed for field \\\"password\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Email(); ok {\n\t\tif err := user.EmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"email\", err: fmt.Errorf(\"ent: validator failed for field \\\"email\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Phone(); ok {\n\t\tif err := user.PhoneValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"phone\", err: fmt.Errorf(\"ent: validator failed for field \\\"phone\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Bio(); ok {\n\t\tif err := user.BioValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"bio\", err: fmt.Errorf(\"ent: validator failed for field \\\"bio\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Intro(); ok {\n\t\tif err := user.IntroValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"intro\", err: fmt.Errorf(\"ent: validator failed for field \\\"intro\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.GithubProfile(); ok {\n\t\tif err := user.GithubProfileValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"github_profile\", err: fmt.Errorf(\"ent: validator failed for field \\\"github_profile\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.ProfilePictureURL(); ok {\n\t\tif err := user.ProfilePictureURLValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"profile_picture_url\", err: fmt.Errorf(\"ent: validator failed for field \\\"profile_picture_url\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Status(); ok {\n\t\tif err := user.StatusValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"status\", err: fmt.Errorf(\"ent: validator failed for field \\\"status\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func ViewManagerAndUsers(w http.ResponseWriter, r *http.Request) {\n AuthorizePages(w,r) // Restrict Unauthorized User \n tmpl, err := template.ParseFiles(\"templates/viewManagersAndUsers.html\")\n if err != nil {\n fmt.Println(err)\n }\n\n var managerList []helpers.User\n var userList []helpers.User\n\n managerList = dbquery.GetManagerList()\n\n userId := UserIds{\n ManagerId: r.FormValue(\"managerId\"),\n }\n \n var isShow bool = false\n var noDataMessage string\n var listLen int\n \n if userId.ManagerId != \"Select\" && userId.ManagerId != \"\" {\n userList = dbquery.GetUserByMngrList(userId.ManagerId)\n listLen = len(userList);\n } else {\n isShow = true\n noDataMessage = \"Please select Manager\"\n }\n\n if (listLen == 0 && (userId.ManagerId != \"Select\" && userId.ManagerId != \"\")) {\n isShow = true\n noDataMessage = \"There are no users for this Manager\"\n }\n\n AuthorizePages(w,r) // Restrict Unauthorized User\n \n tmpl.Execute(w, AllUsersResponse{ListLen: listLen, Managers: managerList, Users: userList, IsShow: isShow, FailedMessage: noDataMessage})\n}", "func (uu *UserUpdate) check() error {\n\tif v, ok := uu.mutation.Username(); ok {\n\t\tif err := user.UsernameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"username\", err: fmt.Errorf(\"ent: validator failed for field \\\"username\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Fullname(); ok {\n\t\tif err := user.FullnameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"fullname\", err: fmt.Errorf(\"ent: validator failed for field \\\"fullname\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Password(); ok {\n\t\tif err := user.PasswordValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"password\", err: fmt.Errorf(\"ent: validator failed for field \\\"password\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Email(); ok {\n\t\tif err := user.EmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"email\", err: fmt.Errorf(\"ent: validator failed for field \\\"email\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Phone(); ok {\n\t\tif err := user.PhoneValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"phone\", err: fmt.Errorf(\"ent: validator failed for field \\\"phone\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Bio(); ok {\n\t\tif err := user.BioValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"bio\", err: fmt.Errorf(\"ent: validator failed for field \\\"bio\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Intro(); ok {\n\t\tif err := user.IntroValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"intro\", err: fmt.Errorf(\"ent: validator failed for field \\\"intro\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.GithubProfile(); ok {\n\t\tif err := user.GithubProfileValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"github_profile\", err: fmt.Errorf(\"ent: validator failed for field \\\"github_profile\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.ProfilePictureURL(); ok {\n\t\tif err := user.ProfilePictureURLValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"profile_picture_url\", err: fmt.Errorf(\"ent: validator failed for field \\\"profile_picture_url\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Status(); ok {\n\t\tif err := user.StatusValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"status\", err: fmt.Errorf(\"ent: validator failed for field \\\"status\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func runUserValFuncs(user *User, fns ...userValidatorFunc) error {\n\tfor _, fn := range fns {\n\t\tif err := fn(user); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (p *Provider) checkUser(userId bson.ObjectId, users []models.MachineUser) error {\n\t// check if the incoming user is in the list of permitted user list\n\tfor _, u := range users {\n\t\tif userId == u.Id && (u.Owner || (u.Permanent && u.Approved)) {\n\t\t\treturn nil // ok he/she is good to go!\n\t\t}\n\t}\n\n\treturn fmt.Errorf(\"permission denied. user not in the list of permitted users\")\n}", "func GetUsers(req *http.Request, render render.Render, account services.Account) {\n qs := req.URL.Query()\n userIDs := qs[\"userId\"]\n var users []models.User\n for _, userID := range userIDs {\n if user, err := account.GetUser(userID); err != nil {\n render.JSON(err.HttpCode, err)\n return\n } else {\n users = append(users, *user)\n }\n }\n render.JSON(http.StatusOK, users)\n}", "func UserListAll(w http.ResponseWriter, r *http.Request) {\n\n\tvar err error\n\tvar pageSize int\n\tvar paginatedUsers auth.PaginatedUsers\n\n\t// Init output\n\toutput := []byte(\"\")\n\n\t// Add content type header to the response\n\tcontentType := \"application/json\"\n\tcharset := \"utf-8\"\n\tw.Header().Add(\"Content-Type\", fmt.Sprintf(\"%s; charset=%s\", contentType, charset))\n\n\t// Grab context references\n\trefStr := gorillaContext.Get(r, \"str\").(stores.Store)\n\trefRoles := gorillaContext.Get(r, \"auth_roles\").([]string)\n\n\t// Grab url path variables\n\turlValues := r.URL.Query()\n\tpageToken := urlValues.Get(\"pageToken\")\n\tstrPageSize := urlValues.Get(\"pageSize\")\n\tprojectName := urlValues.Get(\"project\")\n\tprojectUUID := \"\"\n\n\tif projectName != \"\" {\n\t\tprojectUUID = projects.GetUUIDByName(projectName, refStr)\n\t\tif projectUUID == \"\" {\n\t\t\terr := APIErrorNotFound(\"ProjectUUID\")\n\t\t\trespondErr(w, err)\n\t\t\treturn\n\t\t}\n\t}\n\n\tif strPageSize != \"\" {\n\t\tif pageSize, err = strconv.Atoi(strPageSize); err != nil {\n\t\t\tlog.Errorf(\"Pagesize %v produced an error while being converted to int: %v\", strPageSize, err.Error())\n\t\t\terr := APIErrorInvalidData(\"Invalid page size\")\n\t\t\trespondErr(w, err)\n\t\t\treturn\n\t\t}\n\t}\n\n\t// check that user is indeed a service admin in order to be priviledged to see full user info\n\tpriviledged := auth.IsServiceAdmin(refRoles)\n\n\t// Get Results Object - call is always priviledged because this handler is only accessible by service admins\n\tif paginatedUsers, err = auth.PaginatedFindUsers(pageToken, int32(pageSize), projectUUID, priviledged, refStr); err != nil {\n\t\terr := APIErrorInvalidData(\"Invalid page token\")\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\t// Output result to JSON\n\tresJSON, err := paginatedUsers.ExportJSON()\n\n\tif err != nil {\n\t\terr := APIErrExportJSON()\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\t// Write response\n\toutput = []byte(resJSON)\n\trespondOK(w, output)\n\n}", "func getAllUsers(c *fiber.Ctx) error {\n\tcollection := mg.Db.Collection(\"users\")\n\tquery := bson.D{{}}\n\tcursor, err := collection.Find(c.Context(), &query)\n\tif err != nil {\n\t\treturn c.Status(500).SendString(err.Error())\n\t}\n\tvar records []User = make([]User, 0)\n\t// iterate the cursor and decode the values\n\tif err := cursor.All(c.Context(), &records); err != nil {\n\t\treturn c.Status(404).SendString(\"There isnt any\")\n\t}\n\tvar users []User = make([]User, 0)\n\tfor i, s := range records {\n\t\ts.Password = \"\"\n\t\ts.TaskCode = \"\"\n\t\tusers = append(users, s)\n\t\tfmt.Println(i)\n\t}\n\n\treturn c.JSON(users)\n}", "func postUser(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tvar user User\n\terr := json.NewDecoder(r.Body).Decode(&user)\n\tlog.ErrorHandler(err)\n\tvar (\n\t\temail = strings.ToLower(user.Email)\n\t\talias = user.Alias\n\t\tuserName = user.UserName\n\t\tpassword = user.Password\n\t\tfullName = user.FullName\n\t\tsafeNames bool\n\t\tsafeEmail = emailValidator(email)\n\t\tsafePassword = passwordValidator(password)\n\t\tsimilarToUser = similarToUser(fullName, alias, userName, password)\n\t)\n\n\tduplicateEmail := DuplicateCheck(email)\n\n\tif duplicateEmail {\n\t\tw.WriteHeader(http.StatusConflict)\n\t\terr := json.NewEncoder(w).Encode(core.FourONine)\n\t\tlog.ErrorHandler(err)\n\t\tlog.AccessHandler(r, 409)\n\t\treturn\n\t}\n\n\tsafeNames = userDetails(fullName, alias, userName)\n\n\tif safeNames {\n\t\t// Some or all of the details in the body are empty\n\t\t//\tAll fields are required\n\t\tw.WriteHeader(http.StatusUnprocessableEntity)\n\t\terr := json.NewEncoder(w).Encode(core.FourTwoTwo)\n\t\tlog.ErrorHandler(err)\n\t\tlog.AccessHandler(r, 422)\n\t\treturn\n\t}\n\n\tif !safeEmail {\n\t\t// Issue with Email\n\t\t//Email couldn't be verified or invalid email\n\t\tw.WriteHeader(http.StatusUnprocessableEntity)\n\t\terr := json.NewEncoder(w).Encode(core.FourTwoTwo)\n\t\tlog.ErrorHandler(err)\n\t\tlog.AccessHandler(r, 422)\n\t\treturn\n\t}\n\n\tif similarToUser {\n\t\t// Issue with Password\n\t\t// Password is similar to user information\n\t\tw.WriteHeader(http.StatusUnprocessableEntity)\n\t\terr := json.NewEncoder(w).Encode(core.FourTwoTwo)\n\t\tlog.ErrorHandler(err)\n\t\tlog.AccessHandler(r, 422)\n\t\treturn\n\t}\n\n\tif !safePassword {\n\t\t// Issue with Password\n\t\t//\tPassword doesn't go through the validator successfully\n\t\tw.WriteHeader(http.StatusUnprocessableEntity)\n\t\terr := json.NewEncoder(w).Encode(core.FourTwoTwo)\n\t\tlog.ErrorHandler(err)\n\t\tlog.AccessHandler(r, 422)\n\t\treturn\n\t}\n\n\tpasswordHash, err := generatePasswordHash(password)\n\tlog.ErrorHandler(err)\n\n\tuser = User{\n\t\tUserName: userName,\n\t\tFullName: fullName,\n\t\tAlias: alias,\n\t\tEmail: email,\n\t\tIsAdmin: false,\n\t\tPassword: passwordHash,\n\t\tLastLogin: time.Time{},\n\t\tIsActive: false,\n\t\tIsEmailVerified: false,\n\t}\n\n\t//\tfmt.Println(\"Create The Fucking User Here\")\n\n\tdb.Create(&user)\n\terr = json.NewEncoder(w).Encode(user)\n\tlog.ErrorHandler(err)\n\n\t// Create OTP to verify email by\n\t// OTP expires in 30 minutes\n\t// Stored in Redis with key new_user_otp_email\n\tverifiableToken := generateOTP()\n\terr = redisClient.Set(ctx, \"new_user_otp_\"+email, verifiableToken, 30*time.Minute).Err()\n\tlog.ErrorHandler(err)\n\n\t//payload := struct {\n\t//\tToken string\n\t//}{\n\t//\tToken: verifiableToken,\n\t//}\n\t//\n\t//var status bool\n\t//\n\t////status, err = core.SendEmailNoAttachment(email, \"OTP for Verification\", payload, \"token.txt\")\n\t//if !status {\n\t//\tw.WriteHeader(http.StatusInternalServerError)\n\t//\terr = json.NewEncoder(w).Encode(core.FiveHundred)\n\t//\tlog.ErrorHandler(err)\n\t//\tlog.AccessHandler(r, 500)\n\t//\treturn\n\t//}\n\tlog.ErrorHandler(err)\n\tlog.AccessHandler(r, 200)\n\treturn\n}", "func checkUserResponse(user, resp User) (err error) {\n\tif user.Name != resp.Name {\n\t\terr = errors.New(\"Name isn't equal\")\n\t\treturn\n\t}\n\tif user.Username != resp.Username {\n\t\terr = errors.New(\"Username isn't equal\")\n\t\treturn\n\t}\n\tif user.Phone != resp.Phone {\n\t\terr = errors.New(\"Phone isn't equal\")\n\t\treturn\n\t}\n\tif user.Password != \"\" {\n\t\terr = errors.New(\"Password isn't empty\")\n\t\treturn\n\t}\n\treturn\n}", "func ValidateUserView(result *UserView) (err error) {\n\tif result.ID == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingFieldError(\"id\", \"result\"))\n\t}\n\tif result.Username == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingFieldError(\"username\", \"result\"))\n\t}\n\tif result.Name == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingFieldError(\"name\", \"result\"))\n\t}\n\tif result.Mobile == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingFieldError(\"mobile\", \"result\"))\n\t}\n\tif result.Email == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingFieldError(\"email\", \"result\"))\n\t}\n\tif result.Jobs == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingFieldError(\"jobs\", \"result\"))\n\t}\n\tif result.IsAdmin == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingFieldError(\"is_admin\", \"result\"))\n\t}\n\tif result.Jobs != nil {\n\t\tif !(*result.Jobs == 1 || *result.Jobs == 2 || *result.Jobs == 3) {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidEnumValueError(\"result.jobs\", *result.Jobs, []interface{}{1, 2, 3}))\n\t\t}\n\t}\n\tif result.Superior != nil {\n\t\tif err2 := ValidateSuperiorView(result.Superior); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, err2)\n\t\t}\n\t}\n\tif result.Group != nil {\n\t\tif err2 := ValidateGroupView(result.Group); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, err2)\n\t\t}\n\t}\n\treturn\n}", "func GetAllUsers(res http.ResponseWriter, req *http.Request) {\n\tres.Header().Set(\"Content-Type\", \"application/json\")\n\tquery := bson.M{}\n\tselector := bson.M{\n\t\t\"_id\": 1,\n\t\t\"name\": 1,\n\t\t\"email\": 1,\n\t}\n\n\tusers, err := db.GetAllUsers(query, selector)\n\tif err != nil {\n\t\tif err.Error() == mgo.ErrNotFound.Error() {\n\t\t\tmsg := \"User not found\"\n\n\t\t\tutils.ReturnErrorResponse(http.StatusNotFound, msg, \"\", nil, nil, res)\n\t\t\treturn\n\t\t}\n\n\t\tmsg := \"Error occurred while getting user details\"\n\t\tutils.ReturnErrorResponse(http.StatusBadRequest, msg, \"\", nil, nil, res)\n\t\treturn\n\t}\n\n\tmsg := \"Your request processed successfully\"\n\tutils.ReturnSuccessReponse(http.StatusOK, msg, users, res)\n}", "func ViewUsers(w http.ResponseWriter, r *http.Request) { \n AuthorizePages(w,r) \n w.Header().Set(\"Content-Type\", \"text/html; charset=utf-8\")\n t, err := template.ParseFiles(\"templates/viewUsers.html\")\n if err != nil {\n fmt.Println(err) // Ugly debug output\n w.WriteHeader(http.StatusInternalServerError) // Proper HTTP response\n return\n }\n \n userId := UserIds{\n UserId: r.FormValue(\"userId\"),\n }\n\n var successMessage string\n var isShow bool \n\n if (userId.UserId != \"\" ) {\n if (dbquery.DeleteManagerUser(\"User\",userId.UserId)){\n isShow = true\n successMessage = \"User Deleted Successfully\"\n }\n }\n\n var userList []helpers.User \n userList = dbquery.GetUserByRole(\"\",\"'User'\")\n t.Execute(w, AllUsersResponse{Users: userList, SuccessMessage: successMessage, IsShow: isShow}) \n}", "func (h *HTTPClientHandler) getAllUsersHandler(w http.ResponseWriter, r *http.Request) {\n\n\tuserid, _ := r.URL.Query()[\"q\"]\n\t// looking for specific user\n\tif len(userid) > 0 {\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"userid\": userid[0],\n\t\t}).Info(\"Looking for user..\")\n\n\t\tuser, err := h.db.getUser(userid[0])\n\n\t\tif err == nil {\n\t\t\t// Marshal provided interface into JSON structure\n\t\t\tresponse := UserResource{Data: user}\n\t\t\tuj, _ := json.Marshal(response)\n\n\t\t\t// Write content-type, statuscode, payload\n\t\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\t\tw.WriteHeader(200)\n\t\t\tfmt.Fprintf(w, \"%s\", uj)\n\t\t\treturn\n\t\t} else {\n\t\t\tlog.WithFields(log.Fields{\n\t\t\t\t\"error\": err.Error(),\n\t\t\t}).Warn(\"Failed to insert..\")\n\n\t\t\tcontent, code := responseDetailsFromMongoError(err)\n\n\t\t\t// Marshal provided interface into JSON structure\n\t\t\tuj, _ := json.Marshal(content)\n\n\t\t\t// Write content-type, statuscode, payload\n\t\t\twriteJsonResponse(w, &uj, code)\n\t\t\treturn\n\n\t\t}\n\t}\n\n\tlog.Warn(len(userid))\n\t// displaying all users\n\tresults, err := h.db.getUsers()\n\tif err != nil {\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"error\": err.Error(),\n\t\t}).Error(\"Got error when tried to get all users\")\n\t}\n\n\tlog.WithFields(log.Fields{\n\t\t\"count\": len(results),\n\t}).Info(\"number of users\")\n\n\t// Marshal provided interface into JSON structure\n\tresponse := UsersResource{Data: results}\n\tuj, _ := json.Marshal(response)\n\n\t// Write content-type, statuscode, payload\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(200)\n\tfmt.Fprintf(w, \"%s\", uj)\n}", "func (u *User) Validate() ([]app.Invalid, error) {\n\tvar inv []app.Invalid\n\n\tif u.UserType == 0 {\n\t\tinv = append(inv, app.Invalid{Fld: \"UserType\", Err: \"The value of UserType cannot be 0.\"})\n\t}\n\n\tif u.FirstName == \"\" {\n\t\tinv = append(inv, app.Invalid{Fld: \"FirstName\", Err: \"A value of FirstName cannot be empty.\"})\n\t}\n\n\tif u.LastName == \"\" {\n\t\tinv = append(inv, app.Invalid{Fld: \"LastName\", Err: \"A value of LastName cannot be empty.\"})\n\t}\n\n\tif u.Email == \"\" {\n\t\tinv = append(inv, app.Invalid{Fld: \"Email\", Err: \"A value of Email cannot be empty.\"})\n\t}\n\n\tif u.Company == \"\" {\n\t\tinv = append(inv, app.Invalid{Fld: \"Company\", Err: \"A value of Company cannot be empty.\"})\n\t}\n\n\tif len(u.Addresses) == 0 {\n\t\tinv = append(inv, app.Invalid{Fld: \"Addresses\", Err: \"There must be at least one address.\"})\n\t} else {\n\t\tfor _, ua := range u.Addresses {\n\t\t\tif va, err := ua.Validate(); err != nil {\n\t\t\t\tinv = append(inv, va...)\n\t\t\t}\n\t\t}\n\t}\n\n\tif len(inv) > 0 {\n\t\treturn inv, errors.New(\"Validation failures identified\")\n\t}\n\n\treturn nil, nil\n}", "func (mt EasypostUserCollection) Validate() (err error) {\n\tfor _, e := range mt {\n\t\tif e.ID == \"\" {\n\t\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response[*]`, \"id\"))\n\t\t}\n\t\tif e.Object == \"\" {\n\t\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response[*]`, \"object\"))\n\t\t}\n\t\tif e.Name == \"\" {\n\t\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response[*]`, \"name\"))\n\t\t}\n\t\tif e.Email == \"\" {\n\t\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response[*]`, \"email\"))\n\t\t}\n\n\t\tif err2 := e.Children.Validate(); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, err2)\n\t\t}\n\t\tif ok := goa.ValidatePattern(`^user_`, e.ID); !ok {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response[*].id`, e.ID, `^user_`))\n\t\t}\n\t\tif ok := goa.ValidatePattern(`^User$`, e.Object); !ok {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response[*].object`, e.Object, `^User$`))\n\t\t}\n\t}\n\treturn\n}", "func (p *Plugin) getSkilledUsers(userQuery string, selfID string) map[string]string {\n\n\tpreprocessedString := preprocessQuery(strings.TrimSpace(userQuery))\n\n\tsetOfWords := mapset.NewSet()\n\n\tfor _, word := range strings.Split(preprocessedString, \" \") {\n\t\tsetOfWords.Add(strings.ToUpper(strings.TrimSpace(word)))\n\t}\n\n\t// Get skills required to solve the issue (userQuery)\n\tcommon := setOfWords.Intersect(p.allSkills)\n\tcommonSkills := common.String()\n\tcommonSkills = commonSkills[4 : len(commonSkills)-1]\n\n\t// atleast one skill is required in the issue to search for the user\n\tatleastOneReq := false\n\n\tif commonSkills != \"\" {\n\t\tatleastOneReq = true\n\t}\n\n\tskillsForQuery := strings.Split(strings.TrimSpace(commonSkills), \",\")\n\n\t// Get list of a few users. Currently considering 500.\n\tusers, err := p.API.KVList(0, 500)\n\n\tif err != nil {\n\t\tp.API.LogError(\"Failed to fetch user's list\", \"err\", err.Error())\n\t\treturn map[string]string{\n\t\t\t\"Found\": \"false\",\n\t\t\t\"Error\": \"true\",\n\t\t\t\"Message\": \"Error\",\n\t\t}\n\t}\n\n\t// Find user who has all the required skills to resolve the issue\n\tfor _, user := range users {\n\t\tif user != selfID {\n\t\t\tskills, err := p.API.KVGet(user)\n\t\t\tif err != nil {\n\t\t\t\tp.API.LogError(\"Failed to get user's skills\", \"err\", err.Error())\n\t\t\t\treturn map[string]string{\n\t\t\t\t\t\"Found\": \"false\",\n\t\t\t\t\t\"Error\": \"true\",\n\t\t\t\t\t\"Message\": \"Error\",\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif skills != nil {\n\t\t\t\tskillsOfUser := strings.Split(string(skills), \",\")\n\t\t\t\tfoundAll := true\n\n\t\t\t\tfor _, skillReq := range skillsForQuery {\n\n\t\t\t\t\tfoundThisSkill := false\n\t\t\t\t\tfor _, skill := range skillsOfUser {\n\t\t\t\t\t\tif strings.ToUpper(strings.TrimSpace(skillReq)) == skill {\n\t\t\t\t\t\t\tfoundThisSkill = true\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tif !foundThisSkill {\n\t\t\t\t\t\tfoundAll = false\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif foundAll && atleastOneReq {\n\t\t\t\t\tuserInfo, err := p.API.GetUser(user)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tp.API.LogError(\"Failed to get user information\", \"err\", err.Error())\n\t\t\t\t\t\treturn map[string]string{\n\t\t\t\t\t\t\t\"Found\": \"false\",\n\t\t\t\t\t\t\t\"Error\": \"true\",\n\t\t\t\t\t\t\t\"Message\": \"Error\",\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\t// Skilled user (in the domain of the issue) found.\n\t\t\t\t\t// Suggest username to the user who wants to resolve the issue.\n\t\t\t\t\tp.API.LogInfo(\"Skilled user found\", map[string]string{\"Info\": \"Skilled user found\"})\n\t\t\t\t\treturn map[string]string{\n\t\t\t\t\t\t\"Found\": \"true\",\n\t\t\t\t\t\t\"Error\": \"false\",\n\t\t\t\t\t\t\"Message\": \"You may contact @\" + userInfo.Username + \" who is skilled in [ \" + string(skills) + \"]\\n\",\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// No skilled user found. Report user not found.\n\tp.API.LogInfo(\"Skilled user not found\", map[string]string{\"Info\": \"Skilled user not found\"})\n\treturn map[string]string{\n\t\t\"Found\": \"false\",\n\t\t\"Error\": \"false\",\n\t\t\"Message\": \"User not found \",\n\t}\n}", "func (u User) Validate() []string {\n\tvar errs []string\n\tfor _, n := range u.Name {\n\t\tif !unicode.IsLetter(n) && !unicode.IsDigit(n) && !unicode.IsPunct(n) && !unicode.IsSpace(n) {\n\t\t\terrs = append(errs, \"name::is_not_word\")\n\t\t\tbreak\n\t\t}\n\t}\n\tif utf8.RuneCountInString(u.Password) < 8 {\n\t\terrs = append(errs, \"password::min_length_is::8\")\n\t}\n\tif len(u.Password) > 0 {\n\t\tvar PasswordUpper, PasswordLetter, PasswordNumber, PasswordSpaces, PasswordInvalidChar bool\n\t\tfor _, c := range u.Password {\n\t\t\tif c < 33 || c > 126 {\n\t\t\t\tPasswordInvalidChar = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tswitch {\n\t\t\tcase unicode.IsNumber(c):\n\t\t\t\tPasswordNumber = true\n\t\t\tcase unicode.IsUpper(c):\n\t\t\t\tPasswordUpper = true\n\t\t\tcase unicode.IsLetter(c):\n\t\t\t\tPasswordLetter = true\n\t\t\tcase c == ' ':\n\t\t\t\tPasswordSpaces = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif !PasswordUpper || !PasswordLetter || !PasswordNumber || PasswordSpaces || PasswordInvalidChar {\n\t\t\terrs = append(errs, \"password::invalid_password_format\")\n\t\t}\n\t}\n\tfor _, n := range u.Desc {\n\t\tif !unicode.IsLetter(n) && !unicode.IsDigit(n) && !unicode.IsPunct(n) && !unicode.IsSpace(n) {\n\t\t\terrs = append(errs, \"desc::is_not_word\")\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn errs\n}", "func (u User) IsValid() []error{\n\tvar errs []error\n\tfirstname := strings.Trim(u.FirstName, \" \")\n\tlastname := strings.Trim(u.LastName, \" \")\n\n\tif firstname != \"\" {\n\t\tif strings.Contains(firstname, \" \"){\n\t\t\terrs = append(errs, errors.New(\"FirstName can't have spaces\"))\n\t\t}\n\t\tif len(firstname) < 2 {\n\t\t\terrs = append(errs, errors.New(\"FirstName must be at least 2 characters\"))\n\t\t}\n\t\tif !helper.IsLetter(firstname) {\n\t\t\terrs = append(errs, errors.New(\"Firstname contains a number\"))\n\t\t}\n\t}\n\n\tif lastname != \"\"{\n\t\tif strings.Contains(lastname, \" \"){\n\t\t\terrs = append(errs, errors.New(\"LastName can't have spaces\"))\n\t\t}\n\n\t\tif len(lastname) < 2 {\n\t\t\terrs = append(errs, errors.New(\"LastName must be at least 2 characters\"))\n\t\t}\n\n\t\tif !helper.IsLetter(lastname) {\n\t\t\terrs = append(errs, errors.New(\"Lastname contains a number\"))\n\t\t}\n\t}\n\n\tif u.Email != \"\" {\n\t\tre := regexp.MustCompile(\"^[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\\\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$\")\n\n\t\tif !re.MatchString(u.Email) {\n\t\t\terrs = append(errs, errors.New(\"Email address is not valid\"))\n\t\t}\n\t}\n\n\n\tyear, _, _, _, _, _ := helper.DateDiff(u.DateOfBirth, time.Now())\n\tif year < 18 {\n\t\terrs = append(errs, errors.New(\"You must be 18 or more\"))\n\t}\n\tif len(errs) > 0 {\n\t\treturn errs\n\t}\n\treturn nil\n}", "func GetAllUser(c *gin.Context) {\n\tuserList := FindAll()\n\n\tif len(userList) <= 0 {\n\t\tc.JSON(http.StatusOK, gin.H{\n\t\t\t\"status\": http.StatusOK,\n\t\t\t\"message\": \"user is not found\",\n\t\t})\n\t} else {\n\t\tc.JSON(http.StatusOK, gin.H{\n\t\t\t\"status\": http.StatusOK,\n\t\t\t\"data\": userList,\n\t\t})\n\t}\n}", "func (u *UserCtr) GetUserAll(c *gin.Context) {\n\tusers, err := model.UserAll(u.DB)\n\tif err != nil {\n\t\tresp := errors.New(err.Error())\n\t\tc.JSON(http.StatusInternalServerError, resp)\n\t\treturn\n\t}\n\n\tif len(users) == 0 {\n\t\tc.JSON(http.StatusOK, make([]*model.User, 0))\n\t\treturn\n\t}\n\n\tc.JSON(http.StatusOK, gin.H{\n\t\t\"result\": users,\n\t})\n\treturn\n}", "func isUserValid(username, password string) bool {\n\tfor _, u := range userList {\n\t\tif u.Username == username && u.Password == password {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func (p *politeiawww) processUsers(users *www.Users, isAdmin bool) (*www.UsersReply, error) {\n\tlog.Tracef(\"processUsers: %v\", isAdmin)\n\n\temailQuery := strings.ToLower(users.Email)\n\tusernameQuery := formatUsername(users.Username)\n\tpubkeyQuery := users.PublicKey\n\n\tvar u *user.User\n\tvar totalUsers uint64\n\tvar totalMatches uint64\n\tvar pubkeyMatchID string\n\tmatchedUsers := make([]www.AbridgedUser, 0, www.UserListPageSize)\n\n\tif pubkeyQuery != \"\" {\n\t\t// Search by pubkey. Only exact matches are returned.\n\t\t// Validate pubkey\n\t\terr := validatePubKey(pubkeyQuery)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tu, err := p.db.UserGetByPubKey(pubkeyQuery)\n\t\tif err != nil {\n\t\t\tif err == user.ErrUserNotFound {\n\t\t\t\t// Pubkey searches require an exact match. If no\n\t\t\t\t// match was found, we can go ahead and return.\n\t\t\t\treturn &www.UsersReply{}, nil\n\t\t\t}\n\t\t\treturn nil, err\n\t\t}\n\n\t\tpubkeyMatchID = u.ID.String()\n\t}\n\n\tswitch {\n\tcase isAdmin:\n\t\t// Admins can search by username and/or email with\n\t\t// partial matches being returned.\n\t\terr := p.db.AllUsers(func(user *user.User) {\n\t\t\ttotalUsers++\n\t\t\tuserMatches := true\n\n\t\t\t// If both emailQuery and usernameQuery are non-empty, the\n\t\t\t// user must match both to be included in the results.\n\t\t\tif emailQuery != \"\" {\n\t\t\t\tif !strings.Contains(strings.ToLower(user.Email),\n\t\t\t\t\temailQuery) {\n\t\t\t\t\tuserMatches = false\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif usernameQuery != \"\" && userMatches {\n\t\t\t\tif !strings.Contains(strings.ToLower(user.Username),\n\t\t\t\t\tusernameQuery) {\n\t\t\t\t\tuserMatches = false\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif pubkeyQuery != \"\" && userMatches {\n\t\t\t\tif user.ID.String() != pubkeyMatchID {\n\t\t\t\t\tuserMatches = false\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif userMatches {\n\t\t\t\ttotalMatches++\n\t\t\t\tif totalMatches < www.UserListPageSize {\n\t\t\t\t\tmatchedUsers = append(matchedUsers, www.AbridgedUser{\n\t\t\t\t\t\tID: user.ID.String(),\n\t\t\t\t\t\tEmail: user.Email,\n\t\t\t\t\t\tUsername: user.Username,\n\t\t\t\t\t})\n\t\t\t\t}\n\t\t\t}\n\t\t})\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// Sort results alphabetically.\n\t\tsort.Slice(matchedUsers, func(i, j int) bool {\n\t\t\treturn matchedUsers[i].Username < matchedUsers[j].Username\n\t\t})\n\n\tdefault:\n\t\t// Non-admins can search by username and the search\n\t\t// must be an exact match.\n\t\tif usernameQuery != \"\" {\n\t\t\t// Validate username\n\t\t\terr := validateUsername(usernameQuery)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tu, err = p.db.UserGetByUsername(usernameQuery)\n\t\t\tif err != nil {\n\t\t\t\t// ErrUserNotFound is ok. Empty search results\n\t\t\t\t// will be returned.\n\t\t\t\tif err != user.ErrUserNotFound {\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// If both pubkeyQuery and usernameQuery are non-empty, the\n\t\t\t// user must match both to be included in the results.\n\t\t\tif (u != nil) && (pubkeyQuery != \"\") &&\n\t\t\t\t(u.ID.String() != pubkeyMatchID) {\n\t\t\t\t// User doesn't match both\n\t\t\t\tu = nil\n\t\t\t}\n\t\t}\n\n\t\tif u != nil {\n\t\t\ttotalMatches++\n\t\t\tmatchedUsers = append(matchedUsers, www.AbridgedUser{\n\t\t\t\tID: u.ID.String(),\n\t\t\t\tUsername: u.Username})\n\t\t}\n\t}\n\n\treturn &www.UsersReply{\n\t\tTotalUsers: totalUsers,\n\t\tTotalMatches: totalMatches,\n\t\tUsers: matchedUsers,\n\t}, nil\n}", "func HandleUserGetAll(c *gin.Context) {\n\tvar u User\n\tusers, err := u.GetAll()\n\tif err != nil {\n\t\tc.JSON(http.StatusInternalServerError, gin.H{\n\t\t\t\"code\": 1,\n\t\t\t\"msg\": err.Error(),\n\t\t\t\"data\": gin.H{},\n\t\t})\n\t\treturn\n\t}\n\tc.JSON(http.StatusOK, gin.H{\n\t\t\"code\": 0,\n\t\t\"msg\": \"ok\",\n\t\t\"data\": users,\n\t})\n}", "func (u User2) Validate() []string {\n\tvar errs []string\n\tfor _, n := range u.Name {\n\t\tif !unicode.IsLetter(n) && !unicode.IsDigit(n) && !unicode.IsPunct(n) && !unicode.IsSpace(n) {\n\t\t\terrs = append(errs, \"name::is_not_word\")\n\t\t\tbreak\n\t\t}\n\t}\n\tif u.PasswordRef != nil && utf8.RuneCountInString(*u.PasswordRef) < 8 {\n\t\terrs = append(errs, \"password_ref::min_length_is::8\")\n\t}\n\tif u.PasswordRef != nil && len(*u.PasswordRef) > 0 {\n\t\tvar PasswordRefUpper, PasswordRefLetter, PasswordRefNumber, PasswordRefSpaces, PasswordRefInvalidChar bool\n\t\tfor _, c := range *u.PasswordRef {\n\t\t\tif c < 33 || c > 126 {\n\t\t\t\tPasswordRefInvalidChar = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tswitch {\n\t\t\tcase unicode.IsNumber(c):\n\t\t\t\tPasswordRefNumber = true\n\t\t\tcase unicode.IsUpper(c):\n\t\t\t\tPasswordRefUpper = true\n\t\t\tcase unicode.IsLetter(c):\n\t\t\t\tPasswordRefLetter = true\n\t\t\tcase c == ' ':\n\t\t\t\tPasswordRefSpaces = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif !PasswordRefUpper || !PasswordRefLetter || !PasswordRefNumber || PasswordRefSpaces || PasswordRefInvalidChar {\n\t\t\terrs = append(errs, \"password_ref::invalid_password_format\")\n\t\t}\n\t}\n\tfor _, n := range u.Desc {\n\t\tif !unicode.IsLetter(n) && !unicode.IsDigit(n) && !unicode.IsPunct(n) && !unicode.IsSpace(n) {\n\t\t\terrs = append(errs, \"desc::is_not_word\")\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn errs\n}", "func (mt *EasypostUser) Validate() (err error) {\n\tif mt.ID == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"id\"))\n\t}\n\tif mt.Object == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"object\"))\n\t}\n\tif mt.Name == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"name\"))\n\t}\n\tif mt.Email == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"email\"))\n\t}\n\n\tif err2 := mt.Children.Validate(); err2 != nil {\n\t\terr = goa.MergeErrors(err, err2)\n\t}\n\tif ok := goa.ValidatePattern(`^user_`, mt.ID); !ok {\n\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response.id`, mt.ID, `^user_`))\n\t}\n\tif ok := goa.ValidatePattern(`^User$`, mt.Object); !ok {\n\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response.object`, mt.Object, `^User$`))\n\t}\n\treturn\n}", "func (u *User) Validate(tx *pop.Connection) (*validate.Errors, error) {\n\tvar err error\n\t//validate based on is agent\n\tif !u.IsAgent { //is not an agent\n\t\treturn validate.Validate(\n\t\t\t&validators.StringIsPresent{Field: u.Email, Name: \"Email\"},\n\t\t\t&validators.StringIsPresent{Field: u.PasswordHash, Name: \"PasswordHash\"},\n\t\t\t&validators.StringIsPresent{Field: u.LastName, Name: \"LastName\"},\n\t\t\t&validators.StringIsPresent{Field: u.FirstName, Name: \"FirstName\"},\n\t\t\t&validators.StringIsPresent{Field: u.Phone, Name: \"Phone\"},\n\t\t\t// check to see if the email address is already taken:\n\t\t\t&validators.FuncValidator{\n\t\t\t\tField: u.Email,\n\t\t\t\tName: \"Email\",\n\t\t\t\tMessage: \"%s is already taken\",\n\t\t\t\tFn: func() bool {\n\t\t\t\t\tvar b bool\n\t\t\t\t\tq := tx.Where(\"email = ?\", u.Email)\n\t\t\t\t\tif u.ID != uuid.Nil {\n\t\t\t\t\t\tq = q.Where(\"id != ?\", u.ID)\n\t\t\t\t\t}\n\t\t\t\t\tb, err = q.Exists(u)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn false\n\t\t\t\t\t}\n\t\t\t\t\treturn !b\n\t\t\t\t},\n\t\t\t},\n\t\t), err\n\n\t} else { // trying to save agent\n\t\treturn validate.Validate(\n\t\t\t&validators.StringIsPresent{Field: u.Email, Name: \"Email\"},\n\t\t\t&validators.StringIsPresent{Field: u.PasswordHash, Name: \"PasswordHash\"},\n\t\t\t&validators.StringIsPresent{Field: u.LastName, Name: \"LastName\"},\n\t\t\t&validators.StringIsPresent{Field: u.FirstName, Name: \"FirstName\"},\n\t\t\t&validators.StringIsPresent{Field: u.Phone, Name: \"Phone\"},\n\t\t\t&validators.StringIsPresent{Field: u.PublicEmail.String, Name: \"PublicEmail\"},\n\t\t\t&validators.StringIsPresent{Field: u.Company.String, Name: \"Company\"},\n\t\t\t&validators.StringIsPresent{Field: u.Address1.String, Name: \"Address1\"},\n\t\t\t&validators.StringIsPresent{Field: u.City.String, Name: \"City\"},\n\t\t\t&validators.StringIsPresent{Field: u.State.String, Name: \"State\"},\n\t\t\t&validators.StringIsPresent{Field: u.Zipcode.String, Name: \"Zipcode\"},\n\t\t\t// check to see if the email address is already taken:\n\t\t\t&validators.FuncValidator{\n\t\t\t\tField: u.Email,\n\t\t\t\tName: \"Email\",\n\t\t\t\tMessage: \"%s is already taken\",\n\t\t\t\tFn: func() bool {\n\t\t\t\t\tvar b bool\n\t\t\t\t\tq := tx.Where(\"email = ?\", u.Email)\n\t\t\t\t\tif u.ID != uuid.Nil {\n\t\t\t\t\t\tq = q.Where(\"id != ?\", u.ID)\n\t\t\t\t\t}\n\t\t\t\t\tb, err = q.Exists(u)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn false\n\t\t\t\t\t}\n\t\t\t\t\treturn !b\n\t\t\t\t},\n\t\t\t},\n\t\t), err\n\n\t}\n}", "func (u *User) Validate(values ...interface{}) (bool, []error) {\n\treturn true, nil\n}", "func allUsers(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tvar users []User\n\tdb.Scopes(Paginate(r)).Find(&users)\n\terr := json.NewEncoder(w).Encode(users)\n\tlog.ErrorHandler(err)\n\tlog.AccessHandler(r, 200)\n\treturn\n}", "func (user *User) Validate(clients []*Client) error {\n\t// convert pager slice to generic interface slice\n\tclientIDs := make([]interface{}, len(clients))\n\tfor i, client := range clients {\n\t\tclientIDs[i] = client.ID\n\t}\n\n\tif err := validation.ValidateStruct(user,\n\t\tvalidation.Field(&user.Username, validation.Required, validation.Match(regexp.MustCompile(\"[[:word:]]+$\"))),\n\t\tvalidation.Field(&user.Password, validation.Required, validation.Length(1, 100)),\n\t\tvalidation.Field(&user.ClientID, validation.In(clientIDs...)),\n\t); err != nil {\n\t\tif e, ok := err.(validation.InternalError); ok {\n\t\t\treturn errors.Wrap(e, \"internal validation error occured\")\n\t\t}\n\n\t\treturn &modelValidationErr{err.Error()}\n\t}\n\n\treturn nil\n}", "func Validate(config *Main) error {\n\tfor user, userData := range config.Users {\n\t\tif len(userData.Entrypoint) == 0 {\n\t\t\treturn fmt.Errorf(\"The field `entrypoint` is missing for user '%s'\", user)\n\t\t}\n\n\t\tif len(userData.Sitemaps.Default) == 0 {\n\t\t\treturn fmt.Errorf(\"The field `sitemaps.default` is missing for user '%s'\", user)\n\t\t}\n\n\t\tif len(userData.Sitemaps.Allowed) == 0 {\n\t\t\treturn fmt.Errorf(\"The field `sitemaps.allowed` is missing for user '%s'\", user)\n\t\t}\n\t}\n\n\treturn nil\n}", "func ListAllUsers(w http.ResponseWriter, r *http.Request) {\n\tdefer func() {\n\t\tif err := recover(); err != nil {\n\t\t\thelper.WriteHTTPResponse(r.Context(), w, http.StatusBadRequest, \"Mohomaaf ...dsb\", nil, nil)\n\t\t}\n\t}()\n\n\tfLog := userMgmtLogger.WithField(\"func\", \"ListAllUsers\").WithField(\"RequestID\", r.Context().Value(constants.RequestID)).WithField(\"path\", r.URL.Path).WithField(\"method\", r.Method)\n\n\tiauthctx := r.Context().Value(constants.HansipAuthentication)\n\tif iauthctx == nil {\n\t\thelper.WriteHTTPResponse(r.Context(), w, http.StatusUnauthorized, \"You are not authorized to access this resource\", nil, nil)\n\t\treturn\n\t}\n\n\tfLog.Trace(\"Listing Users\")\n\tpageRequest, err := helper.NewPageRequestFromRequest(r)\n\tif err != nil {\n\t\tfLog.Errorf(\"helper.NewPageRequestFromRequest got %s\", err.Error())\n\t\thelper.WriteHTTPResponse(r.Context(), w, http.StatusBadRequest, err.Error(), nil, nil)\n\t\treturn\n\t}\n\tusers, page, err := UserRepo.ListUser(r.Context(), pageRequest)\n\tif err != nil {\n\t\tfLog.Errorf(\"UserRepo.ListUser got %s\", err.Error())\n\t\thelper.WriteHTTPResponse(r.Context(), w, http.StatusInternalServerError, err.Error(), nil, nil)\n\t\treturn\n\t}\n\tsusers := make([]*SimpleUser, len(users))\n\tfor i, v := range users {\n\t\tsusers[i] = &SimpleUser{\n\t\t\tRecID: v.RecID,\n\t\t\tEmail: v.Email,\n\t\t\tEnabled: v.Enabled,\n\t\t\tSuspended: v.Suspended,\n\t\t}\n\t}\n\tret := make(map[string]interface{})\n\tret[\"users\"] = susers\n\tret[\"page\"] = page\n\thelper.WriteHTTPResponse(r.Context(), w, http.StatusOK, \"List of all user paginated\", nil, ret)\n}", "func LitUsersUnderHim(w http.ResponseWriter, r *http.Request) { \n w.Header().Set(\"Content-Type\", \"text/html; charset=utf-8\")\n t, err := template.ParseFiles(\"templates/listUsersUnderHim.html\")\n\n userDetails := getSession(r)\n\n AuthorizePages(w,r)\n if err != nil {\n fmt.Println(err) // Ugly debug output\n w.WriteHeader(http.StatusInternalServerError) // Proper HTTP response\n return\n }\n \n if err != nil {\n fmt.Println(err)\n }\n var userList []helpers.User\n var listLen int\n var failedMessage string\n var isShow bool = false\n\n userList = dbquery.GetUserByMngrList(userDetails.UserId)\n listLen = len(userList);\n\n if listLen == 0 {\n isShow = true\n failedMessage = \"Currently you are not assigned for any User\"\n } \n\n t.Execute(w, AllUsersResponse{Users: userList, ListLen: listLen, FailedMessage: failedMessage, IsShow: isShow}) \n}", "func fetchAllUser(c *gin.Context) {\n\tvar users []user\n\n\tdb.Find(&users)\n\n\tif len(users) <= 0 {\n\t\tc.JSON(http.StatusNotFound, gin.H{\"status\": http.StatusNotFound, \"message\": \"No user found!\"})\n\t\treturn\n\t}\n\n\tc.JSON(http.StatusOK, gin.H{\"status\": http.StatusOK, \"data\": users})\n}", "func (a authenticator) Validate(user string) bool {\n\tfor _, u := range users {\n\t\tif user == u {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func UsersBusy(thismeet User) error {\r\n\tcollection := client.Database(\"appointytask\").Collection(\"users\")\r\n\tctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)\r\n\tdefer cancel()\r\n\tvar meet User\r\n\tfor _, thisperson := range thismeet.Users {\r\n\t\tif thisperson.Password == \"Yes\" {\r\n\t\t\tfilter := bson.M{\r\n\t\t\t\t\"users.email\": thisperson.Email,\r\n\t\t\t\t\"users.password\": \"Yes\",\r\n\t\t\t\t\"endtime\": bson.M{\"$gt\": string(time.Now().Format(time.RFC3339))},\r\n\t\t\t}\r\n\t\t\tcursor, _ := collection.Find(ctx, filter)\r\n\t\t\tfor cursor.Next(ctx) {\r\n\t\t\t\tcursor.Decode(&meet)\r\n\t\t\t\tif (thismeet.Starttime >= meet.Starttime && thismeet.Starttime <= meet.Endtime) ||\r\n\t\t\t\t\t(thismeet.Endtime >= meet.Starttime && thismeet.Endtime <= meet.Endtime) {\r\n\t\t\t\t\treturnerror := \"Error 400: User \" + thisperson.Name + \" Password Clash\"\r\n\t\t\t\t\treturn errors.New(returnerror)\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\treturn nil\r\n}", "func (user *User) Validate(action string) map[string]string {\n\tvar errMessages = make(map[string]string)\n\tvar err error\n\n\tswitch strings.ToLower(action) {\n\tcase \"update\":\n\t\tif user.Email == \"\" {\n\t\t\terrMessages[\"email_required\"] = \"email required\"\n\t\t}\n\n\t\tif user.Email != \"\" {\n\t\t\tif err = checkmail.ValidateFormat(user.Email); err != nil {\n\t\t\t\terrMessages[\"invalid_email\"] = \"invalid email\"\n\t\t\t}\n\t\t}\n\tcase \"login\":\n\t\tif user.Password == \"\" {\n\t\t\terrMessages[\"password_required\"] = \"password is required\"\n\t\t}\n\n\t\tif user.Email == \"\" {\n\t\t\terrMessages[\"email_required\"] = \"email required\"\n\t\t}\n\n\t\tif user.Email != \"\" {\n\t\t\tif err = checkmail.ValidateFormat(user.Email); err != nil {\n\t\t\t\terrMessages[\"invalid_email\"] = \"invalid email\"\n\t\t\t}\n\t\t}\n\tcase \"forgotpassword\":\n\t\tif user.Email == \"\" {\n\t\t\terrMessages[\"email_required\"] = \"email required\"\n\t\t}\n\t\tif user.Email != \"\" {\n\t\t\tif err = checkmail.ValidateFormat(user.Email); err != nil {\n\t\t\t\terrMessages[\"invalid_email\"] = \"please provide a valid email\"\n\t\t\t}\n\t\t}\n\tdefault:\n\t\tif user.FirstName == \"\" {\n\t\t\terrMessages[\"firstname_required\"] = \"first name is required\"\n\t\t}\n\n\t\tif user.LastName == \"\" {\n\t\t\terrMessages[\"lastname_required\"] = \"last name is required\"\n\t\t}\n\n\t\tif user.Password == \"\" {\n\t\t\terrMessages[\"password_required\"] = \"password is required\"\n\t\t}\n\n\t\tif user.Password != \"\" && len(user.Password) < 6 {\n\t\t\terrMessages[\"invalid_password\"] = \"password should be at least 6 characters\"\n\t\t}\n\n\t\tif user.Email == \"\" {\n\t\t\terrMessages[\"email_required\"] = \"email is required\"\n\t\t}\n\n\t\tif user.Email != \"\" {\n\t\t\tif err = checkmail.ValidateFormat(user.Email); err != nil {\n\t\t\t\terrMessages[\"invalid_email\"] = \"please provide a valid email\"\n\t\t\t}\n\t\t}\n\t}\n\n\treturn errMessages\n}", "func checkUser(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tparams := mux.Vars(r)\n\tresult := get.CheckTheUser(params[\"email\"])\n\tjson.NewEncoder(w).Encode(map[string]string{\n\t\t\"result\": result,\n\t})\n}", "func (s *UserSet) Require(query string) ([]User, bool) {\n\treqSet := make([]User, 0)\n\tfor _, u := range s.Users {\n\t\tif strings.Contains(u.FirstName, query) ||\n\t\t\tstrings.Contains(u.LastName, query) ||\n\t\t\tstrings.Contains(u.About, query) {\n\n\t\t\treqSet = append(reqSet, User{\n\t\t\t\tId: u.Id,\n\t\t\t\tName: u.FirstName + \" \" + u.LastName,\n\t\t\t\tAge: u.Age,\n\t\t\t\tAbout: u.About,\n\t\t\t\tGender: u.Gender,\n\t\t\t})\n\t\t}\n\t}\n\tisMatch := len(reqSet) > 0\n\treturn reqSet, isMatch\n}", "func ReadAllUsers(ID string, page int64, search string, stype string) ([]*models.Users, bool) {\n\tctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)\n\n\tdefer cancel()\n\t//Aqui apunto a la base de datos y la collection\n\tbd := MongoCN.Database(\"twittgo\")\n\tcol := bd.Collection(\"users\")\n\n\tvar results []*models.Users\n\n\tfindOptions := options.Find()\n\tfindOptions.SetLimit(20)\n\tfindOptions.SetSkip((page - 1) * 20)\n\n\tquery := bson.M{\n\t\t\"nombre\": bson.M{\"$regex\": `(?i)` + search},\n\t}\n\n\tcur, err := col.Find(ctx, query, findOptions)\n\tif err != nil {\n\t\tfmt.Println(err.Error())\n\t\treturn results, false\n\t}\n\n\tvar find, include bool\n\n\tfor cur.Next(ctx) {\n\t\tvar s models.Users\n\t\terr := cur.Decode(&s)\n\t\tif err != nil {\n\t\t\tfmt.Println(err.Error())\n\t\t\treturn results, false\n\t\t}\n\n\t\tvar r models.Relation\n\t\tr.UserID = ID\n\t\tr.UserRelationID = s.ID.Hex()\n\n\t\tinclude = false\n\n\t\tfind, err = CheckRelation(r)\n\t\tif stype == \"new\" && find == false {\n\t\t\tinclude = true\n\t\t}\n\t\tif stype == \"follow\" && find == true {\n\t\t\tinclude = true\n\t\t}\n\n\t\tif r.UserRelationID == ID {\n\t\t\tinclude = false\n\t\t}\n\t\tif include == true {\n\t\t\ts.Password = \"\"\n\t\t\ts.Biografia = \"\"\n\t\t\ts.Banner = \"\"\n\t\t\ts.Email = \"\"\n\t\t\ts.SitioWeb = \"\"\n\t\t\ts.Ubicacion = \"\"\n\n\t\t\tresults = append(results, &s)\n\t\t}\n\n\t}\n\n\terr = cur.Err()\n\tif err != nil {\n\t\tfmt.Println(err.Error())\n\t\treturn results, false\n\t}\n\n\tcur.Close(ctx)\n\treturn results, true\n}", "func (s *UserService) GetAll() ([]model.User , error){\n\t// variable for all user\n\tvar users []model.User\n\t// get all data in user based on filter if err print error\n\tcursor, err := conn.Collection(\"user\").Find(context.Background(), bson.D{})\n\tif err != nil {\n\t\tprintln(err)\n\t}\n\t// iterate all cursor and append it to users slice\n\tfor cursor.Next(context.TODO()) {\n elem := model.User{}\n if err := cursor.Decode(&elem); err != nil {\n log.Fatal(err)\n }\n users = append(users, elem)\n\t}\n\t// return user\n\treturn users, nil\n}", "func userList(w http.ResponseWriter, r *http.Request) {}", "func ViewListOtherManagers(w http.ResponseWriter, r *http.Request) { \n w.Header().Set(\"Content-Type\", \"text/html; charset=utf-8\")\n t, err := template.ParseFiles(\"templates/viewListOtherManagers.html\")\n\n userDetails := getSession(r)\n\n AuthorizePages(w,r)\n if err != nil {\n fmt.Println(err) // Ugly debug output\n w.WriteHeader(http.StatusInternalServerError) // Proper HTTP response\n return\n }\n \n if err != nil {\n fmt.Println(err)\n }\n var managerList []helpers.User\n var listLen int\n var failedMessage string\n var isShow bool = false\n\n managerList = dbquery.GetManagerList()\n listLen = len(managerList);\n\n var managerList1 []helpers.User\n\n for i := 0; i < listLen; i++ {\n if managerList[i].UserId != userDetails.UserId {\n managerList1 = append(managerList1, helpers.User{\n FirstName: managerList[i].FirstName,\n LastName: managerList[i].LastName,\n UserId: managerList[i].UserId,\n })\n }\n }\n if listLen == 0 {\n isShow = true\n failedMessage = \"Currently you are not assigned for any User\"\n } \n\n t.Execute(w, AllUsersResponse{Users: managerList1, ListLen: listLen, FailedMessage: failedMessage, IsShow: isShow}) \n}", "func checkUserIsRegistered(email string, password string) string {\n\tdb, err := config.GetMongoDB()\n\n\tif err != nil {\n\t\tfmt.Println(\"Gagal menghubungkan ke database!\")\n\t\tos.Exit(2)\n\t}\n\n\tvar userRepository repository.UserRepository\n\n\tuserRepository = repository.NewUserRepositoryMongo(db, \"pengguna\")\n\n\tuserData, err1 := userRepository.FindAll()\n\n\tif err1 != nil {\n\t\treturn \"invalid_email_login\"\n\t} else {\n\t\tfor _, user := range userData {\n\t\t\tif email == user.Email {\n\t\t\t\tif components.CheckPasswordHash(password, user.Password) == true {\n\t\t\t\t\treturn \"login_success\"\n\t\t\t\t} else {\n\t\t\t\t\treturn \"invalid_password_login\"\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\treturn \"invalid_email_login\"\n\t\t\t}\n\t\t}\n\t}\n\n\treturn \"invalid_email_login\"\n}", "func (config ConfigUser) Validate() (err error) {\n\terr = config.User.Validate()\n\tif err != nil {\n\t\treturn\n\t}\n\tif config.Groups != nil {\n\t\tif len(*config.Groups) != 0 {\n\t\t\tfor _, e := range *config.Groups {\n\t\t\t\terr = e.Validate()\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn config.Password.Validate()\n}", "func (ctx *Upgrader) getUsers(notReturn ...*Socket) []*Socket {\n\tvar users []*Socket\n\t//\n\tcontain := func(user *Socket) bool {\n\t\tfor indc := range notReturn {\n\t\t\tif user == notReturn[indc] {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t\treturn false\n\t}\n\n\t//\n\tfor _, val := range ctx.usersSk {\n\t\tif !contain(val) {\n\t\t\tusers = append(users, val)\n\t\t}\n\t}\n\treturn users\n}", "func GetUsers(w http.ResponseWriter, r *http.Request) {\n\tvar users []UsersData\n\terr := model.FindAll(nil, &users)\n\tif err != nil {\n\t\tfmt.Println(\"err\", err)\n\t\tw.Write([]byte(\"Something wen't wrong!!\"))\n\t} else {\n\t\trender.JSON(w, 200, &users)\n\t}\n}", "func getUser(c *fiber.Ctx) error {\n\tUserscollection := mg.Db.Collection(\"users\")\n\tListscollection := mg.Db.Collection(\"lists\")\n\tusername := c.Params(\"name\")\n\tuserQuery := bson.D{{Key: \"username\", Value: username}}\n\n\tuserRecord := Userscollection.FindOne(c.Context(), &userQuery)\n\tuser := &User{}\n\tuserRecord.Decode(&user)\n\tif len(user.ID) < 1 {\n\t\treturn c.Status(404).SendString(\"cant find user\")\n\t}\n\tlistQuery := bson.D{{Key: \"userid\", Value: user.Username}}\n\tcursor, err := Listscollection.Find(c.Context(), &listQuery)\n\tif err != nil {\n\t\treturn c.Status(500).SendString(err.Error())\n\t}\n\tvar lists []List = make([]List, 0)\n\tif err := cursor.All(c.Context(), &lists); err != nil {\n\t\treturn c.Status(500).SendString(\"internal err\")\n\t}\n\tuser.Password = \"\"\n\tuser.TaskCode = \"\"\n\treturn c.Status(200).JSON(&fiber.Map{\n\t\t\"user\": user,\n\t\t\"lists\": lists,\n\t})\n}", "func (handler *UserHandler) GetAllUsers(w http.ResponseWriter, req *http.Request, _ httprouter.Params) {\n\tvar users []*User\n\tul, err := handler.UserService.GetAllUsers()\n\n\tfor _, user := range ul {\n\t\tusers = append(users, user.hidePassword())\n\t}\n\n\tif err != nil {\n\t\thandler.Formatter.JSON(w, http.StatusBadRequest, util.NewError(\"1008\", \"Missing user privileges\", err.Error()))\n\t} else {\n\t\thandler.Formatter.JSON(w, http.StatusOK, users)\n\t}\n\n}", "func checkValidUser(userN, pass string, lock *Lock) bool {\r\n\t\r\n\t//acquires shared lock to read file\r\n\tacquireLock(\"read\", lock)\r\n\r\n\tuserFile, err := os.Open(usersFileName)\r\n\t\r\n\tif err != nil {\r\n\t\tlog.Println(\"Could not open file properly.\")\r\n\t\tlog.Fatal(err)\r\n\t}\r\n\t\r\n\tuserScanner := bufio.NewScanner(userFile)\r\n\r\n\tdefer releaseLock(\"read\",lock)\r\n\tdefer userFile.Close()\r\n\r\n\t//goes through user.txt file, checks each user and password, if match\r\n\t//return true, else false\r\n\tfor userScanner.Scan() {\r\n\t\tcurUser := userScanner.Text()\r\n\t\tuserArr := strings.SplitAfter(curUser,\",\") \r\n\t\tif len(userArr) >= 1 && userArr[0] != \"\" {\r\n\t\t\tcurUsername := string(userArr[0][0:len(userArr[0])-1])\r\n\t\t\tcurPassword := string(userArr[1][0:len(userArr[1])-1])\r\n\t\t\tif (userN == curUsername) && (pass == curPassword) {\r\n\t\t\t\treturn true\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\treturn false\r\n}", "func Validation(a User) error {\n\tfmt.Println(\"user :: \", a)\n\tvar rxEmail = regexp.MustCompile(\"^[a-zA-Z0-9.!#$%&'*+\\\\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\\\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$\")\n\tswitch {\n\tcase len(strings.TrimSpace(a.Password)) == 0:\n\t\treturn ErrPasswordInvalid\n\tcase len(strings.TrimSpace(a.Email)) == 0 || !rxEmail.MatchString(a.Email):\n\t\treturn ErrEmailInvalid\n\tdefault:\n\t\treturn nil\n\t}\n}", "func users(c *gin.Context) {\n\tvar result []interface{}\n\terr := tweetsColl().Find(bson.M{}).Distinct(\"user\", &result) // returns all the distict users in an array, not just the length which would be optimal\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tc.JSON(500, \"error\")\n\t\treturn\n\t}\n\n\tc.JSON(200, gin.H{\"users\": len(result)})\n}", "func (mt UserCollection) Validate() (err error) {\n\tfor _, e := range mt {\n\t\tif e != nil {\n\t\t\tif err2 := e.Validate(); err2 != nil {\n\t\t\t\terr = goa.MergeErrors(err, err2)\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func ValidaUser(user, password, tipoUser string) (retorno bool) {\n\tvar SQLSelect string\n\n\tdb, err := sql.Open(\"mysql\", UserDB+\":\"+PassDB+\"@tcp(\"+HostDB+\":\"+PortDB+\")/\"+DatabaseDB+\"?charset=utf8\")\n\tcheckErr(err)\n\n\tdefer db.Close()\n\n\t// Se tipoUser = 0 quer dizer que nao eh necessario ser admin para efetuar validacao\n\tif tipoUser == \"0\" {\n\t\tSQLSelect = \"SELECT COUNT(nome_usuario) FROM usuarios WHERE ativo=1 AND usuario='\" + user + \"' AND senha='\" + password + \"'\"\n\t} else {\n\t\tSQLSelect = \"SELECT COUNT(nome_usuario) FROM usuarios WHERE ativo=1 AND usuario='\" + user + \"' AND senha='\" + password + \"' AND admin='\" + tipoUser + \"'\"\n\t}\n\n\trows, err := db.Query(SQLSelect)\n\n\tcheckErr(err)\n\n\tfor rows.Next() {\n\t\tvar count int\n\t\terr = rows.Scan(&count)\n\t\tcheckErr(err)\n\n\t\tif count >= 1 {\n\t\t\treturn true\n\t\t} else {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn retorno\n}", "func withMultipleArgs2(name string, age int) (user, error) {\n\tfor _, user := range users {\n\t\tif user.name == name && user.age == age {\n\t\t\treturn user, nil\n\t\t}\n\t}\n\n\treturn user{}, errors.New(\"user is not found\")\n}", "func validate(sessions *session.Sessions, users *user.Users, permissions *user.Permissions, writer http.ResponseWriter, request *http.Request, kind db.PK) (*session.Session, *user.User) {\n\tses, usr := GetSessionAndUser(sessions, users, writer, request)\n\tif usr == nil {\n\t\treturn nil, nil\n\t}\n\n\tallowed, err := permissions.IsAllowed(kind, usr)\n\tif err != nil {\n\t\thttp.Error(writer, err.Error(), http.StatusInternalServerError)\n\t\treturn nil, nil\n\t}\n\n\tif !allowed {\n\t\thttp.Error(writer, \"\", http.StatusForbidden)\n\t\treturn nil, nil\n\t}\n\treturn ses, usr\n}", "func users(ds *datastore.Client) ([]User, error) {\n\tdst := make([]User, 0)\n\t_, err := ds.GetAll(context.Background(), datastore.NewQuery(userKind).Limit(limit).Project(usernameField, userIDField).Order(usernameField), &dst)\n\treturn dst, err\n}", "func (uc *UserCreate) check() error {\n\tif _, ok := uc.mutation.Age(); !ok {\n\t\treturn &ValidationError{Name: \"age\", err: errors.New(\"ent: missing required field \\\"age\\\"\")}\n\t}\n\tif v, ok := uc.mutation.Age(); ok {\n\t\tif err := user.AgeValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"age\", err: fmt.Errorf(\"ent: validator failed for field \\\"age\\\": %w\", err)}\n\t\t}\n\t}\n\tif _, ok := uc.mutation.Name(); !ok {\n\t\treturn &ValidationError{Name: \"name\", err: errors.New(\"ent: missing required field \\\"name\\\"\")}\n\t}\n\tif v, ok := uc.mutation.ID(); ok {\n\t\tif err := user.IDValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"id\", err: fmt.Errorf(\"ent: validator failed for field \\\"id\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func getUser(vUserName string, vPassword string) (userobj Users, err error) {\n\t/*\n\t\tDefining the variables\n\t*/\n\tvar vDBId, vName, vEmail, vToken, vsqlPassword sql.NullString\n\tvar vIsActive sql.NullBool\n\n\t/*\n\t\tcreating a sql query using parameter\n\t*/\n\tsqlStmt := fmt.Sprintf(`SELECT id,Name,Email,Token,Is_Active,Password FROM shard_1.users WHERE LOWER(Email)=lower('%s') and lower(password) = md5('%s')`, strings.ToLower(vUserName), vPassword)\n\n\t/*\n\t\tExecuting the sql query\n\t\tIn case of error, error information will be returned\n\t\tUser object is returned in case credentials are valid\n\t*/\n\terr = db.QueryRow(sqlStmt).Scan(&vDBId, &vName, &vEmail, &vToken, &vIsActive, &vsqlPassword)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\terr = fmt.Errorf(\"unknown email : %s\", err.Error())\n\t\treturn\n\t}\n\tuserobj.DBId = vDBId.String\n\tuserobj.Name = vName.String\n\tuserobj.Email = vEmail.String\n\tuserobj.Token = vToken.String\n\tuserobj.IsActive = vIsActive.Bool\n\tuserobj.Password = \"\"\n\treturn\n}", "func validate(id string) int {\n collection := db.Database(dbName).Collection(\"users\")\n filter := bson.D{{ \"discordid\", id}}\n\n // Check if the api retrieved a discord id\n if (id == \"\") {\n return 500\n }\n\n // Check if there is a db entry for the discord id\n var result User\n err := collection.FindOne(context.TODO(), filter).Decode(&result)\n if err != nil {\n fmt.Println(err)\n return 401\n }\n\n // Check if there is an active membership id\n if (result.ActiveMembership == \"-1\") {\n return 300\n }\n\n return 200\n}", "func UserAuthentication(username, password string) (token string, err error) {\n\n err = checkInit()\n if err != nil {\n return\n }\n\n var login = func(username, password string, loginData map[string]interface{}) (err error) {\n err = createError(010)\n\n var salt = loginData[\"_salt\"].(string)\n var loginUsername = loginData[\"_username\"].(string)\n var loginPassword = loginData[\"_password\"].(string)\n\n if SHA256(username, salt) == loginUsername {\n if SHA256(password, salt) == loginPassword {\n err = nil\n }\n }\n\n return\n }\n\n var users = data[\"users\"].(map[string]interface{})\n for id, loginData := range users {\n err = login(username, password, loginData.(map[string]interface{}))\n if err == nil {\n token = setToken(id, \"-\")\n return\n }\n }\n\n return\n}", "func checkUserFormChanged(uForm storage.User, originalUser storage.User) (bool, storage.User) {\n\tfmt.Printf(\"---originalUser = %v, type = %T\\n\", originalUser.FirstName, originalUser.FirstName)\n\tfmt.Printf(\"---user in form = %v, type = %T\\n\", uForm.FirstName, uForm.FirstName)\n\n\tchanged := false\n\tif uForm.FirstName != originalUser.FirstName && uForm.FirstName != \"\" {\n\t\toriginalUser.FirstName = uForm.FirstName\n\t\tchanged = true\n\t}\n\tif uForm.LastName != originalUser.LastName && uForm.LastName != \"\" {\n\t\toriginalUser.LastName = uForm.LastName\n\t\tchanged = true\n\t}\n\tif uForm.Mail != originalUser.Mail && uForm.Mail != \"\" {\n\t\toriginalUser.Mail = uForm.Mail\n\t\tchanged = true\n\t}\n\tif uForm.Address != originalUser.Address && uForm.Address != \"\" {\n\t\toriginalUser.Address = uForm.Address\n\t\tchanged = true\n\t}\n\tif uForm.PostNrAndPlace != originalUser.PostNrAndPlace && uForm.PostNrAndPlace != \"\" {\n\t\toriginalUser.PostNrAndPlace = uForm.PostNrAndPlace\n\t\tchanged = true\n\t}\n\tif uForm.PhoneNr != originalUser.PhoneNr && uForm.PhoneNr != \"\" {\n\t\toriginalUser.PhoneNr = uForm.PhoneNr\n\t\tchanged = true\n\t}\n\tif uForm.OrgNr != originalUser.OrgNr && uForm.OrgNr != \"\" {\n\t\toriginalUser.OrgNr = uForm.OrgNr\n\t\tchanged = true\n\t}\n\tif uForm.CountryID != originalUser.CountryID && uForm.CountryID != \"\" {\n\t\toriginalUser.CountryID = uForm.CountryID\n\t\tchanged = true\n\t}\n\tif uForm.BankAccount != originalUser.BankAccount && uForm.BankAccount != \"\" {\n\t\toriginalUser.BankAccount = uForm.BankAccount\n\t\tchanged = true\n\t}\n\treturn changed, originalUser\n}", "func (h *Handler) getAllUsers(c *gin.Context) handlerResponse {\n\n\tusers, err := h.service.User.GetAll()\n\tif err != nil {\n\t\treturn handleError(err)\n\t}\n\tremovePasswords(users)\n\treturn handleOK(StringMap{\"users\": users})\n}", "func GetUsers(ID string, page int64, search string, tipo string) ([]*models.User, bool) {\n\tctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)\n\tdefer cancel()\n\n\tdb := MongoConnection.Database(\"socialnetwork\")\n\tcollection := db.Collection(\"Users\")\n\n\tvar results []*models.User\n\n\tfindOptions := options.Find()\n\tfindOptions.SetSkip((page - 1) * 20)\n\tfindOptions.SetLimit(20)\n\n\tquery := bson.M{\n\t\t\"Name\": bson.M{\"$regex\": `(?i)` + search},\n\t}\n\n\tcursor, err := collection.Find(ctx, query, findOptions)\n\tif err != nil {\n\t\treturn results, false\n\t}\n\n\tvar found, flagInclude bool\n\n\tfor cursor.Next(ctx) {\n\t\tvar userObj models.User\n\t\terr := cursor.Decode(&userObj)\n\t\tif err != nil {\n\t\t\treturn results, false\n\t\t}\n\n\t\tvar userFollowerObj models.UsersFollowers\n\t\tuserFollowerObj.UserID = ID\n\n\t\tuserFollowerObj.FollowerID = userObj.ID.Hex()\n\n\t\tflagInclude = false\n\n\t\tfound, err = CheckFollowing(userFollowerObj)\n\n\t\t//New users without relations. Users not following\n\t\tif tipo == \"new\" && found == false {\n\t\t\tflagInclude = true\n\t\t}\n\n\t\t//User following\n\t\tif tipo == \"follow\" && found == true {\n\t\t\tflagInclude = true\n\t\t}\n\n\t\t//Validation IDs not sames\n\t\tif userFollowerObj.FollowerID == ID {\n\t\t\tflagInclude = false\n\t\t}\n\n\t\tif flagInclude == true {\n\t\t\t//Clean values that not used\n\t\t\tuserObj.Password = \"\"\n\t\t\tuserObj.Biography = \"\"\n\t\t\tuserObj.WebSite = \"\"\n\t\t\tuserObj.Location = \"\"\n\t\t\tuserObj.Banner = \"\"\n\t\t\tuserObj.Email = \"\"\n\n\t\t\tresults = append(results, &userObj)\n\t\t}\n\t}\n\n\terr = cursor.Err()\n\tif err != nil {\n\t\treturn results, false\n\t}\n\tcursor.Close(ctx)\n\treturn results, true\n}", "func GetAllUser(w http.ResponseWriter, r *http.Request) {\n\temail, err := getEmailFromTokenHeader(r)\n\tif err != nil || email == \"\" {\n\t\thttp.Error(w, \"Invalid Token\", http.StatusUnauthorized)\n\t\treturn\n\t}\n\tw.Header().Set(\"Context-Type\", \"application/x-www-form-urlencoded\")\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t// get all the users in the db\n\tusers, err := database.GetAllUsers()\n\tif err != nil {\n\t\tlogrus.Errorf(\"Unable to get all user. %v\", err)\n\t\thttp.Error(w, \"\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\t// send all the users as response\n\terr = json.NewEncoder(w).Encode(&models.UserList{Users: users})\n\tif err != nil {\n\t\tlogrus.Errorf(err.Error())\n\t\treturn\n\t}\n}", "func validateUser(userId int) error {\n\n\t// return an error\n\treturn &UnauthorizedError{userId, 1234}\n}", "func (a *Ares) getBotandAdmin() {\n\tapi := slack.New(a.SlackAppToken)\n\tusers, err := api.GetUsers()\n\n\tif err != nil {\n\t\tlog.Fatal(\"Failed to fetch slack users info\", err.Error())\n\t}\n\n\ta.Users = make(map[string]string)\n\ta.MutedUsers = make(map[string]bool)\n\n\tfor _, user := range users {\n\n\t\tif user.Profile.ApiAppID == a.SlackAppID {\n\t\t\ta.BotUserID = user.ID\n\t\t}\n\n\t\tif user.IsAdmin {\n\t\t\ta.Admins = append(a.Admins, user.ID)\n\t\t} else {\n\t\t\ta.Users[user.Name] = user.ID\n\t\t}\n\t}\n\n\tif a.BotUserID == \"\" {\n\t\tlog.Fatal(\"Unable to find bot user on the Slack\")\n\t}\n}", "func (h *Handler) list() http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tentities, err := h.UserDAO.FetchAll(r.Context())\n\t\tswitch {\n\t\tcase errors.Is(err, errorx.ErrNoUser):\n\t\t\tmsg := &errorMessage{\n\t\t\t\tMessage: fmt.Sprintf(\"no users exist\"),\n\t\t\t}\n\t\t\tresponse.JSON(w, http.StatusNotFound, msg)\n\t\t\treturn\n\t\tcase err != nil:\n\t\t\tmsg := &errorMessage{\n\t\t\t\tError: err.Error(),\n\t\t\t\tMessage: \"user datastore error\",\n\t\t\t}\n\t\t\tresponse.JSON(w, http.StatusInternalServerError, msg)\n\t\t\treturn\n\t\tdefault:\n\t\t\tresponse.JSON(w, http.StatusOK, entities)\n\t\t}\n\t}\n}", "func (a UsersApi) GetUsers(pageSize int, pageNumber int, id []string, jabberId []string, sortOrder string, expand []string, integrationPresenceSource string, state string) (*Userentitylisting, *APIResponse, error) {\n\tvar httpMethod = \"GET\"\n\t// create path and map variables\n\tpath := a.Configuration.BasePath + \"/api/v2/users\"\n\tdefaultReturn := new(Userentitylisting)\n\tif true == false {\n\t\treturn defaultReturn, nil, errors.New(\"This message brought to you by the laws of physics being broken\")\n\t}\n\n\n\theaderParams := make(map[string]string)\n\tqueryParams := make(map[string]string)\n\tformParams := url.Values{}\n\tvar postBody interface{}\n\tvar postFileName string\n\tvar fileBytes []byte\n\t// authentication (PureCloud OAuth) required\n\n\t// oauth required\n\tif a.Configuration.AccessToken != \"\"{\n\t\theaderParams[\"Authorization\"] = \"Bearer \" + a.Configuration.AccessToken\n\t}\n\t// add default headers if any\n\tfor key := range a.Configuration.DefaultHeader {\n\t\theaderParams[key] = a.Configuration.DefaultHeader[key]\n\t}\n\t\n\tqueryParams[\"pageSize\"] = a.Configuration.APIClient.ParameterToString(pageSize, \"\")\n\t\n\tqueryParams[\"pageNumber\"] = a.Configuration.APIClient.ParameterToString(pageNumber, \"\")\n\t\n\tqueryParams[\"id\"] = a.Configuration.APIClient.ParameterToString(id, \"multi\")\n\t\n\tqueryParams[\"jabberId\"] = a.Configuration.APIClient.ParameterToString(jabberId, \"multi\")\n\t\n\tqueryParams[\"sortOrder\"] = a.Configuration.APIClient.ParameterToString(sortOrder, \"\")\n\t\n\tqueryParams[\"expand\"] = a.Configuration.APIClient.ParameterToString(expand, \"multi\")\n\t\n\tqueryParams[\"integrationPresenceSource\"] = a.Configuration.APIClient.ParameterToString(integrationPresenceSource, \"\")\n\t\n\tqueryParams[\"state\"] = a.Configuration.APIClient.ParameterToString(state, \"\")\n\t\n\n\t// Find an replace keys that were altered to avoid clashes with go keywords \n\tcorrectedQueryParams := make(map[string]string)\n\tfor k, v := range queryParams {\n\t\tif k == \"varType\" {\n\t\t\tcorrectedQueryParams[\"type\"] = v\n\t\t\tcontinue\n\t\t}\n\t\tcorrectedQueryParams[k] = v\n\t}\n\tqueryParams = correctedQueryParams\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{ \"application/json\", }\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := a.Configuration.APIClient.SelectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\theaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\n\t\t\"application/json\",\n\t}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := a.Configuration.APIClient.SelectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\theaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\tvar successPayload *Userentitylisting\n\tresponse, err := a.Configuration.APIClient.CallAPI(path, httpMethod, postBody, headerParams, queryParams, formParams, postFileName, fileBytes)\n\tif err != nil {\n\t\t// Nothing special to do here, but do avoid processing the response\n\t} else if err == nil && response.Error != nil {\n\t\terr = errors.New(response.ErrorMessage)\n\t} else if response.HasBody {\n\t\tif \"Userentitylisting\" == \"string\" {\n\t\t\tcopy(response.RawBody, &successPayload)\n\t\t} else {\n\t\t\terr = json.Unmarshal(response.RawBody, &successPayload)\n\t\t}\n\t}\n\treturn successPayload, response, err\n}", "func ViewUsers(w http.ResponseWriter, r *http.Request) error {\n if r.Method == \"GET\" {\n myUid, err0 := GetMyUserId(r)\n if err0 != nil {\n return err0\n }\n ctx1, _ := context.WithTimeout(context.Background(), constant.ContextTimeoutDuration)\n response, err := BackendClientIns.FindAllUsers(ctx1, &FindAllUsersRequest{})\n if err != nil {\n return err\n }\n allUsers := response.Users\n newUserList := make([]user, 0)\n for _, value := range allUsers {\n if value.UserId == myUid { // Exclude myself\n continue\n }\n ctx, _ := context.WithTimeout(context.Background(), constant.ContextTimeoutDuration)\n responseFromWhetherFollowing, _ := BackendClientIns.UserCheckWhetherFollowing(ctx,\n &UserCheckWhetherFollowingRequest{\n SourceUserId: myUid,\n TargetUserId: value.UserId,\n })\n newUserList = append(newUserList, user{Name: value.UserName,\n Followed: responseFromWhetherFollowing.Ok,\n Id: strconv.Itoa(int(value.UserId))})\n }\n view := viewUserView{\n UserList: newUserList,\n }\n log.Println(view.UserList)\n t, _ := template.ParseFiles(constant.RelativePathForTemplate + \"users.html\")\n w.Header().Set(\"Content-Type\", \"text/html\")\n t.Execute(w, view)\n }\n return nil\n}", "func TestListUser(t *testing.T) {\n\tctx := context.Background()\n\tconn, err := grpc.DialContext(ctx, \"\", grpc.WithInsecure(), grpc.WithContextDialer(bufDialer))\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tdefer conn.Close()\n\tclient := api.NewUserServiceClient(conn)\n\tuser1 := createUser(t, ctx, client, 0)\n\tuser2 := createUser(t, ctx, client, 0)\n\tuser3 := createUser(t, ctx, client, 0)\n\tuser4 := createUser(t, ctx, client, 0)\n\n\ttestCases := []struct {\n\t\tcaseName string\n\t\tlistUserRequest api.ListUserRequest\n\t\tresultLen int\n\t\tisPositive bool\n\t\terrCode codes.Code\n\t\terrMsg string\n\t}{\n\t\t{\n\t\t\tcaseName: \"ListUser: limit = 1, page = 1\",\n\t\t\tlistUserRequest: api.ListUserRequest{\n\t\t\t\tPageFilter: &api.PageFilter{\n\t\t\t\t\tLimit: 1,\n\t\t\t\t\tPage: 1,\n\t\t\t\t},\n\t\t\t},\n\t\t\tresultLen: 1,\n\t\t\tisPositive: true,\n\t\t},\n\t\t{\n\t\t\tcaseName: \"ListUser: limit = 2, page = 1\",\n\t\t\tlistUserRequest: api.ListUserRequest{\n\t\t\t\tPageFilter: &api.PageFilter{\n\t\t\t\t\tLimit: 2,\n\t\t\t\t\tPage: 1,\n\t\t\t\t},\n\t\t\t},\n\t\t\tresultLen: 2,\n\t\t\tisPositive: true,\n\t\t},\n\t\t{\n\t\t\tcaseName: \"ListUser: limit = 2, page = 2\",\n\t\t\tlistUserRequest: api.ListUserRequest{\n\t\t\t\tPageFilter: &api.PageFilter{\n\t\t\t\t\tLimit: 2,\n\t\t\t\t\tPage: 2,\n\t\t\t\t},\n\t\t\t},\n\t\t\tresultLen: 2,\n\t\t\tisPositive: true,\n\t\t},\n\t\t{\n\t\t\tcaseName: \"ListUser: limit = 2, page = 0\",\n\t\t\tlistUserRequest: api.ListUserRequest{\n\t\t\t\tPageFilter: &api.PageFilter{\n\t\t\t\t\tLimit: 2,\n\t\t\t\t\tPage: 0,\n\t\t\t\t},\n\t\t\t},\n\t\t\tisPositive: false,\n\t\t\terrMsg: \"page must be > 0, page = 0\",\n\t\t\terrCode: codes.InvalidArgument,\n\t\t},\n\t}\n\n\tfor _, tc := range testCases {\n\t\tt.Run(tc.caseName, func(t *testing.T) {\n\t\t\tusers, err := client.ListUser(ctx, &tc.listUserRequest)\n\t\t\tif tc.isPositive {\n\t\t\t\tassert.Empty(t, err)\n\t\t\t\tassert.Equal(t, tc.resultLen, len(users.Users))\n\t\t\t} else {\n\t\t\t\tassert.NotEmpty(t, err)\n\t\t\t\tfromError, _ := status.FromError(err)\n\t\t\t\tassert.Equal(t, tc.errCode, fromError.Code())\n\t\t\t\tassert.Equal(t, tc.errMsg, fromError.Message())\n\t\t\t}\n\t\t})\n\t}\n\tdeleteUser(t, ctx, client, user1.Id)\n\tdeleteUser(t, ctx, client, user2.Id)\n\tdeleteUser(t, ctx, client, user3.Id)\n\tdeleteUser(t, ctx, client, user4.Id)\n}", "func user(w http.ResponseWriter, r *http.Request) {\n w.Header().Set(\"Content-Type\", \"application/json\")\n\n if r.Method == \"POST\" {\n var id = r.FormValue(\"id\") // ambil data dari client\n var result []byte\n var err error\n\n for _, each := range data {\n if each.ID == id {\n result, err = json.Marshal(each)\n\n if err != nil {\n http.Error(w, err.Error(), http.StatusInternalServerError)\n return\n }\n\n w.Write(result)\n return\n }\n }\n\n http.Error(w, \"User not found\", http.StatusBadRequest)\n return\n }\n\n http.Error(w, \"\", http.StatusBadRequest)\n}", "func GetUsers(c *gin.Context) {\n\n\tlog := logger.WithFields(logrus.Fields{\"tag\": \"GetUsers\"})\n\tlog.Info(\"Fetching users\")\n\n\torganization := auth.GetCurrentOrganization(c.Request)\n\n\tidParam := c.Param(\"id\")\n\tid, err := strconv.ParseUint(idParam, 10, 32)\n\tif idParam != \"\" && err != nil {\n\t\tmessage := fmt.Sprintf(\"error parsing user id: %s\", err)\n\t\tlog.Info(message)\n\t\tc.JSON(http.StatusBadRequest, components.ErrorResponse{\n\t\t\tCode: http.StatusBadRequest,\n\t\t\tMessage: message,\n\t\t\tError: message,\n\t\t})\n\t\treturn\n\t}\n\n\tvar users []auth.User\n\tdb := model.GetDB()\n\terr = db.Model(organization).Related(&users, \"Users\").Error\n\tif err != nil {\n\t\tmessage := \"failed to fetch users\"\n\t\tlog.Info(message + \": \" + err.Error())\n\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, components.ErrorResponse{\n\t\t\tCode: http.StatusInternalServerError,\n\t\t\tMessage: message,\n\t\t\tError: message,\n\t\t})\n\t} else if id == 0 {\n\t\tc.JSON(http.StatusOK, users)\n\t} else if len(users) == 1 {\n\t\tc.JSON(http.StatusOK, users[0])\n\t} else if len(users) > 1 {\n\t\tmessage := fmt.Sprintf(\"multiple users found with id: %d\", id)\n\t\tlog.Info(message)\n\t\tc.AbortWithStatusJSON(http.StatusConflict, components.ErrorResponse{\n\t\t\tCode: http.StatusConflict,\n\t\t\tMessage: message,\n\t\t\tError: message,\n\t\t})\n\t} else {\n\t\tmessage := fmt.Sprintf(\"user not found with id: %d\", id)\n\t\tlog.Info(message)\n\t\tc.AbortWithStatusJSON(http.StatusNotFound, components.ErrorResponse{\n\t\t\tCode: http.StatusNotFound,\n\t\t\tMessage: message,\n\t\t\tError: message,\n\t\t})\n\t}\n}", "func GetAllUsers(dbmap *gorp.DbMap) func(w http.ResponseWriter, r *http.Request) {\n\n return usersHandler(nil, func(r *http.Request) *[]models.User {\n var users []models.User\n _, dbError := dbmap.Select(&users, \"select * from \\\"user\\\"\")\n if dbError != nil {\n log.Print(dbError)\n }\n\n return &users\n })\n\n}", "func UserStructLevelValidation(sl validator.StructLevel) {\n\n\tuser := sl.Current().Interface().(User)\n\n\tif len(user.FirstName) == 0 && len(user.LastName) == 0 {\n\t\tsl.ReportError(user.FirstName, \"FirstName\", \"fname\", \"fnameorlname\", \"\")\n\t\tsl.ReportError(user.LastName, \"LastName\", \"lname\", \"fnameorlname\", \"\")\n\t}\n\n\t// plus can to more, even with different tag than \"fnameorlname\"\n}", "func (apiContext Context) UsersGet(w http.ResponseWriter, r *http.Request) {\n\n\t// Get context variables.\n\tmodelContext := context.Get(r, \"modelContext\").(model.Context)\n\n\t// Extract query parameters.\n\tquery := r.URL.Query()\n\tidStr := query.Get(\"id\")\n\tstatus := query.Get(\"status\")\n\tcursor := query.Get(\"cursor\")\n\tlimitStr := query.Get(\"limit\")\n\torderBy := query.Get(\"order-by\")\n\torder := query.Get(\"order\")\n\n\t// Parse non-string parametes.\n\tid := []string{}\n\tif idStr != \"\" {\n\t\tid = strings.Split(idStr, \",\")\n\t}\n\tlimit := 20\n\tif limitStr != \"\" {\n\t\tvar err error\n\t\tlimit, err = strconv.Atoi(limitStr)\n\t\tif err != nil {\n\t\t\tresponses.Context(apiContext).RespondWithError(w, r, http.StatusBadRequest, \"The 'limit' parameter is not a valid integer.\", errors.WithStack(err))\n\t\t\treturn\n\t\t}\n\t\tif limit < 1 {\n\t\t\tlimit = 1\n\t\t}\n\t\tif limit > 100 {\n\t\t\tlimit = 100\n\t\t}\n\t}\n\n\t// Build the filters map.\n\tvar filters = map[string]interface{}{}\n\tif len(id) > 0 {\n\t\tfilters[\"id\"] = id\n\t}\n\tif status != \"\" {\n\t\tfilters[\"status\"] = status\n\t}\n\n\t// Access model.\n\tresult, cm, err := modelContext.GetUsers(filters, limit, cursor, orderBy, order)\n\tif errors.Cause(err) == model.ErrBadInput {\n\t\tresponses.Context(apiContext).RespondWithError(w, r, http.StatusBadRequest, \"Parameters were wrong.\", errors.WithStack(err))\n\t\treturn\n\t} else if err != nil {\n\t\tresponses.Context(apiContext).RespondWithError(w, r, http.StatusInternalServerError, \"Something went wrong.\", errors.WithStack(err))\n\t\treturn\n\t}\n\n\t// We never return the user's password.\n\tfor i := 0; i < len(result); i++ {\n\t\tresult[i].PasswordHash = \"\"\n\t}\n\n\t// Build the response.\n\tvar response = map[string]interface{}{}\n\tresponse[\"data\"] = result\n\tresponse[\"metadata\"] = cm\n\n\t// Add the next link.\n\tresponse[\"links\"] = map[string]interface{}{\n\t\t\"next\": nil,\n\t}\n\tif cm.NextPageCursor != \"\" {\n\t\tquery = url.Values{}\n\t\tquery.Set(\"id\", strings.Join(id, \",\"))\n\t\tquery.Set(\"status\", status)\n\t\tquery.Set(\"limit\", strconv.Itoa(limit))\n\t\tquery.Set(\"order-by\", orderBy)\n\t\tquery.Set(\"order\", order)\n\t\tquery.Set(\"cursor\", cm.NextPageCursor)\n\t\tfor key := range query {\n\t\t\tif query.Get(key) == \"\" {\n\t\t\t\tquery.Del(key)\n\t\t\t}\n\t\t}\n\t\tnextURL := url.URL{\n\t\t\tScheme: \"http\",\n\t\t\tHost: r.Host,\n\t\t\tPath: r.URL.Path,\n\t\t\tRawQuery: query.Encode(),\n\t\t}\n\t\tresponse[\"links\"].(map[string]interface{})[\"next\"] = nextURL.String()\n\t}\n\n\t// Return response.\n\tresponses.RespondWithJSON(w, http.StatusOK, response)\n}", "func (h UserHTTP) List(w http.ResponseWriter, r *http.Request) {\n\tlistRequest := listRequestDecoder(r)\n\tusers, err := h.svc.ListUsers(r.Context(), listRequest)\n\tif err != nil {\n\t\th.logger.With(r.Context()).Errorf(\"list users error : %s\", err)\n\t\trender.Render(w, r, e.BadRequest(err, \"bad request\"))\n\t\treturn\n\t}\n\trender.Respond(w, r, users)\n}", "func GetUsers(response http.ResponseWriter, request *http.Request) {\n\t//var results TDoc\n\tvar errorResponse = ErrorResponse{\n\t\tCode: http.StatusInternalServerError, Message: \"Internal Server Error.\",\n\t}\n\n\tcollection := Client.Database(\"msdb\").Collection(\"users\")\n\tctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)\n\tcursor, err := collection.Find(ctx, bson.M{})\n\tvar results []bson.M\n\terr = cursor.All(ctx, &results)\n\n\tdefer cancel()\n\n\tif err != nil {\n\t\terrorResponse.Message = \"Document not found\"\n\t\treturnErrorResponse(response, request, errorResponse)\n\t} else {\n\t\tvar successResponse = SuccessResponse{\n\t\t\tCode: http.StatusOK,\n\t\t\tMessage: \"Success\",\n\t\t\tResponse: results,\n\t\t}\n\n\t\tsuccessJSONResponse, jsonError := json.Marshal(successResponse)\n\n\t\tif jsonError != nil {\n\t\t\treturnErrorResponse(response, request, errorResponse)\n\t\t}\n\t\tresponse.Header().Set(\"Content-Type\", \"application/json\")\n\t\tresponse.Write(successJSONResponse)\n\t}\n\n}", "func (d *webData) modifyUsersWeb(w http.ResponseWriter, r *http.Request) {\n\tr.ParseForm()\n\tip := r.RemoteAddr\n\t//query the userDB for all users and put the returning slice with result in p\n\tallUsers := storage.QueryAllUserInfo(d.PDB)\n\tfmt.Println(\"---ALL USERS FROM DATABASE = \", allUsers)\n\n\t//Execute the web for modify users, range over allUsers to make the select user drop down menu\n\terr := d.tpl.ExecuteTemplate(w, \"modifyUserCompletePage\", allUsers)\n\tif err != nil {\n\t\tfmt.Fprint(w, \"template execution error = \", err)\n\t}\n\n\t//Execute the modifyUserSelection drop down menu template\n\terr = d.tpl.ExecuteTemplate(w, \"modifyUserSelection\", allUsers)\n\tif err != nil {\n\t\tfmt.Fprint(w, \"template execution error = \", err)\n\t}\n\n\t//Get the value (number) of the chosen user from form dropdown menu <select name=\"users\">\n\tnum, _ := strconv.Atoi(r.FormValue(\"users\"))\n\tvar singleUser storage.User\n\n\t//Find the selected single user chosen in dropdown in the slice of all users\n\tfor i := range allUsers {\n\t\t//Iterate over the complete struct of users until the chosen user is found\n\t\tif allUsers[i].Number == num {\n\t\t\t//Store the index nr in slice of the chosen user\n\t\t\tsingleUser = allUsers[i]\n\t\t\td.IndexUser = i\n\t\t}\n\t}\n\terr = d.tpl.ExecuteTemplate(w, \"modifyUser\", singleUser) //bruk bare en spesifik slice av struct og send til html template\n\tif err != nil {\n\t\tlog.Println(ip, \"modifyUsersWeb: error = \", err)\n\t}\n\n\tuForm := storage.User{}\n\t//get all the values from the user info fileds of the the\n\tgetFormValuesUserInfo(&uForm, r)\n\n\tchanged := false\n\tchanged, allUsers[d.IndexUser] = checkUserFormChanged(uForm, allUsers[d.IndexUser])\n\n\tfmt.Printf(\"---single user %v, type = %T\\n\", singleUser, singleUser)\n\tfmt.Printf(\"---uallUsers %v, type %T\\n\", allUsers[d.IndexUser], allUsers[d.IndexUser])\n\n\t//if any of the values was changed....update information into database\n\tif changed {\n\t\tstorage.UpdateUser(d.PDB, allUsers[d.IndexUser])\n\t}\n}", "func (s *Server) GetUser(res http.ResponseWriter, req *http.Request, p httprouter.Params) {\n\tvar (\n\t\tmeUser = p.ByName(\"userID\") == \"me\"\n\t\tresp = JSON(nil, res)\n\t)\n\n\t// handle other users later!\n\tif !meUser {\n\t\tresp.Error(errors.New(\"kljshadf\"), http.StatusNotFound)\n\t\treturn\n\t}\n\n\tu := User{\n\t\tID: uuid.NewV1(),\n\t\tEmail: \"test\",\n\t}\n\n\t// switch err {\n\t// case nil:\n\tresp.Success(u)\n\t// case api.ErrInvalidUserID:\n\t// \tresp.Error(err, http.StatusNotFound)\n\t// default:\n\t// \tresp.Error(err, http.StatusInternalServerError)\n\t// }\n}", "func GetUserUsername(c *gin.Context) {\n\tuserName := c.Param(\"user_name\")\n\tdb := dbConn()\n\tselDB, err := db.Query(\"CALL read_user_username(?)\", userName)\n\tif err != nil {\n\t\tpanic(err.Error)\n\t}\n\n\tuser := User{}\n\tusers := []User{}\n\tfor selDB.Next() {\n\t\tvar id, username, useremail, fname, lname, password, passwordchange, passwordexpired, lastlogon, accountlocked string\n\t\terr = selDB.Scan(&id, &username, &useremail, &fname, &lname, &password, &passwordchange, &passwordexpired, &lastlogon, &accountlocked)\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t\tc.JSON(500, gin.H{\n\t\t\t\t\"error\": err.Error(),\n\t\t\t})\n\t\t}\n\t\tuser.ID = id\n\t\tuser.UserName = username\n\t\tuser.UserEmail = useremail\n\t\tuser.FName = fname\n\t\tuser.LName = lname\n\t\tuser.Password = password\n\t\tuser.PasswordChange = passwordchange\n\t\tuser.PasswordExpired = passwordexpired\n\t\tuser.LastLogon = lastlogon\n\t\tuser.AccountLocked = accountlocked\n\t\tiid, err := strconv.Atoi(id)\n\t\tif err != nil {\n\t\t\tpanic(err.Error)\n\t\t}\n\t\tselDB02, err := db.Query(\"CALL read_access_userid(?)\", iid)\n\t\tif err != nil {\n\t\t\tpanic(err.Error)\n\t\t}\n\t\taccess := Access{}\n\t\taccessList := []Access{}\n\t\tfor selDB02.Next() {\n\t\t\tvar accessid, userid, courtid, caseaccess, personaccess, accountingaccess, juryaccess, attorneyaccess, configaccess, securitylevel, sealedcase string\n\t\t\terr := selDB02.Scan(&accessid, &userid, &courtid, &caseaccess, &personaccess, &accountingaccess, &juryaccess, &attorneyaccess, &configaccess, &securitylevel, &sealedcase)\n\t\t\tif err != nil {\n\t\t\t\tlog.Println(err)\n\t\t\t\tc.JSON(500, gin.H{\n\t\t\t\t\t\"error\": err.Error(),\n\t\t\t\t})\n\t\t\t}\n\t\t\taccess.AccessID = accessid\n\t\t\taccess.IDUser = userid\n\t\t\taccess.IDCourt = courtid\n\t\t\taccess.CaseAccess = caseaccess\n\t\t\taccess.PersonAccess = personaccess\n\t\t\taccess.AccountingAccess = accountingaccess\n\t\t\taccess.JuryAccess = juryaccess\n\t\t\taccess.AttorneyAccess = attorneyaccess\n\t\t\taccess.ConfigAccess = configaccess\n\t\t\taccess.SecurityLevel = securitylevel\n\t\t\taccess.SealedCase = sealedcase\n\t\t\taccessList = append(accessList, access)\n\t\t}\n\t\tuser.AccessList = accessList\n\t\tusers = append(users, user)\n\t}\n\n\tc.JSON(200, gin.H{\n\t\t\"result\": users,\n\t})\n\n\tdefer db.Close()\n}", "func (app *App) allUsers(w http.ResponseWriter, r *http.Request) {\n\tusers, err := users.GetUsers(app.Db)\n\tif err != nil {\n\t\trespondWithError(w, http.StatusInternalServerError, err.Error())\n\t\treturn\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\trespondWithJSON(w, http.StatusOK, users)\n}", "func make(res http.ResponseWriter, req *http.Request) {\n if req.Method != \"POST\" {\n http.Error(res, http.StatusText(405), 405)\n return\n }\n //POST request handling\n var newUser newUserPost // make newUser struct to be populated by POST\n decoder := json.NewDecoder(req.Body)\n decoder.Decode(&newUser) //populate struct newUser\n\n //check username field\n if !checkEmail(newUser.Username) {\n res.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n res.WriteHeader(http.StatusOK)\n signupFail := &authFail{Success: false, Message:\"Username must be a valid villanova.edu email address\"}\n if err := json.NewEncoder(res).Encode(signupFail); err != nil {\n log.Fatal(err) //error encoding JSON, should fail\n }\n return\n }\n\n //check the password field\n if !checkPassword(newUser.Password) {\n res.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n res.WriteHeader(http.StatusOK)\n signupFail := &authFail{Success: false, Message:\"Password must be 7+ characters and must include numbers and letters\"}\n if err := json.NewEncoder(res).Encode(signupFail); err != nil {\n log.Fatal(err) //error encoding JSON, should fail\n }\n return\n }\n\n //check names\n if !checkNames(newUser.FirstName, newUser.LastName) {\n res.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n res.WriteHeader(http.StatusOK)\n signupFail := &authFail{Success: false, Message:\"Something went wrong with name input\"}\n if err := json.NewEncoder(res).Encode(signupFail); err != nil {\n log.Fatal(err) //error encoding JSON, should fail\n }\n return\n }\n\n /*\n Form validation passed, check DB for name\n */\n var username string\n err := db.Connection.QueryRow(`\n SELECT username FROM users WHERE username=?\n `, newUser.Username).Scan(&username)\n\n if err != nil {\n if err == sql.ErrNoRows {\n //encrypt and save password, create user\n key := []byte(config.Secret)\n hasher := hmac.New(sha256.New, key)\n hasher.Write([]byte(newUser.Password))\n newPassword := base64.StdEncoding.EncodeToString(hasher.Sum(nil))\n db.Connection.Exec(`\n INSERT INTO users (username,password,firstname,lastname,activated) VALUES (?,?,?,?,?)\n `, newUser.Username, newPassword, newUser.FirstName, newUser.LastName, false)\n\n /*\n Send Email To User\n */\n\n //create token\n tokenizer := jwt.New(jwt.SigningMethodHS256)\n // Set some claims -- Data that goes with JWT\n tokenizer.Claims[\"username\"] = newUser.Username\n tokenizer.Claims[\"exp\"] = time.Now().Add(time.Hour * 480).Unix()\n // Sign and get the complete encoded token as a string\n token, err := tokenizer.SignedString([]byte(config.JwtSecret))\n if err != nil {\n log.Fatal(err)\n }\n\n //create new URL\n tokenUrl := &url.URL{\n Scheme : req.URL.Scheme,\n Host : req.URL.Host,\n Path : \"/users/activate\",\n }\n //create query\n query := tokenUrl.Query()\n query.Set(\"token\", token)\n tokenUrl.RawQuery = query.Encode()\n\n //Set Header for Success\n res.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n res.WriteHeader(http.StatusOK)\n signupSuccess := &newUserSuccess{\n Success: true,\n Message:\"Successful Signup! Check your email for confirmation\",\n FirstName: newUser.FirstName,\n }\n //return JSON\n if err := json.NewEncoder(res).Encode(signupSuccess); err != nil {\n log.Fatal(err) //error encoding JSON, should fail\n }\n //send Email\n sendEmail(newUser.Username, tokenUrl.String(), newUser.FirstName, req)\n return;\n } else {\n //handle PostgreSQL error -- internal server error\n http.Error(res, http.StatusText(500), 500)\n return\n }\n } else {\n res.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n res.WriteHeader(http.StatusOK)\n signupFail := &authFail{Success: false, Message:\"That account name already exists\"}\n if err := json.NewEncoder(res).Encode(signupFail); err != nil {\n log.Fatal(err) //error encoding JSON, should fail\n }\n }\n}", "func GetUser(res http.ResponseWriter, req *http.Request) {\n\tres.Header().Set(\"Content-Type\", \"application/json\")\n\tparams := mux.Vars(req)\n\tuserID := bson.ObjectIdHex(params[\"userID\"])\n\n\tquery := bson.M{\n\t\t\"_id\": userID,\n\t}\n\n\tselector := bson.M{\n\t\t\"_id\": 1,\n\t\t\"name\": 1,\n\t\t\"email\": 1,\n\t}\n\n\tuser, err := db.GetUser(query, selector)\n\tif err != nil {\n\t\tif err.Error() == mgo.ErrNotFound.Error() {\n\t\t\tmsg := \"User not found\"\n\n\t\t\tutils.ReturnErrorResponse(http.StatusNotFound, msg, \"\", nil, nil, res)\n\t\t\treturn\n\t\t}\n\n\t\tmsg := \"Error occurred while getting user details\"\n\n\t\tutils.ReturnErrorResponse(http.StatusBadRequest, msg, \"\", nil, nil, res)\n\t\treturn\n\t}\n\n\tmsg := \"Your request processed successfully\"\n\tutils.ReturnSuccessReponse(http.StatusOK, msg, user, res)\n\n}", "func (uc *UserCreate) check() error {\n\tif _, ok := uc.mutation.Firstname(); !ok {\n\t\treturn &ValidationError{Name: \"firstname\", err: errors.New(\"ent: missing required field \\\"firstname\\\"\")}\n\t}\n\tif _, ok := uc.mutation.Lastname(); !ok {\n\t\treturn &ValidationError{Name: \"lastname\", err: errors.New(\"ent: missing required field \\\"lastname\\\"\")}\n\t}\n\tif _, ok := uc.mutation.Username(); !ok {\n\t\treturn &ValidationError{Name: \"username\", err: errors.New(\"ent: missing required field \\\"username\\\"\")}\n\t}\n\tif _, ok := uc.mutation.Password(); !ok {\n\t\treturn &ValidationError{Name: \"password\", err: errors.New(\"ent: missing required field \\\"password\\\"\")}\n\t}\n\treturn nil\n}", "func (u *usecase) GetAll(ctx context.Context) ([]*User, error) {\n\tusers, err := u.repository.GetAll(ctx)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"error fetching all users\")\n\t}\n\treturn users, nil\n}", "func (s *ServerState) getUsers(c *gin.Context) {\n\tvar u []User\n\tif err := s.DB.Select(&u, \"select * from users\"); err != nil {\n\t\tc.JSON(http.StatusBadRequest, gin.H{\"status\": err})\n\t\treturn\n\t}\n\n\tc.JSON(http.StatusOK, gin.H{\"user\": u})\n}", "func (u *User) All(ctx context.Context, title, filter string) ([]entity.User, error) {\n\tswitch title {\n\tcase filterCountry:\n\t\treturn u.client.AllByCountry(ctx, filter)\n\tcase filterFirstName:\n\t\tfallthrough\n\tcase filterLastName:\n\t\tfallthrough\n\tcase filterNickname:\n\t\tfallthrough\n\tcase filterEmil:\n\t\treturn u.client.AllWithFilter(ctx, titleMap[title], filter)\n\tdefault:\n\t\treturn u.client.All(ctx)\n\t}\n}", "func AllUsersGet(c *gin.Context) {\n\tmeta := model.TableMetaFromQuery(c)\n\tginutils.WriteGinJSON(c, http.StatusOK, model.AllUsers(meta))\n}", "func (user *User) Validate() (map[string]interface{}, bool) {\n\n\tif !strings.Contains(user.Email, \"@\") {\n\t\treturn u.Message(false, \"Email address is required\"), false\n\t}\n\n\tif len(user.Password) < 6 && user.GoogleUserID == \"\" && user.FacebookUserID == \"\" {\n\t\treturn u.Message(false, \"Password is required\"), false\n\t}\n\n\t//Email must be unique\n\ttemp := &User{}\n\n\t//check for errors and duplicate emails\n\terr := GetDB().Table(\"users\").Where(\"email = ?\", user.Email).First(temp).Error\n\tif err != nil && err != gorm.ErrRecordNotFound {\n\t\tfmt.Printf(\"err = %s\", err)\n\t\treturn u.Message(false, \"Connection error. Please retry\"), false\n\t}\n\tif temp.Email != \"\" {\n\t\treturn u.Message(false, \"Email address already in use by another user.\"), false\n\t}\n\n\treturn u.Message(false, \"Requirement passed\"), true\n}", "func populatePlayers(game *models.Game) (err config.ApiError) {\n\tif game.Player1Id.Valid() {\n\t\tgame.Player1, err = GetUserById(game.Player1Id)\n\t}\n\n\tif game.Player2Id.Valid() {\n\t\tgame.Player2, err = GetUserById(game.Player2Id)\n\t}\n\treturn\n}", "func getAllUsers(w http.ResponseWriter, r *http.Request) {\n\tctx := r.Context()\n\tusers, err := users.GetAllUsers(ctx)\n\tif err != nil {\n\t\tlog.Error(ctx, \"database problem\", \"error\", err)\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\tfmt.Fprintf(w, \"null\")\n\t\treturn\n\t}\n\tdata, err := json.Marshal(users)\n\tif err != nil {\n\t\tlog.Error(ctx, \"json marshaling problem\", \"error\", err)\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\tfmt.Fprintf(w, \"null\")\n\t\treturn\n\t}\n\tfmt.Fprintf(w, string(data))\n}" ]
[ "0.6167302", "0.6161984", "0.6040623", "0.60332716", "0.59944403", "0.59804195", "0.5977585", "0.5961505", "0.596062", "0.5959122", "0.5949425", "0.59162706", "0.5908111", "0.58769625", "0.58350945", "0.5828674", "0.5817458", "0.5765263", "0.57598054", "0.575636", "0.5753536", "0.5734337", "0.57271314", "0.5701868", "0.570035", "0.569473", "0.5679482", "0.5660447", "0.5654756", "0.564992", "0.56374407", "0.5628451", "0.5623305", "0.56060314", "0.5598519", "0.5595832", "0.5591714", "0.55870456", "0.55811584", "0.5571748", "0.5556598", "0.55483663", "0.55466646", "0.5546149", "0.5500045", "0.54900587", "0.54725283", "0.547107", "0.54710364", "0.54614604", "0.5459167", "0.54554987", "0.5451225", "0.54478115", "0.5446366", "0.54427886", "0.5439753", "0.54372686", "0.5437188", "0.5432477", "0.54323274", "0.542307", "0.54199237", "0.54199195", "0.54198664", "0.54197484", "0.54197353", "0.54056025", "0.5399484", "0.53921676", "0.5386617", "0.53822684", "0.5381042", "0.53780276", "0.5375804", "0.5375401", "0.5373781", "0.5357091", "0.5354935", "0.5352111", "0.5350508", "0.5345043", "0.5342597", "0.533792", "0.5337363", "0.5323661", "0.5322401", "0.5320279", "0.5317781", "0.53155833", "0.531442", "0.53127223", "0.53124535", "0.53089035", "0.53080755", "0.5306318", "0.53029335", "0.5302387", "0.53004026", "0.52978", "0.52965814" ]
0.0
-1
4 Update a user name
func TestPatchUserService (t *testing.T){ err := PatchUserService(user_01.SocialNumber, mongoDB.User{Name:new_name_user_01}) assert.Equal(t, 200, err.HTTPStatus) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func UserNameChange(w http.ResponseWriter, r *http.Request) {\n\n\tw.Header().Set(\"Content-Type\", \"text/javascript\")\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\n\tif r.Method != \"POST\" {\n\t\tfmt.Fprintln(w, \"bad request\")\n\t\treturn\n\t}\n\n\tr.ParseForm()\n\tVC := r.Form[\"vc\"][0]\n\tID := r.Form[\"id\"][0]\n\tUserName := r.Form[\"username\"][0]\n\n\tvar temp = new(structs.User)\n\tcollection := session.DB(\"bkbfbtpiza46rc3\").C(\"users\")\n\n\tFindErr := collection.Find(bson.M{\"name\": UserName}).One(&temp)\n\n\tif FindErr == nil {\n\t\tfmt.Fprintln(w, \"reserved\")\n\t\treturn\n\t}\n\n\tif FindErr == mgo.ErrNotFound {\n\n\t\tFindErr = collection.FindId(bson.ObjectIdHex(ID)).One(&temp)\n\n\t\tif temp.Vc == VC {\n\n\t\t\tUpdateErr := collection.UpdateId(temp.ID, bson.M{\"$set\": bson.M{\"name\": UserName}})\n\n\t\t\tif UpdateErr != nil {\n\t\t\t\tfmt.Fprintln(w, \"0\")\n\t\t\t\treturn\n\t\t\t} else {\n\t\t\t\tfmt.Fprintln(w, \"1\")\n\t\t\t\treturn\n\t\t\t}\n\n\t\t} else {\n\t\t\tfmt.Fprintln(w, \"-1\")\n\t\t\treturn\n\t\t}\n\n\t}\n\n}", "func (u *User) changeName(name string) {\n u.name = name\n}", "func updateUser(w http.ResponseWriter, r *http.Request) {\r\n\tparams := mux.Vars(r)\r\n\tstmt, err := db.Prepare(\"UPDATE users SET name = ? WHERE id = ?\")\r\n\tif err != nil {\r\n\t\tpanic(err.Error())\r\n\t}\r\n\tbody, err := ioutil.ReadAll(r.Body)\r\n\tif err != nil {\r\n\t\tpanic(err.Error())\r\n\t}\r\n\tkeyVal := make(map[string]string)\r\n\tjson.Unmarshal(body, &keyVal)\r\n\tnewName := keyVal[\"name\"]\r\n\t_, err = stmt.Exec(newName, params[\"id\"])\r\n\tif err != nil {\r\n\t\tpanic(err.Error())\r\n\t}\r\n\tfmt.Fprintf(w, \"User with id = %s was updated\", params[\"id\"])\r\n}", "func (this *Queries_UServ) UpdateUserName(ctx context.Context, db persist.Runnable) *Query_UServ_UpdateUserName {\n\treturn &Query_UServ_UpdateUserName{\n\t\topts: this.opts,\n\t\tctx: ctx,\n\t\tdb: db,\n\t}\n}", "func (ah *AuthHandler) UpdateUsername(w http.ResponseWriter, r *http.Request) {\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\n\tuser := &data.User{}\n\terr := data.FromJSON(user, r.Body)\n\tif err != nil {\n\t\tah.logger.Error(\"unable to decode user json\", \"error\", err.Error())\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\t// data.ToJSON(&GenericError{Error: err.Error()}, w)\n\t\tdata.ToJSON(&GenericResponse{Status: false, Message: err.Error()}, w)\n\t\treturn\n\t}\n\n\tuser.ID = r.Context().Value(UserIDKey{}).(string)\n\tah.logger.Debug(\"udpating username for user : \", user)\n\n\terr = ah.repo.UpdateUsername(context.Background(), user)\n\tif err != nil {\n\t\tah.logger.Error(\"unable to update username\", \"error\", err)\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t// data.ToJSON(&GenericError{Error: err.Error()}, w)\n\t\tdata.ToJSON(&GenericResponse{Status: false, Message: \"Unable to update username. Please try again later\"}, w)\n\t\treturn\n\t}\n\n\tw.WriteHeader(http.StatusOK)\n\t// data.ToJSON(&UsernameUpdate{Username: user.Username}, w)\n\tdata.ToJSON(&GenericResponse{\n\t\tStatus: true,\n\t\tMessage: \"Successfully updated username\",\n\t\tData: &UsernameUpdate{Username: user.Username},\n\t}, w)\n}", "func (c Controller) UpdateName(w http.ResponseWriter, r *http.Request) {\n\tuserID := chi.URLParam(r, \"userID\")\n\trequest := &UpdateNameRequest{}\n\tif err := render.Bind(r, request); err != nil {\n\t\thttp.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\terr := c.userService.UpdateName(r.Context(), userID, request.Name)\n\tif err != nil {\n\t\thttp.Error(w, \"could not update user\", http.StatusInternalServerError)\n\t\treturn\n\t}\n}", "func (srv *Service) UpdateUserName(id string, name string) (*string, error) {\n\t//check if the email already exists\n\t_, err := srv.mongoRepository.GetUserByID(id)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t//call driven adapter responsible for updating a user's name inside the database\n\t_, err = srv.mongoRepository.UpdateUserName(id, name)\n\n\tif err != nil {\n\t\t//return the error sent by the repository\n\t\treturn nil, err\n\t}\n\n\tmessage := \"Name updated sucessfully\"\n\n\treturn &message, nil\n}", "func UpdateUser(w http.ResponseWriter, r *http.Request) {\n\n}", "func (*UserSvr) UpdateByName(req *UpdateByNameReq, rsp *UpdateByNameRsp) int {\n\titem := &req.UserItem\n\tif item.Username == \"\" || item.Nickname == \"\" || item.Profile == \"\" {\n\t\treturn common.ErrArg\n\t}\n\tdb := common.GetDb()\n\tresult, err := db.Exec(\"UPDATE users set nickname=? , profile = ? where username=?\",\n\t\treq.UserItem.Nickname, req.UserItem.Profile, req.UserItem.Username)\n\tif err != nil {\n\t\tlog.Printf(\"Update failed,err:%v\", err)\n\t\treturn common.ErrDB\n\t}\n\trowsaffected, err := result.RowsAffected()\n\tif err != nil || rowsaffected != 1 {\n\t\tlog.Printf(\"failed, RowsAffected:%d err:%v\", rowsaffected, err)\n\t\treturn common.ErrArg\n\t}\n\t// here should use redis, but found that this redis lib dose not use conection pool,\n\t// not useful to bench test\n\treturn 0\n}", "func (a *LocalKeyAgent) UpdateUsername(username string) {\n\ta.username = username\n}", "func UpdateUser(c *gin.Context) {}", "func (this *Client) UpdateName() bool {\n\tfmt.Println(\"Please input user name\")\n\tfmt.Scanln(&this.Name)\n\n\tsendMsg := fmt.Sprintf(\"rename|%v\\n\", this.Name)\n\t_, err := this.conn.Write([]byte(sendMsg))\n\tif err != nil {\n\t\tfmt.Println(\"conn.Write error: \", err)\n\t\treturn false\n\t}\n\treturn true\n}", "func editUser(userID int, firstName string, MI string, lastName string, privLevel int) error {\n\n\tdb, err := sql.Open(\"mysql\", DB_USER_NAME+\":\"+DB_PASSWORD+\"@unix(/var/run/mysql/mysql.sock)/\"+DB_NAME)\n\tif err != nil {\n\t\treturn errors.New(\"No connection\")\n\t}\n\n\tres, err := db.Exec(\"update Users set FirstName=?, MiddleInitial=?, LastName=?, PrivLevel=? where UserID=?\", firstName, MI, lastName, privLevel, userID)\n\n\tif err != nil {\n\t\treturn errors.New(\"User update failed.\")\n\t}\n\n\trowsAffected, err := res.RowsAffected()\n\n\tif rowsAffected != 1 {\n\t\treturn errors.New(\"Query didn't match any users.\")\n\t}\n\n\treturn nil\n}", "func partialUpdateUsername(providedUser *models.User, username string) error {\n\n\tif username == \"\" {\n\t\treturn nil\n\t}\n\n\tuser := models.User{}\n\n\tdatabase.DB.Where(models.User{Username: username}).First(&user)\n\n\tif user.ID != 0 {\n\t\treturn &utils.UsernameIsTakenError{}\n\t}\n\n\tprovidedUser.Username = username\n\n\treturn nil\n}", "func (ul *usernameList) modifyUsername(id int, name string) error {\n\tul.Lock()\n\tdefer ul.Unlock()\n\n\t_, exists := ul.usernameToID[name]\n\tif exists {\n\t\treturn errors.New(\"username already exists\")\n\t}\n\n\toldName, ok := ul.idToUsername[id]\n\tif !ok {\n\t\treturn errors.New(\"connection does not have a username already\")\n\t}\n\n\tif oldName == name {\n\t\treturn nil\n\t}\n\n\tul.idToUsername[id] = name\n\tul.usernameToID[name] = id\n\tdelete(ul.usernameToID, oldName)\n\n\treturn nil\n}", "func (c *Client) ModifyUserName(name, token string, autoSign bool) (*ModifyUserNameResponse, error) {\n\tvar createSign string\n\tif autoSign {\n\t\tcreateSign = \"1\"\n\t}\n\tp := modifyUserNameParams{\n\t\tUserName: name,\n\t\tCreateSignature: createSign,\n\t}\n\tparamMap, err := toMap(p, map[string]string{\n\t\t\"token\": token,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tret, err := httpRequest(c, p.URI(), paramMap, nil, func() interface{} {\n\t\treturn &ModifyUserNameResponse{}\n\t})\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\trsp := ret.(*ModifyUserNameResponse)\n\n\tif err = checkErr(rsp.Code, rsp.SubCode, rsp.Message); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn rsp, nil\n}", "func UpdateUserCodeByName(c *gin.Context) {\n\tname := c.PostForm(\"name\")\n\n\tif name == \"\" {\n\t\tc.JSON(200, gin.H{\"code\": \"-1\", \"msg\": \"name cannot be empty!\"})\n\t}\n\n\tif db.UserCheckIsExist(name) {\n\t\trand := libs.RandString(10)\n\t\tdb.UserUpdateCodeGetByName(name, rand)\n\t\tc.JSON(200, gin.H{\"code\": \"0\", \"co\": rand})\n\t} else {\n\t\tc.JSON(200, gin.H{\"code\": \"-2\", \"msg\": \"name does not exist!\"})\n\t}\n\n}", "func setDetails(username string, name string, email string, pw string) {\n\tvar buf bytes.Buffer\n\t// We're not marshalling to avoid making a bloated payload with empty tags\n\tbuf.WriteString(\"<?xml version=\\\"1.0\\\" encoding=\\\"UTF-8\\\" standalone=\\\"yes\\\"?>\")\n\tbuildUserXML(&buf, username, name, email, pw)\n\n\treq, _ := http.NewRequest(\"PUT\", cfg.Main.Server+\"users\"+\"/\"+username, bytes.NewReader(buf.Bytes()))\n\treq.Header.Set(\"Content-Type\", \"application/xml\")\n\treq.Header.Set(\"Authorization\", cfg.Main.Key)\n\n\tclient := &http.Client{}\n\tres, err := client.Do(req)\n\tif err != nil {\n\t\tp(\"Couldn't connect to Openfire server: %s\", err.Error())\n\t\treturn\n\t}\n\tdefer res.Body.Close()\n\tif res.StatusCode == 200 {\n\t\tp(\"Successfully changed details for %s\", username)\n\t}\n}", "func (c *myClient) updateUserPasswordByName(u string, p string) (err error) {\n\tuserRef, err := c.findObjectByNameReturnReference(\"user\", u)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif userRef == nil {\n\t\tlogger.Fatalf(\"%s not found. Exiting\", u)\n\t}\n\tlogger.Infof(\"Changing %s's password\\n\", u)\n\tlogger.Infof(\"Using \" + c.username)\n\tpostBody := fmt.Sprintf(`\n\t\t\t{\n\t\t\t\t\"type\": \"CredentialUpdateParameters\",\n\t\t\t\t\"newCredential\": {\n\t\t\t\t\t\"type\": \"PasswordCredential\",\n\t\t\t\t\t\"password\": \"%s\"\n\t\t\t\t}\n\t\t\t}\n\t\t`, p)\n\tc.LoadAndValidate()\n\t_, _, err = c.httpPost(fmt.Sprintf(\"user/%s/updateCredential\", userRef), postBody)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif c.username == u {\n\t\tc.password = p\n\t\tc.LoadAndValidate()\n\t}\n\treturn err\n}", "func (a *Client) UpdateUsername(params *UpdateUsernameParams, authInfo runtime.ClientAuthInfoWriter) (*UpdateUsernameOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewUpdateUsernameParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"updateUsername\",\n\t\tMethod: \"PATCH\",\n\t\tPathPattern: \"/usernames/{userName}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"http\", \"https\"},\n\t\tParams: params,\n\t\tReader: &UpdateUsernameReader{formats: a.formats},\n\t\tAuthInfo: authInfo,\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn result.(*UpdateUsernameOK), nil\n\n}", "func UpdateUser(w http.ResponseWriter, r *http.Request) {\n\tw.Write([]byte(\"Updating a user\"))\n}", "func updateUser(user *User) {\n\tvar dummy string\n\n\t// update user information\n\tdb.QueryRow(\"UPDATE users SET username=$1, realname=$2, email=$3, token=$4\"+\n\t\t\" WHERE gh_id=$5\", user.User_name, user.Real_name, user.Email,\n\t\tuser.Token, user.GH_Id).Scan(&dummy)\n}", "func Update(user User) error {\n\n}", "func newUser() string {\n\treturn goherokuname.Haikunate()\n}", "func (*UsersController) Rename(ctx *gin.Context) {\n\tvar renameJSON tat.RenameUserJSON\n\tctx.Bind(&renameJSON)\n\n\tvar userToRename = tat.User{}\n\tfound, err := userDB.FindByUsername(&userToRename, renameJSON.Username)\n\tif !found {\n\t\tctx.JSON(http.StatusBadRequest, gin.H{\"error\": fmt.Errorf(\"user with username %s does not exist\", renameJSON.Username)})\n\t\treturn\n\t} else if err != nil {\n\t\tctx.JSON(http.StatusInternalServerError, gin.H{\"error\": fmt.Errorf(\"Error while fetching user with username %s\", renameJSON.Username)})\n\t\treturn\n\t}\n\n\tif err := userDB.Rename(&userToRename, renameJSON.NewUsername); err != nil {\n\t\tctx.JSON(http.StatusBadRequest, gin.H{\"error\": fmt.Errorf(\"Rename %s user to %s failed\", renameJSON.Username, renameJSON.NewUsername)})\n\t\treturn\n\t}\n\n\tif err := messageDB.ChangeUsernameOnMessages(userToRename.Username, renameJSON.NewUsername); err != nil {\n\t\tctx.JSON(http.StatusBadRequest, gin.H{\"error\": fmt.Errorf(\"Rename %s user to %s failed\", renameJSON.Username, renameJSON.NewUsername)})\n\t\treturn\n\t}\n\tctx.JSON(http.StatusCreated, gin.H{\"info\": \"user is renamed\"})\n}", "func updateName(name string, id int) {\n\tsqlStatement := `\nUPDATE people\nSET Name = $2\nWHERE id = $1;`\n\t_, err := Db.Exec(sqlStatement, id, name)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tfmt.Println(\"People Name Updated\", id)\n\n}", "func ChangeNickname(db *sql.DB) func(http.ResponseWriter, *http.Request, httprouter.Params) {\n\treturn func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {\n\n\t\t//Check token\n\t\tif !(checkToken(w, r)) {\n\t\t\tw.WriteHeader(http.StatusUnauthorized)\n\t\t\treturn\n\t\t}\n\n\t\tvar res s.Response\n\t\terr := json.NewDecoder(r.Body).Decode(&res)\n\t\tif err != nil || res.Nickname == \"\" {\n\t\t\t// Error handling\n\t\t\tw.WriteHeader(http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\n\t\t// Nickname change\n\t\tif !(queryChangeNickname(db, res)) {\n\t\t\tw.WriteHeader(http.StatusOK)\n\t\t}\n\t\tdb.Close()\n\t}\n}", "func (self Users) Update() {\n\tsqlStatement := `UPDATE users SET username = $2 WHERE id = $1`\n\t_, err := self.DB.Exec(sqlStatement, self.Id, self.UserName)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}", "func (a *Client) ReplaceUsername(params *ReplaceUsernameParams, authInfo runtime.ClientAuthInfoWriter) (*ReplaceUsernameOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewReplaceUsernameParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"replaceUsername\",\n\t\tMethod: \"PUT\",\n\t\tPathPattern: \"/usernames/{userName}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"http\", \"https\"},\n\t\tParams: params,\n\t\tReader: &ReplaceUsernameReader{formats: a.formats},\n\t\tAuthInfo: authInfo,\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn result.(*ReplaceUsernameOK), nil\n\n}", "func (m *BasicAuthentication) SetUsername(value *string)() {\n err := m.GetBackingStore().Set(\"username\", value)\n if err != nil {\n panic(err)\n }\n}", "func (c *Client) ChangeUsername(cu *www.ChangeUsername) (*www.ChangeUsernameReply, error) {\n\tresponseBody, err := c.makeRequest(http.MethodPost,\n\t\twww.PoliteiaWWWAPIRoute, www.RouteChangeUsername, cu)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar cur www.ChangeUsernameReply\n\terr = json.Unmarshal(responseBody, &cur)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unmarshal ChangeUsernameReply: %v\", err)\n\t}\n\n\tif c.cfg.Verbose {\n\t\terr := prettyPrintJSON(cur)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn &cur, nil\n}", "func (b *Bot) SetName(newName string) {\n\tv := url.Values{\"role\": {b.BotId}, \"type\": {\"profile\"}, \"dataType\": {\"name\"}, \"name\": {newName}}\n\trequest, _ := http.NewRequest(\"POST\", fmt.Sprintf(\"https://admin-official.line.me/%v/account/profile/name\", b.BotId), strings.NewReader(v.Encode()))\n\trequest.Header.Set(\"Content-Type\", \"application/x-www-form-urlencoded; charset=UTF-8\")\n\trequest.Header.Set(\"X-CSRF-Token\", b.xrt)\n\tresponse, _ := b.client.Do(request)\n\tdefer response.Body.Close()\n}", "func updateUser(res http.ResponseWriter, req *http.Request, p httprouter.Params) {\n\tvar data map[string]string\n\tvar username = p.ByName(\"username\")\n\n\tresp, err := getBody(req)\n\tif err != nil {\n\t\twriteJSON(res, 500, jsMap{\"status\": \"Server Error\"})\n\t\treturn\n\t}\n\tif err := json.Unmarshal(resp, &data); err != nil {\n\t\tlog.Println(\"updateUser:\", err)\n\t\twriteJSON(res, 400, jsMap{\"status\": \"Invalid Data\"})\n\t\treturn\n\t}\n\n\tuser, err := getUser(username)\n\tif err != nil {\n\t\twriteJSON(res, 404, jsMap{\"status\": \"Not Found\"})\n\t\treturn\n\t}\n\t// if the user has 2fa enabled, verify their totp key\n\tif user.TOTP != \"\" {\n\t\terr = verifyTOTP(user, data[\"passcode\"])\n\t\tif err != nil {\n\t\t\twriteJSON(res, 401, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\t}\n\n\tswitch p.ByName(\"setting\") {\n\tcase \"password\":\n\t\t_, err = authenticateUser(user, username, data[\"password\"])\n\t\tif err != nil {\n\t\t\twriteJSON(res, 401, jsMap{\"status\": \"Wrong Password\"})\n\t\t\treturn\n\t\t}\n\n\t\tv, err := srpEnv.Verifier([]byte(username), []byte(data[\"new_password\"]))\n\t\tif err != nil {\n\t\t\twriteJSON(res, 500, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\n\t\tih, verif := v.Encode()\n\t\t_, err = db.Exec(`\n\t\t\tUPDATE accounts \n\t\t\tSET ih = $1, verifier = $2\n\t\t\tWHERE username = $3;`, ih, verif, username,\n\t\t)\n\t\tif err != nil {\n\t\t\twriteJSON(res, 500, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\n\tcase \"2fa_enable\":\n\t\tif !totp.Validate(data[\"passcode\"], data[\"secret\"]) {\n\t\t\twriteJSON(res, 401, jsMap{\"status\": \"Wrong Passcode\"})\n\t\t\treturn\n\t\t}\n\n\t\tkey, _ := hex.DecodeString(secretKey)\n\t\tplaintext := []byte(data[\"secret\"])\n\t\tblock, err := aes.NewCipher(key)\n\t\tif err != nil {\n\t\t\twriteJSON(res, 500, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\n\t\tciphertext := make([]byte, aes.BlockSize+len(plaintext))\n\t\tiv := ciphertext[:aes.BlockSize]\n\t\tif _, err := io.ReadFull(rand.Reader, iv); err != nil {\n\t\t\twriteJSON(res, 500, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\n\t\tmode := cipher.NewCBCEncrypter(block, iv)\n\t\tmode.CryptBlocks(ciphertext[aes.BlockSize:], plaintext)\n\n\t\t_, err = db.Exec(`\n\t\t\tUPDATE accounts\n\t\t\tSET totp = $1\n\t\t\tWHERE username = $2;`,\n\t\t\thex.EncodeToString(ciphertext),\n\t\t\tusername,\n\t\t)\n\t\tif err != nil {\n\t\t\twriteJSON(res, 500, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\n\tcase \"2fa_disable\":\n\t\t_, err = db.Exec(`\n\t\t\tUPDATE accounts\n\t\t\tSET totp = ''\n\t\t\tWHERE username = $1;`, username,\n\t\t)\n\t\tif err != nil {\n\t\t\twriteJSON(res, 500, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\t}\n\twriteJSON(res, 200, jsMap{\"status\": \"OK\"})\n}", "func (mapper *Mapper) Update(newName string) {\n\tlog.Printf(\"%s has changed their name to %s\\n\", mapper.Username, newName)\n\tmapper.Username = newName\n\tDB.Save(&mapper)\n}", "func UpdateUser(userId int64, userData *UserEntry) error {\n _ , nerr := model.Database.Exec(\"UPDATE users SET username = ?, isadmin = ?, email = ? WHERE userid = ?\", userData.Username, userData.IsAdmin, userData.Email, userId)\n if nerr != nil {\n return nerr\n }\n return nil\n}", "func (t *SimpleChaincode) set_user(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n var err error\n \n // 0 1\n // \"name\", \"bob\"\n if len(args) < 2 {\n return nil, errors.New(\"Incorrect number of arguments. Expecting 2\")\n }\n \n fmt.Println(\"- start set user\")\n fmt.Println(args[0] + \" - \" + args[1])\n termAsBytes, err := stub.GetState(args[0])\n if err != nil {\n return nil, errors.New(\"Failed to get thing\")\n }\n res := SearchTerm{}\n json.Unmarshal(termAsBytes, &res) //un stringify it aka JSON.parse()\n res.User = args[1] //change the user\n \n jsonAsBytes, _ := json.Marshal(res)\n err = stub.PutState(args[0], jsonAsBytes) //rewrite the term with id as key\n if err != nil {\n return nil, err\n }\n \n fmt.Println(\"- end set user\")\n return nil, nil\n}", "func (upi *UserPrivateInfo) Update(name string, email string, password string) {\n\tupi.Name = name\n\tupi.Email = email\n\tupi.Password = password\n}", "func UserUpdate(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {\n\n}", "func (m *DeviceManagementIntentDeviceState) SetUserName(value *string)() {\n err := m.GetBackingStore().Set(\"userName\", value)\n if err != nil {\n panic(err)\n }\n}", "func (r *NucypherAccountRepository) UpdateName(name string, updatedBy string, accountID int, now time.Time) error {\n\n\t_, err := r.store.db.NamedExec(`UPDATE nucypher_accounts \n\tSET name=:name, updated_by=:updated_by, updated_at=:updated_at\n\tWHERE (created_by=:updated_by AND account_id=:account_id)`,\n\t\tmap[string]interface{}{\n\t\t\t\"name\": name,\n\t\t\t\"updated_by\": updatedBy,\n\t\t\t\"account_id\": accountID,\n\t\t\t\"updated_at\": now,\n\t\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n\n}", "func (b *ServerBackend) UpdateUser(nUser *NewUser) {\n\tuser := b.GetUser(nUser.Id)\n\tif user == nil {\n\t\treturn\n\t}\n\n\tif user.Name == nUser.Name {\n\t\treturn\n\t}\n\n\tupdate := fromilyclient.User{\n\t\tId: user.Id,\n\t\tName: nUser.Name,\n\t}\n\terr := b.Client.UpdateUser(&update)\n\tif err != nil {\n\t\tuser.Name = nUser.Name\n\t}\n}", "func UpdateUserProfileHandler(w http.ResponseWriter, r *http.Request) {\n\n}", "func updateUser(c *gin.Context) {\n\tvar user user\n\tuserID := c.Param(\"id\")\n\n\tdb.First(&user, userID)\n\n\tif user.Id == 0 {\n\t\tc.JSON(http.StatusNotFound, gin.H{\"status\": http.StatusNotFound, \"message\": \"No user found!\"})\n\t\treturn\n\t}\n\n\tdb.Model(&user).Update(\"login\", c.PostForm(\"login\"))\n password,_ := HashPassword(c.PostForm(\"password\"))\n\tdb.Model(&user).Update(\"password\", password)\n\tc.JSON(http.StatusOK, gin.H{\"status\": http.StatusOK, \"message\": \"User updated successfully!\"})\n}", "func UpdateUser(person *Person, id string) (err error) {\n\tfmt.Println(person)\n\tConfig.DB.Save(person)\n\treturn nil\n}", "func patchAPIUserHandler(w http.ResponseWriter, r *http.Request, _ map[string]string) {\n\tuserName := sessionHandler.GetUserName(r)\n\tuserID, err := getUserID(userName)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tdecoder := json.NewDecoder(r.Body)\n\tvar json JSONUser\n\terr = decoder.Decode(&json)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\t// Make sure user id is over 0\n\tif json.ID < 1 {\n\t\thttp.Error(w, \"Wrong user id.\", http.StatusInternalServerError)\n\t\treturn\n\t} else if userID != json.ID { // Make sure the authenticated user is only changing his/her own data. TODO: Make sure the user is admin when multiple users have been introduced\n\t\thttp.Error(w, \"You don't have permission to change this data.\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\t// Get old user data to compare\n\ttempUser, err := database.RetrieveUser(json.ID)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\t// Make sure user email is provided\n\tif json.Email == \"\" {\n\t\tjson.Email = string(tempUser.Email)\n\t}\n\t// Make sure user name is provided\n\tif json.Name == \"\" {\n\t\tjson.Name = string(tempUser.Name)\n\t}\n\t// Make sure user slug is provided\n\tif json.Slug == \"\" {\n\t\tjson.Slug = tempUser.Slug\n\t}\n\t// Check if new name is already taken\n\tif json.Name != string(tempUser.Name) {\n\t\t_, err = database.RetrieveUserByName([]byte(json.Name))\n\t\tif err == nil {\n\t\t\t// The new user name is already taken. Assign the old name.\n\t\t\t// TODO: Return error that will be displayed in the admin interface.\n\t\t\tjson.Name = string(tempUser.Name)\n\t\t}\n\t}\n\t// Check if new slug is already taken\n\tif json.Slug != tempUser.Slug {\n\t\t_, err = database.RetrieveUserBySlug(json.Slug)\n\t\tif err == nil {\n\t\t\t// The new user slug is already taken. Assign the old slug.\n\t\t\t// TODO: Return error that will be displayed in the admin interface.\n\t\t\tjson.Slug = tempUser.Slug\n\t\t}\n\t}\n\tuser := structure.User{ID: json.ID, Name: []byte(json.Name), Slug: json.Slug, Email: []byte(json.Email), Image: []byte(json.Image), Cover: []byte(json.Cover), Bio: []byte(json.Bio), Website: []byte(json.Website), Location: []byte(json.Location)}\n\terr = methods.UpdateUser(&user, userID)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tif json.Password != \"\" && (json.Password == json.PasswordRepeated) { // Update password if a new one was submitted\n\t\tencryptedPassword, err := authentication.EncryptPassword(json.Password)\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t\terr = database.UpdateUserPassword(user.ID, encryptedPassword, date.GetCurrentTime(), json.ID)\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t}\n\t// Check if the user name was changed. If so, update the session cookie to the new user name.\n\tif json.Name != string(tempUser.Name) {\n\t\tlogInUser(json.Name, w)\n\t}\n\tw.WriteHeader(http.StatusOK)\n\tw.Write([]byte(\"User settings updated!\"))\n\treturn\n}", "func (conn *Conn) User(ident, name string) {\n\tconn.Raw(USER + \" \" + ident + \" 12 * :\" + name)\n}", "func UpdateAccountByRequest(userName, newPassWord string, db *sql.DB) error {\n\tquery := `\n\tUPDATE \"users\" SET\n\t\tpassword=$1\n\tWHERE \n\t\tusername = $2;`\n\t_, err := db.Exec(query, newPassWord, userName)\n\tif err != nil {\n\t\tlogrus.WithFields(logrus.Fields{}).Errorf(\"[UpdateAccountByRequest] Update DB err %v\", err)\n\t\treturn errors.New(\"Lỗi hệ thống, vui lòng thử lại\")\n\t}\n\treturn nil\n}", "func (sock *Server) user(username, hostname, Servername, realname string) {\n\tsock.Send(\"USER \" + username + \" \" + hostname + \" \" + Servername + \" :\" + realname)\n}", "func (m *defaultUsernamesClient) AccountUpdateUsername(ctx context.Context, in *mtproto.TLAccountUpdateUsername) (*mtproto.User, error) {\n\tclient := mtproto.NewRPCUsernamesClient(m.cli.Conn())\n\treturn client.AccountUpdateUsername(ctx, in)\n}", "func (db *Database) SetUsername(user *models.User) error {\n\tuser.Name = strings.ToLower(user.Name)\n\tdbUser, err := db.GetUserByName(user.Name)\n\tif err != nil && !errors.Is(err, models.ErrNotFound) {\n\t\treturn err\n\t}\n\n\tif dbUser != nil {\n\t\tif dbUser.Name == user.Name {\n\t\t\treturn nil\n\t\t}\n\n\t\treturn errors.New(\"name already in use\")\n\t}\n\n\t_, err = db.Collection(userCol).Doc(user.ID).Update(db.ctx, []firestore.Update{\n\t\t{Path: \"name\", Value: user.Name},\n\t})\n\n\treturn err\n}", "func (p *person) updateName(newFirstName string) {\n\tp.firstName = newFirstName\n}", "func (s Service) ChangeLastName(ctx context.Context, lastname string) error {\n\tspan := s.tracer.MakeSpan(ctx, \"ChangeLastName\")\n\tdefer span.Finish()\n\n\ttoken := s.retriveToken(ctx)\n\tif token == \"\" {\n\t\treturn errors.New(\"token_is_empty\")\n\t}\n\n\ts.passContext(&ctx)\n\n\tuserID, err := s.authRPC.GetUserID(ctx, token)\n\tif err != nil {\n\t\ts.tracer.LogError(span, err)\n\t\treturn err\n\t}\n\n\t//check for lastname not to be empty or over 32 or contain !alphabets\n\terr = fromTwoToHundredTwentyEight(lastname)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = dashAndSpace(lastname)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdateReg, err := s.repository.Users.GetDateOfRegistration(ctx, userID)\n\tif err != nil {\n\t\ts.tracer.LogError(span, err)\n\t\t//internal error\n\t\treturn err\n\t}\n\n\tif time.Since(dateReg) > 5*(24*time.Hour) {\n\t\treturn errors.New(\"time_for_this_action_is_passed\")\n\t}\n\n\terr = s.repository.Users.ChangeLastName(ctx, userID, lastname)\n\tif err != nil {\n\t\ts.tracer.LogError(span, err)\n\t}\n\n\treturn nil\n}", "func (s *AccountService) ChangeUsername(form bindings.ChangeUsername) (bool, error) {\n\t//TODO: fix me\n\treturn false, nil\n}", "func (m *BasicAuthentication) SetUsername(value *string)() {\n m.username = value\n}", "func (u *User)Update()(e error){\n\tst := `update users set\n\t\t\tuser_name = ?,email = ?,password = ?\n\t\t\twhere user_id = ?`;\n\n\t_,e = db.Exec(st,u.Name,u.Email,u.password,u.Id)\n\n\treturn\n}", "func updateUser(username string, attrs User) bool {\n\toutp, _ := exec.Command(\"getent\", \"shadow\", username).CombinedOutput()\n\tcurrentPassword := strings.TrimSpace(strings.Split(string(outp), \":\")[1])\n\touts, _ := exec.Command(\"getent\", \"passwd\", username).CombinedOutput()\n\tcurrentShell := strings.TrimSpace(strings.Split(string(outs), \":\")[6])\n\tcurrentHome := strings.TrimSpace(strings.Split(string(outs), \":\")[5])\n\tcurrentComment := strings.TrimSpace(strings.Split(string(outs), \":\")[4])\n\texistingGroups := getUserGroups(username)\n\n\tif attrs.Shell != currentShell {\n\t\tupdateShell(username, attrs.Shell)\n\t}\n\tif attrs.Password != currentPassword {\n\t\tupdatePassword(username, attrs.Password)\n\t}\n\tif attrs.Home != currentHome {\n\t\tupdateHome(username, attrs.Home)\n\t}\n\tif attrs.Comment != currentComment {\n\t\tupdateComment(username, attrs.Comment)\n\t}\n\tif strings.Join(existingGroups, \",\") != strings.Join(attrs.Groups, \",\") {\n\t\tupdateGroups(username, attrs.Groups)\n\t}\n\n\tkeyFile := path.Join(attrs.Home, \".ssh\", \"authorized_keys\")\n\tfileData := []string{}\n\tif buf, err := ioutil.ReadFile(keyFile); err == nil {\n\t\tfileData = strings.Split(string(buf), \"\\n\")\n\t\tsort.Strings(fileData)\n\t}\n\tif strings.Join(attrs.SSHKeys, \",\") != strings.Join(fileData, \",\") {\n\t\tupdateSSHPublicKeys(username, attrs)\n\t}\n\treturn true\n}", "func (pounterToPerson *person) updateName(newlasttname string) {\n\t(*pounterToPerson).lastname = newlasttname // Update the lastname using the actual location\n}", "func (user *User) EditClientPw() (err error) {\n\t_, err = Db.Exec(\"update user set name=$1, email=$2 , password=$3 , image=$4 where id=$5\",user.Name, user.Email, user.Password, user.Image, user.ID)\n\nfmt.Println(err)\nreturn\n}", "func (s Server) ChangeLastname(ctx context.Context, data *userRPC.Lastname) (*userRPC.Empty, error) {\n\terr := s.service.ChangeLastName(ctx, data.GetLastname())\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &userRPC.Empty{}, nil\n}", "func (p *Person) updateName(newFirstName string) {\n\tp.firstName = newFirstName\n}", "func UserUpdatePassword(id int, n string) {\n\tvar i int\n\ti = GetIndexOfUser(id)\n\tuserList[i].uPassword = n\n}", "func updateAccountInfo(oldUsername string, newUsername string, newPassword string) bool {\n\n\tbool1 := true\n\n\tdb := connect()\n\n\tif strings.Contains(newUsername, \"'\") || strings.Contains(newPassword, \"'\") {\n\t\tnewUsername = strings.Replace(newUsername, \"'\", \"\\\\'\", -1)\n\t\tnewPassword = strings.Replace(newPassword, \"'\", \"\\\\'\", -1)\n\t}\n\n\t_, _, err := db.Query(\"UPDATE account SET userName = '\" + newUsername + \"', password = '\" + newPassword + \"' WHERE userName = '\" + oldUsername + \"'\")\n\n\tif err != nil {\n\t\tfmt.Println(\"Database Query Error:\", err)\n\t}\n\n\tdisconnect(db)\n\n\treturn bool1\n}", "func (dbh *DBHandler) UpdateUser(user api.User, newNickname string) {\n\t_, err := dbh.Connection.Exec(`UPDATE users SET nickname = ? WHERE telegram_id = ?;`, newNickname, user.ID)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}", "func Update(c *gin.Context) {\n\tvar form model.UpdateForm\n\terr := c.BindJSON(&form)\n\tif err != nil {\n\t\tfailMsg(c, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tusername, exist := c.Get(\"username\")\n\tif !exist {\n\t\tfailMsg(c, http.StatusUnauthorized, \"user not found\")\n\t\treturn\n\t}\n\tvalid, errMsg := validInfo(form)\n\tif !valid {\n\t\tfailMsg(c, http.StatusUnauthorized, errMsg)\n\t\treturn\n\t}\n\n\terr = model.UpdateUser(username.(string), form)\n\tif err != nil {\n\t\tfailMsg(c, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tc.JSON(http.StatusCreated, gin.H{\n\t\t\"success\": true,\n\t\t\"error\": \"\",\n\t\t\"data\": \"ok\",\n\t})\n}", "func (f *Factory) UpdateUser(id string,firstname string, lastname string, age int) * domain.User {\n\treturn &domain.User{\n\t\tID:\t\t\tid,\t\t\n\t\tFirstname: firstname,\n\t\tLastname: lastname,\n\t\tAge: age,\n\t}\n\n}", "func updateUser(client *chef.Client, username string, user chef.User) chef.UserResult {\n\tuser_update, err := client.Users.Update(username, user)\n\tif err != nil {\n\t\tfmt.Println(\"Issue updating user:\", err)\n\t}\n\treturn user_update\n}", "func updateUser(user UserID, params map[string]interface{}, client *Client) error {\n\treturn client.Put(params, \"/access/users/\"+user.ToString())\n}", "func (u *user) SetUsername(username string) error {\n length := utf8.RuneCountInString(username)\n if length < UsernameLenMin || length > UsernameLenMax {\n return fmt.Errorf(\"Username must be len characters long, where %d<=len<=%d\",\n UsernameLenMin, UsernameLenMax)\n }\n pattern := \"[^\" + UsernameAllowedRunes + \"]\"\n if matched, _ := regexp.MatchString(pattern, username); matched {\n return fmt.Errorf(\"Username can only contain charaters '%s'\", UsernameAllowedRunes)\n }\n u.username = username\n\n return nil\n}", "func (cc *Client) SetName(name string) (*User, error) {\n\tif !ValidateName(name) {\n\t\treturn nil, ErrNameInvalid\n\t}\n\tu := &User{}\n\tu.Name = name\n\terr := cc.AuthedRequest(\"POST\", cc.config.BioHost, cc.config.BioPort, \"/v1/bio\", u, u)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn u, nil\n}", "func ChangeCredentials(userID, username, password string) (err error) {\n err = checkInit()\n if err != nil {\n return\n }\n\n err = createError(032)\n\n if userData, ok := data[\"users\"].(map[string]interface{})[userID]; ok {\n //var userData = tmp.(map[string]interface{})\n var salt = userData.(map[string]interface{})[\"_salt\"].(string)\n\n if len(username) > 0 {\n userData.(map[string]interface{})[\"_username\"] = SHA256(username, salt)\n }\n\n if len(password) > 0 {\n userData.(map[string]interface{})[\"_password\"] = SHA256(password, salt)\n }\n\n err = saveDatabase(data)\n }\n\n return\n}", "func (*UsersController) Update(ctx *gin.Context) {\n\tvar updateJSON tat.UpdateUserJSON\n\tctx.Bind(&updateJSON)\n\n\tvar userToUpdate = tat.User{}\n\tfound, err := userDB.FindByUsername(&userToUpdate, updateJSON.Username)\n\tif !found {\n\t\tctx.JSON(http.StatusBadRequest, gin.H{\"error\": fmt.Errorf(\"user with username %s does not exist\", updateJSON.Username)})\n\t\treturn\n\t} else if err != nil {\n\t\tctx.JSON(http.StatusInternalServerError, gin.H{\"error\": fmt.Errorf(\"Error while fetching user with username %s\", updateJSON.Username)})\n\t\treturn\n\t}\n\n\tif strings.TrimSpace(updateJSON.NewFullname) == \"\" || strings.TrimSpace(updateJSON.NewEmail) == \"\" {\n\t\tctx.JSON(http.StatusBadRequest, gin.H{\"error\": fmt.Errorf(\"Invalid Fullname %s or Email %s\", updateJSON.NewFullname, updateJSON.NewEmail)})\n\t\treturn\n\t}\n\n\terr2 := userDB.Update(&userToUpdate, strings.TrimSpace(updateJSON.NewFullname), strings.TrimSpace(updateJSON.NewEmail))\n\tif err2 != nil {\n\t\tctx.JSON(http.StatusBadRequest, gin.H{\"error\": fmt.Sprintf(\"Update %s user to fullname %s and email %s failed : %s\", updateJSON.Username, updateJSON.NewFullname, updateJSON.NewEmail, err2.Error())})\n\t\treturn\n\t}\n\n\tctx.JSON(http.StatusCreated, gin.H{\"info\": \"user updated\"})\n}", "func UpdateUserInfo() {\n\treq, err := http.NewRequest(\"GET\", discogsURL+userInfo, nil)\n\tif err != nil {\n\t\tlog.Println(\"Error creating GET HTTP request:\", err)\n\t}\n\tquery := req.URL.Query()\n\tquery.Add(token, file.GetToken())\n\treq.URL.RawQuery = query.Encode()\n\tclient := &http.Client{}\n\tresp, err := client.Do(req)\n\tif err != nil {\n\t\tlog.Println(\"Error executing GET HTTP request:\", err)\n\t}\n\tbodyBytes, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\tlog.Println(\"Error reading response:\", err)\n\t}\n\tvar user = new(model.User)\n\terr = json.Unmarshal(bodyBytes, &user)\n\tif err != nil {\n\t\tlog.Println(\"Error decoding response:\", err)\n\t}\n\tres := db.InsertUserInfo(user)\n\tif !res {\n\t\tlog.Println(\"User info not updated!\")\n\t}\n}", "func SetUsername(ctx iris.Context, username string) {\n\tctx.Values().Set(usernameContextKey, username)\n}", "func (this *UserController) Update() {\n\tflash \t := beego.ReadFromRequest(&this.Controller)\n\n\tid, _ := strconv.Atoi(this.Ctx.Input.Param(\":id\"))\n\tuser := &models.User{Id:id}\n\tuser.GetOne()\n\n\tnamesurname \t\t:= this.GetString(\"name_surname\")\n\tusername \t\t\t:= this.GetString(\"user_name\")\n\temail \t\t\t\t:= this.GetString(\"email\")\n\tpassword\t \t\t:= this.GetString(\"password\")\n\turl\t\t\t \t\t:= this.GetString(\"url\")\n\tinfo\t\t\t\t:= this.GetString(\"info\")\n\n\tvalid := validation.Validation{}\n\n\tvalid.Email(email, \"Email\")\n\n\tvalid.Required(username, \"Username\")\n\tvalid.Required(password, \"Password\")\n\n\tvalid.MaxSize(username, 20, \"Username\")\n\tvalid.MaxSize(password, 16, \"Password\")\n\n\tswitch {\n\tcase valid.HasErrors():\n\t\tfor _, err := range valid.Errors {\n\t\t\tlog.Println(err.Key, err.Message)\n\t\t}\n\t\tvalid.Error(\"Problem creating user!\")\n\t\tflash.Error(\"Problem creating user!\")\n\t\tflash.Store(&this.Controller)\n\tdefault:\n\t\tuser := &models.User{\n\t\t\tNameSurname\t\t:namesurname,\n\t\t\tUserName\t\t:username,\n\t\t\tEmail\t\t\t:email,\n\t\t\tPassword\t\t:Md5(password),\n\t\t\tUrl\t\t\t\t:url,\n\t\t\tInfo\t\t\t:info,\n\t\t\tRegisterTime \t:time.Now(),\n\t\t}\n\t\tswitch {\n\t\t\tcase user.ExistUserName():\n\t\t\t\tvalid.Error(\"This username is in use!\")\n\t\t\t\tflash.Error(\"This username is in use!\")\n\t\t\t\tflash.Store(&this.Controller)\n\t\t\tcase user.ExistEmail():\n\t\t\t\tvalid.Error(\"This email is in use!\")\n\t\t\t\tflash.Error(\"This email is in use!\")\n\t\t\t\tflash.Store(&this.Controller)\n\t\t\tdefault:\n\t\t\t\terr := user.Update()\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tvalid.Error(fmt.Sprintf(\"%v\", err))\n\t\t\t\tflash.Notice(\"User updated successfully!\")\n\t\t\t\tflash.Store(&this.Controller)\n\t\t\t\tthis.Redirect(\"/admin/users\", 302)\n\t\t\t\treturn\n\t\t}\n\n\t}\n\n\tredirectUrl := \"/admin/users/edit/\" + strconv.Itoa(id)\n\tthis.Redirect(redirectUrl, 302)\n\tthis.Abort(\"302\")\n\treturn\n}", "func db_update_user(username string, sessionid string, follow_username string, post Post){\n file_path := path.Join(\"db/users\", strings.ToLower(username)+\".json\")\n \n if _, err := os.Stat(file_path); os.IsNotExist(err) {\n return\n }\n user := db_JSON_to_user(username)\n \n if sessionid != \"\" {\n user.SessionID = sessionid\n }\n if follow_username != \"\" {\n user.Follows = append(user.Follows, follow_username)\n }\n if post.Content != \"\" {\n user.Posts = append(user.Posts, &post)\n }\n \n updated_user := db_user_to_JSON(user)\n \n writeerr := ioutil.WriteFile(file_path, updated_user, 0644)\n\n if writeerr != nil {\n panic(writeerr)\n }\n}", "func (internet Internet) UserName(v reflect.Value) (interface{}, error) {\n\treturn internet.username()\n}", "func (h *Handler) updateUser(c *gin.Context) handlerResponse {\n\n\tvar updatedUser types.User\n\tif err := c.ShouldBindJSON(&updatedUser); err != nil {\n\t\treturn handleBadRequest(err)\n\t}\n\tif updatedUser.Name != c.Param(userParameter) {\n\t\treturn handleNameMismatch()\n\t}\n\tstoredUser, err := h.service.User.Update(updatedUser, h.who(c))\n\tif err != nil {\n\t\treturn handleError(err)\n\t}\n\t// Remove password so we do not show in response\n\tstoredUser.Password = \"\"\n\treturn handleOK(storedUser)\n}", "func (u *user) changeEmail(email string) {\n u.email = email\n}", "func (s Service) ChangeFirstName(ctx context.Context, firstname string) error {\n\tspan := s.tracer.MakeSpan(ctx, \"ChangeFirstName\")\n\tdefer span.Finish()\n\n\ttoken := s.retriveToken(ctx)\n\tif token == \"\" {\n\t\treturn errors.New(\"token_is_empty\")\n\t}\n\n\ts.passContext(&ctx)\n\n\tuserID, err := s.authRPC.GetUserID(ctx, token)\n\tif err != nil {\n\t\ts.tracer.LogError(span, err)\n\t\treturn err\n\t}\n\n\t//check for firstname not to be empty or over 32 or contain !alphabets\n\terr = fromTwoToSixtyFour(firstname)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = dashAndSpace(firstname)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdateReg, err := s.repository.Users.GetDateOfRegistration(ctx, userID)\n\tif err != nil {\n\t\ts.tracer.LogError(span, err)\n\t\t//internal error\n\t\treturn err\n\t}\n\n\tif time.Since(dateReg) > 5*(24*time.Hour) {\n\t\treturn errors.New(\"time_for_this_action_is_passed\")\n\t}\n\n\terr = s.repository.Users.ChangeFirstName(ctx, userID, firstname)\n\tif err != nil {\n\t\ts.tracer.LogError(span, err)\n\t}\n\n\treturn nil\n}", "func SyncADUser(l *ldap.Conn, s *db.SFView, typ, env string, orm *gorm.DB) {\n\tbaseDN := MapLDAP[env][\"baseDN\"]\n\tusrEntry := SearchADObject(l, baseDN, s.Username, \"sAMAccountName\", []string{\"sAMAccountName\", \"physicalDeliveryOfficeName\", \"comment\", \"department\", \"memberOf\", \"description\", \"userAccountControl\", \"mail\", \"userPrincipalName\"})\n\n\tif typ == \"update\" {\n\t\t//tmp := GenerateADUserMap(l, s, \"new\", env)\n\t\t//db.CreateORM(orm, tmp)\n\t\tif len(usrEntry) == 0 { // If user does not exist, then create it\n\t\t\tif !s.EmailAddress.Valid || strings.ToLower(s.Username) == strings.ToLower(strings.Split(s.EmailAddress.String, \"@\")[0]) || !strings.HasSuffix(s.EmailAddress.String, \"csisolar.com\") { // mail = null or usrname = mail or email not like csisolar.com,then create ad user\n\t\t\t\tu := GenerateADUserMap(l, s, \"new\", env)\n\n\t\t\t\tusrEntryByEmpID := SearchADObject(l, baseDN, s.PersonIDExternal, \"employeeID\", []string{\"sAMAccountName\", \"userAccountControl\"})\n\t\t\t\tif len(usrEntryByEmpID) > 0 {\n\t\t\t\t\t//存在相同工号的用户,可能是SF改名导致,触发工单提醒服务台\n\t\t\t\t\tsam := usrEntryByEmpID[0].GetAttributeValue(\"sAMAccountName\")\n\t\t\t\t\tif s.Username != sam {\n\t\t\t\t\t\tsubj := \"Username may be changed in SF - \" + s.Username\n\t\t\t\t\t\tbody := fmt.Sprintf(\"Username may be changed in SF. Please check the new AD user.\\n\\nCurrent username in SF: %s\\nOld username in AD: %s\\nEmployeeID: %s\\nSite: %s\\nJob level: %s\\nStart date:%s\", s.Username, sam, s.PersonIDExternal, s.Site, s.JobLevel.String, common.ConvertDatetimeToDate(s.StartDate))\n\t\t\t\t\t\toutput.SendMail(output.MailReport, subj, body, \"plain\", \"\", output.MailAdmin, output.MailHelp, output.MailOAOp)\n\t\t\t\t\t\t//data := fmt.Sprintf(mapZohoData[env][\"usernamechange\"], s.Username, s.Username, sam, s.PersonIDExternal, s.Site, s.JobLevel.String, common.ConvertDatetimeToDate(s.StartDate))\n\t\t\t\t\t\t//output.CreateTicket(mapZohoURL[env], mapZohoKey[env], data)\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Create user, then add to groups\n\t\t\t\tusrDN := fmt.Sprintf(\"CN=%s,OU=Standard Users,OU=Users,OU=Resources,OU=%s,%s,%s\", common.ConvertFullname(s.Username), s.Site, mapSiteGroup[s.Site][\"ou\"], baseDN)\n\t\t\t\tif err := CreateADUser(l, u, usrDN); err == nil {\n\t\t\t\t\tdb.CreateORM(orm, u)\n\n\t\t\t\t\ttimeStartDate, _ := time.Parse(\"2006-01-02\", common.ConvertDatetimeToDate(s.StartDate))\n\t\t\t\t\tif strings.Contains(s.Username, \".\") && time.Now().AddDate(0, 0, -5).Before(timeStartDate) { //people started work long ago will not create ticket\n\t\t\t\t\t\t//data := fmt.Sprintf(mapZohoData[env][\"new\"], s.Username, s.Username, s.PersonIDExternal, s.Site, s.JobLevel.String, common.ConvertDatetimeToDate(s.StartDate))\n\t\t\t\t\t\t//output.CreateTicket(mapZohoURL[env], mapZohoKey[env], data)\n\t\t\t\t\t\tsubj := fmt.Sprintf(\"New employee on boarding. Please setup PC - %s\", s.Username)\n\t\t\t\t\t\tbody := fmt.Sprintf(\"New employee on boarding, please find details as below:\\n\\nName: %s\\nUser ID: %s\\nSite: %s\\nJob level: %s\\nStart date: %s\", s.Username, s.PersonIDExternal, s.Site, s.JobLevel.String, common.ConvertDatetimeToDate(s.StartDate))\n\t\t\t\t\t\toutput.SendMail(output.MailReport, subj, body, \"plain\", \"\", output.MailAdmin, output.MailHelp)\n\t\t\t\t\t}\n\t\t\t\t\tsiteGrpDN := fmt.Sprintf(\"CN=%s,%s,OU=%s,%s,%s\", mapSiteGroup[s.Site][\"group\"], disGrpOU, s.Site, mapSiteGroup[s.Site][\"ou\"], baseDN)\n\t\t\t\t\tsLangGrpDN := mapExclaimer[u[\"comment\"][0]] + \",\" + secGrpOU + \",\" + baseDN\n\n\t\t\t\t\tAddADGroupMember(l, baseDN, siteGrpDN, usrDN)\n\t\t\t\t\tAddADGroupMember(l, baseDN, sLangGrpDN, usrDN)\n\n\t\t\t\t\tif s.EmailAddress.Valid && strings.HasSuffix(s.EmailAddress.String, \"csisolar.com\") && strings.Contains(strings.Split(s.EmailAddress.String, \"@\")[0], \".\") {\n\t\t\t\t\t\to365Grp := mapO365[\"P1\"]\n\t\t\t\t\t\tif s.JobLevel.Valid && s.JobLevel.String <= \"E\" {\n\t\t\t\t\t\t\to365Grp = mapO365[\"E3\"]\n\t\t\t\t\t\t}\n\t\t\t\t\t\to365GrpDN := o365Grp + \",\" + secGrpOU + \",\" + baseDN\n\t\t\t\t\t\tmfaGrpDN := mapO365[\"MFA\"] + \",\" + secGrpOU + \",\" + baseDN\n\n\t\t\t\t\t\tAddADGroupMember(l, baseDN, o365GrpDN, usrDN)\n\t\t\t\t\t\tAddADGroupMember(l, baseDN, mfaGrpDN, usrDN)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\terr := errors.New(\"Username distinct from mail prefix\")\n\t\t\t\toutput.GenerateLog(err, \"AD user creation\", s.Site+\" | \"+s.Username+\" | \"+s.EmailAddress.String, false)\n\t\t\t}\n\t\t} else { // If user exists, then update it\n\t\t\toldUPN := usrEntry[0].GetAttributeValue(\"userPrincipalName\")\n\t\t\tif !strings.Contains(oldUPN, \"canadiansolar.com\") { //not deal with EG user\n\t\t\t\tu := GenerateADUserMap(l, s, \"update\", env)\n\n\t\t\t\tusrOldDN := usrEntry[0].DN\n\t\t\t\tsam := s.Username\n\n\t\t\t\tnewDept := \"\"\n\t\t\t\tif len(u[\"department\"]) > 0 {\n\t\t\t\t\tnewDept = u[\"department\"][0]\n\t\t\t\t}\n\n\t\t\t\tnewSite := u[\"physicalDeliveryOfficeName\"][0]\n\t\t\t\tnewSLang := u[\"comment\"][0] // slang is modified in GenerateADUserMap function\n\n\t\t\t\toldDept := usrEntry[0].GetAttributeValue(\"department\")\n\t\t\t\toldSite := usrEntry[0].GetAttributeValue(\"physicalDeliveryOfficeName\")\n\t\t\t\toldSLang := usrEntry[0].GetAttributeValue(\"comment\")\n\n\t\t\t\toldMail := usrEntry[0].GetAttributeValue(\"mail\")\n\n\t\t\t\tif strings.TrimSpace(oldMail) == \"\" && s.EmailAddress.Valid && strings.HasSuffix(s.EmailAddress.String, \"csisolar.com\") && strings.Contains(strings.Split(s.EmailAddress.String, \"@\")[0], \".\") {\n\t\t\t\t\to365Grp := mapO365[\"P1\"]\n\t\t\t\t\tif s.JobLevel.Valid && s.JobLevel.String <= \"E\" {\n\t\t\t\t\t\to365Grp = mapO365[\"E3\"]\n\t\t\t\t\t}\n\t\t\t\t\to365GrpDN := o365Grp + \",\" + secGrpOU + \",\" + baseDN\n\t\t\t\t\tmfaGrpDN := mapO365[\"MFA\"] + \",\" + secGrpOU + \",\" + baseDN\n\n\t\t\t\t\tAddADGroupMember(l, baseDN, o365GrpDN, usrOldDN)\n\t\t\t\t\tAddADGroupMember(l, baseDN, mfaGrpDN, usrOldDN)\n\t\t\t\t}\n\n\t\t\t\tif err := ModifyADUser(l, u, usrOldDN); err == nil {\n\t\t\t\t\tdb.UpdateORM(orm, sam, u)\n\t\t\t\t\t// Adjust sign language and site groups, move user OU\n\t\t\t\t\tif oldSLang != newSLang {\n\t\t\t\t\t\tnewSLangGrpDN := mapExclaimer[newSLang] + \",\" + secGrpOU + \",\" + baseDN\n\t\t\t\t\t\toldSLangGrpDN := mapExclaimer[oldSLang] + \",\" + secGrpOU + \",\" + baseDN\n\n\t\t\t\t\t\tAddADGroupMember(l, baseDN, newSLangGrpDN, usrOldDN)\n\t\t\t\t\t\tRemoveADGroupMember(l, baseDN, oldSLangGrpDN, usrOldDN)\n\t\t\t\t\t} else if strings.TrimSpace(oldDept) != \"\" && strings.TrimSpace(newDept) != \"\" && oldDept != newDept {\n\t\t\t\t\t\tif strings.Contains(s.Username, \".\") {\n\t\t\t\t\t\t\tInformADUserGroupManagers(l, baseDN, s.Username, oldDept, newDept, \"dept\")\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tif oldSite != newSite {\n\t\t\t\t\t\t//newSiteGrpDN := fmt.Sprintf(\"CN=CN_%s_ALL,%s,OU=%s,%s,%s\", newSite, disGrpOU, newSite, mapSiteGroup[newSite][\"ou\"], baseDN)\n\t\t\t\t\t\tnewSiteGrpDN := fmt.Sprintf(\"CN=%s,%s,OU=%s,%s,%s\", mapSiteGroup[newSite][\"group\"], disGrpOU, newSite, mapSiteGroup[newSite][\"ou\"], baseDN)\n\t\t\t\t\t\t//oldSiteGrpDN := fmt.Sprintf(\"CN=CN_%s_ALL,%s,OU=%s,%s,%s\", oldSite, disGrpOU, oldSite, mapSiteGroup[oldSite][\"ou\"], baseDN)\n\t\t\t\t\t\toldSiteGrpDN := fmt.Sprintf(\"CN=%s,%s,OU=%s,%s,%s\", mapSiteGroup[oldSite][\"group\"], disGrpOU, oldSite, mapSiteGroup[oldSite][\"ou\"], baseDN)\n\n\t\t\t\t\t\tAddADGroupMember(l, baseDN, newSiteGrpDN, usrOldDN)\n\t\t\t\t\t\tRemoveADGroupMember(l, baseDN, oldSiteGrpDN, usrOldDN)\n\n\t\t\t\t\t\tusrNewDN := fmt.Sprintf(\"CN=%s,OU=Standard Users,OU=Users,OU=Resources,OU=%s,%s,%s\", s.Username, newSite, mapSiteGroup[newSite][\"ou\"], baseDN)\n\t\t\t\t\t\tMoveADObject(l, usrOldDN, usrNewDN)\n\n\t\t\t\t\t\tif strings.Contains(s.Username, \".\") {\n\t\t\t\t\t\t\tdata := fmt.Sprintf(mapZohoData[env][\"update\"], s.Username, s.Username, s.PersonIDExternal, oldSite, newSite, s.JobLevel.String, common.ConvertDatetimeToDate(s.JobEffectDate))\n\t\t\t\t\t\t\toutput.CreateTicket(mapZohoURL[env], mapZohoKey[env], data)\n\n\t\t\t\t\t\t\tInformADUserGroupManagers(l, baseDN, s.Username, oldSite, newSite, \"site\")\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t} else if typ == \"disable\" {\n\t\t//fmt.Println(\"hello\", s.PersonIDExternal)\n\t\tusrEntryByEmpID := SearchADObject(l, baseDN, s.PersonIDExternal, \"employeeID\", []string{\"sAMAccountName\", \"userAccountControl\", \"description\", \"memberOf\", \"userPrincipalName\"})\n\t\tif len(usrEntryByEmpID) > 0 {\n\t\t\t//fmt.Println(s.PersonIDExternal)\n\t\t\tfor _, e := range usrEntryByEmpID {\n\t\t\t\toldUPN := usrEntryByEmpID[0].GetAttributeValue(\"userPrincipalName\")\n\t\t\t\tif !strings.Contains(oldUPN, \"canadiansolar.com\") { //not deal with EG user\n\t\t\t\t\tuacCode, _ := strconv.Atoi(e.GetAttributeValue(\"userAccountControl\"))\n\t\t\t\t\tusrOldDN := e.DN\n\n\t\t\t\t\tif uacCode != 514 && strings.Contains(usrOldDN, \"OU=Standard Users\") { // not disabled yet, in standard user OU\n\t\t\t\t\t\tu := GenerateADUserMap(l, s, \"disable\", env)\n\t\t\t\t\t\tif err := DisableADUser(l, usrOldDN); err == nil {\n\t\t\t\t\t\t\t//fmt.Println(u)\n\t\t\t\t\t\t\tif err = ModifyADUser(l, u, usrOldDN); err == nil {\n\t\t\t\t\t\t\t\tsam := s.Username\n\n\t\t\t\t\t\t\t\tdb.DeleteORM(orm, sam, u)\n\n\t\t\t\t\t\t\t\tfor _, g := range e.GetAttributeValues(\"memberOf\") {\n\t\t\t\t\t\t\t\t\tif !strings.Contains(g, \"F_O365\") {\n\t\t\t\t\t\t\t\t\t\tRemoveADGroupMember(l, baseDN, g, usrOldDN)\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t//fmt.Println(usrOldDN)\n\t\t\t\t\t\t\t\tusrNewDn := strings.Replace(usrOldDN, \"OU=Standard Users\", \"OU=Resigned Users\", 1)\n\t\t\t\t\t\t\t\t//fmt.Println(usrNewDn)\n\t\t\t\t\t\t\t\tMoveADObject(l, usrOldDN, usrNewDn)\n\n\t\t\t\t\t\t\t\tif common.ConvertDatetimeToDate(s.StartDate) < \"2020-10-01\" && strings.Contains(s.Username, \".\") {\n\t\t\t\t\t\t\t\t\tsubj := fmt.Sprintf(\"Employee resigned. Please delete the canadiansolar mailbox - %s\", s.Username)\n\t\t\t\t\t\t\t\t\tbody := fmt.Sprintf(\"There is an employee resigned whose start date is before 2020-10-01:\\n\\nName: %s\\nUser ID: %s\\nSite: %s\\nJob level: %s\\nStart date: %s\\nLast work date: %s\", s.Username, s.PersonIDExternal, s.Site, s.JobLevel.String, common.ConvertDatetimeToDate(s.StartDate), common.ConvertDatetimeToDate(s.LastDateWorked))\n\t\t\t\t\t\t\t\t\toutput.SendMail(output.MailReport, subj, body, \"plain\", \"\", output.MailAdmin, output.MailHelp)\n\t\t\t\t\t\t\t\t\t//data := fmt.Sprintf(mapZohoData[env][\"disable\"], s.Username, s.Username, s.PersonIDExternal, s.Site, s.JobLevel.String, common.ConvertDatetimeToDate(s.StartDate), common.ConvertDatetimeToDate(s.LastDateWorked))\n\t\t\t\t\t\t\t\t\t//output.CreateTicket(mapZohoURL[env], mapZohoKey[env], data)\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}", "func (m *User) SetPreferredName(value *string)() {\n m.preferredName = value\n}", "func UpdateUser(w http.ResponseWriter, r *http.Request) {\n\temail, err := getEmailFromTokenHeader(r)\n\tif err != nil || email == \"\" {\n\t\thttp.Error(w, \"Invalid Token\", http.StatusUnauthorized)\n\t\treturn\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/x-www-form-urlencoded\")\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tw.Header().Set(\"Access-Control-Allow-Methods\", \"PUT\")\n\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Content-Type\")\n\n\t// create an empty user of type models.User\n\tvar user models.User\n\t// decode the json request to user\n\terr = json.NewDecoder(r.Body).Decode(&user)\n\tif err != nil {\n\t\tlogrus.Debugf(\"Unable to decode the request body. %v\", err)\n\t\thttp.Error(w, \"Invalid Request\", http.StatusBadRequest)\n\t\treturn\n\t}\n\t// call update user to update the user\n\tupdatedRows, err := database.UpdateUser(email, user.FirstName, user.LastName)\n\tif err != nil {\n\t\tlogrus.Errorf(\"failed updating user. %v\", err)\n\t\thttp.Error(w, \"Invalid Request\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\tlogrus.Debugf(\"User updated successfully. Total rows/record affected %v\", updatedRows)\n}", "func UpdateConfession(w http.ResponseWriter, r *http.Request) {\n\tfmt.Fprintf(w, \"user updated\")\n}", "func (b *bot) rename(m dggchat.Message, s *dggchat.Session) {\n\tif !isMod(m.Sender) || !strings.HasPrefix(m.Message, \"!rename\") {\n\t\treturn\n\t}\n\n\tparts := strings.Split(m.Message, \" \")\n\tif len(parts) < 3 {\n\t\treturn\n\t}\n\n\toldName := parts[1]\n\tnewName := parts[2]\n\terr := b.renameUser(oldName, newName)\n\tif err != nil {\n\t\tmsg := fmt.Sprintf(\"'%s' to '%s' by %s failed with '%s'\",\n\t\t\toldName, newName, m.Sender.Nick, err.Error())\n\t\tlog.Printf(\"[##] rename: %s\\n\", msg)\n\n\t\ts.SendPrivateMessage(m.Sender.Nick, msg)\n\t\tb.sendMessageDedupe(\"rename error, check logs\", s)\n\t\treturn\n\t}\n\tlog.Printf(\"[##] rename: '%s' to '%s' by '%s' success!\\n\",\n\t\toldName, newName, m.Sender.Nick)\n\tb.sendMessageDedupe(fmt.Sprintf(\"name changed, %s please reconnect\", oldName), s)\n}", "func UpdateHandler(w http.ResponseWriter, r *http.Request) {\n\t_, _, ok := r.BasicAuth()\n\tif !ok {\n\t\tw.Header().Set(\"WWW-Authenticate\", fmt.Sprintf(`Basic realm=\"%s\"`, BasicAuthRealm))\n\t\tw.WriteHeader(http.StatusUnauthorized)\n\t\tw.Write([]byte(http.StatusText(http.StatusUnauthorized) + \"\\n\"))\n\t\treturn\n\t}\n\tif !reqIsAdmin(r) {\n\t\thttp.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)\n\t\treturn\n\t}\n\tvar bad bool\n\tvar badmin bool\n\tif r.FormValue(\"ad\") != \"\" {\n\t\tvar err error\n\t\tbad, err = strconv.ParseBool(r.FormValue(\"ad\"))\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\t}\n\tif r.FormValue(\"admin\") != \"\" {\n\t\tvar err error\n\t\tbadmin, err = strconv.ParseBool(r.FormValue(\"admin\"))\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\t}\n\tu := &User{\n\t\tUsername: strings.ToLower(r.FormValue(\"username\")),\n\t\tPassword: r.FormValue(\"password\"),\n\t\tAdmin: badmin,\n\t\tAD: bad,\n\t\tNamespaces: strings.Split(r.FormValue(\"namespaces\"), \",\"),\n\t}\n\tu, uerr := u.UpdateUser()\n\tif uerr != nil {\n\t\thttp.Error(w, uerr.Error(), http.StatusUnauthorized)\n\t\treturn\n\t}\n\tfmt.Fprintf(w, \"User updated\\n\")\n}", "func (u *user) changeEmail(email string) {\n u.email = email\n}", "func (s Service) ChangeNickname(ctx context.Context, nickname *string, permission *account.Permission) error {\n\tspan := s.tracer.MakeSpan(ctx, \"ChangeNickname\")\n\tdefer span.Finish()\n\n\ttoken := s.retriveToken(ctx)\n\tif token == \"\" {\n\t\treturn errors.New(\"token_is_empty\")\n\t}\n\n\ts.passContext(&ctx)\n\n\tuserID, err := s.authRPC.GetUserID(ctx, token)\n\tif err != nil {\n\t\ts.tracer.LogError(span, err)\n\t\treturn err\n\t}\n\n\t//check for nickname. only contain alphabets and not be over 120 characters\n\terr = middlenicknameValidator(nickname)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = s.repository.Users.ChangeNickname(ctx, userID, nickname, permission)\n\tif err != nil {\n\t\ts.tracer.LogError(span, err)\n\t}\n\n\treturn nil\n}", "func (uc UsersController) UpdateUserInfo(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {\n\tfmt.Fprint(w, \"User.UpdateUserInfo\")\n}", "func (repository Users) Update(ID uint64, user models.User) error {\n\n\tstatement, error := repository.db.Prepare(\"UPDATE users SET name = ?, nick = ?, email = ? where id = ?\")\n\n\tif error != nil {\n\t\treturn error\n\t}\n\n\tdefer statement.Close()\n\n\tif _, error = statement.Exec(user.Name, user.Nick, user.Email, ID); error != nil {\n\t\treturn error\n\t}\n\n\treturn nil\n}", "func (s *Server) setName(c *client.Client, args []string) {\n\t// validate arg input\n\tc.Name = args[1]\n\tc.Msg(fmt.Sprintf(\"name changed to %s\", c.Name))\n}", "func (c *CognitoFlow) Username(w http.ResponseWriter, r *http.Request) {\n\ttype userdata struct {\n\t\tUsername string `json:\"username\"`\n\t}\n\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tlog.Error(err)\n\t\thttp.Error(w, \"\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\tvar userd userdata\n\terr = json.Unmarshal(body, &userd)\n\tif err != nil {\n\t\tlog.Error(err)\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t}\n\n\t_, err = c.CognitoClient.AdminGetUser(&cognitoidentityprovider.AdminGetUserInput{\n\t\tUserPoolId: aws.String(c.UserPoolID),\n\t\tUsername: &userd.Username,\n\t})\n\n\tif err != nil {\n\t\tawsErr, ok := err.(awserr.Error)\n\t\tif ok {\n\t\t\tif awsErr.Code() == cognitoidentityprovider.ErrCodeUserNotFoundException {\n\t\t\t\tlog.Info(\"Username %s is free\", &userd.Username)\n\t\t\t\treturn\n\t\t\t}\n\t\t} else {\n\t\t\tlog.Error(err)\n\t\t\thttp.Error(w, \"\", http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t}\n\n\tlog.Info(\"Username %s is taken.\", &userd.Username)\n\thttp.Error(w, \"taken\", http.StatusConflict)\n}", "func Update(userId int, nickname, about, image string) int {\n\tif !misc.IsIdValid(userId) {\n\t\tlog.Println(\"user was not updated\", userId)\n\t\treturn misc.NothingUpdated\n\t}\n\n\tnickname, ok := misc.ValidateString(nickname, misc.MaxLenS)\n\tif !ok {\n\t\tlog.Println(\"Nickname is not correct\", nickname)\n\t\treturn misc.WrongName\n\t}\n\n\tabout, ok = misc.ValidateString(about, misc.MaxLenB)\n\tif !ok {\n\t\tlog.Println(\"About is not correct\", about)\n\t\treturn misc.WrongDescr\n\t}\n\n\tif !imager.IsAvatarValid(image) {\n\t\tlog.Println(\"Avatar is not valid\", image)\n\t\treturn misc.WrongImg\n\t}\n\n\tsqlResult, err := psql.Db.Exec(`\n\t\tUPDATE users\n\t\tSET nickname = $1, about = $2, image = $3\n\t\tWHERE id = $4`, nickname, about, image, userId)\n\tif err, code := psql.CheckSpecificDriverErrors(err); err != nil {\n\t\tlog.Println(err)\n\t\treturn code\n\t}\n\n\terr, code := psql.IsAffectedOneRow(sqlResult)\n\treturn code\n}", "func (m *User) SetGivenName(value *string)() {\n m.givenName = value\n}", "func (s *Store) UpdateUser(name, password string) error {\n\t// Hash the password before serializing it.\n\thash, err := HashPassword(password)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Serialize command and send it to the leader.\n\treturn s.exec(internal.Command_UpdateUserCommand, internal.E_UpdateUserCommand_Command,\n\t\t&internal.UpdateUserCommand{\n\t\t\tName: proto.String(name),\n\t\t\tHash: proto.String(string(hash)),\n\t\t},\n\t)\n}", "func (u *user) Name() string {\n\treturn u.data.Name\n}", "func (db *database) UpdatePersonName(\n\tctx context.Context,\n\tpersonID int,\n\tfirstName, lastName string,\n) error {\n\n\tresult, err := db.ExecContext(ctx, `\n\t\tUPDATE person SET\n\t\t\tfirst_name = $1,\n\t\t\tlast_name = $2\n\t\tWHERE person_id = $3\n\t`, firstName, lastName, personID)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to update person name\")\n\t}\n\n\tn, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to check result of person name update\")\n\t} else if n != 1 {\n\t\treturn errors.Wrapf(\n\t\t\tapp.ErrNotFound,\n\t\t\t\"no such person by id of %d\", personID,\n\t\t)\n\t}\n\n\treturn nil\n}", "func UserUpdateEmail(id int, n string) {\n\tvar i int\n\ti = GetIndexOfUser(id)\n\tuserList[i].uEmail = n\n}", "func UpdateUserHandler(connection *sql.DB, cnf config.Config) negroni.HandlerFunc {\n\treturn negroni.HandlerFunc(func(w http.ResponseWriter, r *http.Request, next http.HandlerFunc) {\n\t\tvar queryToken = r.URL.Query().Get(\"token\")\n\n\t\tif len(queryToken) < 1 {\n\t\t\tqueryToken = r.Header.Get(\"token\")\n\t\t}\n\n\t\tif len(queryToken) < 1 {\n\t\t\tw.WriteHeader(http.StatusBadRequest)\n\t\t\tw.Write([]byte(string(\"token is mandatory\")))\n\t\t\treturn\n\t\t}\n\n\t\tuser := &models.UserResponse{}\n\t\terr := util.RequestToJSON(r, user)\n\t\tif err != nil {\n\t\t\tutil.SendBadRequest(w, errors.New(\"bad json\"))\n\t\t\treturn\n\t\t}\n\n\t\tsecretKey := cnf.SecretKey\n\t\ttok, err := jwt.Parse(queryToken, func(t *jwt.Token) (interface{}, error) {\n\t\t\treturn []byte(secretKey), nil\n\t\t})\n\t\tif err != nil {\n\t\t\tutil.SendBadRequest(w, err)\n\t\t\treturn\n\t\t}\n\n\t\tclaims := tok.Claims.(jwt.MapClaims)\n\t\tvar ID = claims[\"sub\"].(float64) // gets the ID\n\n\t\tif int64(ID) != user.ID {\n\t\t\tutil.SendBadRequest(w, errors.New(\"you can only change your own user object\"))\n\t\t\treturn\n\t\t}\n\n\t\tif err := user.Validate(); err == nil {\n\n\t\t\tdb.UpdateUser(connection, user)\n\n\t\t\tutil.SendOK(w, user)\n\n\t\t} else {\n\t\t\tutil.SendBadRequest(w, err)\n\t\t}\n\t})\n}", "func (u *User) ChangeName(fullName FullName) Events {\n\tu.Person.FullName = fullName\n\n\treturn Events{EventWithPayload(&PersonNameChanged{\n\t\tTenantID: u.TenantID,\n\t\tUsername: u.Username,\n\t\tFullName: fullName,\n\t})}\n}", "func UpdateUser(data interface{}, token string) error {\n\t// declarations\n\tvalues := makeStringMap(data)\n\tuser := User{\n\t\tGH_Id: makeInt64(values[\"id\"]),\n\t\tUser_name: makeString(values[\"login\"]),\n\t\tReal_name: makeString(values[\"name\"]),\n\t\tEmail: makeString(values[\"email\"]),\n\t\tToken: token,\n\t\tWorker_token: nonExistingRandString(Token_length,\n\t\t\t\"SELECT 42 FROM users WHERE worker_token = $1\"),\n\t\tAdmin: false,\n\t}\n\n\t// update user information\n\tif existsUser(user.GH_Id) {\n\t\tupdateUser(&user)\n\t} else {\n\t\tcreateUser(&user)\n\t}\n\n\treturn nil\n}" ]
[ "0.71727526", "0.7117641", "0.68912226", "0.6796825", "0.6782432", "0.67776346", "0.6765607", "0.6717929", "0.6697031", "0.66757566", "0.6645633", "0.6625669", "0.6601266", "0.65899295", "0.6582045", "0.6462449", "0.6447276", "0.644079", "0.6429267", "0.6413452", "0.6411726", "0.6332362", "0.6205762", "0.61864537", "0.6145022", "0.61141384", "0.61088026", "0.61025065", "0.6100605", "0.61003363", "0.60814244", "0.60625094", "0.605876", "0.6056938", "0.6041548", "0.6033651", "0.60109097", "0.6010565", "0.6000872", "0.59988886", "0.5978446", "0.5976732", "0.59691197", "0.59566706", "0.5949858", "0.59292513", "0.5925671", "0.5916804", "0.59081316", "0.59042305", "0.5904112", "0.5854872", "0.58488053", "0.5845492", "0.58426887", "0.58409375", "0.5840794", "0.5831924", "0.5827546", "0.5814867", "0.5805667", "0.5800242", "0.5794704", "0.5794273", "0.5788895", "0.57673776", "0.576395", "0.5760694", "0.5759136", "0.5744125", "0.57270175", "0.57256687", "0.57203704", "0.57185054", "0.571116", "0.5709907", "0.5706089", "0.57001835", "0.5699321", "0.5672218", "0.56676614", "0.56659263", "0.5665333", "0.5659627", "0.5658891", "0.5657762", "0.5654592", "0.56456107", "0.5638669", "0.5635672", "0.5621115", "0.5614322", "0.55962616", "0.55959696", "0.55915", "0.5590623", "0.55887", "0.5584971", "0.55752", "0.5562809" ]
0.55931133
94
5 Try to update a user that doesnt exist
func TestPatchUserServiceDoesntExist (t *testing.T){ err := PatchUserService(fake_social_number, mongoDB.User{Name:new_name_user_01}) assert.Equal(t, 404, err.HTTPStatus) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func TestUpdateUserNotValid(t *testing.T) {\n\tdb := database.Connect()\n\tu := User{\n\t\tEmail: \"[email protected]\",\n\t}\n\tr := u.Update(db)\n\tif r != false {\n\t\tt.Errorf(\"Expected failed update, got %t\", r)\n\t}\n}", "func UpdateUser(userId int64, userData *UserEntry) error {\n _ , nerr := model.Database.Exec(\"UPDATE users SET username = ?, isadmin = ?, email = ? WHERE userid = ?\", userData.Username, userData.IsAdmin, userData.Email, userId)\n if nerr != nil {\n return nerr\n }\n return nil\n}", "func updateUser(c *gin.Context) {\n\tvar user user\n\tuserID := c.Param(\"id\")\n\n\tdb.First(&user, userID)\n\n\tif user.Id == 0 {\n\t\tc.JSON(http.StatusNotFound, gin.H{\"status\": http.StatusNotFound, \"message\": \"No user found!\"})\n\t\treturn\n\t}\n\n\tdb.Model(&user).Update(\"login\", c.PostForm(\"login\"))\n password,_ := HashPassword(c.PostForm(\"password\"))\n\tdb.Model(&user).Update(\"password\", password)\n\tc.JSON(http.StatusOK, gin.H{\"status\": http.StatusOK, \"message\": \"User updated successfully!\"})\n}", "func (u *UserResource) updateUser(request *restful.Request, response *restful.Response) {\n\tusr := new(User)\n\terr := request.ReadEntity(&usr)\n\tif err == nil {\n\t\tdb.WLock()\n\t\tdefer db.WUnlock() //unlock when exit this method\n\n\t\tif _, err = db.Engine.Id(usr.ID).Update(usr); err != nil {\n\t\t\tresponse.WriteHeaderAndEntity(http.StatusInternalServerError, UsersResponse{Error: err.Error()})\n\t\t} else {\n\t\t\tresponse.WriteEntity(UsersResponse{Success: true})\n\t\t}\n\t} else {\n\t\tresponse.WriteHeaderAndEntity(http.StatusInternalServerError, UsersResponse{Error: err.Error()})\n\t}\n}", "func UserUpdatePost(w http.ResponseWriter, r *http.Request) {\n\tsess := session.Instance(r)\n\tvar updateReq webpojo.UserUpdateReq\n\tvar updateResp = webpojo.UserUpdateResp{}\n\n\t// Prevent brute force login attempts by not hitting MySQL and pretending like it was invalid :-)\n\tif sess.Values[SessLoginAttempt] == nil ||\n\t\t(sess.Values[UserRole] != webpojo.UserSupervisor && sess.Values[UserRole] != webpojo.UserAdmin) {\n\t\tlog.Println(\"Authorized request\")\n\t\tupdateResp.StatusCode = constants.StatusCode_429\n\t\tupdateResp.Message = constants.Msg_429\n\n\t\tReturnJsonResp(w, updateResp)\n\t\treturn\n\t}\n\n\tbody, readErr := ioutil.ReadAll(r.Body)\n\tif readErr != nil {\n\t\tlog.Println(readErr)\n\t\tReturnError(w, readErr)\n\t\treturn\n\t}\n\n\tif len(body) == 0 {\n\t\tlog.Println(\"Empty json payload\")\n\t\tRecordLoginAttempt(sess)\n\t\tsess.Save(r, w)\n\t\tupdateResp.StatusCode = constants.StatusCode_400\n\t\tupdateResp.Message = constants.Msg_400\n\t\tReturnJsonResp(w, updateResp)\n\t\treturn\n\t}\n\n\t//log.Println(\"r.Body\", string(body))\n\tupdateReq = webpojo.UserUpdateReq{}\n\tjsonErr := json.Unmarshal(body, &updateReq)\n\tif jsonErr != nil {\n\t\tlog.Println(jsonErr)\n\t\tReturnError(w, jsonErr)\n\t\treturn\n\t}\n\tlog.Println(fmt.Sprintf(\"%v is updating user: %v\", sess.Values[UserName], updateReq.Email))\n\n\tuser := model.User{}\n\tuser.Email = updateReq.Email\n\tuser.Password, _ = passhash.HashString(updateReq.Password)\n\tuser.FirstName = updateReq.FirstName\n\tuser.LastName = updateReq.LastName\n\tuser.UserRole = updateReq.UserRole\n\tuser.ID = updateReq.UserID\n\tdbErr := model.UserUpdate(user)\n\n\tif dbErr != nil {\n\t\tlog.Println(dbErr)\n\t\tRecordLoginAttempt(sess)\n\t\tsess.Save(r, w)\n\t\tupdateResp.StatusCode = constants.StatusCode_500\n\t\tupdateResp.Message = constants.Msg_500\n\t} else {\n\t\tlog.Println(\"Updated successfully\")\n\t\tupdateResp.StatusCode = constants.StatusCode_200\n\t\tupdateResp.Message = constants.Msg_200\n\t}\n\n\tReturnJsonResp(w, updateResp)\n}", "func Update(user User) error {\n\n}", "func UpdateUser(c *gin.Context) {}", "func updateUser(res http.ResponseWriter, req *http.Request, p httprouter.Params) {\n\tvar data map[string]string\n\tvar username = p.ByName(\"username\")\n\n\tresp, err := getBody(req)\n\tif err != nil {\n\t\twriteJSON(res, 500, jsMap{\"status\": \"Server Error\"})\n\t\treturn\n\t}\n\tif err := json.Unmarshal(resp, &data); err != nil {\n\t\tlog.Println(\"updateUser:\", err)\n\t\twriteJSON(res, 400, jsMap{\"status\": \"Invalid Data\"})\n\t\treturn\n\t}\n\n\tuser, err := getUser(username)\n\tif err != nil {\n\t\twriteJSON(res, 404, jsMap{\"status\": \"Not Found\"})\n\t\treturn\n\t}\n\t// if the user has 2fa enabled, verify their totp key\n\tif user.TOTP != \"\" {\n\t\terr = verifyTOTP(user, data[\"passcode\"])\n\t\tif err != nil {\n\t\t\twriteJSON(res, 401, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\t}\n\n\tswitch p.ByName(\"setting\") {\n\tcase \"password\":\n\t\t_, err = authenticateUser(user, username, data[\"password\"])\n\t\tif err != nil {\n\t\t\twriteJSON(res, 401, jsMap{\"status\": \"Wrong Password\"})\n\t\t\treturn\n\t\t}\n\n\t\tv, err := srpEnv.Verifier([]byte(username), []byte(data[\"new_password\"]))\n\t\tif err != nil {\n\t\t\twriteJSON(res, 500, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\n\t\tih, verif := v.Encode()\n\t\t_, err = db.Exec(`\n\t\t\tUPDATE accounts \n\t\t\tSET ih = $1, verifier = $2\n\t\t\tWHERE username = $3;`, ih, verif, username,\n\t\t)\n\t\tif err != nil {\n\t\t\twriteJSON(res, 500, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\n\tcase \"2fa_enable\":\n\t\tif !totp.Validate(data[\"passcode\"], data[\"secret\"]) {\n\t\t\twriteJSON(res, 401, jsMap{\"status\": \"Wrong Passcode\"})\n\t\t\treturn\n\t\t}\n\n\t\tkey, _ := hex.DecodeString(secretKey)\n\t\tplaintext := []byte(data[\"secret\"])\n\t\tblock, err := aes.NewCipher(key)\n\t\tif err != nil {\n\t\t\twriteJSON(res, 500, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\n\t\tciphertext := make([]byte, aes.BlockSize+len(plaintext))\n\t\tiv := ciphertext[:aes.BlockSize]\n\t\tif _, err := io.ReadFull(rand.Reader, iv); err != nil {\n\t\t\twriteJSON(res, 500, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\n\t\tmode := cipher.NewCBCEncrypter(block, iv)\n\t\tmode.CryptBlocks(ciphertext[aes.BlockSize:], plaintext)\n\n\t\t_, err = db.Exec(`\n\t\t\tUPDATE accounts\n\t\t\tSET totp = $1\n\t\t\tWHERE username = $2;`,\n\t\t\thex.EncodeToString(ciphertext),\n\t\t\tusername,\n\t\t)\n\t\tif err != nil {\n\t\t\twriteJSON(res, 500, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\n\tcase \"2fa_disable\":\n\t\t_, err = db.Exec(`\n\t\t\tUPDATE accounts\n\t\t\tSET totp = ''\n\t\t\tWHERE username = $1;`, username,\n\t\t)\n\t\tif err != nil {\n\t\t\twriteJSON(res, 500, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\t}\n\twriteJSON(res, 200, jsMap{\"status\": \"OK\"})\n}", "func editUser(userID int, firstName string, MI string, lastName string, privLevel int) error {\n\n\tdb, err := sql.Open(\"mysql\", DB_USER_NAME+\":\"+DB_PASSWORD+\"@unix(/var/run/mysql/mysql.sock)/\"+DB_NAME)\n\tif err != nil {\n\t\treturn errors.New(\"No connection\")\n\t}\n\n\tres, err := db.Exec(\"update Users set FirstName=?, MiddleInitial=?, LastName=?, PrivLevel=? where UserID=?\", firstName, MI, lastName, privLevel, userID)\n\n\tif err != nil {\n\t\treturn errors.New(\"User update failed.\")\n\t}\n\n\trowsAffected, err := res.RowsAffected()\n\n\tif rowsAffected != 1 {\n\t\treturn errors.New(\"Query didn't match any users.\")\n\t}\n\n\treturn nil\n}", "func (this *UserController) Update() {\n\tflash \t := beego.ReadFromRequest(&this.Controller)\n\n\tid, _ := strconv.Atoi(this.Ctx.Input.Param(\":id\"))\n\tuser := &models.User{Id:id}\n\tuser.GetOne()\n\n\tnamesurname \t\t:= this.GetString(\"name_surname\")\n\tusername \t\t\t:= this.GetString(\"user_name\")\n\temail \t\t\t\t:= this.GetString(\"email\")\n\tpassword\t \t\t:= this.GetString(\"password\")\n\turl\t\t\t \t\t:= this.GetString(\"url\")\n\tinfo\t\t\t\t:= this.GetString(\"info\")\n\n\tvalid := validation.Validation{}\n\n\tvalid.Email(email, \"Email\")\n\n\tvalid.Required(username, \"Username\")\n\tvalid.Required(password, \"Password\")\n\n\tvalid.MaxSize(username, 20, \"Username\")\n\tvalid.MaxSize(password, 16, \"Password\")\n\n\tswitch {\n\tcase valid.HasErrors():\n\t\tfor _, err := range valid.Errors {\n\t\t\tlog.Println(err.Key, err.Message)\n\t\t}\n\t\tvalid.Error(\"Problem creating user!\")\n\t\tflash.Error(\"Problem creating user!\")\n\t\tflash.Store(&this.Controller)\n\tdefault:\n\t\tuser := &models.User{\n\t\t\tNameSurname\t\t:namesurname,\n\t\t\tUserName\t\t:username,\n\t\t\tEmail\t\t\t:email,\n\t\t\tPassword\t\t:Md5(password),\n\t\t\tUrl\t\t\t\t:url,\n\t\t\tInfo\t\t\t:info,\n\t\t\tRegisterTime \t:time.Now(),\n\t\t}\n\t\tswitch {\n\t\t\tcase user.ExistUserName():\n\t\t\t\tvalid.Error(\"This username is in use!\")\n\t\t\t\tflash.Error(\"This username is in use!\")\n\t\t\t\tflash.Store(&this.Controller)\n\t\t\tcase user.ExistEmail():\n\t\t\t\tvalid.Error(\"This email is in use!\")\n\t\t\t\tflash.Error(\"This email is in use!\")\n\t\t\t\tflash.Store(&this.Controller)\n\t\t\tdefault:\n\t\t\t\terr := user.Update()\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tvalid.Error(fmt.Sprintf(\"%v\", err))\n\t\t\t\tflash.Notice(\"User updated successfully!\")\n\t\t\t\tflash.Store(&this.Controller)\n\t\t\t\tthis.Redirect(\"/admin/users\", 302)\n\t\t\t\treturn\n\t\t}\n\n\t}\n\n\tredirectUrl := \"/admin/users/edit/\" + strconv.Itoa(id)\n\tthis.Redirect(redirectUrl, 302)\n\tthis.Abort(\"302\")\n\treturn\n}", "func (r *Repository) update(user *domain.UserInfoModel) error {\n\n\tctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)\n\tdefer cancel()\n\n\tquery := \"UPDATE users SET namee = $1, email = $2, password = $3 WHERE id = $4\"\n\tstmt, err := r.db.PrepareContext(ctx, query)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer stmt.Close()\n\n\tqueryStart := time.Now().Nanosecond() / 1000\n\t_, err = stmt.ExecContext(ctx, user.Name, user.Email, user.PassWord, user.ID)\n\tif err != nil {\n\t\treturn err\n\t}\n\tqueryEnd := time.Now().Nanosecond() / 1000\n\texecutionTime := queryEnd - queryStart\n\tr.insertTimeSpent(\"Update\", executionTime)\n\treturn nil\n}", "func TestUpdateUserNoPassword(t *testing.T) {\n\tdb := database.Connect()\n\tu := User{\n\t\tEmail: \"[email protected]\",\n\t}\n\tr := u.Create(db)\n\tif r != true {\n\t\tt.Errorf(\"Expected successful create, got %t\", r)\n\t}\n\tu.Password = \"\"\n\tu.Email = \"[email protected]\"\n\tr = u.Update(db)\n\tif r != true {\n\t\tt.Errorf(\"Expected successful update, got %t\", r)\n\t}\n}", "func UpdateUser(w http.ResponseWriter, r *http.Request) {\n\n}", "func Update() error {\n\tuser := map[string]interface{}{\n\t\t\"name\": \"viney.chow\",\n\t\t\"created\": time.Now().Format(\"2006-01-02 15:04:05\"),\n\t}\n\n\ti, err := orm.SetTable(\"tb_user\").SetPK(\"uid\").Where(\"uid=$1\", 2).Update(user)\n\tif err == nil {\n\t\tfmt.Println(i)\n\t\treturn nil\n\t}\n\n\treturn err\n}", "func UpdateUser(w http.ResponseWriter, r *http.Request) {\n\tparams := mux.Vars(r)\n\tuserID, err := strconv.ParseInt(params[\"id\"], 10, 64)\n\tif err != nil {\n\t\tresponses.Error(w, http.StatusInternalServerError, err)\n\t\treturn\n\t}\n\n\tuserIDToken, err := authentication.ExtractUserId(r)\n\tif err != nil {\n\t\tresponses.Error(w, http.StatusUnauthorized, err)\n\t\treturn\n\t}\n\n\tif userIDToken != userID {\n\t\tresponses.Error(w, http.StatusForbidden, errors.New(\"não é possível manipular usuário de terceiros\"))\n\t\treturn\n\t}\n\n\tbodyRequest, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tresponses.Error(w, http.StatusUnprocessableEntity, err)\n\t\treturn\n\t}\n\n\tvar user models.User\n\tif err := json.Unmarshal(bodyRequest, &user); err != nil {\n\t\tresponses.Error(w, http.StatusInternalServerError, err)\n\t\treturn\n\t}\n\n\tif err := user.Prepare(false); err != nil {\n\t\tresponses.Error(w, http.StatusBadRequest, err)\n\t\treturn\n\t}\n\n\tuser.Id = userID\n\tif err := validateUniqueDataUser(user, false); err != nil {\n\t\tresponses.Error(w, http.StatusBadRequest, err)\n\t\treturn\n\t}\n\n\tdb, err := database.Connect()\n\tif err != nil {\n\t\tresponses.Error(w, http.StatusInternalServerError, err)\n\t\treturn\n\t}\n\tdefer db.Close()\n\n\trepository := repository.NewRepositoryUser(db)\n\tif err = repository.UpdateUser(userID, user); err != nil {\n\t\tresponses.Error(w, http.StatusInternalServerError, err)\n\t\treturn\n\t}\n\n\tresponses.JSON(w, http.StatusNoContent, nil)\n\n}", "func UpdateUser(c *gin.Context) {\n\tuuid := c.Param(\"uuid\")\n\tvar user models.User\n\n\tdb := db.GetDB()\n\tif err := db.Where(\"uuid = ?\", uuid).First(&user).Error; err != nil {\n\t\tc.AbortWithStatus(http.StatusNotFound)\n\t\treturn\n\t}\n\tdb.Where(\"uuid = ?\", uuid)\n\n\tif user.ID != 0 {\n\n\t\tjwtClaims := jwt.ExtractClaims(c)\n\t\tauthUserAccessLevel := jwtClaims[\"access_level\"].(float64)\n\t\tauthUserUUID := jwtClaims[\"uuid\"].(string)\n\t\tif authUserAccessLevel != 1 {\n\t\t\tif authUserUUID != uuid {\n\t\t\t\tc.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{\n\t\t\t\t\t\"error\": \"Sorry but you can't Update, ONLY admin user can\",\n\t\t\t\t})\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\n\t\tvar newUser models.User\n\t\tc.Bind(&newUser)\n\n\t\tif newUser.FirstName != \"\" {\n\t\t\tuser.FirstName = newUser.FirstName\n\t\t}\n\n\t\tif newUser.LastName != \"\" {\n\t\t\tuser.LastName = newUser.LastName\n\t\t}\n\n\t\tif newUser.Email != \"\" {\n\t\t\tuser.Email = newUser.Email\n\t\t}\n\n\t\tif newUser.AccessLevel == 0 || newUser.AccessLevel == 1 {\n\t\t\tuser.AccessLevel = newUser.AccessLevel\n\t\t}\n\n\t\tif !newUser.DateOfBirth.IsZero() {\n\t\t\tuser.DateOfBirth = newUser.DateOfBirth\n\t\t}\n\n\t\t// Update multiple attributes with `struct`, will only update those changed\n\n\t\tif err := db.Save(&user); err != nil {\n\t\t\t// convert array of errors to JSON\n\t\t\terrs := err.GetErrors()\n\n\t\t\tif len(errs) > 0 {\n\t\t\t\tstrErrors := make([]string, len(errs))\n\t\t\t\tfor i, err := range errs {\n\t\t\t\t\tstrErrors[i] = err.Error()\n\t\t\t\t}\n\n\t\t\t\t// return errors\n\t\t\t\tc.JSON(http.StatusUnprocessableEntity, gin.H{\n\t\t\t\t\t\"errors\": strErrors,\n\t\t\t\t})\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\t// Display modified data in JSON message \"success\"\n\t\tc.JSON(http.StatusOK, &user)\n\n\t}\n\n}", "func updateUser(user *User) {\n\tvar dummy string\n\n\t// update user information\n\tdb.QueryRow(\"UPDATE users SET username=$1, realname=$2, email=$3, token=$4\"+\n\t\t\" WHERE gh_id=$5\", user.User_name, user.Real_name, user.Email,\n\t\tuser.Token, user.GH_Id).Scan(&dummy)\n}", "func patchAPIUserHandler(w http.ResponseWriter, r *http.Request, _ map[string]string) {\n\tuserName := sessionHandler.GetUserName(r)\n\tuserID, err := getUserID(userName)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tdecoder := json.NewDecoder(r.Body)\n\tvar json JSONUser\n\terr = decoder.Decode(&json)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\t// Make sure user id is over 0\n\tif json.ID < 1 {\n\t\thttp.Error(w, \"Wrong user id.\", http.StatusInternalServerError)\n\t\treturn\n\t} else if userID != json.ID { // Make sure the authenticated user is only changing his/her own data. TODO: Make sure the user is admin when multiple users have been introduced\n\t\thttp.Error(w, \"You don't have permission to change this data.\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\t// Get old user data to compare\n\ttempUser, err := database.RetrieveUser(json.ID)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\t// Make sure user email is provided\n\tif json.Email == \"\" {\n\t\tjson.Email = string(tempUser.Email)\n\t}\n\t// Make sure user name is provided\n\tif json.Name == \"\" {\n\t\tjson.Name = string(tempUser.Name)\n\t}\n\t// Make sure user slug is provided\n\tif json.Slug == \"\" {\n\t\tjson.Slug = tempUser.Slug\n\t}\n\t// Check if new name is already taken\n\tif json.Name != string(tempUser.Name) {\n\t\t_, err = database.RetrieveUserByName([]byte(json.Name))\n\t\tif err == nil {\n\t\t\t// The new user name is already taken. Assign the old name.\n\t\t\t// TODO: Return error that will be displayed in the admin interface.\n\t\t\tjson.Name = string(tempUser.Name)\n\t\t}\n\t}\n\t// Check if new slug is already taken\n\tif json.Slug != tempUser.Slug {\n\t\t_, err = database.RetrieveUserBySlug(json.Slug)\n\t\tif err == nil {\n\t\t\t// The new user slug is already taken. Assign the old slug.\n\t\t\t// TODO: Return error that will be displayed in the admin interface.\n\t\t\tjson.Slug = tempUser.Slug\n\t\t}\n\t}\n\tuser := structure.User{ID: json.ID, Name: []byte(json.Name), Slug: json.Slug, Email: []byte(json.Email), Image: []byte(json.Image), Cover: []byte(json.Cover), Bio: []byte(json.Bio), Website: []byte(json.Website), Location: []byte(json.Location)}\n\terr = methods.UpdateUser(&user, userID)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tif json.Password != \"\" && (json.Password == json.PasswordRepeated) { // Update password if a new one was submitted\n\t\tencryptedPassword, err := authentication.EncryptPassword(json.Password)\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t\terr = database.UpdateUserPassword(user.ID, encryptedPassword, date.GetCurrentTime(), json.ID)\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t}\n\t// Check if the user name was changed. If so, update the session cookie to the new user name.\n\tif json.Name != string(tempUser.Name) {\n\t\tlogInUser(json.Name, w)\n\t}\n\tw.WriteHeader(http.StatusOK)\n\tw.Write([]byte(\"User settings updated!\"))\n\treturn\n}", "func UpdateUserByID(svc *dynamodb.DynamoDB, user UserUpdate) (User, error) {\n\n\tif user.Email == nil && user.DisplayName == nil && user.Career == nil && user.AvatarURI == nil && user.Message == nil {\n\t\treturn User{}, errors.New(\"required new value\")\n\t}\n\n\texpressionAttributeValues := map[string]*dynamodb.AttributeValue{}\n\tupdateExpression := \"SET \"\n\n\tif user.Email != nil {\n\t\tif *user.Email == \"\" {\n\t\t\t*user.Email = \" \"\n\t\t}\n\t\texpressionAttributeValues[\":email\"] = &dynamodb.AttributeValue{S: aws.String(*user.Email)}\n\t\tupdateExpression += \"email = :email, \"\n\t}\n\n\tif user.DisplayName != nil {\n\t\tif *user.DisplayName == \"\" {\n\t\t\t*user.DisplayName = \" \"\n\t\t}\n\t\texpressionAttributeValues[\":displayName\"] = &dynamodb.AttributeValue{S: aws.String(*user.DisplayName)}\n\t\tupdateExpression += \"displayName = :displayName, \"\n\t}\n\n\tif user.Career != nil {\n\t\tif *user.Career == \"\" {\n\t\t\t*user.Career = \" \"\n\t\t}\n\t\texpressionAttributeValues[\":career\"] = &dynamodb.AttributeValue{S: aws.String(*user.Career)}\n\t\tupdateExpression += \"career = :career, \"\n\t}\n\n\tif user.AvatarURI != nil {\n\t\tif *user.AvatarURI == \"\" {\n\t\t\t*user.AvatarURI = \" \"\n\t\t}\n\t\texpressionAttributeValues[\":avatarUri\"] = &dynamodb.AttributeValue{S: aws.String(*user.AvatarURI)}\n\t\tupdateExpression += \"avatarUri = :avatarUri, \"\n\t}\n\n\tif user.Message != nil {\n\t\tif *user.Message == \"\" {\n\t\t\t*user.Message = \" \"\n\t\t}\n\t\texpressionAttributeValues[\":message\"] = &dynamodb.AttributeValue{S: aws.String(*user.Message)}\n\t\tupdateExpression += \"message = :message, \"\n\t}\n\n\tif user.SkillList != nil && len(*user.SkillList) != 0 {\n\t\texpressionAttributeValues[\":skillList\"] = &dynamodb.AttributeValue{SS: aws.StringSlice(*user.SkillList)}\n\t\tupdateExpression += \"skillList = :skillList, \"\n\t}\n\n\tinput := &dynamodb.UpdateItemInput{\n\t\tTableName: aws.String(UserTableName),\n\t\tExpressionAttributeValues: expressionAttributeValues,\n\t\tKey: map[string]*dynamodb.AttributeValue{\n\t\t\t\"id\": {\n\t\t\t\tS: aws.String(user.ID),\n\t\t\t},\n\t\t},\n\t\tReturnValues: aws.String(\"ALL_NEW\"),\n\t\tUpdateExpression: aws.String(strings.TrimRight(updateExpression, \", \")),\n\t}\n\n\tresult, err := svc.UpdateItem(input)\n\n\tif err != nil {\n\t\treturn User{}, err\n\t}\n\n\titem := User{}\n\n\terr = dynamodbattribute.UnmarshalMap(result.Attributes, &item)\n\n\tif err != nil {\n\t\treturn User{}, err\n\t}\n\n\treturn item, nil\n}", "func (user *User) UpdateUser(id int) error {\n\n\t_, err := db.Model(user).Where(\"id = ?\", id).Returning(\"*\").UpdateNotZero()\n\treturn err\n}", "func updateUser(client *chef.Client, username string, user chef.User) chef.UserResult {\n\tuser_update, err := client.Users.Update(username, user)\n\tif err != nil {\n\t\tfmt.Println(\"Issue updating user:\", err)\n\t}\n\treturn user_update\n}", "func Update(userId int, nickname, about, image string) int {\n\tif !misc.IsIdValid(userId) {\n\t\tlog.Println(\"user was not updated\", userId)\n\t\treturn misc.NothingUpdated\n\t}\n\n\tnickname, ok := misc.ValidateString(nickname, misc.MaxLenS)\n\tif !ok {\n\t\tlog.Println(\"Nickname is not correct\", nickname)\n\t\treturn misc.WrongName\n\t}\n\n\tabout, ok = misc.ValidateString(about, misc.MaxLenB)\n\tif !ok {\n\t\tlog.Println(\"About is not correct\", about)\n\t\treturn misc.WrongDescr\n\t}\n\n\tif !imager.IsAvatarValid(image) {\n\t\tlog.Println(\"Avatar is not valid\", image)\n\t\treturn misc.WrongImg\n\t}\n\n\tsqlResult, err := psql.Db.Exec(`\n\t\tUPDATE users\n\t\tSET nickname = $1, about = $2, image = $3\n\t\tWHERE id = $4`, nickname, about, image, userId)\n\tif err, code := psql.CheckSpecificDriverErrors(err); err != nil {\n\t\tlog.Println(err)\n\t\treturn code\n\t}\n\n\terr, code := psql.IsAffectedOneRow(sqlResult)\n\treturn code\n}", "func updateUser(w http.ResponseWriter, r *http.Request) {\r\n\tparams := mux.Vars(r)\r\n\tstmt, err := db.Prepare(\"UPDATE users SET name = ? WHERE id = ?\")\r\n\tif err != nil {\r\n\t\tpanic(err.Error())\r\n\t}\r\n\tbody, err := ioutil.ReadAll(r.Body)\r\n\tif err != nil {\r\n\t\tpanic(err.Error())\r\n\t}\r\n\tkeyVal := make(map[string]string)\r\n\tjson.Unmarshal(body, &keyVal)\r\n\tnewName := keyVal[\"name\"]\r\n\t_, err = stmt.Exec(newName, params[\"id\"])\r\n\tif err != nil {\r\n\t\tpanic(err.Error())\r\n\t}\r\n\tfmt.Fprintf(w, \"User with id = %s was updated\", params[\"id\"])\r\n}", "func (_obj *WebApiAuth) SysUser_Update(id int32, req *SysUser, res *SysUser, _opt ...map[string]string) (err error) {\n\n\tvar length int32\n\tvar have bool\n\tvar ty byte\n\t_os := codec.NewBuffer()\n\terr = _os.Write_int32(id, 1)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = req.WriteBlock(_os, 2)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = (*res).WriteBlock(_os, 3)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar _status map[string]string\n\tvar _context map[string]string\n\tif len(_opt) == 1 {\n\t\t_context = _opt[0]\n\t} else if len(_opt) == 2 {\n\t\t_context = _opt[0]\n\t\t_status = _opt[1]\n\t}\n\t_resp := new(requestf.ResponsePacket)\n\ttarsCtx := context.Background()\n\n\terr = _obj.s.Tars_invoke(tarsCtx, 0, \"SysUser_Update\", _os.ToBytes(), _status, _context, _resp)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t_is := codec.NewReader(tools.Int8ToByte(_resp.SBuffer))\n\terr = (*res).ReadBlock(_is, 3, true)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif len(_opt) == 1 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t} else if len(_opt) == 2 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t\tfor k := range _status {\n\t\t\tdelete(_status, k)\n\t\t}\n\t\tfor k, v := range _resp.Status {\n\t\t\t_status[k] = v\n\t\t}\n\n\t}\n\t_ = length\n\t_ = have\n\t_ = ty\n\treturn nil\n}", "func (h *User) Update(w http.ResponseWriter, r *http.Request) {\n\tdefer r.Body.Close()\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tR.JSON500(w)\n\t\treturn\n\t}\n\n\t// @todo we might want extra check that /users/id equals to user.ID received in body\n\tuser, err := validator.UserCreate(body)\n\tif err != nil || user.ID == 0 {\n\t\tlog.Println(err)\n\t\tR.JSON400(w)\n\t\treturn\n\t}\n\n\terr = h.Storage.UpdateUser(user)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tR.JSON500(w)\n\t\treturn\n\t}\n\n\tR.JSON200OK(w)\n}", "func (h *Handler) updateUser(c *gin.Context) handlerResponse {\n\n\tvar updatedUser types.User\n\tif err := c.ShouldBindJSON(&updatedUser); err != nil {\n\t\treturn handleBadRequest(err)\n\t}\n\tif updatedUser.Name != c.Param(userParameter) {\n\t\treturn handleNameMismatch()\n\t}\n\tstoredUser, err := h.service.User.Update(updatedUser, h.who(c))\n\tif err != nil {\n\t\treturn handleError(err)\n\t}\n\t// Remove password so we do not show in response\n\tstoredUser.Password = \"\"\n\treturn handleOK(storedUser)\n}", "func (call *UserUsecaseImpl) Update(user *models.User) (*models.User, error) {\n\n\tstatus := call.userRepo.CheckMail(user)\n\tif !status {\n\t\treturn nil, errors.New(\"Opps.. sorry email already use other account\")\n\t}\n\n\tuser, err := common.Encrypt(user)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn call.userRepo.Update(user)\n}", "func (user *User) EditClientPw() (err error) {\n\t_, err = Db.Exec(\"update user set name=$1, email=$2 , password=$3 , image=$4 where id=$5\",user.Name, user.Email, user.Password, user.Image, user.ID)\n\nfmt.Println(err)\nreturn\n}", "func UserUpdate(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {\n\n}", "func partialUpdateUsername(providedUser *models.User, username string) error {\n\n\tif username == \"\" {\n\t\treturn nil\n\t}\n\n\tuser := models.User{}\n\n\tdatabase.DB.Where(models.User{Username: username}).First(&user)\n\n\tif user.ID != 0 {\n\t\treturn &utils.UsernameIsTakenError{}\n\t}\n\n\tprovidedUser.Username = username\n\n\treturn nil\n}", "func (as *ActionSuite) TestUpdateUser() {\n\tt := as.T()\n\n\tf := fixturesForUserQuery(as)\n\n\ttype testCase struct {\n\t\tName string\n\t\tPayload string\n\t\tTestUser models.User\n\t\tExpectError string\n\t\tTest func(t *testing.T)\n\t}\n\n\tvar resp UserResponse\n\n\tnewNickname := \"U1 New Nickname\"\n\tlocation := `{description: \"Paris, France\", country: \"FR\", latitude: 48.8588377, longitude: 2.2770202}`\n\n\tpreferences := fmt.Sprintf(`{weightUnit: %s}`, strings.ToUpper(domain.UserPreferenceWeightUnitKGs))\n\n\tupdate := fmt.Sprintf(`mutation { user: updateUser(input:{id: \"%s\", nickname: \"%s\", location: %s,\n\t\t\tpreferences: %s, photoID: \"%s\"}) {%s} }`,\n\t\tf.Users[1].UUID.String(), newNickname, location, preferences, f.Files[0].UUID.String(), allUserFields)\n\n\ttestCases := []testCase{\n\t\t{\n\t\t\tName: \"duplicate nickname\",\n\t\t\tPayload: fmt.Sprintf(`mutation { user: updateUser(input:{id: \"%s\", nickname: \"%s\"}) {nickname} }`,\n\t\t\t\tf.Users[0].UUID, f.Users[1].Nickname),\n\t\t\tTestUser: f.Users[0],\n\t\t\tTest: func(t *testing.T) {},\n\t\t\tExpectError: \"That user nickname is already taken\",\n\t\t},\n\t\t{\n\t\t\tName: \"blank nickname\",\n\t\t\tPayload: fmt.Sprintf(`mutation { user: updateUser(input:{id: \"%s\", nickname: \"\"}) {nickname} }`,\n\t\t\t\tf.Users[0].UUID),\n\t\t\tTestUser: f.Users[0],\n\t\t\tTest: func(t *testing.T) {},\n\t\t\tExpectError: \"Your user nickname must contain at least one visible character\",\n\t\t},\n\t\t{\n\t\t\tName: \"allowed\",\n\t\t\tPayload: update,\n\t\t\tTestUser: f.Users[0],\n\t\t\tTest: func(t *testing.T) {\n\t\t\t\tif err := as.DB.Load(&(f.Users[1]), \"PhotoFile\"); err != nil {\n\t\t\t\t\tt.Errorf(\"failed to load user fixture, %s\", err)\n\t\t\t\t}\n\t\t\t\tas.Equal(newNickname, resp.User.Nickname, \"incorrect Nickname\")\n\t\t\t\tas.Equal(f.Users[1].PhotoFile.URL, resp.User.AvatarURL, \"incorrect AvatarURL\")\n\t\t\t\tas.Regexp(\"^https?\", resp.User.AvatarURL, \"invalid AvatarURL\")\n\t\t\t\tas.Equal(\"Paris, France\", resp.User.Location.Description, \"incorrect location\")\n\t\t\t\tas.Equal(\"FR\", resp.User.Location.Country, \"incorrect country\")\n\n\t\t\t\tas.Equal(strings.ToUpper(domain.UserPreferenceWeightUnitKGs), *resp.User.Preferences.WeightUnit,\n\t\t\t\t\t\"incorrect preference - weightUnit\")\n\t\t\t\tas.Equal(\"\", *resp.User.Preferences.Language, \"incorrect preference - language\")\n\t\t\t\tas.Equal(\"\", *resp.User.Preferences.TimeZone, \"incorrect preference - timeZone\")\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tName: \"not allowed\",\n\t\t\tPayload: fmt.Sprintf(`mutation {user: updateUser(input:{id: \"%v\", location: %v}) {%s}}`,\n\t\t\t\tf.Users[0].UUID, location, allUserFields),\n\t\t\tTestUser: f.Users[1],\n\t\t\tTest: func(t *testing.T) {},\n\t\t\tExpectError: \"not allowed\",\n\t\t},\n\t\t{\n\t\t\tName: \"remove photo\",\n\t\t\tPayload: fmt.Sprintf(`mutation {user: updateUser(input:{id: \"%v\", location: %v, preferences: %s}) {%s}}`,\n\t\t\t\tf.Users[1].UUID, location, preferences, allUserFields),\n\t\t\tTestUser: f.Users[0],\n\t\t\tTest: func(t *testing.T) {\n\t\t\t\tas.Equal(f.Users[1].AuthPhotoURL.String, resp.User.AvatarURL, \"expected photo to be deleted\")\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tName: \"remove location\",\n\t\t\tPayload: fmt.Sprintf(`mutation {user: updateUser(input:{id: \"%v\", preferences: %s}) {%s}}`,\n\t\t\t\tf.Users[1].UUID, preferences, allUserFields),\n\t\t\tTestUser: f.Users[0],\n\t\t\tTest: func(t *testing.T) {\n\t\t\t\tas.Nil(resp.User.Location, \"expected location to be deleted\")\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tName: \"remove preferences\",\n\t\t\tPayload: fmt.Sprintf(`mutation {user: updateUser(input:{id: \"%v\"}) {%s}}`,\n\t\t\t\tf.Users[1].UUID, allUserFields),\n\t\t\tTestUser: f.Users[0],\n\t\t\tTest: func(t *testing.T) {\n\t\t\t\tas.Equal(\"\", *resp.User.Preferences.WeightUnit, \"expected preferences to be deleted\")\n\t\t\t},\n\t\t},\n\t}\n\n\tfor _, test := range testCases {\n\t\tresp = UserResponse{}\n\t\terr := as.testGqlQuery(test.Payload, test.TestUser.Nickname, &resp)\n\n\t\tif test.ExpectError != \"\" {\n\t\t\tas.Error(err)\n\t\t\tas.Contains(err.Error(), test.ExpectError)\n\t\t} else {\n\t\t\tas.NoError(err)\n\t\t}\n\t\tt.Run(test.Name, test.Test)\n\t}\n}", "func EditUser(u models.User, ID string) (bool, error) {\n\tctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)\n\tdefer cancel()\n\n\tdb := MongoCN.Database(\"meganeuradb\")\n\tcol := db.Collection(\"users\")\n\n\t//map of interface with string index\n\t//for validation reasons that\n\t//cant be done on json format\n\tuserMap := make(map[string]interface{})\n\tif len(u.Name) > 0 {\n\t\tuserMap[\"name\"] = u.Name\n\t}\n\tif len(u.LastName) > 0 {\n\t\tuserMap[\"lastName\"] = u.LastName\n\t}\n\n\tuserMap[\"birthDate\"] = u.BirthDate\n\n\tif len(u.Bio) > 0 {\n\t\tuserMap[\"bio\"] = u.Bio\n\t}\n\tif len(u.Banner) > 0 {\n\t\tuserMap[\"banner\"] = u.Banner\n\t}\n\tif len(u.Avatar) > 0 {\n\t\tuserMap[\"avatar\"] = u.Avatar\n\t}\n\tif len(u.Location) > 0 {\n\t\tuserMap[\"location\"] = u.Location\n\t}\n\tif len(u.Web) > 0 {\n\t\tuserMap[\"web\"] = u.Web\n\t}\n\n\t//updating string for mongoDB format\n\tupdtString := bson.M{\n\t\t\"$set\": userMap,\n\t}\n\n\t//id of param to ObjID\n\tobjID, _ := primitive.ObjectIDFromHex(ID)\n\n\t//get the doc OF the ID WITH \"EQUAL\"\n\tfilter := bson.M{\"_id\": bson.M{\"$eq\": objID}}\n\n\t//updating on mongodb\n\t_, err := col.UpdateOne(ctx, filter, updtString)\n\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\treturn true, nil\n}", "func UserUpdate(w http.ResponseWriter, r *http.Request) {\n\n\t// Init output\n\toutput := []byte(\"\")\n\n\t// Add content type header to the response\n\tcontentType := \"application/json\"\n\tcharset := \"utf-8\"\n\tw.Header().Add(\"Content-Type\", fmt.Sprintf(\"%s; charset=%s\", contentType, charset))\n\n\t// Grab url path variables\n\turlVars := mux.Vars(r)\n\turlUser := urlVars[\"user\"]\n\n\t// Grab context references\n\trefStr := gorillaContext.Get(r, \"str\").(stores.Store)\n\n\t// Read POST JSON body\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\terr := APIErrorInvalidRequestBody()\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\t// Parse pull options\n\tpostBody, err := auth.GetUserFromJSON(body)\n\tif err != nil {\n\t\terr := APIErrorInvalidArgument(\"User\")\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\t// Get Result Object\n\tuserUUID := auth.GetUUIDByName(urlUser, refStr)\n\tmodified := time.Now().UTC()\n\tres, err := auth.UpdateUser(userUUID, postBody.FirstName, postBody.LastName, postBody.Organization, postBody.Description,\n\t\tpostBody.Name, postBody.Projects, postBody.Email, postBody.ServiceRoles, modified, true, refStr)\n\n\tif err != nil {\n\n\t\t// In case of invalid project or role in post body\n\n\t\tif err.Error() == \"not found\" {\n\t\t\terr := APIErrorNotFound(\"User\")\n\t\t\trespondErr(w, err)\n\t\t\treturn\n\t\t}\n\n\t\tif strings.HasPrefix(err.Error(), \"invalid\") {\n\t\t\terr := APIErrorInvalidData(err.Error())\n\t\t\trespondErr(w, err)\n\t\t\treturn\n\t\t}\n\n\t\tif strings.HasPrefix(err.Error(), \"duplicate\") {\n\t\t\terr := APIErrorInvalidData(err.Error())\n\t\t\trespondErr(w, err)\n\t\t\treturn\n\t\t}\n\n\t\terr := APIErrGenericInternal(err.Error())\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\t// Output result to JSON\n\tresJSON, err := res.ExportJSON()\n\tif err != nil {\n\t\terr := APIErrExportJSON()\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\t// Write response\n\toutput = []byte(resJSON)\n\trespondOK(w, output)\n\n}", "func (app *application) EditUser(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\tuserID, _ := strconv.Atoi(id)\n\n\tvar user models.User\n\n\terr := app.readJSON(w, r, &user)\n\tif err != nil {\n\t\tapp.badRequest(w, r, err)\n\t\treturn\n\t}\n\n\tif userID > 0 { // For an existing user, update the user record\n\t\terr = app.DB.EditUser(user)\n\t\tif err != nil {\n\t\t\tapp.badRequest(w, r, err)\n\t\t\treturn\n\t\t}\n\n\t\tif user.Password != \"\" {\n\t\t\tnewHash, err := bcrypt.GenerateFromPassword([]byte(user.Password), 12)\n\t\t\tif err != nil {\n\t\t\t\tapp.badRequest(w, r, err)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\terr = app.DB.UpdatePasswordForUser(user, string(newHash))\n\t\t\tif err != nil {\n\t\t\t\tapp.badRequest(w, r, err)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\n\t} else { // For a new user, simply add the user to the users table\n\t\tnewHash, err := bcrypt.GenerateFromPassword([]byte(user.Password), 12)\n\t\tif err != nil {\n\t\t\tapp.badRequest(w, r, err)\n\t\t\treturn\n\t\t}\n\t\terr = app.DB.AddUser(user, string(newHash))\n\t\tif err != nil {\n\t\t\tapp.badRequest(w, r, err)\n\t\t\treturn\n\t\t}\n\t}\n\n\tvar resp struct {\n\t\tError bool `json:\"error\"`\n\t\tMessage string `json:\"message\"`\n\t}\n\n\tresp.Error = false\n\tapp.writeJSON(w, http.StatusOK, resp)\n}", "func UpdateLpBrookUserById(m *LpBrookUser) (err error) {\n\to := orm.NewOrm()\n\tv := LpBrookUser{Id: m.Id}\n\t// ascertain id exists in the database\n\tif err = o.Read(&v); err == nil {\n\t\tvar num int64\n\t\tif num, err = o.Update(m); err == nil {\n\t\t\tfmt.Println(\"Number of records updated in database:\", num)\n\t\t}\n\t}\n\treturn\n}", "func db_update_user(username string, sessionid string, follow_username string, post Post){\n file_path := path.Join(\"db/users\", strings.ToLower(username)+\".json\")\n \n if _, err := os.Stat(file_path); os.IsNotExist(err) {\n return\n }\n user := db_JSON_to_user(username)\n \n if sessionid != \"\" {\n user.SessionID = sessionid\n }\n if follow_username != \"\" {\n user.Follows = append(user.Follows, follow_username)\n }\n if post.Content != \"\" {\n user.Posts = append(user.Posts, &post)\n }\n \n updated_user := db_user_to_JSON(user)\n \n writeerr := ioutil.WriteFile(file_path, updated_user, 0644)\n\n if writeerr != nil {\n panic(writeerr)\n }\n}", "func UpdateUser(w http.ResponseWriter, r *http.Request) {\n\tdb, err := gorm.Open(\"mysql\", \"root@tcp(127.0.0.1:3306)/test?charset=utf8&parseTime=True\")\n\tif err != nil {\n\t\tlog.Panic(err, \" Update user error\")\n\t}\n\tdefer db.Close()\n\n}", "func updateUser(username string, attrs User) bool {\n\toutp, _ := exec.Command(\"getent\", \"shadow\", username).CombinedOutput()\n\tcurrentPassword := strings.TrimSpace(strings.Split(string(outp), \":\")[1])\n\touts, _ := exec.Command(\"getent\", \"passwd\", username).CombinedOutput()\n\tcurrentShell := strings.TrimSpace(strings.Split(string(outs), \":\")[6])\n\tcurrentHome := strings.TrimSpace(strings.Split(string(outs), \":\")[5])\n\tcurrentComment := strings.TrimSpace(strings.Split(string(outs), \":\")[4])\n\texistingGroups := getUserGroups(username)\n\n\tif attrs.Shell != currentShell {\n\t\tupdateShell(username, attrs.Shell)\n\t}\n\tif attrs.Password != currentPassword {\n\t\tupdatePassword(username, attrs.Password)\n\t}\n\tif attrs.Home != currentHome {\n\t\tupdateHome(username, attrs.Home)\n\t}\n\tif attrs.Comment != currentComment {\n\t\tupdateComment(username, attrs.Comment)\n\t}\n\tif strings.Join(existingGroups, \",\") != strings.Join(attrs.Groups, \",\") {\n\t\tupdateGroups(username, attrs.Groups)\n\t}\n\n\tkeyFile := path.Join(attrs.Home, \".ssh\", \"authorized_keys\")\n\tfileData := []string{}\n\tif buf, err := ioutil.ReadFile(keyFile); err == nil {\n\t\tfileData = strings.Split(string(buf), \"\\n\")\n\t\tsort.Strings(fileData)\n\t}\n\tif strings.Join(attrs.SSHKeys, \",\") != strings.Join(fileData, \",\") {\n\t\tupdateSSHPublicKeys(username, attrs)\n\t}\n\treturn true\n}", "func (a *Users) Update(w http.ResponseWriter, r *http.Request) {\n\tid := getUserID(r)\n\ta.l.Println(\"[DEBUG] get record id\", id)\n\n\t// fetch the user from the context\n\tacc := r.Context().Value(KeyUser{}).(*models.User)\n\tacc.ID = id\n\ta.l.Println(\"[DEBUG] updating user with id\", acc.ID)\n\n\terr := models.UpdateUser(acc)\n\n\tif err == models.ErrUserNotFound {\n\t\ta.l.Println(\"[ERROR] user not found\", err)\n\n\t\tw.WriteHeader(http.StatusNotFound)\n\t\tmodels.ToJSON(&GenericError{Message: \"User not found in database\"}, w)\n\t\treturn\n\t}\n\n\t// write the no content success header\n\tw.WriteHeader(http.StatusNoContent)\n}", "func UserUpdatePassword(id int, n string) {\n\tvar i int\n\ti = GetIndexOfUser(id)\n\tuserList[i].uPassword = n\n}", "func SyncADUser(l *ldap.Conn, s *db.SFView, typ, env string, orm *gorm.DB) {\n\tbaseDN := MapLDAP[env][\"baseDN\"]\n\tusrEntry := SearchADObject(l, baseDN, s.Username, \"sAMAccountName\", []string{\"sAMAccountName\", \"physicalDeliveryOfficeName\", \"comment\", \"department\", \"memberOf\", \"description\", \"userAccountControl\", \"mail\", \"userPrincipalName\"})\n\n\tif typ == \"update\" {\n\t\t//tmp := GenerateADUserMap(l, s, \"new\", env)\n\t\t//db.CreateORM(orm, tmp)\n\t\tif len(usrEntry) == 0 { // If user does not exist, then create it\n\t\t\tif !s.EmailAddress.Valid || strings.ToLower(s.Username) == strings.ToLower(strings.Split(s.EmailAddress.String, \"@\")[0]) || !strings.HasSuffix(s.EmailAddress.String, \"csisolar.com\") { // mail = null or usrname = mail or email not like csisolar.com,then create ad user\n\t\t\t\tu := GenerateADUserMap(l, s, \"new\", env)\n\n\t\t\t\tusrEntryByEmpID := SearchADObject(l, baseDN, s.PersonIDExternal, \"employeeID\", []string{\"sAMAccountName\", \"userAccountControl\"})\n\t\t\t\tif len(usrEntryByEmpID) > 0 {\n\t\t\t\t\t//存在相同工号的用户,可能是SF改名导致,触发工单提醒服务台\n\t\t\t\t\tsam := usrEntryByEmpID[0].GetAttributeValue(\"sAMAccountName\")\n\t\t\t\t\tif s.Username != sam {\n\t\t\t\t\t\tsubj := \"Username may be changed in SF - \" + s.Username\n\t\t\t\t\t\tbody := fmt.Sprintf(\"Username may be changed in SF. Please check the new AD user.\\n\\nCurrent username in SF: %s\\nOld username in AD: %s\\nEmployeeID: %s\\nSite: %s\\nJob level: %s\\nStart date:%s\", s.Username, sam, s.PersonIDExternal, s.Site, s.JobLevel.String, common.ConvertDatetimeToDate(s.StartDate))\n\t\t\t\t\t\toutput.SendMail(output.MailReport, subj, body, \"plain\", \"\", output.MailAdmin, output.MailHelp, output.MailOAOp)\n\t\t\t\t\t\t//data := fmt.Sprintf(mapZohoData[env][\"usernamechange\"], s.Username, s.Username, sam, s.PersonIDExternal, s.Site, s.JobLevel.String, common.ConvertDatetimeToDate(s.StartDate))\n\t\t\t\t\t\t//output.CreateTicket(mapZohoURL[env], mapZohoKey[env], data)\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Create user, then add to groups\n\t\t\t\tusrDN := fmt.Sprintf(\"CN=%s,OU=Standard Users,OU=Users,OU=Resources,OU=%s,%s,%s\", common.ConvertFullname(s.Username), s.Site, mapSiteGroup[s.Site][\"ou\"], baseDN)\n\t\t\t\tif err := CreateADUser(l, u, usrDN); err == nil {\n\t\t\t\t\tdb.CreateORM(orm, u)\n\n\t\t\t\t\ttimeStartDate, _ := time.Parse(\"2006-01-02\", common.ConvertDatetimeToDate(s.StartDate))\n\t\t\t\t\tif strings.Contains(s.Username, \".\") && time.Now().AddDate(0, 0, -5).Before(timeStartDate) { //people started work long ago will not create ticket\n\t\t\t\t\t\t//data := fmt.Sprintf(mapZohoData[env][\"new\"], s.Username, s.Username, s.PersonIDExternal, s.Site, s.JobLevel.String, common.ConvertDatetimeToDate(s.StartDate))\n\t\t\t\t\t\t//output.CreateTicket(mapZohoURL[env], mapZohoKey[env], data)\n\t\t\t\t\t\tsubj := fmt.Sprintf(\"New employee on boarding. Please setup PC - %s\", s.Username)\n\t\t\t\t\t\tbody := fmt.Sprintf(\"New employee on boarding, please find details as below:\\n\\nName: %s\\nUser ID: %s\\nSite: %s\\nJob level: %s\\nStart date: %s\", s.Username, s.PersonIDExternal, s.Site, s.JobLevel.String, common.ConvertDatetimeToDate(s.StartDate))\n\t\t\t\t\t\toutput.SendMail(output.MailReport, subj, body, \"plain\", \"\", output.MailAdmin, output.MailHelp)\n\t\t\t\t\t}\n\t\t\t\t\tsiteGrpDN := fmt.Sprintf(\"CN=%s,%s,OU=%s,%s,%s\", mapSiteGroup[s.Site][\"group\"], disGrpOU, s.Site, mapSiteGroup[s.Site][\"ou\"], baseDN)\n\t\t\t\t\tsLangGrpDN := mapExclaimer[u[\"comment\"][0]] + \",\" + secGrpOU + \",\" + baseDN\n\n\t\t\t\t\tAddADGroupMember(l, baseDN, siteGrpDN, usrDN)\n\t\t\t\t\tAddADGroupMember(l, baseDN, sLangGrpDN, usrDN)\n\n\t\t\t\t\tif s.EmailAddress.Valid && strings.HasSuffix(s.EmailAddress.String, \"csisolar.com\") && strings.Contains(strings.Split(s.EmailAddress.String, \"@\")[0], \".\") {\n\t\t\t\t\t\to365Grp := mapO365[\"P1\"]\n\t\t\t\t\t\tif s.JobLevel.Valid && s.JobLevel.String <= \"E\" {\n\t\t\t\t\t\t\to365Grp = mapO365[\"E3\"]\n\t\t\t\t\t\t}\n\t\t\t\t\t\to365GrpDN := o365Grp + \",\" + secGrpOU + \",\" + baseDN\n\t\t\t\t\t\tmfaGrpDN := mapO365[\"MFA\"] + \",\" + secGrpOU + \",\" + baseDN\n\n\t\t\t\t\t\tAddADGroupMember(l, baseDN, o365GrpDN, usrDN)\n\t\t\t\t\t\tAddADGroupMember(l, baseDN, mfaGrpDN, usrDN)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\terr := errors.New(\"Username distinct from mail prefix\")\n\t\t\t\toutput.GenerateLog(err, \"AD user creation\", s.Site+\" | \"+s.Username+\" | \"+s.EmailAddress.String, false)\n\t\t\t}\n\t\t} else { // If user exists, then update it\n\t\t\toldUPN := usrEntry[0].GetAttributeValue(\"userPrincipalName\")\n\t\t\tif !strings.Contains(oldUPN, \"canadiansolar.com\") { //not deal with EG user\n\t\t\t\tu := GenerateADUserMap(l, s, \"update\", env)\n\n\t\t\t\tusrOldDN := usrEntry[0].DN\n\t\t\t\tsam := s.Username\n\n\t\t\t\tnewDept := \"\"\n\t\t\t\tif len(u[\"department\"]) > 0 {\n\t\t\t\t\tnewDept = u[\"department\"][0]\n\t\t\t\t}\n\n\t\t\t\tnewSite := u[\"physicalDeliveryOfficeName\"][0]\n\t\t\t\tnewSLang := u[\"comment\"][0] // slang is modified in GenerateADUserMap function\n\n\t\t\t\toldDept := usrEntry[0].GetAttributeValue(\"department\")\n\t\t\t\toldSite := usrEntry[0].GetAttributeValue(\"physicalDeliveryOfficeName\")\n\t\t\t\toldSLang := usrEntry[0].GetAttributeValue(\"comment\")\n\n\t\t\t\toldMail := usrEntry[0].GetAttributeValue(\"mail\")\n\n\t\t\t\tif strings.TrimSpace(oldMail) == \"\" && s.EmailAddress.Valid && strings.HasSuffix(s.EmailAddress.String, \"csisolar.com\") && strings.Contains(strings.Split(s.EmailAddress.String, \"@\")[0], \".\") {\n\t\t\t\t\to365Grp := mapO365[\"P1\"]\n\t\t\t\t\tif s.JobLevel.Valid && s.JobLevel.String <= \"E\" {\n\t\t\t\t\t\to365Grp = mapO365[\"E3\"]\n\t\t\t\t\t}\n\t\t\t\t\to365GrpDN := o365Grp + \",\" + secGrpOU + \",\" + baseDN\n\t\t\t\t\tmfaGrpDN := mapO365[\"MFA\"] + \",\" + secGrpOU + \",\" + baseDN\n\n\t\t\t\t\tAddADGroupMember(l, baseDN, o365GrpDN, usrOldDN)\n\t\t\t\t\tAddADGroupMember(l, baseDN, mfaGrpDN, usrOldDN)\n\t\t\t\t}\n\n\t\t\t\tif err := ModifyADUser(l, u, usrOldDN); err == nil {\n\t\t\t\t\tdb.UpdateORM(orm, sam, u)\n\t\t\t\t\t// Adjust sign language and site groups, move user OU\n\t\t\t\t\tif oldSLang != newSLang {\n\t\t\t\t\t\tnewSLangGrpDN := mapExclaimer[newSLang] + \",\" + secGrpOU + \",\" + baseDN\n\t\t\t\t\t\toldSLangGrpDN := mapExclaimer[oldSLang] + \",\" + secGrpOU + \",\" + baseDN\n\n\t\t\t\t\t\tAddADGroupMember(l, baseDN, newSLangGrpDN, usrOldDN)\n\t\t\t\t\t\tRemoveADGroupMember(l, baseDN, oldSLangGrpDN, usrOldDN)\n\t\t\t\t\t} else if strings.TrimSpace(oldDept) != \"\" && strings.TrimSpace(newDept) != \"\" && oldDept != newDept {\n\t\t\t\t\t\tif strings.Contains(s.Username, \".\") {\n\t\t\t\t\t\t\tInformADUserGroupManagers(l, baseDN, s.Username, oldDept, newDept, \"dept\")\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tif oldSite != newSite {\n\t\t\t\t\t\t//newSiteGrpDN := fmt.Sprintf(\"CN=CN_%s_ALL,%s,OU=%s,%s,%s\", newSite, disGrpOU, newSite, mapSiteGroup[newSite][\"ou\"], baseDN)\n\t\t\t\t\t\tnewSiteGrpDN := fmt.Sprintf(\"CN=%s,%s,OU=%s,%s,%s\", mapSiteGroup[newSite][\"group\"], disGrpOU, newSite, mapSiteGroup[newSite][\"ou\"], baseDN)\n\t\t\t\t\t\t//oldSiteGrpDN := fmt.Sprintf(\"CN=CN_%s_ALL,%s,OU=%s,%s,%s\", oldSite, disGrpOU, oldSite, mapSiteGroup[oldSite][\"ou\"], baseDN)\n\t\t\t\t\t\toldSiteGrpDN := fmt.Sprintf(\"CN=%s,%s,OU=%s,%s,%s\", mapSiteGroup[oldSite][\"group\"], disGrpOU, oldSite, mapSiteGroup[oldSite][\"ou\"], baseDN)\n\n\t\t\t\t\t\tAddADGroupMember(l, baseDN, newSiteGrpDN, usrOldDN)\n\t\t\t\t\t\tRemoveADGroupMember(l, baseDN, oldSiteGrpDN, usrOldDN)\n\n\t\t\t\t\t\tusrNewDN := fmt.Sprintf(\"CN=%s,OU=Standard Users,OU=Users,OU=Resources,OU=%s,%s,%s\", s.Username, newSite, mapSiteGroup[newSite][\"ou\"], baseDN)\n\t\t\t\t\t\tMoveADObject(l, usrOldDN, usrNewDN)\n\n\t\t\t\t\t\tif strings.Contains(s.Username, \".\") {\n\t\t\t\t\t\t\tdata := fmt.Sprintf(mapZohoData[env][\"update\"], s.Username, s.Username, s.PersonIDExternal, oldSite, newSite, s.JobLevel.String, common.ConvertDatetimeToDate(s.JobEffectDate))\n\t\t\t\t\t\t\toutput.CreateTicket(mapZohoURL[env], mapZohoKey[env], data)\n\n\t\t\t\t\t\t\tInformADUserGroupManagers(l, baseDN, s.Username, oldSite, newSite, \"site\")\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t} else if typ == \"disable\" {\n\t\t//fmt.Println(\"hello\", s.PersonIDExternal)\n\t\tusrEntryByEmpID := SearchADObject(l, baseDN, s.PersonIDExternal, \"employeeID\", []string{\"sAMAccountName\", \"userAccountControl\", \"description\", \"memberOf\", \"userPrincipalName\"})\n\t\tif len(usrEntryByEmpID) > 0 {\n\t\t\t//fmt.Println(s.PersonIDExternal)\n\t\t\tfor _, e := range usrEntryByEmpID {\n\t\t\t\toldUPN := usrEntryByEmpID[0].GetAttributeValue(\"userPrincipalName\")\n\t\t\t\tif !strings.Contains(oldUPN, \"canadiansolar.com\") { //not deal with EG user\n\t\t\t\t\tuacCode, _ := strconv.Atoi(e.GetAttributeValue(\"userAccountControl\"))\n\t\t\t\t\tusrOldDN := e.DN\n\n\t\t\t\t\tif uacCode != 514 && strings.Contains(usrOldDN, \"OU=Standard Users\") { // not disabled yet, in standard user OU\n\t\t\t\t\t\tu := GenerateADUserMap(l, s, \"disable\", env)\n\t\t\t\t\t\tif err := DisableADUser(l, usrOldDN); err == nil {\n\t\t\t\t\t\t\t//fmt.Println(u)\n\t\t\t\t\t\t\tif err = ModifyADUser(l, u, usrOldDN); err == nil {\n\t\t\t\t\t\t\t\tsam := s.Username\n\n\t\t\t\t\t\t\t\tdb.DeleteORM(orm, sam, u)\n\n\t\t\t\t\t\t\t\tfor _, g := range e.GetAttributeValues(\"memberOf\") {\n\t\t\t\t\t\t\t\t\tif !strings.Contains(g, \"F_O365\") {\n\t\t\t\t\t\t\t\t\t\tRemoveADGroupMember(l, baseDN, g, usrOldDN)\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t//fmt.Println(usrOldDN)\n\t\t\t\t\t\t\t\tusrNewDn := strings.Replace(usrOldDN, \"OU=Standard Users\", \"OU=Resigned Users\", 1)\n\t\t\t\t\t\t\t\t//fmt.Println(usrNewDn)\n\t\t\t\t\t\t\t\tMoveADObject(l, usrOldDN, usrNewDn)\n\n\t\t\t\t\t\t\t\tif common.ConvertDatetimeToDate(s.StartDate) < \"2020-10-01\" && strings.Contains(s.Username, \".\") {\n\t\t\t\t\t\t\t\t\tsubj := fmt.Sprintf(\"Employee resigned. Please delete the canadiansolar mailbox - %s\", s.Username)\n\t\t\t\t\t\t\t\t\tbody := fmt.Sprintf(\"There is an employee resigned whose start date is before 2020-10-01:\\n\\nName: %s\\nUser ID: %s\\nSite: %s\\nJob level: %s\\nStart date: %s\\nLast work date: %s\", s.Username, s.PersonIDExternal, s.Site, s.JobLevel.String, common.ConvertDatetimeToDate(s.StartDate), common.ConvertDatetimeToDate(s.LastDateWorked))\n\t\t\t\t\t\t\t\t\toutput.SendMail(output.MailReport, subj, body, \"plain\", \"\", output.MailAdmin, output.MailHelp)\n\t\t\t\t\t\t\t\t\t//data := fmt.Sprintf(mapZohoData[env][\"disable\"], s.Username, s.Username, s.PersonIDExternal, s.Site, s.JobLevel.String, common.ConvertDatetimeToDate(s.StartDate), common.ConvertDatetimeToDate(s.LastDateWorked))\n\t\t\t\t\t\t\t\t\t//output.CreateTicket(mapZohoURL[env], mapZohoKey[env], data)\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}", "func UpdateUser(c *gin.Context) {\n\tvar user Models.User\n\tid := c.Params.ByName(\"id\")\n\tfmt.Println(\"id\", id)\n\terr := Models.GetUserByID(&user, id)\n\tif err != nil {\n\t\tc.JSON(http.StatusNotFound, gin.H{\n\t\t\t\"error\" : gin.H { \n\t\t\t\"status\": http.StatusNotFound,\n\t\t\t\"message\": \"Not Found\",\n\t\t}})\n\t\treturn\n\t} else {\n\tc.BindJSON(&user)\n\t\n\terr = Models.UpdateUser(&user, id)\n\tif err != nil {\n\t\tc.AbortWithStatus(http.StatusNotFound)\n\t\tc.JSON(http.StatusNotFound, gin.H{\n\t\t\t\"data\":gin.H { \n\t\t\t\"error\" : gin.H { \n\t\t\t\"status\": http.StatusBadRequest,\n\t\t\t\"message\": \"Can´t update user\",\n\t\t}}})\n\t} else {\n\t\tc.JSON(http.StatusOK, user)\n\t}\n}\n}", "func (u *User)Update()(e error){\n\tst := `update users set\n\t\t\tuser_name = ?,email = ?,password = ?\n\t\t\twhere user_id = ?`;\n\n\t_,e = db.Exec(st,u.Name,u.Email,u.password,u.Id)\n\n\treturn\n}", "func UpdateUser(datastore core.ApplicationUser, accountID, applicationID int64, existingApplicationUser, updatedApplicationUser *entity.ApplicationUser) (errs []errors.Error) {\n\tif updatedApplicationUser.FirstName != \"\" {\n\t\tif !StringLengthBetween(updatedApplicationUser.FirstName, userNameMin, userNameMax) {\n\t\t\terrs = append(errs, errmsg.ErrApplicationUserFirstNameSize.SetCurrentLocation())\n\t\t}\n\t}\n\n\tif updatedApplicationUser.LastName != \"\" {\n\t\tif !StringLengthBetween(updatedApplicationUser.LastName, userNameMin, userNameMax) {\n\t\t\terrs = append(errs, errmsg.ErrApplicationUserLastNameSize.SetCurrentLocation())\n\t\t}\n\t}\n\n\tif updatedApplicationUser.Username != \"\" {\n\t\tif !StringLengthBetween(updatedApplicationUser.Username, userNameMin, userNameMax) {\n\t\t\terrs = append(errs, errmsg.ErrApplicationUserUsernameSize.SetCurrentLocation())\n\t\t}\n\t}\n\n\tif updatedApplicationUser.Username == \"\" && updatedApplicationUser.Email == \"\" {\n\t\terrs = append(errs, errmsg.ErrApplicationUsernameAndEmailAreEmpty.SetCurrentLocation())\n\t}\n\n\tif updatedApplicationUser.Email != \"\" && !IsValidEmail(updatedApplicationUser.Email) {\n\t\terrs = append(errs, errmsg.ErrApplicationUserEmailInvalid.SetCurrentLocation())\n\t}\n\n\tif updatedApplicationUser.Email != \"\" && existingApplicationUser.Email != updatedApplicationUser.Email {\n\t\tisDuplicate, err := DuplicateApplicationUserEmail(datastore, accountID, applicationID, updatedApplicationUser.Email)\n\t\tif isDuplicate || err != nil {\n\t\t\tif isDuplicate {\n\t\t\t\terrs = append(errs, errmsg.ErrApplicationUserEmailAlreadyExists.SetCurrentLocation())\n\t\t\t} else if err != nil {\n\t\t\t\terrs = append(errs, err...)\n\t\t\t}\n\t\t}\n\t}\n\n\tif updatedApplicationUser.Username != \"\" && existingApplicationUser.Username != updatedApplicationUser.Username {\n\t\tisDuplicate, err := DuplicateApplicationUserUsername(datastore, accountID, applicationID, updatedApplicationUser.Username)\n\t\tif isDuplicate || err != nil {\n\t\t\tif isDuplicate {\n\t\t\t\terrs = append(errs, errmsg.ErrApplicationUserUsernameInUse.SetCurrentLocation())\n\t\t\t} else if err != nil {\n\t\t\t\terrs = append(errs, err...)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn\n}", "func (m *mysqlUserRepository) Update(u *domain.User) (err error) {\n\tquery := `UPDATE user set name=?, email=?, updated_at=? WHERE ID =?`\n\n\tstmt, err := m.Conn.Prepare(query)\n\tif err != nil {\n\t\treturn\n\t}\n\tres, err := stmt.Exec(u.Name, u.Email, time.Now())\n\tif err != nil {\n\t\treturn\n\t}\n\n\trowsAffected, err := res.RowsAffected()\n\tif err != nil {\n\t\treturn\n\t}\n\tif rowsAffected != 1 {\n\t\terr = fmt.Errorf(\"Something not expected: %d rows affected\", rowsAffected)\n\t\treturn\n\t}\n\treturn\n}", "func (sp *ScyllaUserProvider) Update(user entities.User) derrors.Error {\n\n\tsp.Lock()\n\tdefer sp.Unlock()\n\n\t// check connection\n\tif err := sp.checkAndConnect(); err != nil {\n\t\treturn err\n\t}\n\n\t// check if the user exists\n\texists, err := sp.unsafeExists(user.Email)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\tif !exists {\n\t\treturn derrors.NewNotFoundError(user.Email)\n\t}\n\n\t// update a user\n\tstmt, names := qb.Update(userTable).Set(\"organization_id\", \"name\", \"member_since\", \"last_name\", \"title\", \"phone\", \"location\").Where(qb.Eq(userTablePK)).ToCql()\n\tq := gocqlx.Query(sp.Session.Query(stmt), names).BindStruct(user)\n\tcqlErr := q.ExecRelease()\n\n\tif cqlErr != nil {\n\t\treturn derrors.AsError(cqlErr, \"cannot update user\")\n\t}\n\n\tuserPhoto := NewUserPhotoInfo(user.Email, user.PhotoBase64)\n\tstmt, names = qb.Update(userPhotoTable).Set(\"photo_base64\").Where(qb.Eq(userPhotoTablePK)).ToCql()\n\tq = gocqlx.Query(sp.Session.Query(stmt), names).BindStruct(userPhoto)\n\tcqlErr = q.ExecRelease()\n\n\tif cqlErr != nil {\n\t\treturn derrors.AsError(cqlErr, \"cannot update user photo\")\n\t}\n\n\treturn nil\n}", "func (u *User) Update() *errorsutils.RestErr {\n\tstmt, err := usersdb.Client.Prepare(queryUpdateUser)\n\tif err != nil {\n\t\tlogger.Error(\"error when trying to prepare update user statement\", err)\n\t\treturn errorsutils.NewInternalServerError(\"database error\", errors.New(\"database error\"))\n\t}\n\tdefer stmt.Close()\n\n\tif _, err = stmt.Exec(u.FirstName, u.LastName, u.Email, u.ID); err != nil {\n\t\tlogger.Error(\"error when trying to update user\", err)\n\t\treturn errorsutils.NewInternalServerError(\"database error\", errors.New(\"database error\"))\n\t}\n\n\treturn nil\n}", "func UpdateUserHandler(connection *sql.DB, cnf config.Config) negroni.HandlerFunc {\n\treturn negroni.HandlerFunc(func(w http.ResponseWriter, r *http.Request, next http.HandlerFunc) {\n\t\tvar queryToken = r.URL.Query().Get(\"token\")\n\n\t\tif len(queryToken) < 1 {\n\t\t\tqueryToken = r.Header.Get(\"token\")\n\t\t}\n\n\t\tif len(queryToken) < 1 {\n\t\t\tw.WriteHeader(http.StatusBadRequest)\n\t\t\tw.Write([]byte(string(\"token is mandatory\")))\n\t\t\treturn\n\t\t}\n\n\t\tuser := &models.UserResponse{}\n\t\terr := util.RequestToJSON(r, user)\n\t\tif err != nil {\n\t\t\tutil.SendBadRequest(w, errors.New(\"bad json\"))\n\t\t\treturn\n\t\t}\n\n\t\tsecretKey := cnf.SecretKey\n\t\ttok, err := jwt.Parse(queryToken, func(t *jwt.Token) (interface{}, error) {\n\t\t\treturn []byte(secretKey), nil\n\t\t})\n\t\tif err != nil {\n\t\t\tutil.SendBadRequest(w, err)\n\t\t\treturn\n\t\t}\n\n\t\tclaims := tok.Claims.(jwt.MapClaims)\n\t\tvar ID = claims[\"sub\"].(float64) // gets the ID\n\n\t\tif int64(ID) != user.ID {\n\t\t\tutil.SendBadRequest(w, errors.New(\"you can only change your own user object\"))\n\t\t\treturn\n\t\t}\n\n\t\tif err := user.Validate(); err == nil {\n\n\t\t\tdb.UpdateUser(connection, user)\n\n\t\t\tutil.SendOK(w, user)\n\n\t\t} else {\n\t\t\tutil.SendBadRequest(w, err)\n\t\t}\n\t})\n}", "func (uuo *UserUpdateOne) check() error {\n\tif v, ok := uuo.mutation.Username(); ok {\n\t\tif err := user.UsernameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"username\", err: fmt.Errorf(\"ent: validator failed for field \\\"username\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Fullname(); ok {\n\t\tif err := user.FullnameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"fullname\", err: fmt.Errorf(\"ent: validator failed for field \\\"fullname\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Password(); ok {\n\t\tif err := user.PasswordValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"password\", err: fmt.Errorf(\"ent: validator failed for field \\\"password\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Email(); ok {\n\t\tif err := user.EmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"email\", err: fmt.Errorf(\"ent: validator failed for field \\\"email\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Phone(); ok {\n\t\tif err := user.PhoneValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"phone\", err: fmt.Errorf(\"ent: validator failed for field \\\"phone\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Bio(); ok {\n\t\tif err := user.BioValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"bio\", err: fmt.Errorf(\"ent: validator failed for field \\\"bio\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Intro(); ok {\n\t\tif err := user.IntroValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"intro\", err: fmt.Errorf(\"ent: validator failed for field \\\"intro\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.GithubProfile(); ok {\n\t\tif err := user.GithubProfileValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"github_profile\", err: fmt.Errorf(\"ent: validator failed for field \\\"github_profile\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.ProfilePictureURL(); ok {\n\t\tif err := user.ProfilePictureURLValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"profile_picture_url\", err: fmt.Errorf(\"ent: validator failed for field \\\"profile_picture_url\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Status(); ok {\n\t\tif err := user.StatusValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"status\", err: fmt.Errorf(\"ent: validator failed for field \\\"status\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func (user *User) Update() *errors.RestErr {\n\tstmt, err := usersdb.Client.Prepare(queryUdpdateUser)\n\tif err != nil {\n\t\treturn errors.NewInternalServerError(err.Error())\n\t}\n\tdefer stmt.Close()\n\n\t_, err = stmt.Exec(user.FirstName, user.LastName, user.Email, user.ID)\n\tif err != nil {\n\t\treturn errors.ParseError(err)\n\n\t}\n\treturn nil\n\n}", "func UpdateUser(db *sql.DB, user *models.UserResponse) (int, error) {\n\t_, err := db.Exec(\"UPDATE users SET username = ?, email = ? WHERE id = ?\", user.Username, user.Email, user.ID)\n\tif err != nil {\n\t\tlog.Errorf(\"Error inserting\")\n\t\tlog.Error(err)\n\t\treturn 0, err\n\t}\n\treturn user.ID, nil\n}", "func TestStore_UpdateUser(t *testing.T) {\n\tt.Parallel()\n\ts := MustOpenStore()\n\tdefer s.Close()\n\n\t// Create users.\n\tif _, err := s.CreateUser(\"susy\", \"pass\", true); err != nil {\n\t\tt.Fatal(err)\n\t} else if _, err := s.CreateUser(\"bob\", \"pass\", true); err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\t// Store password hash for bob.\n\tui, err := s.User(\"bob\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\t// Update user.\n\tif err := s.UpdateUser(\"bob\", \"XXX\"); err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\t// Verify password hash was updated.\n\tif other, err := s.User(\"bob\"); err != nil {\n\t\tt.Fatal(err)\n\t} else if ui.Hash == other.Hash {\n\t\tt.Fatal(\"password hash did not change\")\n\t}\n}", "func UpdateUser(res http.ResponseWriter, req *http.Request) {\n\tvar response responses.User\n\tuser := new(model.User)\n\tID := req.Context().Value(\"ID\").(string)\n\tdata := req.Context().Value(\"data\").(*validation.UpdateUser)\n\tnow := time.Now()\n\tdocKey, err := connectors.ReadDocument(\"users\", ID, user)\n\tif err != nil {\n\t\trender.Render(res, req, responses.NewHTTPError(http.StatusServiceUnavailable, constants.Unavailable))\n\t\treturn\n\t} else if len(docKey) == 0 {\n\t\trender.Render(res, req, responses.NewHTTPError(http.StatusBadRequest, constants.NotFoundResource))\n\t\treturn\n\t}\n\tcopier.Copy(&user, data)\n\tuser.UpdatedAt = now.Unix()\n\tdocKey, err = connectors.UpdateDocument(\"users\", docKey, user)\n\tif err != nil {\n\t\trender.Render(res, req, responses.NewHTTPError(http.StatusServiceUnavailable, constants.Unavailable))\n\t\treturn\n\t}\n\tresponse.ID = docKey\n\tcopier.Copy(&response, user)\n\trender.Render(res, req, responses.NewHTTPSucess(http.StatusOK, response))\n}", "func UpdateUser(person *Person, id string) (err error) {\n\tfmt.Println(person)\n\tConfig.DB.Save(person)\n\treturn nil\n}", "func UpdateAuthorizedUser(c *gin.Context, username string, client *statsd.Client) {\n\tlog.Info(\"updating user\")\n\tvar user entity.User\n\terr := model.GetUserByUsername(&user, username, client)\n\tif err != nil {\n\t\tlog.Error(err)\n\t\tc.JSON(http.StatusNotFound, gin.H{\n\t\t\t\"error\": err.Error(),\n\t\t})\n\t}\n\t//get the authorized user information\n\toriID := user.ID\n\tvar oriUsername string = *user.Username\n\toriCreatedTime := user.AccountCreated\n\toriPwd := user.Password\n\tc.BindJSON(&user)\n\n\t//if the user wants to change id, username or create time, it's not allowed\n\tif user.ID != oriID || *user.Username != oriUsername || user.AccountCreated != oriCreatedTime {\n\t\tlog.Error(\"id, username and AccountCreated are readonly\")\n\t\tc.JSON(http.StatusBadRequest, gin.H{\n\t\t\t\"error\": \"id, username and AccountCreated are readonly!!!\",\n\t\t})\n\t\treturn\n\t} else if oriPwd == user.Password {\n\t\t//if the user isn't updating password, don't need to check the password\n\t\terr = model.UpdateUserWithSamePwd(&user, user.ID, client)\n\t\tif err != nil {\n\t\t\tlog.Error(err)\n\t\t\tc.JSON(http.StatusNotFound, gin.H{\n\t\t\t\t\"error\": err.Error(),\n\t\t\t})\n\t\t} else {\n\t\t\tc.JSON(http.StatusOK, gin.H{\n\t\t\t\t\"id\": user.ID,\n\t\t\t\t\"first_name\": user.FirstName,\n\t\t\t\t\"last_name\": user.LastName,\n\t\t\t\t\"username\": user.Username,\n\t\t\t\t\"account_created\": user.AccountCreated,\n\t\t\t\t\"account_updated\": user.AccountUpdated,\n\t\t\t})\n\t\t}\n\t} else {\n\t\t//if the user is updating password, do check the password\n\t\terr = model.UpdateUserWithDiffPwd(&user, user.ID, client)\n\t\tif err != nil {\n\t\t\tlog.Error(err)\n\t\t\tc.JSON(http.StatusNotFound, gin.H{\n\t\t\t\t\"error\": err.Error(),\n\t\t\t})\n\t\t} else {\n\t\t\tc.JSON(http.StatusOK, gin.H{\n\t\t\t\t\"id\": user.ID,\n\t\t\t\t\"first_name\": user.FirstName,\n\t\t\t\t\"last_name\": user.LastName,\n\t\t\t\t\"username\": user.Username,\n\t\t\t\t\"account_created\": user.AccountCreated,\n\t\t\t\t\"account_updated\": user.AccountUpdated,\n\t\t\t})\n\t\t}\n\t}\n\tlog.Info(\"user updated\")\n}", "func (u *User) UpdateUser(db *pg.DB) error {\n\tcount, err := db.Model(u).WherePK().Count()\n\n\tif count < 1 {\n\t\treturn errors.New(\"User does not exist\")\n\t}\n\t_, err = db.Model(u).WherePK().Update()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (u *UserTest) Update(user User) error {\n\treturn nil\n}", "func UpdateSecUserById(m *SecUser) (err error) {\n\to := orm.NewOrm()\n\tv := SecUser{Id: m.Id}\n\t// ascertain id exists in the database\n\tif err = o.Read(&v); err == nil {\n\t\tvar num int64\n\t\tif num, err = o.Update(m); err == nil {\n\t\t\tfmt.Println(\"Number of records updated in database:\", num)\n\t\t}\n\t}\n\treturn\n}", "func (us UserService) Update(userDto dto.UserEditDto) int64 {\n\tuserModel := model.User{\n\t\tId: userDto.Id,\n\t}\n\tc := userDao.Update(&userModel, map[string]interface{}{\n\t\t\"mobile\": userDto.Mobile,\n\t\t\"department_id\": userDto.DepartmentId,\n\t\t\"status\": userDto.Status,\n\t\t\"title\": userDto.Title,\n\t\t\"realname\": userDto.Realname,\n\t\t\"sex\": userDto.Sex,\n\t\t\"email\": userDto.Email,\n\t})\n\tus.AssignRole(strconv.Itoa(userDto.Id), strings.Split(userDto.Roles, \",\"))\n\treturn c.RowsAffected\n}", "func (m *Manager) updateUser(ctx context.Context, p tg.InputUserClass) (*tg.User, error) {\n\t// TODO(tdakkota): batch requests.\n\tusers, err := m.api.UsersGetUsers(ctx, []tg.InputUserClass{p})\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"get users\")\n\t}\n\n\tif len(users) < 1 {\n\t\treturn nil, errors.Errorf(\"got empty result for %+v\", p)\n\t}\n\n\tif err := m.applyUsers(ctx, users...); err != nil {\n\t\treturn nil, errors.Wrap(err, \"update users\")\n\t}\n\n\tuser, ok := users[0].AsNotEmpty()\n\tif !ok {\n\t\treturn nil, errors.New(\"got empty user\")\n\t}\n\tif user.Self {\n\t\tm.me.Store(user)\n\t}\n\n\treturn user, nil\n}", "func (a Authorizer) UpdateUser(username string, m map[string]interface{}, currUserName string) error {\n\tvar (\n\t\thash []byte\n\t)\n\n\tuser, err := a.userDao.User(username)\n\tif err != nil {\n\t\tlogger.Get().Error(\"Error retrieving the user: %s. error: %v\", username, err)\n\t\treturn err\n\t}\n\n\tif val, ok := m[\"oldpassword\"]; ok {\n\t\top := val.(string)\n\t\tmatch := bcrypt.CompareHashAndPassword(user.Hash, []byte(op))\n\t\tif match != nil {\n\t\t\tlogger.Get().Error(\"Old password doesnt match\")\n\t\t\treturn mkerror(\"Old password doesnt match\" + err.Error())\n\t\t} else {\n\t\t\tif val, ok := m[\"password\"]; ok {\n\t\t\t\tp := val.(string)\n\t\t\t\thash, err = bcrypt.GenerateFromPassword([]byte(p), bcrypt.DefaultCost)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlogger.Get().Error(\"Error saving the password for user: %s. error: %v\", username, err)\n\t\t\t\t\treturn mkerror(\"couldn't save password: \" + err.Error())\n\t\t\t\t}\n\t\t\t\tuser.Hash = hash\n\t\t\t}\n\t\t}\n\t} else {\n\t\tcurUser, e := a.userDao.User(currUserName)\n\t\tif e != nil {\n\t\t\tlogger.Get().Error(\"Error retrieving the user: %s. error: %v\", currUserName, e)\n\t\t\treturn e\n\t\t}\n\t\tif curUser.Role == \"admin\" {\n\t\t\tif val, ok := m[\"password\"]; ok {\n\t\t\t\tp := val.(string)\n\t\t\t\thash, err = bcrypt.GenerateFromPassword([]byte(p), bcrypt.DefaultCost)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlogger.Get().Error(\"Error saving the password for user: %s. error: %v\", username, err)\n\t\t\t\t\treturn mkerror(\"couldn't save password: \" + err.Error())\n\t\t\t\t}\n\t\t\t\tuser.Hash = hash\n\t\t\t}\n\t\t} else {\n\t\t\tlogger.Get().Error(\"Error saving the password for user since no previledge: %s. error: %v\", username, err)\n\t\t\treturn mkerror(\"couldn't save password: \" + err.Error())\n\t\t}\n\t}\n\n\tif val, ok := m[\"email\"]; ok {\n\t\te := val.(string)\n\t\tuser.Email = e\n\t}\n\n\tif val, ok := m[\"notificationenabled\"]; ok {\n\t\tn := val.(bool)\n\t\tuser.NotificationEnabled = n\n\t}\n\n\tif val, ok := m[\"status\"]; ok {\n\t\ts := val.(bool)\n\t\tuser.Status = s\n\t}\n\n\terr = a.userDao.SaveUser(user)\n\tif err != nil {\n\t\tlogger.Get().Error(\"Error saving the user: %s to DB. error: %v\", username, err)\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func UpdateUser(w http.ResponseWriter, r *http.Request) {\n\n\t// w.Header().Set(\"Content-Type\", \"application/x-www-form-urlencoded\")\n\t// w.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t// w.Header().Set(\"Access-Control-Allow-Methods\", \"PUT\")\n\t// w.Header().Set(\"Access-Control-Allow-Headers\", \"Content-Type\")\n\n\t// get the userid from the request params, key is \"id\"\n\tparams := mux.Vars(r)\n\n\t// convert the id type from string to int\n\tid, err := strconv.Atoi(params[\"id\"])\n\n\tif err != nil {\n\t\tlog.Fatalf(\"Unable to convert the string into int. %v\", err)\n\t}\n\n\t// create an empty user of type models.User\n\tvar user TempUsers\n\n\t// decode the json request to user\n\terr = json.NewDecoder(r.Body).Decode(&user)\n\n\tif err != nil {\n\t\tlog.Fatalf(\"Unable to decode the request body. %v\", err)\n\t}\n\n\tdb := createConnection()\n\t// close the db connection\n\tdefer db.Close()\n\n\t// create the update sql query\n\tsqlStatement := `UPDATE users SET full_name=$2, email=$3, mobile_no=$4, username=$5, passwd=$6, created_at=$7 WHERE userid=$1`\n\n\t// execute the sql statement\n\tres, err := db.Exec(sqlStatement, id, user.FullName, user.Email, user.MobileNo, user.UserName, user.Password, time.Now())\n\n\tif err != nil {\n\t\tlog.Fatalf(\"Unable to execute the query. %v\", err)\n\t}\n\n\t// check how many rows affected\n\trowsAffected, err := res.RowsAffected()\n\n\tif err != nil {\n\t\tlog.Fatalf(\"Error while checking the affected rows. %v\", err)\n\t}\n\n\tif rowsAffected > 0 {\n\t\tmsg := map[string]string{\"msg\": \"Updated Successfully.\"}\n\t\tjson.NewEncoder(w).Encode(msg)\n\t} else {\n\t\tmsg := map[string]string{\"msg\": \"Unable to Update, ID does not exists.\"}\n\t\tjson.NewEncoder(w).Encode(msg)\n\t}\n}", "func (*UserSvr) UpdateByName(req *UpdateByNameReq, rsp *UpdateByNameRsp) int {\n\titem := &req.UserItem\n\tif item.Username == \"\" || item.Nickname == \"\" || item.Profile == \"\" {\n\t\treturn common.ErrArg\n\t}\n\tdb := common.GetDb()\n\tresult, err := db.Exec(\"UPDATE users set nickname=? , profile = ? where username=?\",\n\t\treq.UserItem.Nickname, req.UserItem.Profile, req.UserItem.Username)\n\tif err != nil {\n\t\tlog.Printf(\"Update failed,err:%v\", err)\n\t\treturn common.ErrDB\n\t}\n\trowsaffected, err := result.RowsAffected()\n\tif err != nil || rowsaffected != 1 {\n\t\tlog.Printf(\"failed, RowsAffected:%d err:%v\", rowsaffected, err)\n\t\treturn common.ErrArg\n\t}\n\t// here should use redis, but found that this redis lib dose not use conection pool,\n\t// not useful to bench test\n\treturn 0\n}", "func updateUser(col *mgo.Collection, user *User) {\n\tcol.Upsert(bson.M{\"id\": user.ID}, user)\n}", "func Update() error {\n\tuser := &Users{\n\t\tName: \"维尼\",\n\t\tCreated: time.Now(),\n\t}\n\n\ti, err := engine.Id(1).Update(user)\n\tif err == nil {\n\t\treturn nil\n\t} else if i <= 0 {\n\t\treturn errors.New(\"更新失败\")\n\t}\n\n\treturn nil\n}", "func (uu *UserUpdate) check() error {\n\tif v, ok := uu.mutation.Username(); ok {\n\t\tif err := user.UsernameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"username\", err: fmt.Errorf(\"ent: validator failed for field \\\"username\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Fullname(); ok {\n\t\tif err := user.FullnameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"fullname\", err: fmt.Errorf(\"ent: validator failed for field \\\"fullname\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Password(); ok {\n\t\tif err := user.PasswordValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"password\", err: fmt.Errorf(\"ent: validator failed for field \\\"password\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Email(); ok {\n\t\tif err := user.EmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"email\", err: fmt.Errorf(\"ent: validator failed for field \\\"email\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Phone(); ok {\n\t\tif err := user.PhoneValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"phone\", err: fmt.Errorf(\"ent: validator failed for field \\\"phone\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Bio(); ok {\n\t\tif err := user.BioValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"bio\", err: fmt.Errorf(\"ent: validator failed for field \\\"bio\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Intro(); ok {\n\t\tif err := user.IntroValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"intro\", err: fmt.Errorf(\"ent: validator failed for field \\\"intro\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.GithubProfile(); ok {\n\t\tif err := user.GithubProfileValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"github_profile\", err: fmt.Errorf(\"ent: validator failed for field \\\"github_profile\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.ProfilePictureURL(); ok {\n\t\tif err := user.ProfilePictureURLValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"profile_picture_url\", err: fmt.Errorf(\"ent: validator failed for field \\\"profile_picture_url\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Status(); ok {\n\t\tif err := user.StatusValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"status\", err: fmt.Errorf(\"ent: validator failed for field \\\"status\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func (u *User) Update() error {\n\tvar prevUser User\n\terr := db.Todo.Get(&prevUser,\n\t\t`Select * from users where id = $1 or email = $2`,\n\t\tu.ID, u.Email)\n\tif err != nil {\n\t\treturn errors.New(\"No user with specified ID to update\")\n\t}\n\n\tif u.Password != nil && *u.Password != \"\" {\n\t\tif len(*u.Password) < 6 {\n\t\t\treturn errors.New(\"Password needs to be at least four characters long\")\n\t\t}\n\t\thash, err := bcrypt.GenerateFromPassword([]byte(*u.Password), bcrypt.DefaultCost)\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"Here2\")\n\t\t\treturn err\n\t\t}\n\n\t\tencryptedPassword := string(hash)\n\t\tu.EncryptedPassword = &encryptedPassword\n\t}\n\n\t_, err = db.Todo.Exec(\n\t\t`UPDATE users SET\n\t\temail=$2,\n\t\tfirst_name=$3,\n\t\tlast_name=$4,\n\t\tencrypted_password=$5\n\t WHERE id=$1`,\n\t\tprevUser.ID,\n\t\tu.Email,\n\t\tu.FirstName,\n\t\tu.LastName,\n\t\tu.EncryptedPassword)\n\tif err != nil {\n\t\tfmt.Printf(\"Here1\")\n\t\treturn err\n\t}\n\treturn nil\n}", "func UsersUpdate(c buffalo.Context) error {\n\ttx := c.Value(\"tx\").(*pop.Connection)\n\tuser := &models.User{}\n\terr := tx.Eager(\"Contracts.Boss\").Find(user, c.Param(\"user_id\"))\n\tif err != nil {\n\t\tc.Flash().Add(\"warning\", \"Cannot find that user.\")\n\t\treturn c.Redirect(307, \"/\")\n\t}\n\n\tc.Request().ParseForm()\n\tcurr := c.Request().Form[\"CurrentPassword\"]\n\tnew := c.Request().Form[\"NewPassword\"]\n\tif len(curr) == 0 || len(new) == 0 {\n\t\tc.Flash().Add(\"warning\", \"Form is incomplete.\")\n\t\treturn c.Redirect(302, \"/users/%s\", user.ID)\n\t}\n\n\tuser.Password = curr[0]\n\tif user.Authenticate() != true {\n\t\tc.Flash().Add(\"warning\", \"Password does not match the one on record.\")\n\t\treturn c.Redirect(303, \"/users/%s\", user.ID)\n\t}\n\n\t// Update password on User and generate hash.\n\tuser.Password = new[0]\n\t_, err = user.UpdatePassword(tx)\n\tif err != nil {\n\t\tc.Flash().Add(\"warning\", \"Error saving new password.\")\n\t\treturn c.Redirect(303, \"/users/%s\", user.ID)\n\t}\n\n\tc.Flash().Add(\"success\", \"Password changed.\")\n\treturn c.Redirect(303, \"/users/%s\", user.ID)\n}", "func UpdateUser(ctx iris.Context) {\n\tvar (\n\t\tuser model.User\n\t\tnewUser model.User\n\t\tresult iris.Map\n\t)\n\tid := ctx.Params().Get(\"id\") // get id by params\n\tdb := config.GetDatabaseConnection()\n\tdefer db.Close()\n\terr := db.First(&user, id).Error\n\tif err != nil {\n\t\tresult = iris.Map{\n\t\t\t\"error\": \"true\",\n\t\t\t\"status\": iris.StatusBadRequest,\n\t\t\t\"message\": \"user not found\",\n\t\t\t\"result\": nil,\n\t\t}\n\t}\n\tctx.ReadJSON(&newUser)\n\terr = db.Model(&user).Updates(newUser).Error\n\tif err != nil {\n\t\tresult = iris.Map{\n\t\t\t\"error\": \"true\",\n\t\t\t\"status\": iris.StatusBadRequest,\n\t\t\t\"message\": \"error when update user\",\n\t\t\t\"result\": err.Error(),\n\t\t}\n\t} else {\n\t\tresult = iris.Map{\n\t\t\t\"error\": \"false\",\n\t\t\t\"status\": iris.StatusOK,\n\t\t\t\"message\": \"success update user\",\n\t\t\t\"result\": newUser,\n\t\t}\n\t}\n\tctx.JSON(result)\n\treturn\n}", "func (a Authorizer) UpdateUser(username string, m map[string]interface{}, currUserName string) error {\n\tvar (\n\t\thash []byte\n\t\tupdated bool\n\t)\n\tuser, err := a.userDao.User(username)\n\tif err != nil {\n\t\tlogger.Get().Error(\"Error retrieving the user: %s. error: %v\", username, err)\n\t\treturn err\n\t}\n\tif user.Type == authprovider.Internal {\n\t\tif val, ok := m[\"oldpassword\"]; ok {\n\t\t\top := val.(string)\n\t\t\tmatch := bcrypt.CompareHashAndPassword(user.Hash, []byte(op))\n\t\t\tif match != nil {\n\t\t\t\tlogger.Get().Error(\"Old password doesnt match\")\n\t\t\t\treturn mkerror(\"Old password doesnt match\" + err.Error())\n\t\t\t} else {\n\t\t\t\tif val, ok := m[\"password\"]; ok {\n\t\t\t\t\tp := val.(string)\n\t\t\t\t\thash, err = bcrypt.GenerateFromPassword([]byte(p), bcrypt.DefaultCost)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tlogger.Get().Error(\"Error saving the password for user: %s. error: %v\", username, err)\n\t\t\t\t\t\treturn mkerror(\"couldn't save password: \" + err.Error())\n\t\t\t\t\t}\n\t\t\t\t\tuser.Hash = hash\n\t\t\t\t\tupdated = true\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tcurUser, e := a.userDao.User(currUserName)\n\t\t\tif e != nil {\n\t\t\t\tlogger.Get().Error(\"Error retrieving the user: %s. error: %v\", currUserName, e)\n\t\t\t\treturn e\n\t\t\t}\n\t\t\tif curUser.Role == \"admin\" {\n\t\t\t\tif val, ok := m[\"password\"]; ok {\n\t\t\t\t\tp := val.(string)\n\t\t\t\t\thash, err = bcrypt.GenerateFromPassword([]byte(p), bcrypt.DefaultCost)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tlogger.Get().Error(\"Error saving the password for user: %s. error: %v\", username, err)\n\t\t\t\t\t\treturn mkerror(\"couldn't save password: \" + err.Error())\n\t\t\t\t\t}\n\t\t\t\t\tuser.Hash = hash\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tlogger.Get().Error(\"Error saving the password for user since no previledge: %s. error: %v\", username, err)\n\t\t\t\treturn mkerror(\"couldn't save password: \" + err.Error())\n\t\t\t}\n\t\t}\n\t}\n\tif val, ok := m[\"email\"]; ok {\n\t\te := val.(string)\n\t\tuser.Email = e\n\t\tupdated = true\n\t}\n\n\tif val, ok := m[\"notificationenabled\"]; ok {\n\t\tn := val.(bool)\n\t\tuser.NotificationEnabled = n\n\t}\n\n\tif val, ok := m[\"status\"]; ok {\n\t\ts := val.(bool)\n\t\tuser.Status = s\n\t}\n\n\tif updated {\n\t\terr = a.userDao.SaveUser(user)\n\t\tif err != nil {\n\t\t\tlogger.Get().Error(\"Error saving details for the user: %s to DB. error: %v\", username, err)\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (srv *UsersService) UpdateHandler(ctx *gin.Context) {\n\tlogger := srv.logger.New(\"action\", \"UpdateHandler\")\n\tuser := GetRequestedUser(ctx)\n\tif user == nil {\n\t\t// Returns a \"404 StatusNotFound\" response\n\t\tsrv.ResponseService.NotFound(ctx)\n\t\treturn\n\t}\n\n\trawData, err := ctx.GetRawData()\n\tif err != nil {\n\t\tlogger.Error(\"cannot read body\", \"err\", err)\n\t\tsrv.ResponseService.Error(ctx, responses.CanNotUpdateUser, \"Can't update user.\")\n\t\treturn\n\t}\n\n\tcurrentUser := GetCurrentUser(ctx)\n\tif currentUser.UID == user.UID ||\n\t\tcurrentUser.RoleName == \"root\" ||\n\t\tcurrentUser.RoleName == \"admin\" {\n\n\t\terr = srv.userForm.Update(user, currentUser, rawData)\n\t\tif err != nil {\n\t\t\tlogger.Error(\"cannot update a user\", \"err\", err)\n\t\t\tsrv.ResponseService.ValidatorErrorResponse(ctx, responses.UnprocessableEntity, err)\n\t\t\treturn\n\t\t}\n\n\t\told, err := srv.Repository.GetUsersRepository().FindByUID(user.UID)\n\t\tif err != nil {\n\t\t\tlogger.Error(\"cannot found user\", \"err\", err)\n\t\t\tsrv.ResponseService.NotFound(ctx)\n\t\t\treturn\n\t\t}\n\n\t\terr = srv.userLoaderService.LoadUserCompletely(old)\n\t\tif err != nil {\n\t\t\tlogger.Error(\"cannot load user\", \"err\", err)\n\t\t\tsrv.ResponseService.Error(ctx, responses.CanNotUpdateUser, \"Can't update a user\")\n\t\t\treturn\n\t\t}\n\n\t\ttx := srv.Repository.GetUsersRepository().DB.Begin()\n\t\terr = srv.userCreator.Update(user, tx)\n\t\tif err != nil {\n\t\t\ttx.Rollback()\n\t\t\t// Returns a \"400 StatusBadRequest\" response\n\t\t\tsrv.ResponseService.Error(ctx, responses.CanNotUpdateUser, \"Can't update a user\")\n\t\t\treturn\n\t\t}\n\n\t\tif currentUser.UID != user.UID &&\n\t\t\t(currentUser.RoleName == \"admin\" || currentUser.RoleName == \"root\") {\n\t\t\tsrv.SystemLogsService.LogModifyUserProfileAsync(old, user, currentUser.UID)\n\t\t}\n\n\t\ttx.Commit()\n\t}\n\n\t// Returns a \"204 StatusNoContent\" response\n\tctx.JSON(http.StatusNoContent, nil)\n}", "func (h *UserRepos) Update(ctx context.Context, w http.ResponseWriter, r *http.Request, params map[string]string) error {\n\n\tctxValues, err := webcontext.ContextValues(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tclaims, err := auth.ClaimsFromContext(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t//\n\treq := new(user.UserUpdateRequest)\n\tdata := make(map[string]interface{})\n\tf := func() (bool, error) {\n\t\tif r.Method == http.MethodPost {\n\t\t\terr := r.ParseForm()\n\t\t\tif err != nil {\n\t\t\t\treturn false, err\n\t\t\t}\n\n\t\t\tdecoder := schema.NewDecoder()\n\t\t\tdecoder.IgnoreUnknownKeys(true)\n\n\t\t\tif err := decoder.Decode(req, r.PostForm); err != nil {\n\t\t\t\treturn false, err\n\t\t\t}\n\t\t\treq.ID = claims.Subject\n\n\t\t\terr = h.UserRepo.Update(ctx, claims, *req, ctxValues.Now)\n\t\t\tif err != nil {\n\t\t\t\tswitch errors.Cause(err) {\n\t\t\t\tdefault:\n\t\t\t\t\tif verr, ok := weberror.NewValidationError(ctx, err); ok {\n\t\t\t\t\t\tdata[\"validationErrors\"] = verr.(*weberror.Error)\n\t\t\t\t\t\treturn false, nil\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn false, err\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif r.PostForm.Get(\"Password\") != \"\" {\n\t\t\t\tpwdReq := new(user.UserUpdatePasswordRequest)\n\n\t\t\t\tif err := decoder.Decode(pwdReq, r.PostForm); err != nil {\n\t\t\t\t\treturn false, err\n\t\t\t\t}\n\t\t\t\tpwdReq.ID = claims.Subject\n\n\t\t\t\terr = h.UserRepo.UpdatePassword(ctx, claims, *pwdReq, ctxValues.Now)\n\t\t\t\tif err != nil {\n\t\t\t\t\tswitch errors.Cause(err) {\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tif verr, ok := weberror.NewValidationError(ctx, err); ok {\n\t\t\t\t\t\t\tdata[\"validationErrors\"] = verr.(*weberror.Error)\n\t\t\t\t\t\t\treturn false, nil\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\treturn false, err\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Display a success message to the user.\n\t\t\twebcontext.SessionFlashSuccess(ctx,\n\t\t\t\t\"Profile Updated\",\n\t\t\t\t\"User profile successfully updated.\")\n\n\t\t\treturn true, web.Redirect(ctx, w, r, \"/user\", http.StatusFound)\n\t\t}\n\n\t\treturn false, nil\n\t}\n\n\tend, err := f()\n\tif err != nil {\n\t\treturn web.RenderError(ctx, w, r, err, h.Renderer, TmplLayoutBase, TmplContentErrorGeneric, web.MIMETextHTMLCharsetUTF8)\n\t} else if end {\n\t\treturn nil\n\t}\n\n\tusr, err := h.UserRepo.ReadByID(ctx, claims, claims.Subject)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif req.ID == \"\" {\n\t\treq.FirstName = &usr.FirstName\n\t\treq.LastName = &usr.LastName\n\t\treq.Email = &usr.Email\n\t\treq.Timezone = usr.Timezone\n\t}\n\n\tdata[\"user\"] = usr.Response(ctx)\n\n\tdata[\"timezones\"], err = h.GeoRepo.ListTimezones(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdata[\"form\"] = req\n\n\tif verr, ok := weberror.NewValidationError(ctx, webcontext.Validator().Struct(user.UserUpdateRequest{})); ok {\n\t\tdata[\"userValidationDefaults\"] = verr.(*weberror.Error)\n\t}\n\n\tif verr, ok := weberror.NewValidationError(ctx, webcontext.Validator().Struct(user.UserUpdatePasswordRequest{})); ok {\n\t\tdata[\"passwordValidationDefaults\"] = verr.(*weberror.Error)\n\t}\n\n\treturn h.Renderer.Render(ctx, w, r, TmplLayoutBase, \"user-update.gohtml\", web.MIMETextHTMLCharsetUTF8, http.StatusOK, data)\n}", "func (f *Factory) UpdateUser(id string,firstname string, lastname string, age int) * domain.User {\n\treturn &domain.User{\n\t\tID:\t\t\tid,\t\t\n\t\tFirstname: firstname,\n\t\tLastname: lastname,\n\t\tAge: age,\n\t}\n\n}", "func updateUser(user UserID, params map[string]interface{}, client *Client) error {\n\treturn client.Put(params, \"/access/users/\"+user.ToString())\n}", "func (a *Api) UpdateUserAfterFailedLogin(ctx context.Context, u *User) error {\n\tif u.FailedLogin == nil {\n\t\tu.FailedLogin = new(FailedLoginInfos)\n\t}\n\tu.FailedLogin.Count++\n\tu.FailedLogin.Total++\n\tif u.FailedLogin.Count >= a.ApiConfig.MaxFailedLogin {\n\t\tnextAttemptTime := time.Now().Add(time.Minute * time.Duration(a.ApiConfig.DelayBeforeNextLoginAttempt))\n\t\tu.FailedLogin.NextLoginAttemptTime = nextAttemptTime.Format(time.RFC3339)\n\t}\n\treturn a.Store.UpsertUser(ctx, u)\n}", "func (c *UserRepoImpl) Update(id int, req *model.User) (*model.User, error) {\n\tuser := new(model.User)\n\n\tif err := c.db.Table(\"user\").Where(\"user_id = ?\", id).First(&user).Update(&req).Error; err != nil {\n\t\tlogrus.Error(err)\n\t\treturn nil, errors.New(\"update user data : error \")\n\t}\n\n\treturn user, nil\n}", "func (s *Database) UpdateUser(user UserPartner, conditions UserPartner) error {\n\tc, err := s.Engine.Update(user, conditions)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif c == 0 {\n\t\treturn errors.New(\"Khong tim thay user\")\n\t}\n\treturn err\n}", "func userUpdateProfile(u *ldap.Entry, buffer *bytes.Buffer, updateType string) bool {\n\t// We lock the whole function so we dont reuse the same connection for multiple logging attempts\n\tmutexProfile.Lock()\n\tdefer mutexProfile.Unlock()\n\n\t// We initilaise the connection pool the first time the function is called and reuse it\n\t// This is reuse the connections rather than creating a pool each invocation\n\tonceProfile.Do(func() {\n\t\tprofileAPI = apiLib.NewXmlmcInstance(ldapImportConf.InstanceID)\n\t\tprofileAPI.SetAPIKey(ldapImportConf.APIKey)\n\t\tprofileAPI.SetTimeout(5)\n\t})\n\n\tUserID := getFeildValue(u, \"UserID\", buffer)\n\tbuffer.WriteString(loggerGen(1, \"Processing User Profile Data \"+UserID))\n\n\tprofileAPI.OpenElement(\"profileData\")\n\tprofileAPI.SetParam(\"userID\", UserID)\n\tvalue := \"\"\n\t//-- Loop Through UserProfileMapping\n\tfor key := range userProfileArray {\n\t\tname := userProfileArray[key]\n\t\tfeild := userProfileMappingMap[name]\n\n\t\tif feild == \"manager\" {\n\t\t\t//-- Process User manager if enabled\n\t\t\tif ldapImportConf.UserManagerMapping.Enabled {\n\t\t\t\t//-- Action is Update\n\t\t\t\tif updateType == \"Update\" && ldapImportConf.UserManagerMapping.Action != createString {\n\t\t\t\t\tvalue = getManagerFromLookup(u, buffer)\n\t\t\t\t}\n\t\t\t\t//-- Action is Create\n\t\t\t\tif updateType == \"Create\" && ldapImportConf.UserManagerMapping.Action != updateString {\n\t\t\t\t\tvalue = getManagerFromLookup(u, buffer)\n\t\t\t\t}\n\n\t\t\t} else {\n\t\t\t\t//-- Get Value From LDAP\n\t\t\t\tvalue = getFeildValueProfile(u, name, buffer)\n\t\t\t}\n\n\t\t} else {\n\t\t\t//-- Get Value From LDAP\n\t\t\tvalue = getFeildValueProfile(u, name, buffer)\n\t\t}\n\n\t\t//-- if we have Value then set it\n\t\tif value != \"\" {\n\t\t\tprofileAPI.SetParam(feild, value)\n\t\t}\n\t}\n\n\tprofileAPI.CloseElement(\"profileData\")\n\t//-- Check for Dry Run\n\tif configDryRun != true {\n\t\tXMLCreate, xmlmcErr := profileAPI.Invoke(\"admin\", \"userProfileSet\")\n\t\tvar xmlRespon xmlmcResponse\n\t\tif xmlmcErr != nil {\n\t\t\tbuffer.WriteString(loggerGen(4, \"Unable to Update User Profile: \"+fmt.Sprintf(\"%v\", xmlmcErr)))\n\t\t\treturn false\n\t\t}\n\t\terr := xml.Unmarshal([]byte(XMLCreate), &xmlRespon)\n\t\tif err != nil {\n\t\t\tbuffer.WriteString(loggerGen(4, \"Unable to Update User Profile: \"+fmt.Sprintf(\"%v\", err)))\n\n\t\t\treturn false\n\t\t}\n\t\tif xmlRespon.MethodResult != constOK {\n\t\t\tprofileSkippedCountInc()\n\t\t\tif xmlRespon.State.ErrorRet == noValuesToUpdate {\n\t\t\t\treturn true\n\t\t\t}\n\t\t\terr := errors.New(xmlRespon.State.ErrorRet)\n\t\t\tbuffer.WriteString(loggerGen(4, \"Unable to Update User Profile: \"+fmt.Sprintf(\"%v\", err)))\n\t\t\treturn false\n\t\t}\n\t\tprofileCountInc()\n\t\tbuffer.WriteString(loggerGen(1, \"User Profile Update Success\"))\n\t\treturn true\n\n\t}\n\t//-- DEBUG XML TO LOG FILE\n\tvar XMLSTRING = profileAPI.GetParam()\n\tbuffer.WriteString(loggerGen(1, \"User Profile Update XML \"+XMLSTRING))\n\tprofileSkippedCountInc()\n\tprofileAPI.ClearParam()\n\treturn true\n\n}", "func UpdateUserById(m *User) (err error) {\n\to := orm.NewOrm()\n\tv := User{Id: m.Id}\n\t// ascertain id exists in the database\n\tif err = o.Read(&v); err == nil {\n\t\tvar num int64\n\t\tif num, err = o.Update(m); err == nil {\n\t\t\tfmt.Println(\"Number of records updated in database:\", num)\n\t\t}\n\t}\n\treturn err\n}", "func (us *UserService) Update(u *User, changePassword bool) error {\n\toldUser, err := us.Datasource.Get(u.ID)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif !oldUser.IsAdmin {\n\t\tif oldUser.ID != u.ID {\n\t\t\treturn httperror.PermissionDenied(\"update\", \"user\", fmt.Errorf(\"permission denied user %d is not granted to update user %d\", oldUser.ID, u.ID))\n\t\t}\n\t}\n\n\tif us.UserInterceptor != nil {\n\t\tif err := us.UserInterceptor.PreUpdate(oldUser, u); err != nil {\n\t\t\treturn httperror.InternalServerError(fmt.Errorf(\"error while executing user interceptor 'PreUpdate' error %v\", err))\n\t\t}\n\t}\n\n\tvar v Validations\n\n\tif u.Email != oldUser.Email {\n\t\tv |= VDupEmail\n\t}\n\n\tif u.Username != oldUser.Username {\n\t\tv |= VDupUsername\n\t}\n\n\tif changePassword {\n\t\tv |= VPassword\n\t}\n\n\tif err = u.validate(us, us.Config.MinPasswordLength, v); err != nil {\n\t\treturn err\n\t}\n\n\toneAdmin, err := us.OneAdmin()\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif oneAdmin {\n\t\tif (oldUser.IsAdmin && !u.IsAdmin) || (oldUser.IsAdmin && !u.Active) {\n\t\t\treturn httperror.New(http.StatusUnprocessableEntity,\n\t\t\t\t\"Could not update user, because no administrator would remain\",\n\t\t\t\tfmt.Errorf(\"could not update user %s action, because no administrator would remain\", oldUser.Username))\n\t\t}\n\t}\n\n\tif changePassword {\n\t\tsalt := crypt.GenerateSalt()\n\t\tsaltedPassword := append(u.PlainPassword[:], salt[:]...)\n\t\tpassword, err := crypt.CryptPassword([]byte(saltedPassword))\n\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tu.Password = password\n\t\tu.Salt = salt\n\t}\n\n\tif err = us.Datasource.Update(u, changePassword); err != nil {\n\t\treturn err\n\t}\n\n\tu.Password = nil\n\n\tif us.UserInterceptor != nil {\n\t\tif err := us.UserInterceptor.PostUpdate(oldUser, u); err != nil {\n\t\t\tlogger.Log.Errorf(\"error while executing PostUpdate user interceptor method %v\", err)\n\t\t}\n\t}\n\n\tu.PlainPassword = nil\n\n\treturn nil\n}", "func Test_UpdatePwd(t *testing.T) {\n\tuser, err := NewUserPwd(defaultPassword, defaultSaltStr)\n\tif err != nil {\n\t\tt.Error(\"Test fail, can't initialized user password structure, error:\", err)\n\t\tt.FailNow()\n\t}\n\tfor i := 0; i < defaultNumberOfOldPasswords*2; i++ {\n\t\tpwd := []byte(string(defaultPassword) + fmt.Sprintf(\"%d\", i))\n\t\tnewPwd, err := user.UpdatePassword(user.Password, pwd)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Test fail: user: %v, password %v, ('%v') rejected, but it was't used, error: %v\", user, newPwd, string(pwd), err)\n\t\t}\n\t\tfor j := i; j >= i-defaultNumberOfOldPasswords && j >= 0; j-- {\n\t\t\tpwd := []byte(string(defaultPassword) + fmt.Sprintf(\"%d\", j))\n\t\t\tnewPwd, err := user.UpdatePassword(user.Password, pwd)\n\t\t\tif err == nil {\n\t\t\t\tt.Errorf(\"Test fail: password %v ('%v') was already used, but it was accepted, user data: %v\", newPwd, string(pwd), user)\n\t\t\t}\n\t\t}\n\t}\n}", "func (*UsersController) Update(ctx *gin.Context) {\n\tvar updateJSON tat.UpdateUserJSON\n\tctx.Bind(&updateJSON)\n\n\tvar userToUpdate = tat.User{}\n\tfound, err := userDB.FindByUsername(&userToUpdate, updateJSON.Username)\n\tif !found {\n\t\tctx.JSON(http.StatusBadRequest, gin.H{\"error\": fmt.Errorf(\"user with username %s does not exist\", updateJSON.Username)})\n\t\treturn\n\t} else if err != nil {\n\t\tctx.JSON(http.StatusInternalServerError, gin.H{\"error\": fmt.Errorf(\"Error while fetching user with username %s\", updateJSON.Username)})\n\t\treturn\n\t}\n\n\tif strings.TrimSpace(updateJSON.NewFullname) == \"\" || strings.TrimSpace(updateJSON.NewEmail) == \"\" {\n\t\tctx.JSON(http.StatusBadRequest, gin.H{\"error\": fmt.Errorf(\"Invalid Fullname %s or Email %s\", updateJSON.NewFullname, updateJSON.NewEmail)})\n\t\treturn\n\t}\n\n\terr2 := userDB.Update(&userToUpdate, strings.TrimSpace(updateJSON.NewFullname), strings.TrimSpace(updateJSON.NewEmail))\n\tif err2 != nil {\n\t\tctx.JSON(http.StatusBadRequest, gin.H{\"error\": fmt.Sprintf(\"Update %s user to fullname %s and email %s failed : %s\", updateJSON.Username, updateJSON.NewFullname, updateJSON.NewEmail, err2.Error())})\n\t\treturn\n\t}\n\n\tctx.JSON(http.StatusCreated, gin.H{\"info\": \"user updated\"})\n}", "func (userRepository UserRepository) Update(userId uint64, user models.User) error {\n\tstatement, err := userRepository.db.Prepare(\n\t\t\"update users set name = ?, nick = ?, email = ? where id = ?\",\n\t)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer statement.Close()\n\n\tif _, err = statement.Exec(user.Name, user.Nick, user.Email, userId); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (repository Users) Update(ID uint64, user models.User) error {\n\n\tstatement, error := repository.db.Prepare(\"UPDATE users SET name = ?, nick = ?, email = ? where id = ?\")\n\n\tif error != nil {\n\t\treturn error\n\t}\n\n\tdefer statement.Close()\n\n\tif _, error = statement.Exec(user.Name, user.Nick, user.Email, ID); error != nil {\n\t\treturn error\n\t}\n\n\treturn nil\n}", "func (dm *DatabaseManager) UserUpdate(internalID int, userID string, userName string, pass string, email string, groupID int) error {\n\tif len(userID) > 20 {\n\t\treturn errors.New(\"error: len(userID) > 20\")\n\t}\n\n\tif len(userName) > 256 {\n\t\treturn errors.New(\"error: len(userName) > 256\")\n\t}\n\n\tif len(pass) > 50 {\n\t\treturn errors.New(\"error: len(pass) > 50\")\n\t}\n\tpassHashArr := sha512.Sum512([]byte(pass))\n\n\tif len(email) > 50 {\n\t\treturn errors.New(\"error: len(email) > 50\")\n\t}\n\n\t_, err := dm.db.Update(&User{\n\t\tUid: userID,\n\t\tUserName: userName,\n\t\tPassHash: passHashArr[:],\n\t\tEmail: email,\n\t\tGroupID: groupID,\n\t})\n\n\treturn err\n}", "func (handler *Handler) handleUserUpdate(w http.ResponseWriter, r *http.Request) {\n\n\t//We have gone through the auth, so we should know the id of the logged in user\n\tloggedInUser := r.Context().Value(\"user\").(int) //Grab the id of the user that send the request\n\n\t//Now load the current user from the repo\n\tuser, err := handler.userHelper.GetUser(loggedInUser)\n\n\t//Check for an error\n\tif err != nil {\n\t\tutils.ReturnJsonError(w, http.StatusUnprocessableEntity, err)\n\t\treturn\n\t}\n\n\t//decode the request body into struct with all of the info specified and failed if any error occur\n\terr = json.NewDecoder(r.Body).Decode(user)\n\tif err != nil {\n\t\tutils.ReturnJsonError(w, http.StatusUnprocessableEntity, err)\n\t\treturn\n\n\t}\n\n\t//Now update the user\n\tuser, err = handler.userHelper.updateUser(loggedInUser, user)\n\n\t//Check to see if the user was created\n\tif err == nil {\n\t\tutils.ReturnJson(w, http.StatusAccepted, user)\n\t} else {\n\t\tutils.ReturnJsonError(w, http.StatusForbidden, err)\n\t}\n\n}", "func (uuo *UserUpdateOne) check() error {\n\tif v, ok := uuo.mutation.AccountName(); ok {\n\t\tif err := user.AccountNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"account_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"account_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.StaffType(); ok {\n\t\tif err := user.StaffTypeValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"staff_type\", err: fmt.Errorf(\"ent: validator failed for field \\\"staff_type\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.FamilyName(); ok {\n\t\tif err := user.FamilyNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"family_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"family_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.GivenName(); ok {\n\t\tif err := user.GivenNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"given_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"given_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.DisplayName(); ok {\n\t\tif err := user.DisplayNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"display_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"display_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.IDNumber(); ok {\n\t\tif err := user.IDNumberValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"id_number\", err: fmt.Errorf(\"ent: validator failed for field \\\"id_number\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Sex(); ok {\n\t\tif err := user.SexValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"sex\", err: fmt.Errorf(\"ent: validator failed for field \\\"sex\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.PhoneNumber(); ok {\n\t\tif err := user.PhoneNumberValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"phone_number\", err: fmt.Errorf(\"ent: validator failed for field \\\"phone_number\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Address(); ok {\n\t\tif err := user.AddressValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"address\", err: fmt.Errorf(\"ent: validator failed for field \\\"address\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.StaffID(); ok {\n\t\tif err := user.StaffIDValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"staff_id\", err: fmt.Errorf(\"ent: validator failed for field \\\"staff_id\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.PersonalEmail(); ok {\n\t\tif err := user.PersonalEmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"personal_email\", err: fmt.Errorf(\"ent: validator failed for field \\\"personal_email\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.IntranetWorkEmail(); ok {\n\t\tif err := user.IntranetWorkEmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"intranet_work_email\", err: fmt.Errorf(\"ent: validator failed for field \\\"intranet_work_email\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.ExtranetWorkEmail(); ok {\n\t\tif err := user.ExtranetWorkEmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"extranet_work_email\", err: fmt.Errorf(\"ent: validator failed for field \\\"extranet_work_email\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func (u *User) Update(db XODB) error {\n\tvar err error\n\n\t// if doesn't exist, bail\n\tif !u._exists {\n\t\treturn errors.New(\"update failed: does not exist\")\n\t}\n\n\t// if deleted, bail\n\tif u._deleted {\n\t\treturn errors.New(\"update failed: marked for deletion\")\n\t}\n\n\t// sql query\n\tconst sqlstr = `UPDATE test_database.users SET ` +\n\t\t`username = ?, created_at = ?` +\n\t\t` WHERE user_id = ?`\n\n\t// run query\n\tXOLog(sqlstr, u.Username, u.CreatedAt, u.UserID)\n\t_, err = db.Exec(sqlstr, u.Username, u.CreatedAt, u.UserID)\n\treturn err\n}", "func TestUpdateUserWithPassword(t *testing.T) {\n\tdb := database.Connect()\n\tu := User{\n\t\tEmail: \"[email protected]\",\n\t\tPassword: \"123\",\n\t}\n\tr := u.Create(db)\n\tif r != true {\n\t\tt.Errorf(\"Expected successful create, got %t\", r)\n\t}\n\n\tu.Password = \"321\"\n\tr = u.Update(db)\n\tif r != true {\n\t\tt.Errorf(\"Expected successful update, got %t\", r)\n\t}\n}", "func (db *MySQLDB) UpdateUser(ctx context.Context, user *User) error {\n\tfLog := mysqlLog.WithField(\"func\", \"UpdateUser\").WithField(\"RequestID\", ctx.Value(constants.RequestID))\n\texist, err := db.IsUserRecIDExist(ctx, user.RecID)\n\tif err != nil {\n\t\tfLog.Errorf(\"db.IsUserRecIDExist got %s\", err.Error())\n\t\treturn err\n\t}\n\tif !exist {\n\t\treturn ErrNotFound\n\t}\n\tenabled := 0\n\tsuspended := 0\n\tenable2fa := 0\n\tif user.Enabled {\n\t\tenabled = 1\n\t}\n\tif user.Suspended {\n\t\tsuspended = 1\n\t}\n\tif user.Enable2FactorAuth {\n\t\tenable2fa = 1\n\t}\n\n\tq := \"UPDATE HANSIP_USER SET EMAIL=?,HASHED_PASSPHRASE=?,ENABLED=?, SUSPENDED=?,LAST_SEEN=?,LAST_LOGIN=?,FAIL_COUNT=?,ACTIVATION_CODE=?,ACTIVATION_DATE=?,TOTP_KEY=?,ENABLE_2FE=?,TOKEN_2FE=?,RECOVERY_CODE=? WHERE REC_ID=?\"\n\n\tfLog.Infof(\"Updating user %s\", user.Email)\n\t_, err = db.instance.ExecContext(ctx, q,\n\t\tuser.Email, user.HashedPassphrase, enabled, suspended, user.LastSeen, user.LastLogin, user.FailCount, user.ActivationCode,\n\t\tuser.ActivationDate, user.UserTotpSecretKey, enable2fa, user.Token2FA, user.RecoveryCode, user.RecID)\n\tif err != nil {\n\t\tfLog.Errorf(\"db.instance.ExecContext got %s. SQL = %s\", err.Error(), q)\n\t\treturn &ErrDBExecuteError{\n\t\t\tWrapped: err,\n\t\t\tMessage: \"Error UpdateUser\",\n\t\t\tSQL: q,\n\t\t}\n\t}\n\treturn nil\n}", "func UpdateUser(db *gorm.DB, w http.ResponseWriter, r *http.Request) {\n\tuser := models.AppUser{}\n\tdecoder := json.NewDecoder(r.Body)\n\tdefer r.Body.Close()\n\tdecoder.DisallowUnknownFields()\n\tif err := decoder.Decode(&user); err != nil {\n\t\trespondJSON(w, http.StatusBadRequest, JSONResponse{models.AppUser{}, \"Error petición mal estructurada\"})\n\t\treturn\n\t}\n\tuserTemp := getUserOrNull(db, user.AppUserID, w, r)\n\tif userTemp == nil {\n\t\trespondJSON(w, http.StatusBadRequest, JSONResponse{Message: \"Error el usuario no existe\"})\n\t\treturn\n\t}\n\tif userTemp.CompanyID != user.CompanyID {\n\t\trespondJSON(w, http.StatusBadRequest, JSONResponse{Message: \"Este usuario no pertenece a su organización\"})\n\t\treturn\n\t}\n\tif err := db.Model(&user).Where(\"company_id = ?\", user.CompanyID).Omit(\"AppUserID\", \"CompanyID\", \"AppUserCreationDate\", \"AppUserPassword\").Save(user).Error; err != nil {\n\t\trespondJSON(w, http.StatusInternalServerError, JSONResponse{Message: \"Error interno del servidor\"})\n\t\treturn\n\t}\n\trespondJSON(w, http.StatusOK, JSONResponse{user, \"Actualización realizada!\"})\n}", "func UpdateAccountByRequest(userName, newPassWord string, db *sql.DB) error {\n\tquery := `\n\tUPDATE \"users\" SET\n\t\tpassword=$1\n\tWHERE \n\t\tusername = $2;`\n\t_, err := db.Exec(query, newPassWord, userName)\n\tif err != nil {\n\t\tlogrus.WithFields(logrus.Fields{}).Errorf(\"[UpdateAccountByRequest] Update DB err %v\", err)\n\t\treturn errors.New(\"Lỗi hệ thống, vui lòng thử lại\")\n\t}\n\treturn nil\n}", "func UpdateUser(user *models.User, id string) (err error) {\n\tfmt.Println(user)\n\tconfig.DB.Save(user)\n\treturn nil\n}", "func UpdateHandler(w http.ResponseWriter, r *http.Request) {\n\t_, _, ok := r.BasicAuth()\n\tif !ok {\n\t\tw.Header().Set(\"WWW-Authenticate\", fmt.Sprintf(`Basic realm=\"%s\"`, BasicAuthRealm))\n\t\tw.WriteHeader(http.StatusUnauthorized)\n\t\tw.Write([]byte(http.StatusText(http.StatusUnauthorized) + \"\\n\"))\n\t\treturn\n\t}\n\tif !reqIsAdmin(r) {\n\t\thttp.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)\n\t\treturn\n\t}\n\tvar bad bool\n\tvar badmin bool\n\tif r.FormValue(\"ad\") != \"\" {\n\t\tvar err error\n\t\tbad, err = strconv.ParseBool(r.FormValue(\"ad\"))\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\t}\n\tif r.FormValue(\"admin\") != \"\" {\n\t\tvar err error\n\t\tbadmin, err = strconv.ParseBool(r.FormValue(\"admin\"))\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\t}\n\tu := &User{\n\t\tUsername: strings.ToLower(r.FormValue(\"username\")),\n\t\tPassword: r.FormValue(\"password\"),\n\t\tAdmin: badmin,\n\t\tAD: bad,\n\t\tNamespaces: strings.Split(r.FormValue(\"namespaces\"), \",\"),\n\t}\n\tu, uerr := u.UpdateUser()\n\tif uerr != nil {\n\t\thttp.Error(w, uerr.Error(), http.StatusUnauthorized)\n\t\treturn\n\t}\n\tfmt.Fprintf(w, \"User updated\\n\")\n}", "func (self Users) Update() {\n\tsqlStatement := `UPDATE users SET username = $2 WHERE id = $1`\n\t_, err := self.DB.Exec(sqlStatement, self.Id, self.UserName)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}", "func migrateNonexistUserTest(ctx context.Context, s *testing.State, cryptohome *hwsec.CryptohomeClient) {\n\t// Migrating the key of non-exist user should fail.\n\tif err := cryptohome.ChangeVaultPassword(ctx, username, oldPassword, util.PasswordLabel, newPassword); err == nil {\n\t\ts.Fatal(\"Password was successfully changed for non-existent user; want: should have failed\")\n\t}\n}", "func UpdateUser(c *gin.Context) {\n\tvar user, data, condition Users\n\n\tuserID, _ := strconv.ParseUint(c.Param(\"id\"), 10, 64)\n\tcondition.ID = uint(userID)\n\tuser.FindOne(condition)\n\n\tif err := c.BindJSON(&data); err == nil {\n\t\treturnMessage := user.Update(data)\n\n\t\tc.JSON(http.StatusOK, gin.H{\n\t\t\t\"status\": returnMessage.Status,\n\t\t\t\"message\": returnMessage.Description,\n\t\t})\n\t} else {\n\t\tc.JSON(http.StatusInternalServerError, gin.H{\n\t\t\t\"status\": http.StatusInternalServerError,\n\t\t\t\"message\": err.Error(),\n\t\t})\n\t}\n}", "func (us UserService) Update(dto dto.UserEditDto) int64 {\n\tuserModel := model.User{\n\t\tId: dto.Id,\n\t\tUsername: dto.Username,\n\t\tMobile: dto.Mobile,\n\t\tDepartmentId: dto.DepartmentId,\n\t}\n\tc := userDao.Update(&userModel)\n\treturn c.RowsAffected\n}", "func (ur *UserRepository) Update(ctx context.Context, id uint, u user.User) error {\n\tq := `\n\tUPDATE users set email=$1, location=$2, games_won=$3, games_lost=$4, updated_at=$5\n\t\tWHERE id=$6;\n\t`\n\n\tstmt, err := ur.Data.DB.PrepareContext(ctx, q)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdefer stmt.Close()\n\n\t_, err = stmt.ExecContext(\n\t\tctx, u.Email, u.Location,\n\t\tu.GamesWon, u.GamesLost, time.Now(), id,\n\t)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (u *usecase) Update(ctx context.Context, id string, user *UpdateUser) error {\n\tvalidate = validator.New()\n\tif err := validate.Struct(user); err != nil {\n\t\tvalidationErrors := err.(validator.ValidationErrors)\n\t\treturn validationErrors\n\t}\n\n\tif err := u.repository.Update(ctx, id, user); err != nil {\n\t\treturn errors.Wrap(err, \"error updating user\")\n\t}\n\treturn nil\n}" ]
[ "0.6680948", "0.6657655", "0.66514474", "0.65688694", "0.6541573", "0.6437556", "0.64088184", "0.6333889", "0.62890565", "0.6288243", "0.6285996", "0.6218925", "0.6218643", "0.6199982", "0.6194088", "0.61515987", "0.61501783", "0.6130199", "0.611097", "0.60609686", "0.600802", "0.5977153", "0.5963815", "0.59612864", "0.5961191", "0.59555423", "0.5955181", "0.594811", "0.5946051", "0.59421635", "0.59379536", "0.59293747", "0.59182715", "0.59160143", "0.59100485", "0.5888969", "0.58760476", "0.5874726", "0.58682436", "0.5861854", "0.5859844", "0.5857042", "0.58538204", "0.5848351", "0.5845618", "0.58418024", "0.5840947", "0.5840821", "0.58358264", "0.5833355", "0.58239454", "0.58173305", "0.5816132", "0.5806528", "0.5801734", "0.57934105", "0.5786592", "0.5778422", "0.5772355", "0.5772283", "0.57700646", "0.57683563", "0.57630366", "0.5758647", "0.5757312", "0.5754527", "0.57401884", "0.5735797", "0.57356954", "0.5732108", "0.57293826", "0.5726176", "0.57256806", "0.57253444", "0.5725038", "0.5716635", "0.5696275", "0.56877285", "0.56776667", "0.5676215", "0.5673641", "0.5669524", "0.56646234", "0.56620973", "0.5661959", "0.5660061", "0.56584525", "0.56560063", "0.5652929", "0.56515896", "0.56503105", "0.5640612", "0.5640488", "0.5640144", "0.5637101", "0.5635631", "0.563343", "0.5629615", "0.5627286", "0.5620835" ]
0.589277
35
6 Verifies if the user was updated
func TestGetUserServicePatched (t *testing.T){ user1, err := GetUserService(user_01.SocialNumber) assert.Equal(t, 200, err.HTTPStatus) assert.Equal(t, user1.Name, new_name_user_01) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (u *User) canUpdate(ctx context.Context, user entity.User) error {\n\toriginal, err := u.passwordClient.One(ctx, user.ID)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn u.hasher.Compare(user.Password, original.Hash)\n}", "func (h *UserRepos) Update(ctx context.Context, w http.ResponseWriter, r *http.Request, params map[string]string) error {\n\n\tctxValues, err := webcontext.ContextValues(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tclaims, err := auth.ClaimsFromContext(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t//\n\treq := new(user.UserUpdateRequest)\n\tdata := make(map[string]interface{})\n\tf := func() (bool, error) {\n\t\tif r.Method == http.MethodPost {\n\t\t\terr := r.ParseForm()\n\t\t\tif err != nil {\n\t\t\t\treturn false, err\n\t\t\t}\n\n\t\t\tdecoder := schema.NewDecoder()\n\t\t\tdecoder.IgnoreUnknownKeys(true)\n\n\t\t\tif err := decoder.Decode(req, r.PostForm); err != nil {\n\t\t\t\treturn false, err\n\t\t\t}\n\t\t\treq.ID = claims.Subject\n\n\t\t\terr = h.UserRepo.Update(ctx, claims, *req, ctxValues.Now)\n\t\t\tif err != nil {\n\t\t\t\tswitch errors.Cause(err) {\n\t\t\t\tdefault:\n\t\t\t\t\tif verr, ok := weberror.NewValidationError(ctx, err); ok {\n\t\t\t\t\t\tdata[\"validationErrors\"] = verr.(*weberror.Error)\n\t\t\t\t\t\treturn false, nil\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn false, err\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif r.PostForm.Get(\"Password\") != \"\" {\n\t\t\t\tpwdReq := new(user.UserUpdatePasswordRequest)\n\n\t\t\t\tif err := decoder.Decode(pwdReq, r.PostForm); err != nil {\n\t\t\t\t\treturn false, err\n\t\t\t\t}\n\t\t\t\tpwdReq.ID = claims.Subject\n\n\t\t\t\terr = h.UserRepo.UpdatePassword(ctx, claims, *pwdReq, ctxValues.Now)\n\t\t\t\tif err != nil {\n\t\t\t\t\tswitch errors.Cause(err) {\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tif verr, ok := weberror.NewValidationError(ctx, err); ok {\n\t\t\t\t\t\t\tdata[\"validationErrors\"] = verr.(*weberror.Error)\n\t\t\t\t\t\t\treturn false, nil\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\treturn false, err\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Display a success message to the user.\n\t\t\twebcontext.SessionFlashSuccess(ctx,\n\t\t\t\t\"Profile Updated\",\n\t\t\t\t\"User profile successfully updated.\")\n\n\t\t\treturn true, web.Redirect(ctx, w, r, \"/user\", http.StatusFound)\n\t\t}\n\n\t\treturn false, nil\n\t}\n\n\tend, err := f()\n\tif err != nil {\n\t\treturn web.RenderError(ctx, w, r, err, h.Renderer, TmplLayoutBase, TmplContentErrorGeneric, web.MIMETextHTMLCharsetUTF8)\n\t} else if end {\n\t\treturn nil\n\t}\n\n\tusr, err := h.UserRepo.ReadByID(ctx, claims, claims.Subject)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif req.ID == \"\" {\n\t\treq.FirstName = &usr.FirstName\n\t\treq.LastName = &usr.LastName\n\t\treq.Email = &usr.Email\n\t\treq.Timezone = usr.Timezone\n\t}\n\n\tdata[\"user\"] = usr.Response(ctx)\n\n\tdata[\"timezones\"], err = h.GeoRepo.ListTimezones(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdata[\"form\"] = req\n\n\tif verr, ok := weberror.NewValidationError(ctx, webcontext.Validator().Struct(user.UserUpdateRequest{})); ok {\n\t\tdata[\"userValidationDefaults\"] = verr.(*weberror.Error)\n\t}\n\n\tif verr, ok := weberror.NewValidationError(ctx, webcontext.Validator().Struct(user.UserUpdatePasswordRequest{})); ok {\n\t\tdata[\"passwordValidationDefaults\"] = verr.(*weberror.Error)\n\t}\n\n\treturn h.Renderer.Render(ctx, w, r, TmplLayoutBase, \"user-update.gohtml\", web.MIMETextHTMLCharsetUTF8, http.StatusOK, data)\n}", "func Update(user User) error {\n\n}", "func (uu *UserUpdate) check() error {\n\tif v, ok := uu.mutation.Username(); ok {\n\t\tif err := user.UsernameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"username\", err: fmt.Errorf(\"ent: validator failed for field \\\"username\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Fullname(); ok {\n\t\tif err := user.FullnameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"fullname\", err: fmt.Errorf(\"ent: validator failed for field \\\"fullname\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Password(); ok {\n\t\tif err := user.PasswordValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"password\", err: fmt.Errorf(\"ent: validator failed for field \\\"password\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Email(); ok {\n\t\tif err := user.EmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"email\", err: fmt.Errorf(\"ent: validator failed for field \\\"email\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Phone(); ok {\n\t\tif err := user.PhoneValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"phone\", err: fmt.Errorf(\"ent: validator failed for field \\\"phone\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Bio(); ok {\n\t\tif err := user.BioValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"bio\", err: fmt.Errorf(\"ent: validator failed for field \\\"bio\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Intro(); ok {\n\t\tif err := user.IntroValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"intro\", err: fmt.Errorf(\"ent: validator failed for field \\\"intro\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.GithubProfile(); ok {\n\t\tif err := user.GithubProfileValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"github_profile\", err: fmt.Errorf(\"ent: validator failed for field \\\"github_profile\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.ProfilePictureURL(); ok {\n\t\tif err := user.ProfilePictureURLValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"profile_picture_url\", err: fmt.Errorf(\"ent: validator failed for field \\\"profile_picture_url\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Status(); ok {\n\t\tif err := user.StatusValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"status\", err: fmt.Errorf(\"ent: validator failed for field \\\"status\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func checkUserFormChanged(uForm storage.User, originalUser storage.User) (bool, storage.User) {\n\tfmt.Printf(\"---originalUser = %v, type = %T\\n\", originalUser.FirstName, originalUser.FirstName)\n\tfmt.Printf(\"---user in form = %v, type = %T\\n\", uForm.FirstName, uForm.FirstName)\n\n\tchanged := false\n\tif uForm.FirstName != originalUser.FirstName && uForm.FirstName != \"\" {\n\t\toriginalUser.FirstName = uForm.FirstName\n\t\tchanged = true\n\t}\n\tif uForm.LastName != originalUser.LastName && uForm.LastName != \"\" {\n\t\toriginalUser.LastName = uForm.LastName\n\t\tchanged = true\n\t}\n\tif uForm.Mail != originalUser.Mail && uForm.Mail != \"\" {\n\t\toriginalUser.Mail = uForm.Mail\n\t\tchanged = true\n\t}\n\tif uForm.Address != originalUser.Address && uForm.Address != \"\" {\n\t\toriginalUser.Address = uForm.Address\n\t\tchanged = true\n\t}\n\tif uForm.PostNrAndPlace != originalUser.PostNrAndPlace && uForm.PostNrAndPlace != \"\" {\n\t\toriginalUser.PostNrAndPlace = uForm.PostNrAndPlace\n\t\tchanged = true\n\t}\n\tif uForm.PhoneNr != originalUser.PhoneNr && uForm.PhoneNr != \"\" {\n\t\toriginalUser.PhoneNr = uForm.PhoneNr\n\t\tchanged = true\n\t}\n\tif uForm.OrgNr != originalUser.OrgNr && uForm.OrgNr != \"\" {\n\t\toriginalUser.OrgNr = uForm.OrgNr\n\t\tchanged = true\n\t}\n\tif uForm.CountryID != originalUser.CountryID && uForm.CountryID != \"\" {\n\t\toriginalUser.CountryID = uForm.CountryID\n\t\tchanged = true\n\t}\n\tif uForm.BankAccount != originalUser.BankAccount && uForm.BankAccount != \"\" {\n\t\toriginalUser.BankAccount = uForm.BankAccount\n\t\tchanged = true\n\t}\n\treturn changed, originalUser\n}", "func UserUpdate(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {\n\n}", "func (uu *UserUpdate) check() error {\n\tif v, ok := uu.mutation.AccountName(); ok {\n\t\tif err := user.AccountNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"account_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"account_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.StaffType(); ok {\n\t\tif err := user.StaffTypeValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"staff_type\", err: fmt.Errorf(\"ent: validator failed for field \\\"staff_type\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.FamilyName(); ok {\n\t\tif err := user.FamilyNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"family_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"family_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.GivenName(); ok {\n\t\tif err := user.GivenNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"given_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"given_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.DisplayName(); ok {\n\t\tif err := user.DisplayNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"display_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"display_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.IDNumber(); ok {\n\t\tif err := user.IDNumberValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"id_number\", err: fmt.Errorf(\"ent: validator failed for field \\\"id_number\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Sex(); ok {\n\t\tif err := user.SexValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"sex\", err: fmt.Errorf(\"ent: validator failed for field \\\"sex\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.PhoneNumber(); ok {\n\t\tif err := user.PhoneNumberValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"phone_number\", err: fmt.Errorf(\"ent: validator failed for field \\\"phone_number\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Address(); ok {\n\t\tif err := user.AddressValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"address\", err: fmt.Errorf(\"ent: validator failed for field \\\"address\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.StaffID(); ok {\n\t\tif err := user.StaffIDValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"staff_id\", err: fmt.Errorf(\"ent: validator failed for field \\\"staff_id\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.PersonalEmail(); ok {\n\t\tif err := user.PersonalEmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"personal_email\", err: fmt.Errorf(\"ent: validator failed for field \\\"personal_email\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.IntranetWorkEmail(); ok {\n\t\tif err := user.IntranetWorkEmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"intranet_work_email\", err: fmt.Errorf(\"ent: validator failed for field \\\"intranet_work_email\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.ExtranetWorkEmail(); ok {\n\t\tif err := user.ExtranetWorkEmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"extranet_work_email\", err: fmt.Errorf(\"ent: validator failed for field \\\"extranet_work_email\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func UpdateUser(w http.ResponseWriter, r *http.Request) {\n\n}", "func UpdateUser(c *gin.Context) {}", "func (uuo *UserUpdateOne) check() error {\n\tif v, ok := uuo.mutation.Username(); ok {\n\t\tif err := user.UsernameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"username\", err: fmt.Errorf(\"ent: validator failed for field \\\"username\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Fullname(); ok {\n\t\tif err := user.FullnameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"fullname\", err: fmt.Errorf(\"ent: validator failed for field \\\"fullname\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Password(); ok {\n\t\tif err := user.PasswordValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"password\", err: fmt.Errorf(\"ent: validator failed for field \\\"password\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Email(); ok {\n\t\tif err := user.EmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"email\", err: fmt.Errorf(\"ent: validator failed for field \\\"email\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Phone(); ok {\n\t\tif err := user.PhoneValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"phone\", err: fmt.Errorf(\"ent: validator failed for field \\\"phone\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Bio(); ok {\n\t\tif err := user.BioValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"bio\", err: fmt.Errorf(\"ent: validator failed for field \\\"bio\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Intro(); ok {\n\t\tif err := user.IntroValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"intro\", err: fmt.Errorf(\"ent: validator failed for field \\\"intro\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.GithubProfile(); ok {\n\t\tif err := user.GithubProfileValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"github_profile\", err: fmt.Errorf(\"ent: validator failed for field \\\"github_profile\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.ProfilePictureURL(); ok {\n\t\tif err := user.ProfilePictureURLValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"profile_picture_url\", err: fmt.Errorf(\"ent: validator failed for field \\\"profile_picture_url\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Status(); ok {\n\t\tif err := user.StatusValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"status\", err: fmt.Errorf(\"ent: validator failed for field \\\"status\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func updateUser(c *gin.Context) {\n\tvar user user\n\tuserID := c.Param(\"id\")\n\n\tdb.First(&user, userID)\n\n\tif user.Id == 0 {\n\t\tc.JSON(http.StatusNotFound, gin.H{\"status\": http.StatusNotFound, \"message\": \"No user found!\"})\n\t\treturn\n\t}\n\n\tdb.Model(&user).Update(\"login\", c.PostForm(\"login\"))\n password,_ := HashPassword(c.PostForm(\"password\"))\n\tdb.Model(&user).Update(\"password\", password)\n\tc.JSON(http.StatusOK, gin.H{\"status\": http.StatusOK, \"message\": \"User updated successfully!\"})\n}", "func (uuo *UserUpdateOne) check() error {\n\tif v, ok := uuo.mutation.AccountName(); ok {\n\t\tif err := user.AccountNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"account_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"account_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.StaffType(); ok {\n\t\tif err := user.StaffTypeValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"staff_type\", err: fmt.Errorf(\"ent: validator failed for field \\\"staff_type\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.FamilyName(); ok {\n\t\tif err := user.FamilyNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"family_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"family_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.GivenName(); ok {\n\t\tif err := user.GivenNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"given_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"given_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.DisplayName(); ok {\n\t\tif err := user.DisplayNameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"display_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"display_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.IDNumber(); ok {\n\t\tif err := user.IDNumberValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"id_number\", err: fmt.Errorf(\"ent: validator failed for field \\\"id_number\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Sex(); ok {\n\t\tif err := user.SexValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"sex\", err: fmt.Errorf(\"ent: validator failed for field \\\"sex\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.PhoneNumber(); ok {\n\t\tif err := user.PhoneNumberValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"phone_number\", err: fmt.Errorf(\"ent: validator failed for field \\\"phone_number\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Address(); ok {\n\t\tif err := user.AddressValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"address\", err: fmt.Errorf(\"ent: validator failed for field \\\"address\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.StaffID(); ok {\n\t\tif err := user.StaffIDValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"staff_id\", err: fmt.Errorf(\"ent: validator failed for field \\\"staff_id\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.PersonalEmail(); ok {\n\t\tif err := user.PersonalEmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"personal_email\", err: fmt.Errorf(\"ent: validator failed for field \\\"personal_email\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.IntranetWorkEmail(); ok {\n\t\tif err := user.IntranetWorkEmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"intranet_work_email\", err: fmt.Errorf(\"ent: validator failed for field \\\"intranet_work_email\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.ExtranetWorkEmail(); ok {\n\t\tif err := user.ExtranetWorkEmailValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"extranet_work_email\", err: fmt.Errorf(\"ent: validator failed for field \\\"extranet_work_email\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func TestUpdateUserNotValid(t *testing.T) {\n\tdb := database.Connect()\n\tu := User{\n\t\tEmail: \"[email protected]\",\n\t}\n\tr := u.Update(db)\n\tif r != false {\n\t\tt.Errorf(\"Expected failed update, got %t\", r)\n\t}\n}", "func updateUser(res http.ResponseWriter, req *http.Request, p httprouter.Params) {\n\tvar data map[string]string\n\tvar username = p.ByName(\"username\")\n\n\tresp, err := getBody(req)\n\tif err != nil {\n\t\twriteJSON(res, 500, jsMap{\"status\": \"Server Error\"})\n\t\treturn\n\t}\n\tif err := json.Unmarshal(resp, &data); err != nil {\n\t\tlog.Println(\"updateUser:\", err)\n\t\twriteJSON(res, 400, jsMap{\"status\": \"Invalid Data\"})\n\t\treturn\n\t}\n\n\tuser, err := getUser(username)\n\tif err != nil {\n\t\twriteJSON(res, 404, jsMap{\"status\": \"Not Found\"})\n\t\treturn\n\t}\n\t// if the user has 2fa enabled, verify their totp key\n\tif user.TOTP != \"\" {\n\t\terr = verifyTOTP(user, data[\"passcode\"])\n\t\tif err != nil {\n\t\t\twriteJSON(res, 401, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\t}\n\n\tswitch p.ByName(\"setting\") {\n\tcase \"password\":\n\t\t_, err = authenticateUser(user, username, data[\"password\"])\n\t\tif err != nil {\n\t\t\twriteJSON(res, 401, jsMap{\"status\": \"Wrong Password\"})\n\t\t\treturn\n\t\t}\n\n\t\tv, err := srpEnv.Verifier([]byte(username), []byte(data[\"new_password\"]))\n\t\tif err != nil {\n\t\t\twriteJSON(res, 500, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\n\t\tih, verif := v.Encode()\n\t\t_, err = db.Exec(`\n\t\t\tUPDATE accounts \n\t\t\tSET ih = $1, verifier = $2\n\t\t\tWHERE username = $3;`, ih, verif, username,\n\t\t)\n\t\tif err != nil {\n\t\t\twriteJSON(res, 500, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\n\tcase \"2fa_enable\":\n\t\tif !totp.Validate(data[\"passcode\"], data[\"secret\"]) {\n\t\t\twriteJSON(res, 401, jsMap{\"status\": \"Wrong Passcode\"})\n\t\t\treturn\n\t\t}\n\n\t\tkey, _ := hex.DecodeString(secretKey)\n\t\tplaintext := []byte(data[\"secret\"])\n\t\tblock, err := aes.NewCipher(key)\n\t\tif err != nil {\n\t\t\twriteJSON(res, 500, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\n\t\tciphertext := make([]byte, aes.BlockSize+len(plaintext))\n\t\tiv := ciphertext[:aes.BlockSize]\n\t\tif _, err := io.ReadFull(rand.Reader, iv); err != nil {\n\t\t\twriteJSON(res, 500, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\n\t\tmode := cipher.NewCBCEncrypter(block, iv)\n\t\tmode.CryptBlocks(ciphertext[aes.BlockSize:], plaintext)\n\n\t\t_, err = db.Exec(`\n\t\t\tUPDATE accounts\n\t\t\tSET totp = $1\n\t\t\tWHERE username = $2;`,\n\t\t\thex.EncodeToString(ciphertext),\n\t\t\tusername,\n\t\t)\n\t\tif err != nil {\n\t\t\twriteJSON(res, 500, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\n\tcase \"2fa_disable\":\n\t\t_, err = db.Exec(`\n\t\t\tUPDATE accounts\n\t\t\tSET totp = ''\n\t\t\tWHERE username = $1;`, username,\n\t\t)\n\t\tif err != nil {\n\t\t\twriteJSON(res, 500, jsMap{\"status\": err.Error()})\n\t\t\treturn\n\t\t}\n\t}\n\twriteJSON(res, 200, jsMap{\"status\": \"OK\"})\n}", "func (p UserSignupChangedPredicate) Update(e runtimeevent.UpdateEvent) bool {\n\tif !checkMetaObjects(changedLog, e) {\n\t\treturn false\n\t}\n\treturn e.ObjectNew.GetGeneration() != e.ObjectOld.GetGeneration() ||\n\t\tp.annotationChanged(e, toolchainv1alpha1.UserSignupUserEmailAnnotationKey) ||\n\t\tp.labelChanged(e, toolchainv1alpha1.UserSignupUserEmailHashLabelKey)\n}", "func (uu *UserUpdate) check() error {\n\tif v, ok := uu.mutation.Username(); ok {\n\t\tif err := user.UsernameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"username\", err: fmt.Errorf(\"ent: validator failed for field \\\"username\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Password(); ok {\n\t\tif err := user.PasswordValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"password\", err: fmt.Errorf(\"ent: validator failed for field \\\"password\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func (this *UserController) Update() {\n\tflash \t := beego.ReadFromRequest(&this.Controller)\n\n\tid, _ := strconv.Atoi(this.Ctx.Input.Param(\":id\"))\n\tuser := &models.User{Id:id}\n\tuser.GetOne()\n\n\tnamesurname \t\t:= this.GetString(\"name_surname\")\n\tusername \t\t\t:= this.GetString(\"user_name\")\n\temail \t\t\t\t:= this.GetString(\"email\")\n\tpassword\t \t\t:= this.GetString(\"password\")\n\turl\t\t\t \t\t:= this.GetString(\"url\")\n\tinfo\t\t\t\t:= this.GetString(\"info\")\n\n\tvalid := validation.Validation{}\n\n\tvalid.Email(email, \"Email\")\n\n\tvalid.Required(username, \"Username\")\n\tvalid.Required(password, \"Password\")\n\n\tvalid.MaxSize(username, 20, \"Username\")\n\tvalid.MaxSize(password, 16, \"Password\")\n\n\tswitch {\n\tcase valid.HasErrors():\n\t\tfor _, err := range valid.Errors {\n\t\t\tlog.Println(err.Key, err.Message)\n\t\t}\n\t\tvalid.Error(\"Problem creating user!\")\n\t\tflash.Error(\"Problem creating user!\")\n\t\tflash.Store(&this.Controller)\n\tdefault:\n\t\tuser := &models.User{\n\t\t\tNameSurname\t\t:namesurname,\n\t\t\tUserName\t\t:username,\n\t\t\tEmail\t\t\t:email,\n\t\t\tPassword\t\t:Md5(password),\n\t\t\tUrl\t\t\t\t:url,\n\t\t\tInfo\t\t\t:info,\n\t\t\tRegisterTime \t:time.Now(),\n\t\t}\n\t\tswitch {\n\t\t\tcase user.ExistUserName():\n\t\t\t\tvalid.Error(\"This username is in use!\")\n\t\t\t\tflash.Error(\"This username is in use!\")\n\t\t\t\tflash.Store(&this.Controller)\n\t\t\tcase user.ExistEmail():\n\t\t\t\tvalid.Error(\"This email is in use!\")\n\t\t\t\tflash.Error(\"This email is in use!\")\n\t\t\t\tflash.Store(&this.Controller)\n\t\t\tdefault:\n\t\t\t\terr := user.Update()\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tvalid.Error(fmt.Sprintf(\"%v\", err))\n\t\t\t\tflash.Notice(\"User updated successfully!\")\n\t\t\t\tflash.Store(&this.Controller)\n\t\t\t\tthis.Redirect(\"/admin/users\", 302)\n\t\t\t\treturn\n\t\t}\n\n\t}\n\n\tredirectUrl := \"/admin/users/edit/\" + strconv.Itoa(id)\n\tthis.Redirect(redirectUrl, 302)\n\tthis.Abort(\"302\")\n\treturn\n}", "func UserUpdatePost(w http.ResponseWriter, r *http.Request) {\n\tsess := session.Instance(r)\n\tvar updateReq webpojo.UserUpdateReq\n\tvar updateResp = webpojo.UserUpdateResp{}\n\n\t// Prevent brute force login attempts by not hitting MySQL and pretending like it was invalid :-)\n\tif sess.Values[SessLoginAttempt] == nil ||\n\t\t(sess.Values[UserRole] != webpojo.UserSupervisor && sess.Values[UserRole] != webpojo.UserAdmin) {\n\t\tlog.Println(\"Authorized request\")\n\t\tupdateResp.StatusCode = constants.StatusCode_429\n\t\tupdateResp.Message = constants.Msg_429\n\n\t\tReturnJsonResp(w, updateResp)\n\t\treturn\n\t}\n\n\tbody, readErr := ioutil.ReadAll(r.Body)\n\tif readErr != nil {\n\t\tlog.Println(readErr)\n\t\tReturnError(w, readErr)\n\t\treturn\n\t}\n\n\tif len(body) == 0 {\n\t\tlog.Println(\"Empty json payload\")\n\t\tRecordLoginAttempt(sess)\n\t\tsess.Save(r, w)\n\t\tupdateResp.StatusCode = constants.StatusCode_400\n\t\tupdateResp.Message = constants.Msg_400\n\t\tReturnJsonResp(w, updateResp)\n\t\treturn\n\t}\n\n\t//log.Println(\"r.Body\", string(body))\n\tupdateReq = webpojo.UserUpdateReq{}\n\tjsonErr := json.Unmarshal(body, &updateReq)\n\tif jsonErr != nil {\n\t\tlog.Println(jsonErr)\n\t\tReturnError(w, jsonErr)\n\t\treturn\n\t}\n\tlog.Println(fmt.Sprintf(\"%v is updating user: %v\", sess.Values[UserName], updateReq.Email))\n\n\tuser := model.User{}\n\tuser.Email = updateReq.Email\n\tuser.Password, _ = passhash.HashString(updateReq.Password)\n\tuser.FirstName = updateReq.FirstName\n\tuser.LastName = updateReq.LastName\n\tuser.UserRole = updateReq.UserRole\n\tuser.ID = updateReq.UserID\n\tdbErr := model.UserUpdate(user)\n\n\tif dbErr != nil {\n\t\tlog.Println(dbErr)\n\t\tRecordLoginAttempt(sess)\n\t\tsess.Save(r, w)\n\t\tupdateResp.StatusCode = constants.StatusCode_500\n\t\tupdateResp.Message = constants.Msg_500\n\t} else {\n\t\tlog.Println(\"Updated successfully\")\n\t\tupdateResp.StatusCode = constants.StatusCode_200\n\t\tupdateResp.Message = constants.Msg_200\n\t}\n\n\tReturnJsonResp(w, updateResp)\n}", "func patchAPIUserHandler(w http.ResponseWriter, r *http.Request, _ map[string]string) {\n\tuserName := sessionHandler.GetUserName(r)\n\tuserID, err := getUserID(userName)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tdecoder := json.NewDecoder(r.Body)\n\tvar json JSONUser\n\terr = decoder.Decode(&json)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\t// Make sure user id is over 0\n\tif json.ID < 1 {\n\t\thttp.Error(w, \"Wrong user id.\", http.StatusInternalServerError)\n\t\treturn\n\t} else if userID != json.ID { // Make sure the authenticated user is only changing his/her own data. TODO: Make sure the user is admin when multiple users have been introduced\n\t\thttp.Error(w, \"You don't have permission to change this data.\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\t// Get old user data to compare\n\ttempUser, err := database.RetrieveUser(json.ID)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\t// Make sure user email is provided\n\tif json.Email == \"\" {\n\t\tjson.Email = string(tempUser.Email)\n\t}\n\t// Make sure user name is provided\n\tif json.Name == \"\" {\n\t\tjson.Name = string(tempUser.Name)\n\t}\n\t// Make sure user slug is provided\n\tif json.Slug == \"\" {\n\t\tjson.Slug = tempUser.Slug\n\t}\n\t// Check if new name is already taken\n\tif json.Name != string(tempUser.Name) {\n\t\t_, err = database.RetrieveUserByName([]byte(json.Name))\n\t\tif err == nil {\n\t\t\t// The new user name is already taken. Assign the old name.\n\t\t\t// TODO: Return error that will be displayed in the admin interface.\n\t\t\tjson.Name = string(tempUser.Name)\n\t\t}\n\t}\n\t// Check if new slug is already taken\n\tif json.Slug != tempUser.Slug {\n\t\t_, err = database.RetrieveUserBySlug(json.Slug)\n\t\tif err == nil {\n\t\t\t// The new user slug is already taken. Assign the old slug.\n\t\t\t// TODO: Return error that will be displayed in the admin interface.\n\t\t\tjson.Slug = tempUser.Slug\n\t\t}\n\t}\n\tuser := structure.User{ID: json.ID, Name: []byte(json.Name), Slug: json.Slug, Email: []byte(json.Email), Image: []byte(json.Image), Cover: []byte(json.Cover), Bio: []byte(json.Bio), Website: []byte(json.Website), Location: []byte(json.Location)}\n\terr = methods.UpdateUser(&user, userID)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tif json.Password != \"\" && (json.Password == json.PasswordRepeated) { // Update password if a new one was submitted\n\t\tencryptedPassword, err := authentication.EncryptPassword(json.Password)\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t\terr = database.UpdateUserPassword(user.ID, encryptedPassword, date.GetCurrentTime(), json.ID)\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t}\n\t// Check if the user name was changed. If so, update the session cookie to the new user name.\n\tif json.Name != string(tempUser.Name) {\n\t\tlogInUser(json.Name, w)\n\t}\n\tw.WriteHeader(http.StatusOK)\n\tw.Write([]byte(\"User settings updated!\"))\n\treturn\n}", "func (uuo *UserUpdateOne) check() error {\n\tif v, ok := uuo.mutation.Username(); ok {\n\t\tif err := user.UsernameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"username\", err: fmt.Errorf(\"ent: validator failed for field \\\"username\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.Password(); ok {\n\t\tif err := user.PasswordValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"password\", err: fmt.Errorf(\"ent: validator failed for field \\\"password\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func (uu *UserUpdate) check() error {\n\tif v, ok := uu.mutation.Tenant(); ok {\n\t\tif err := user.TenantValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"tenant\", err: fmt.Errorf(\"ent: validator failed for field \\\"tenant\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.UUID(); ok {\n\t\tif err := user.UUIDValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"uuid\", err: fmt.Errorf(\"ent: validator failed for field \\\"uuid\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func Update(mainObj interface{}, newData interface{}) bool {\n\tnewDataVal, mainObjVal := reflect.ValueOf(newData).Elem(), reflect.ValueOf(mainObj).Elem()\n\tfieldCount := newDataVal.NumField()\n\tchanged := false\n\tfor i := 0; i < fieldCount; i++ {\n\t\tnewField := newDataVal.Field(i)\n\t\t// They passed in a value for this field, update our DB user\n\t\tif newField.IsValid() && !IsEmpty(newField) {\n\t\t\tdbField := mainObjVal.Field(i)\n\t\t\tdbField.Set(newField)\n\t\t\tchanged = true\n\t\t}\n\t}\n\treturn changed\n}", "func UpdateUser(userId int64, userData *UserEntry) error {\n _ , nerr := model.Database.Exec(\"UPDATE users SET username = ?, isadmin = ?, email = ? WHERE userid = ?\", userData.Username, userData.IsAdmin, userData.Email, userId)\n if nerr != nil {\n return nerr\n }\n return nil\n}", "func (call *UserUsecaseImpl) Update(user *models.User) (*models.User, error) {\n\n\tstatus := call.userRepo.CheckMail(user)\n\tif !status {\n\t\treturn nil, errors.New(\"Opps.. sorry email already use other account\")\n\t}\n\n\tuser, err := common.Encrypt(user)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn call.userRepo.Update(user)\n}", "func (u *UserTest) Update(user User) error {\n\treturn nil\n}", "func (uuo *UserUpdateOne) check() error {\n\tif v, ok := uuo.mutation.Tenant(); ok {\n\t\tif err := user.TenantValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"tenant\", err: fmt.Errorf(\"ent: validator failed for field \\\"tenant\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uuo.mutation.UUID(); ok {\n\t\tif err := user.UUIDValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"uuid\", err: fmt.Errorf(\"ent: validator failed for field \\\"uuid\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func (u *User) Update(tx *pop.Connection) (*validate.Errors, error) {\n\tu.Email = strings.ToLower(u.Email)\n\t// TODO add ldap chpw functionality\n\treturn tx.ValidateAndUpdate(u)\n}", "func (u *User) ValidateUpdate(tx *pop.Connection) (*validate.Errors, error) {\n\treturn validate.NewErrors(), nil\n}", "func (u *User) ValidateUpdate(tx *pop.Connection) (*validate.Errors, error) {\n\treturn validate.NewErrors(), nil\n}", "func (u *User) ValidateUpdate(tx *pop.Connection) (*validate.Errors, error) {\n\treturn validate.NewErrors(), nil\n}", "func (u *User) ValidateUpdate(tx *pop.Connection) (*validate.Errors, error) {\n\treturn validate.NewErrors(), nil\n}", "func (u *User) ValidateUpdate(tx *pop.Connection) (*validate.Errors, error) {\n\treturn validate.NewErrors(), nil\n}", "func UpdatePassword(context *gin.Context) {\n\tuserProfile := models.UserProfile{}\n\tcontext.ShouldBindBodyWith(&userProfile, binding.JSON)\n\n\tmodifiedCount, err := userprofileservice.UpdateByEmail(userProfile)\n\tif modifiedCount == 0 {\n\t\tcontext.JSON(http.StatusNotFound, gin.H{\"message\": \"User not found\"})\n\t\treturn\n\t}\n\n\tif err != nil {\n\t\tcontext.JSON(http.StatusInternalServerError, gin.H{\"message\": \"Failed to update user\"})\n\t\tcontext.Abort()\n\t\treturn\n\t}\n\n\tcontext.JSON(http.StatusOK, gin.H{\"message\": \"ok\"})\n}", "func (handler *Handler) handlePasswordUpdate(w http.ResponseWriter, r *http.Request) {\n\n\t//We have gone through the auth, so we should know the id of the logged in user\n\tloggedInUser := r.Context().Value(\"user\").(int) //Grab the id of the user that send the request\n\n\t//Create a new password change object\n\tinfo := updatePasswordChangeStruct{}\n\n\t//Now get the json info\n\terr := json.NewDecoder(r.Body).Decode(&info)\n\tif err != nil {\n\t\tutils.ReturnJsonError(w, http.StatusUnprocessableEntity, err)\n\t\treturn\n\n\t}\n\n\t//Now update the password\n\terr = handler.userHelper.passwordChange(loggedInUser, info)\n\n\t//Check to see if the user was created\n\tif err == nil {\n\t\tutils.ReturnJsonStatus(w, http.StatusAccepted, true, \"password_change_success\")\n\t} else {\n\t\tutils.ReturnJsonError(w, http.StatusForbidden, err)\n\t}\n\n}", "func UpdateUser(c *gin.Context) {\n\tuuid := c.Param(\"uuid\")\n\tvar user models.User\n\n\tdb := db.GetDB()\n\tif err := db.Where(\"uuid = ?\", uuid).First(&user).Error; err != nil {\n\t\tc.AbortWithStatus(http.StatusNotFound)\n\t\treturn\n\t}\n\tdb.Where(\"uuid = ?\", uuid)\n\n\tif user.ID != 0 {\n\n\t\tjwtClaims := jwt.ExtractClaims(c)\n\t\tauthUserAccessLevel := jwtClaims[\"access_level\"].(float64)\n\t\tauthUserUUID := jwtClaims[\"uuid\"].(string)\n\t\tif authUserAccessLevel != 1 {\n\t\t\tif authUserUUID != uuid {\n\t\t\t\tc.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{\n\t\t\t\t\t\"error\": \"Sorry but you can't Update, ONLY admin user can\",\n\t\t\t\t})\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\n\t\tvar newUser models.User\n\t\tc.Bind(&newUser)\n\n\t\tif newUser.FirstName != \"\" {\n\t\t\tuser.FirstName = newUser.FirstName\n\t\t}\n\n\t\tif newUser.LastName != \"\" {\n\t\t\tuser.LastName = newUser.LastName\n\t\t}\n\n\t\tif newUser.Email != \"\" {\n\t\t\tuser.Email = newUser.Email\n\t\t}\n\n\t\tif newUser.AccessLevel == 0 || newUser.AccessLevel == 1 {\n\t\t\tuser.AccessLevel = newUser.AccessLevel\n\t\t}\n\n\t\tif !newUser.DateOfBirth.IsZero() {\n\t\t\tuser.DateOfBirth = newUser.DateOfBirth\n\t\t}\n\n\t\t// Update multiple attributes with `struct`, will only update those changed\n\n\t\tif err := db.Save(&user); err != nil {\n\t\t\t// convert array of errors to JSON\n\t\t\terrs := err.GetErrors()\n\n\t\t\tif len(errs) > 0 {\n\t\t\t\tstrErrors := make([]string, len(errs))\n\t\t\t\tfor i, err := range errs {\n\t\t\t\t\tstrErrors[i] = err.Error()\n\t\t\t\t}\n\n\t\t\t\t// return errors\n\t\t\t\tc.JSON(http.StatusUnprocessableEntity, gin.H{\n\t\t\t\t\t\"errors\": strErrors,\n\t\t\t\t})\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\t// Display modified data in JSON message \"success\"\n\t\tc.JSON(http.StatusOK, &user)\n\n\t}\n\n}", "func (s *usersrvc) Update(ctx context.Context, p *user.UpdateUser) (err error) {\n\ts.logger.Print(\"userProfile.update\")\n\tuserIns, err := TokenToUser(p.Token)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif userIns.IsActivate == false{\n\t\terr = user.MakeCodeInvalide(fmt.Errorf(\"you email is not active, pleace activate email first\"))\n\t\treturn err\n\t}\n\n\terr = db.UserUpdateById(p.Avatar, p.Nickname, int(userIns.ID))\n\tif err != nil{\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (u *User) ValidateUpdate() (bool, []error) {\n\tvar validationErrors []error\n\n\t_, validationErrors = u.DefaultValidate()\n\n\tvalidationErrors = append(validationErrors, u.validateBothCreateAndUpdate()...)\n\n\t// append check here\n\n\treturn len(validationErrors) == 0, validationErrors\n}", "func (ctl UserController) Update(c *gin.Context) {\n\tvar body struct {\n\t\tPassword string `json:\"password\" binding:\"required\"`\n\t}\n\tif err := c.ShouldBindJSON(&body); err != nil {\n\t\tc.JSON(rootCtl.wrap(http.StatusUnprocessableEntity, err.Error()))\n\t\treturn\n\t}\n\n\tif err := microsoft.NewUser().UpdatePassword(c.Param(\"id\"), c.Param(\"uid\"), body.Password); err != nil {\n\t\tc.JSON(rootCtl.wrap(http.StatusInternalServerError, err.Error()))\n\t\treturn\n\t}\n\n\tc.JSON(rootCtl.wrap(http.StatusOK))\n}", "func (h *auth) Update(c echo.Context) error {\n\t// Filter params\n\tvar params service.UpdateUserParams\n\tif err := c.Bind(&params); err != nil {\n\t\tlog.Println(\"Could not get parameters:\", err)\n\t\treturn c.JSON(http.StatusUnauthorized, sferror.New(\"Could not get parameters.\"))\n\t}\n\tparams.UserAgent = c.Request().UserAgent()\n\tparams.Session = currentSession(c)\n\n\tservice := service.NewUser(h.db, h.sessions, params.APIVersion)\n\tupdate, err := service.Update(currentUser(c), params)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn c.JSON(http.StatusOK, update)\n}", "func (_obj *WebApiAuth) SysUser_Update(id int32, req *SysUser, res *SysUser, _opt ...map[string]string) (err error) {\n\n\tvar length int32\n\tvar have bool\n\tvar ty byte\n\t_os := codec.NewBuffer()\n\terr = _os.Write_int32(id, 1)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = req.WriteBlock(_os, 2)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = (*res).WriteBlock(_os, 3)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar _status map[string]string\n\tvar _context map[string]string\n\tif len(_opt) == 1 {\n\t\t_context = _opt[0]\n\t} else if len(_opt) == 2 {\n\t\t_context = _opt[0]\n\t\t_status = _opt[1]\n\t}\n\t_resp := new(requestf.ResponsePacket)\n\ttarsCtx := context.Background()\n\n\terr = _obj.s.Tars_invoke(tarsCtx, 0, \"SysUser_Update\", _os.ToBytes(), _status, _context, _resp)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t_is := codec.NewReader(tools.Int8ToByte(_resp.SBuffer))\n\terr = (*res).ReadBlock(_is, 3, true)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif len(_opt) == 1 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t} else if len(_opt) == 2 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t\tfor k := range _status {\n\t\t\tdelete(_status, k)\n\t\t}\n\t\tfor k, v := range _resp.Status {\n\t\t\t_status[k] = v\n\t\t}\n\n\t}\n\t_ = length\n\t_ = have\n\t_ = ty\n\treturn nil\n}", "func (UserService) UpdateLoginTime(user dto.UserEditDto) int64 {\n\tu := userDao.Get(user.Id, false)\n\t//u.Status = dto.Status\n\tc := userDao.Update(&u, map[string]interface{}{\n\t\t\"last_login_time\": time.Now(),\n\t})\n\treturn c.RowsAffected\n}", "func (us UserService) VerifyAndReturnUserInfo(loginDto dto.LoginDto) (bool, error, model.User) {\n\tuserModel := userDao.GetByUserName(loginDto.Username)\n\t// Account not exits\n\tif userModel.Id < 1 {\n\t\treturn false, errInvalidAccount, model.User{}\n\t}\n\tif userModel.Status == UserStatusLock {\n\t\treturn false, errAccountLocked, model.User{}\n\t}\n\tlocKey := fmt.Sprintf(viper.GetString(\"login.failRecordKey\"), userModel.Username)\n\tif login.VerifyPassword(loginDto.Password, userModel) {\n\t\t// destroy time records\n\t\t_ = cache.Del(locKey)\n\t\tif us.VerifySmsCodeIfNeeded(userModel) {\n\t\t\tif err := us.Verify2Fa(loginDto.Code, userModel); err != nil {\n\t\t\t\treturn false, errInvalidCode, model.User{}\n\t\t\t}\n\t\t}\n\t\t// update last login time\n\t\t// 判断是否超过了设置的天数间隔,超过再更新登录时间\n\t\t// 这样将实现强制在间隔时间内进行2fa验证\n\t\tif viper.GetBool(\"security.2fa.enabled\") {\n\t\t\tperiod := us.GetNoSmsCodeDurationDay()\n\t\t\tif time.Now().Sub(userModel.LastLoginTime).Seconds() >= float64(24*3600*period) {\n\t\t\t\tus.UpdateLoginTime(dto.UserEditDto{Id: userModel.Id})\n\t\t\t}\n\t\t} else {\n\t\t\tus.UpdateLoginTime(dto.UserEditDto{Id: userModel.Id})\n\t\t}\n\t\treturn true, nil, userModel\n\t} else {\n\t\tif viper.GetBool(\"security.2fa.enabled\") {\n\t\t\tt, _ := cache.Get(locKey)\n\t\t\tfailTimes, _ := strconv.Atoi(t)\n\t\t\t// 累计此次错误,已到达错误上限,所以-1\n\t\t\tif failTimes >= viper.GetInt(\"login.failUntilLock\")-1 {\n\t\t\t\t// lock\n\t\t\t\tif us.UpdateStatus(dto.UserEditStatusDto{Id: userModel.Id, Status: UserStatusLock}) > 0 {\n\t\t\t\t\t// recount\n\t\t\t\t\t_ = cache.Del(locKey)\n\t\t\t\t\treturn false, errAccountLocked, model.User{}\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// increase locks let user just can try several times\n\t\t\t\t_ = cache.Increase(locKey)\n\t\t\t\t// set ttl at first time\n\t\t\t\tif failTimes == 0 {\n\t\t\t\t\t_ = cache.Expire(locKey, time.Second*24*3600)\n\t\t\t\t}\n\t\t\t\treturn false, fmt.Errorf(\"密码输入错误,您还有%d次机会\", viper.GetInt(\"login.failUntilLock\")-failTimes-1), model.User{}\n\t\t\t}\n\t\t}\n\t}\n\treturn false, errInvalidAccount, model.User{}\n}", "func UpdateAuthorizedUser(c *gin.Context, username string, client *statsd.Client) {\n\tlog.Info(\"updating user\")\n\tvar user entity.User\n\terr := model.GetUserByUsername(&user, username, client)\n\tif err != nil {\n\t\tlog.Error(err)\n\t\tc.JSON(http.StatusNotFound, gin.H{\n\t\t\t\"error\": err.Error(),\n\t\t})\n\t}\n\t//get the authorized user information\n\toriID := user.ID\n\tvar oriUsername string = *user.Username\n\toriCreatedTime := user.AccountCreated\n\toriPwd := user.Password\n\tc.BindJSON(&user)\n\n\t//if the user wants to change id, username or create time, it's not allowed\n\tif user.ID != oriID || *user.Username != oriUsername || user.AccountCreated != oriCreatedTime {\n\t\tlog.Error(\"id, username and AccountCreated are readonly\")\n\t\tc.JSON(http.StatusBadRequest, gin.H{\n\t\t\t\"error\": \"id, username and AccountCreated are readonly!!!\",\n\t\t})\n\t\treturn\n\t} else if oriPwd == user.Password {\n\t\t//if the user isn't updating password, don't need to check the password\n\t\terr = model.UpdateUserWithSamePwd(&user, user.ID, client)\n\t\tif err != nil {\n\t\t\tlog.Error(err)\n\t\t\tc.JSON(http.StatusNotFound, gin.H{\n\t\t\t\t\"error\": err.Error(),\n\t\t\t})\n\t\t} else {\n\t\t\tc.JSON(http.StatusOK, gin.H{\n\t\t\t\t\"id\": user.ID,\n\t\t\t\t\"first_name\": user.FirstName,\n\t\t\t\t\"last_name\": user.LastName,\n\t\t\t\t\"username\": user.Username,\n\t\t\t\t\"account_created\": user.AccountCreated,\n\t\t\t\t\"account_updated\": user.AccountUpdated,\n\t\t\t})\n\t\t}\n\t} else {\n\t\t//if the user is updating password, do check the password\n\t\terr = model.UpdateUserWithDiffPwd(&user, user.ID, client)\n\t\tif err != nil {\n\t\t\tlog.Error(err)\n\t\t\tc.JSON(http.StatusNotFound, gin.H{\n\t\t\t\t\"error\": err.Error(),\n\t\t\t})\n\t\t} else {\n\t\t\tc.JSON(http.StatusOK, gin.H{\n\t\t\t\t\"id\": user.ID,\n\t\t\t\t\"first_name\": user.FirstName,\n\t\t\t\t\"last_name\": user.LastName,\n\t\t\t\t\"username\": user.Username,\n\t\t\t\t\"account_created\": user.AccountCreated,\n\t\t\t\t\"account_updated\": user.AccountUpdated,\n\t\t\t})\n\t\t}\n\t}\n\tlog.Info(\"user updated\")\n}", "func (handler *Handler) handleUserUpdate(w http.ResponseWriter, r *http.Request) {\n\n\t//We have gone through the auth, so we should know the id of the logged in user\n\tloggedInUser := r.Context().Value(\"user\").(int) //Grab the id of the user that send the request\n\n\t//Now load the current user from the repo\n\tuser, err := handler.userHelper.GetUser(loggedInUser)\n\n\t//Check for an error\n\tif err != nil {\n\t\tutils.ReturnJsonError(w, http.StatusUnprocessableEntity, err)\n\t\treturn\n\t}\n\n\t//decode the request body into struct with all of the info specified and failed if any error occur\n\terr = json.NewDecoder(r.Body).Decode(user)\n\tif err != nil {\n\t\tutils.ReturnJsonError(w, http.StatusUnprocessableEntity, err)\n\t\treturn\n\n\t}\n\n\t//Now update the user\n\tuser, err = handler.userHelper.updateUser(loggedInUser, user)\n\n\t//Check to see if the user was created\n\tif err == nil {\n\t\tutils.ReturnJson(w, http.StatusAccepted, user)\n\t} else {\n\t\tutils.ReturnJsonError(w, http.StatusForbidden, err)\n\t}\n\n}", "func UpdateUserDetail(w http.ResponseWriter, r *http.Request) {\n\tfLog := userMgmtLogger.WithField(\"func\", \"GetUserDetail\").WithField(\"RequestID\", r.Context().Value(constants.RequestID)).WithField(\"path\", r.URL.Path).WithField(\"method\", r.Method)\n\tparams, err := helper.ParsePathParams(fmt.Sprintf(\"%s/management/user/{userRecId}\", apiPrefix), r.URL.Path)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tuser, err := UserRepo.GetUserByRecID(r.Context(), params[\"userRecId\"])\n\tif err != nil {\n\t\tfLog.Errorf(\"UserRepo.GetUserByRecID got %s\", err.Error())\n\t\thelper.WriteHTTPResponse(r.Context(), w, http.StatusInternalServerError, err.Error(), nil, nil)\n\t\treturn\n\t}\n\tif user == nil {\n\t\thelper.WriteHTTPResponse(r.Context(), w, http.StatusNotFound, fmt.Sprintf(\"User recid %s not found\", params[\"userRecId\"]), nil, nil)\n\t\treturn\n\t}\n\treq := &UpdateUserRequest{}\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tfLog.Errorf(\"ioutil.ReadAll got %s\", err.Error())\n\t\thelper.WriteHTTPResponse(r.Context(), w, http.StatusInternalServerError, err.Error(), nil, nil)\n\t\treturn\n\t}\n\terr = json.Unmarshal(body, req)\n\tif err != nil {\n\t\tfLog.Errorf(\"json.Unmarshal got %s\", err.Error())\n\t\thelper.WriteHTTPResponse(r.Context(), w, http.StatusBadRequest, err.Error(), nil, nil)\n\t\treturn\n\t}\n\n\t// if email is changed and enabled = false, send email\n\tsendemail := false\n\tif user.Email != req.Email && req.Enabled == false {\n\t\tuser.ActivationCode = helper.MakeRandomString(6, true, false, false, false)\n\t\tsendemail = true\n\t}\n\n\tif !user.Enable2FactorAuth && req.Enable2FA {\n\t\tuser.UserTotpSecretKey = totp.MakeSecret().Base32()\n\t}\n\n\tuser.Email = req.Email\n\tuser.Enable2FactorAuth = req.Enable2FA\n\tuser.Enabled = req.Enabled\n\tuser.Suspended = req.Suspended\n\n\terr = UserRepo.UpdateUser(r.Context(), user)\n\tif err != nil {\n\t\tfLog.Errorf(\"UserRepo.SaveOrUpdate got %s\", err.Error())\n\t\thelper.WriteHTTPResponse(r.Context(), w, http.StatusInternalServerError, err.Error(), nil, nil)\n\t\treturn\n\t}\n\n\tif sendemail {\n\t\tfLog.Warnf(\"Sending email\")\n\t\tmailer.Send(r.Context(), &mailer.Email{\n\t\t\tFrom: config.Get(\"mailer.from\"),\n\t\t\tFromName: config.Get(\"mailer.from.name\"),\n\t\t\tTo: []string{user.Email},\n\t\t\tCc: nil,\n\t\t\tBcc: nil,\n\t\t\tTemplate: \"EMAIL_VERIFY\",\n\t\t\tData: user,\n\t\t})\n\t}\n\n\tret := make(map[string]interface{})\n\tret[\"rec_id\"] = user.RecID\n\tret[\"email\"] = user.Email\n\tret[\"enabled\"] = user.Enabled\n\tret[\"suspended\"] = user.Suspended\n\tret[\"last_seen\"] = user.LastSeen\n\tret[\"last_login\"] = user.LastLogin\n\tret[\"enabled_2fa\"] = user.Enable2FactorAuth\n\thelper.WriteHTTPResponse(r.Context(), w, http.StatusOK, \"User updated\", nil, ret)\n\n}", "func Update(c *gin.Context) {\n\tuserID, err := getUserID(c.Param(\"user_id\"))\n\tif err != nil {\n\t\tc.JSON(err.Status, err)\n\t\treturn\n\t}\n\n\tvar newUser users.User\n\tif err := c.ShouldBindJSON(&newUser); err != nil {\n\t\tbdErr := errors.NewBadRequestError(fmt.Sprintf(\"invalid json body %s\", err.Error()))\n\t\tc.JSON(bdErr.Status, bdErr)\n\t\treturn\n\t}\n\n\tnewUser.ID = userID\n\tisPartial := c.Request.Method == http.MethodPatch\n\n\tresult, updateErr := services.UserServ.UpdateUser(newUser, isPartial)\n\tif err != nil {\n\t\tc.JSON(updateErr.Status, updateErr)\n\t\treturn\n\t}\n\n\tisPublic := c.GetHeader(\"X-Public\") == \"true\"\n\tc.JSON(http.StatusOK, result.Marshall(isPublic))\n}", "func (auo *AdministratorUpdateOne) check() error {\n\tif v, ok := auo.mutation.Username(); ok {\n\t\tif err := administrator.UsernameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"username\", err: fmt.Errorf(\"ent: validator failed for field \\\"username\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := auo.mutation.Password(); ok {\n\t\tif err := administrator.PasswordValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"password\", err: fmt.Errorf(\"ent: validator failed for field \\\"password\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func (srv *UsersService) UpdateHandler(ctx *gin.Context) {\n\tlogger := srv.logger.New(\"action\", \"UpdateHandler\")\n\tuser := GetRequestedUser(ctx)\n\tif user == nil {\n\t\t// Returns a \"404 StatusNotFound\" response\n\t\tsrv.ResponseService.NotFound(ctx)\n\t\treturn\n\t}\n\n\trawData, err := ctx.GetRawData()\n\tif err != nil {\n\t\tlogger.Error(\"cannot read body\", \"err\", err)\n\t\tsrv.ResponseService.Error(ctx, responses.CanNotUpdateUser, \"Can't update user.\")\n\t\treturn\n\t}\n\n\tcurrentUser := GetCurrentUser(ctx)\n\tif currentUser.UID == user.UID ||\n\t\tcurrentUser.RoleName == \"root\" ||\n\t\tcurrentUser.RoleName == \"admin\" {\n\n\t\terr = srv.userForm.Update(user, currentUser, rawData)\n\t\tif err != nil {\n\t\t\tlogger.Error(\"cannot update a user\", \"err\", err)\n\t\t\tsrv.ResponseService.ValidatorErrorResponse(ctx, responses.UnprocessableEntity, err)\n\t\t\treturn\n\t\t}\n\n\t\told, err := srv.Repository.GetUsersRepository().FindByUID(user.UID)\n\t\tif err != nil {\n\t\t\tlogger.Error(\"cannot found user\", \"err\", err)\n\t\t\tsrv.ResponseService.NotFound(ctx)\n\t\t\treturn\n\t\t}\n\n\t\terr = srv.userLoaderService.LoadUserCompletely(old)\n\t\tif err != nil {\n\t\t\tlogger.Error(\"cannot load user\", \"err\", err)\n\t\t\tsrv.ResponseService.Error(ctx, responses.CanNotUpdateUser, \"Can't update a user\")\n\t\t\treturn\n\t\t}\n\n\t\ttx := srv.Repository.GetUsersRepository().DB.Begin()\n\t\terr = srv.userCreator.Update(user, tx)\n\t\tif err != nil {\n\t\t\ttx.Rollback()\n\t\t\t// Returns a \"400 StatusBadRequest\" response\n\t\t\tsrv.ResponseService.Error(ctx, responses.CanNotUpdateUser, \"Can't update a user\")\n\t\t\treturn\n\t\t}\n\n\t\tif currentUser.UID != user.UID &&\n\t\t\t(currentUser.RoleName == \"admin\" || currentUser.RoleName == \"root\") {\n\t\t\tsrv.SystemLogsService.LogModifyUserProfileAsync(old, user, currentUser.UID)\n\t\t}\n\n\t\ttx.Commit()\n\t}\n\n\t// Returns a \"204 StatusNoContent\" response\n\tctx.JSON(http.StatusNoContent, nil)\n}", "func UpdateHandler(w http.ResponseWriter, r *http.Request) {\n\t_, _, ok := r.BasicAuth()\n\tif !ok {\n\t\tw.Header().Set(\"WWW-Authenticate\", fmt.Sprintf(`Basic realm=\"%s\"`, BasicAuthRealm))\n\t\tw.WriteHeader(http.StatusUnauthorized)\n\t\tw.Write([]byte(http.StatusText(http.StatusUnauthorized) + \"\\n\"))\n\t\treturn\n\t}\n\tif !reqIsAdmin(r) {\n\t\thttp.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)\n\t\treturn\n\t}\n\tvar bad bool\n\tvar badmin bool\n\tif r.FormValue(\"ad\") != \"\" {\n\t\tvar err error\n\t\tbad, err = strconv.ParseBool(r.FormValue(\"ad\"))\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\t}\n\tif r.FormValue(\"admin\") != \"\" {\n\t\tvar err error\n\t\tbadmin, err = strconv.ParseBool(r.FormValue(\"admin\"))\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\t}\n\tu := &User{\n\t\tUsername: strings.ToLower(r.FormValue(\"username\")),\n\t\tPassword: r.FormValue(\"password\"),\n\t\tAdmin: badmin,\n\t\tAD: bad,\n\t\tNamespaces: strings.Split(r.FormValue(\"namespaces\"), \",\"),\n\t}\n\tu, uerr := u.UpdateUser()\n\tif uerr != nil {\n\t\thttp.Error(w, uerr.Error(), http.StatusUnauthorized)\n\t\treturn\n\t}\n\tfmt.Fprintf(w, \"User updated\\n\")\n}", "func (au *AdministratorUpdate) check() error {\n\tif v, ok := au.mutation.Username(); ok {\n\t\tif err := administrator.UsernameValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"username\", err: fmt.Errorf(\"ent: validator failed for field \\\"username\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := au.mutation.Password(); ok {\n\t\tif err := administrator.PasswordValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"password\", err: fmt.Errorf(\"ent: validator failed for field \\\"password\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func (u *UserCalculator) Changed() bool { return u.changed }", "func (r *Repository) Update(user *User) (bool, error) {\n\tstr := `UPDATE ` + r.tableName + ` SET name = ?, role = ?, status = ?, email = ?, phone = ? WHERE id = ?`\n\t_, err := DB.Exec(str, user.Name, user.Role, user.Status, user.Email, user.Phone, user.ID)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\treturn true, nil\n}", "func (us UserService) Update(userDto dto.UserEditDto) int64 {\n\tuserModel := model.User{\n\t\tId: userDto.Id,\n\t}\n\tc := userDao.Update(&userModel, map[string]interface{}{\n\t\t\"mobile\": userDto.Mobile,\n\t\t\"department_id\": userDto.DepartmentId,\n\t\t\"status\": userDto.Status,\n\t\t\"title\": userDto.Title,\n\t\t\"realname\": userDto.Realname,\n\t\t\"sex\": userDto.Sex,\n\t\t\"email\": userDto.Email,\n\t})\n\tus.AssignRole(strconv.Itoa(userDto.Id), strings.Split(userDto.Roles, \",\"))\n\treturn c.RowsAffected\n}", "func (a Authorizer) UpdateUser(username string, m map[string]interface{}, currUserName string) error {\n\tvar (\n\t\thash []byte\n\t\tupdated bool\n\t)\n\tuser, err := a.userDao.User(username)\n\tif err != nil {\n\t\tlogger.Get().Error(\"Error retrieving the user: %s. error: %v\", username, err)\n\t\treturn err\n\t}\n\tif user.Type == authprovider.Internal {\n\t\tif val, ok := m[\"oldpassword\"]; ok {\n\t\t\top := val.(string)\n\t\t\tmatch := bcrypt.CompareHashAndPassword(user.Hash, []byte(op))\n\t\t\tif match != nil {\n\t\t\t\tlogger.Get().Error(\"Old password doesnt match\")\n\t\t\t\treturn mkerror(\"Old password doesnt match\" + err.Error())\n\t\t\t} else {\n\t\t\t\tif val, ok := m[\"password\"]; ok {\n\t\t\t\t\tp := val.(string)\n\t\t\t\t\thash, err = bcrypt.GenerateFromPassword([]byte(p), bcrypt.DefaultCost)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tlogger.Get().Error(\"Error saving the password for user: %s. error: %v\", username, err)\n\t\t\t\t\t\treturn mkerror(\"couldn't save password: \" + err.Error())\n\t\t\t\t\t}\n\t\t\t\t\tuser.Hash = hash\n\t\t\t\t\tupdated = true\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tcurUser, e := a.userDao.User(currUserName)\n\t\t\tif e != nil {\n\t\t\t\tlogger.Get().Error(\"Error retrieving the user: %s. error: %v\", currUserName, e)\n\t\t\t\treturn e\n\t\t\t}\n\t\t\tif curUser.Role == \"admin\" {\n\t\t\t\tif val, ok := m[\"password\"]; ok {\n\t\t\t\t\tp := val.(string)\n\t\t\t\t\thash, err = bcrypt.GenerateFromPassword([]byte(p), bcrypt.DefaultCost)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tlogger.Get().Error(\"Error saving the password for user: %s. error: %v\", username, err)\n\t\t\t\t\t\treturn mkerror(\"couldn't save password: \" + err.Error())\n\t\t\t\t\t}\n\t\t\t\t\tuser.Hash = hash\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tlogger.Get().Error(\"Error saving the password for user since no previledge: %s. error: %v\", username, err)\n\t\t\t\treturn mkerror(\"couldn't save password: \" + err.Error())\n\t\t\t}\n\t\t}\n\t}\n\tif val, ok := m[\"email\"]; ok {\n\t\te := val.(string)\n\t\tuser.Email = e\n\t\tupdated = true\n\t}\n\n\tif val, ok := m[\"notificationenabled\"]; ok {\n\t\tn := val.(bool)\n\t\tuser.NotificationEnabled = n\n\t}\n\n\tif val, ok := m[\"status\"]; ok {\n\t\ts := val.(bool)\n\t\tuser.Status = s\n\t}\n\n\tif updated {\n\t\terr = a.userDao.SaveUser(user)\n\t\tif err != nil {\n\t\t\tlogger.Get().Error(\"Error saving details for the user: %s to DB. error: %v\", username, err)\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func UserUpdate(w http.ResponseWriter, r *http.Request) {\n\n\t// Init output\n\toutput := []byte(\"\")\n\n\t// Add content type header to the response\n\tcontentType := \"application/json\"\n\tcharset := \"utf-8\"\n\tw.Header().Add(\"Content-Type\", fmt.Sprintf(\"%s; charset=%s\", contentType, charset))\n\n\t// Grab url path variables\n\turlVars := mux.Vars(r)\n\turlUser := urlVars[\"user\"]\n\n\t// Grab context references\n\trefStr := gorillaContext.Get(r, \"str\").(stores.Store)\n\n\t// Read POST JSON body\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\terr := APIErrorInvalidRequestBody()\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\t// Parse pull options\n\tpostBody, err := auth.GetUserFromJSON(body)\n\tif err != nil {\n\t\terr := APIErrorInvalidArgument(\"User\")\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\t// Get Result Object\n\tuserUUID := auth.GetUUIDByName(urlUser, refStr)\n\tmodified := time.Now().UTC()\n\tres, err := auth.UpdateUser(userUUID, postBody.FirstName, postBody.LastName, postBody.Organization, postBody.Description,\n\t\tpostBody.Name, postBody.Projects, postBody.Email, postBody.ServiceRoles, modified, true, refStr)\n\n\tif err != nil {\n\n\t\t// In case of invalid project or role in post body\n\n\t\tif err.Error() == \"not found\" {\n\t\t\terr := APIErrorNotFound(\"User\")\n\t\t\trespondErr(w, err)\n\t\t\treturn\n\t\t}\n\n\t\tif strings.HasPrefix(err.Error(), \"invalid\") {\n\t\t\terr := APIErrorInvalidData(err.Error())\n\t\t\trespondErr(w, err)\n\t\t\treturn\n\t\t}\n\n\t\tif strings.HasPrefix(err.Error(), \"duplicate\") {\n\t\t\terr := APIErrorInvalidData(err.Error())\n\t\t\trespondErr(w, err)\n\t\t\treturn\n\t\t}\n\n\t\terr := APIErrGenericInternal(err.Error())\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\t// Output result to JSON\n\tresJSON, err := res.ExportJSON()\n\tif err != nil {\n\t\terr := APIErrExportJSON()\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\t// Write response\n\toutput = []byte(resJSON)\n\trespondOK(w, output)\n\n}", "func (h *User) Update(w http.ResponseWriter, r *http.Request) {\n\tdefer r.Body.Close()\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tR.JSON500(w)\n\t\treturn\n\t}\n\n\t// @todo we might want extra check that /users/id equals to user.ID received in body\n\tuser, err := validator.UserCreate(body)\n\tif err != nil || user.ID == 0 {\n\t\tlog.Println(err)\n\t\tR.JSON400(w)\n\t\treturn\n\t}\n\n\terr = h.Storage.UpdateUser(user)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tR.JSON500(w)\n\t\treturn\n\t}\n\n\tR.JSON200OK(w)\n}", "func (c *Store) checkProfileUpdates() {\n\tc.ProfileLock.Lock()\n\tdefer c.ProfileLock.Unlock()\n\n\tfor _, v := range c.Profiles {\n\n\t\tif time.Now().After(v.Expires) {\n\t\t\tmsg, err := v.Updater()\n\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(\"Error occurred while updating profile.\")\n\t\t\t\tfmt.Println(err)\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tv.JSONProfile = msg\n\t\t\tv.Expires = time.Now().Add(profileExpiryFrequency)\n\t\t}\n\t}\n}", "func (cl *RestClient) checkupdate(m, room string) bool {\n\tresp, err := restGet(room, cl.client, cl.ip, cl.port)\n\tif err != nil {\n\t\tcl.test.Errorf(\"Error with restGet: %v\", err)\n\t}\n\tif resp.StatusCode == 404 {\n\t\treturn true\n\t}\n\tif resp.StatusCode != 200 {\n\t\tcl.test.Errorf(\"Rest checkupdate got %v want 200\", resp.StatusCode)\n\t}\n\tdec := json.NewDecoder(resp.Body)\n\tvar messages []string\n\terr = dec.Decode(&messages)\n\tif err != nil {\n\t\tcl.test.Errorf(\"Error decoding in Rest checkupdate: %v\", err)\n\t}\n\tresp.Body.Close()\n\tmessage := RemoveTime(messages[len(messages)-1])\n\treturn message == fmt.Sprintf(\"[%v]: %v\", cl.name, m)\n}", "func (us UserService) Update(dto dto.UserEditDto) int64 {\n\tuserModel := model.User{\n\t\tId: dto.Id,\n\t\tUsername: dto.Username,\n\t\tMobile: dto.Mobile,\n\t\tDepartmentId: dto.DepartmentId,\n\t}\n\tc := userDao.Update(&userModel)\n\treturn c.RowsAffected\n}", "func (u *usecase) Update(ctx context.Context, id string, user *UpdateUser) error {\n\tvalidate = validator.New()\n\tif err := validate.Struct(user); err != nil {\n\t\tvalidationErrors := err.(validator.ValidationErrors)\n\t\treturn validationErrors\n\t}\n\n\tif err := u.repository.Update(ctx, id, user); err != nil {\n\t\treturn errors.Wrap(err, \"error updating user\")\n\t}\n\treturn nil\n}", "func UpdateProfile(w http.ResponseWriter, r *http.Request) {\n AuthorizePages(w,r) // Restrict Unauthorized User\n w.Header().Set(\"Content-Type\", \"text/html; charset=utf-8\")\n t, err := template.ParseFiles(\"templates/update-profile.html\")\n userData := getSession(r)\n if err != nil {\n fmt.Println(err) // Ugly debug output\n w.WriteHeader(http.StatusInternalServerError) // Proper HTTP response\n return\n }\n \n if r.Method != http.MethodPost {\n t.Execute(w, nil)\n return\n }\n \n var successMessage string\n var failedMessage string\n var msg string\n msg = \"\"\n \n details := userDetails{\n Userid: userData.UserId,\n FirstName: r.FormValue(\"fname\"),\n LastName: r.FormValue(\"lname\"),\n Emailid: r.FormValue(\"emailid\"),\n Password: r.FormValue(\"pwd2\"),\n }\n if (details.FirstName != \"\" || len(details.FirstName) > 0) {\n if (dbquery.UpdUserProfile(\"First Name\",\"first_name\",details.FirstName,details.Userid)){\n successMessage = \"User First Name Updated Successfully\"\n //w.Write([]byte(\"<script>alert('Email Id Updated Successfully,please login');window.location = '/login'</script>\"))\n }\n }\n if (details.LastName != \"\" || len(details.LastName) > 0){\n if (dbquery.UpdUserProfile(\"Last Name\",\"last_name\",details.LastName,details.Userid)){\n successMessage = \"User Last Name Updated Successfully\"\n //w.Write([]byte(\"<script>alert('Email Id Updated Successfully,please login');window.location = '/login'</script>\"))\n }\n }\n\n if (details.Emailid != \"\" || len(details.Emailid) > 0){\n msg = dbquery.CheckDuplicateEmail(details.Emailid)\n if(dbquery.CheckUserID(details.Userid)) {\n if msg == \"\"{\n if (dbquery.UpdUserProfile(\"Email ID\",\"email_id\",details.Emailid,details.Userid)){\n successMessage = \"User Email Id Updated Successfully\"\n w.Write([]byte(\"<script>alert('Email Id Updated Successfully,please login');window.location = '/logout'</script>\"))\n }\n }else {\n //failedMessage = \"Email Already Exist\" \n }\n\n }else {\n failedMessage = \"There is no User with that User Id\"\n }\n \n }\n\n if (details.Password != \"\" || len(details.Password) > 0){\n password := details.Password\n hash, _ := HashPassword(password) \n if dbquery.CheckUserID(details.Userid) {\n if dbquery.UpdUserProfile(\"Password\",\"password\",hash, details.Userid) {\n successMessage = \"Password Updated Successfully\"\n w.Write([]byte(\"<script>alert('Password Updated Successfully,please login');window.location = '/logout'</script>\"))\n }\n }else {\n failedMessage = \"There is no User with that User Id\"\n } \n }\n \n \n t.Execute(w, AllUsersResponse{SuccessMessage: successMessage, FailedMessage: failedMessage,IssueMsg: msg}) \n}", "func (a Authorizer) UpdateUser(username string, m map[string]interface{}, currUserName string) error {\n\tvar (\n\t\thash []byte\n\t)\n\n\tuser, err := a.userDao.User(username)\n\tif err != nil {\n\t\tlogger.Get().Error(\"Error retrieving the user: %s. error: %v\", username, err)\n\t\treturn err\n\t}\n\n\tif val, ok := m[\"oldpassword\"]; ok {\n\t\top := val.(string)\n\t\tmatch := bcrypt.CompareHashAndPassword(user.Hash, []byte(op))\n\t\tif match != nil {\n\t\t\tlogger.Get().Error(\"Old password doesnt match\")\n\t\t\treturn mkerror(\"Old password doesnt match\" + err.Error())\n\t\t} else {\n\t\t\tif val, ok := m[\"password\"]; ok {\n\t\t\t\tp := val.(string)\n\t\t\t\thash, err = bcrypt.GenerateFromPassword([]byte(p), bcrypt.DefaultCost)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlogger.Get().Error(\"Error saving the password for user: %s. error: %v\", username, err)\n\t\t\t\t\treturn mkerror(\"couldn't save password: \" + err.Error())\n\t\t\t\t}\n\t\t\t\tuser.Hash = hash\n\t\t\t}\n\t\t}\n\t} else {\n\t\tcurUser, e := a.userDao.User(currUserName)\n\t\tif e != nil {\n\t\t\tlogger.Get().Error(\"Error retrieving the user: %s. error: %v\", currUserName, e)\n\t\t\treturn e\n\t\t}\n\t\tif curUser.Role == \"admin\" {\n\t\t\tif val, ok := m[\"password\"]; ok {\n\t\t\t\tp := val.(string)\n\t\t\t\thash, err = bcrypt.GenerateFromPassword([]byte(p), bcrypt.DefaultCost)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlogger.Get().Error(\"Error saving the password for user: %s. error: %v\", username, err)\n\t\t\t\t\treturn mkerror(\"couldn't save password: \" + err.Error())\n\t\t\t\t}\n\t\t\t\tuser.Hash = hash\n\t\t\t}\n\t\t} else {\n\t\t\tlogger.Get().Error(\"Error saving the password for user since no previledge: %s. error: %v\", username, err)\n\t\t\treturn mkerror(\"couldn't save password: \" + err.Error())\n\t\t}\n\t}\n\n\tif val, ok := m[\"email\"]; ok {\n\t\te := val.(string)\n\t\tuser.Email = e\n\t}\n\n\tif val, ok := m[\"notificationenabled\"]; ok {\n\t\tn := val.(bool)\n\t\tuser.NotificationEnabled = n\n\t}\n\n\tif val, ok := m[\"status\"]; ok {\n\t\ts := val.(bool)\n\t\tuser.Status = s\n\t}\n\n\terr = a.userDao.SaveUser(user)\n\tif err != nil {\n\t\tlogger.Get().Error(\"Error saving the user: %s to DB. error: %v\", username, err)\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (a *Users) Update(w http.ResponseWriter, r *http.Request) {\n\tid := getUserID(r)\n\ta.l.Println(\"[DEBUG] get record id\", id)\n\n\t// fetch the user from the context\n\tacc := r.Context().Value(KeyUser{}).(*models.User)\n\tacc.ID = id\n\ta.l.Println(\"[DEBUG] updating user with id\", acc.ID)\n\n\terr := models.UpdateUser(acc)\n\n\tif err == models.ErrUserNotFound {\n\t\ta.l.Println(\"[ERROR] user not found\", err)\n\n\t\tw.WriteHeader(http.StatusNotFound)\n\t\tmodels.ToJSON(&GenericError{Message: \"User not found in database\"}, w)\n\t\treturn\n\t}\n\n\t// write the no content success header\n\tw.WriteHeader(http.StatusNoContent)\n}", "func (r *Repository) update(user *domain.UserInfoModel) error {\n\n\tctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)\n\tdefer cancel()\n\n\tquery := \"UPDATE users SET namee = $1, email = $2, password = $3 WHERE id = $4\"\n\tstmt, err := r.db.PrepareContext(ctx, query)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer stmt.Close()\n\n\tqueryStart := time.Now().Nanosecond() / 1000\n\t_, err = stmt.ExecContext(ctx, user.Name, user.Email, user.PassWord, user.ID)\n\tif err != nil {\n\t\treturn err\n\t}\n\tqueryEnd := time.Now().Nanosecond() / 1000\n\texecutionTime := queryEnd - queryStart\n\tr.insertTimeSpent(\"Update\", executionTime)\n\treturn nil\n}", "func (db *Project) CanUpdate(proj *objects.Project, user users.User) bool {\n\treturn true\n}", "func (uu *UserUpdate) Save(ctx context.Context) (int, error) {\n\tif v, ok := uu.mutation.Name(); ok {\n\t\tif err := user.NameValidator(v); err != nil {\n\t\t\treturn 0, &ValidationError{Name: \"name\", err: fmt.Errorf(\"ent: validator failed for field \\\"name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Email(); ok {\n\t\tif err := user.EmailValidator(v); err != nil {\n\t\t\treturn 0, &ValidationError{Name: \"email\", err: fmt.Errorf(\"ent: validator failed for field \\\"email\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Password(); ok {\n\t\tif err := user.PasswordValidator(v); err != nil {\n\t\t\treturn 0, &ValidationError{Name: \"password\", err: fmt.Errorf(\"ent: validator failed for field \\\"password\\\": %w\", err)}\n\t\t}\n\t}\n\n\tvar (\n\t\terr error\n\t\taffected int\n\t)\n\tif len(uu.hooks) == 0 {\n\t\taffected, err = uu.sqlSave(ctx)\n\t} else {\n\t\tvar mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {\n\t\t\tmutation, ok := m.(*UserMutation)\n\t\t\tif !ok {\n\t\t\t\treturn nil, fmt.Errorf(\"unexpected mutation type %T\", m)\n\t\t\t}\n\t\t\tuu.mutation = mutation\n\t\t\taffected, err = uu.sqlSave(ctx)\n\t\t\tmutation.done = true\n\t\t\treturn affected, err\n\t\t})\n\t\tfor i := len(uu.hooks) - 1; i >= 0; i-- {\n\t\t\tmut = uu.hooks[i](mut)\n\t\t}\n\t\tif _, err := mut.Mutate(ctx, uu.mutation); err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\treturn affected, err\n}", "func (inst *UserN) Staled(onlyFields ...string) bool {\n\tif inst.original == nil {\n\t\tinst.original = &userOriginal{}\n\t}\n\n\tif len(onlyFields) == 0 {\n\n\t\tif inst.Id != inst.original.Id {\n\t\t\treturn true\n\t\t}\n\t\tif inst.Name != inst.original.Name {\n\t\t\treturn true\n\t\t}\n\t\tif inst.Email != inst.original.Email {\n\t\t\treturn true\n\t\t}\n\t\tif inst.Password != inst.original.Password {\n\t\t\treturn true\n\t\t}\n\t\tif inst.RoleId != inst.original.RoleId {\n\t\t\treturn true\n\t\t}\n\t\tif inst.EnterpriseId != inst.original.EnterpriseId {\n\t\t\treturn true\n\t\t}\n\t\tif inst.RememberToken != inst.original.RememberToken {\n\t\t\treturn true\n\t\t}\n\t\tif inst.CreatedAt != inst.original.CreatedAt {\n\t\t\treturn true\n\t\t}\n\t\tif inst.UpdatedAt != inst.original.UpdatedAt {\n\t\t\treturn true\n\t\t}\n\t\tif inst.DeletedAt != inst.original.DeletedAt {\n\t\t\treturn true\n\t\t}\n\t} else {\n\t\tfor _, f := range onlyFields {\n\t\t\tswitch strcase.ToSnake(f) {\n\n\t\t\tcase \"id\":\n\t\t\t\tif inst.Id != inst.original.Id {\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\tcase \"name\":\n\t\t\t\tif inst.Name != inst.original.Name {\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\tcase \"email\":\n\t\t\t\tif inst.Email != inst.original.Email {\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\tcase \"password\":\n\t\t\t\tif inst.Password != inst.original.Password {\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\tcase \"role_id\":\n\t\t\t\tif inst.RoleId != inst.original.RoleId {\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\tcase \"enterprise_id\":\n\t\t\t\tif inst.EnterpriseId != inst.original.EnterpriseId {\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\tcase \"remember_token\":\n\t\t\t\tif inst.RememberToken != inst.original.RememberToken {\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\tcase \"created_at\":\n\t\t\t\tif inst.CreatedAt != inst.original.CreatedAt {\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\tcase \"updated_at\":\n\t\t\t\tif inst.UpdatedAt != inst.original.UpdatedAt {\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\tcase \"deleted_at\":\n\t\t\t\tif inst.DeletedAt != inst.original.DeletedAt {\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\tdefault:\n\t\t\t}\n\t\t}\n\t}\n\n\treturn false\n}", "func Update(c *gin.Context) {\n\tvar form model.UpdateForm\n\terr := c.BindJSON(&form)\n\tif err != nil {\n\t\tfailMsg(c, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tusername, exist := c.Get(\"username\")\n\tif !exist {\n\t\tfailMsg(c, http.StatusUnauthorized, \"user not found\")\n\t\treturn\n\t}\n\tvalid, errMsg := validInfo(form)\n\tif !valid {\n\t\tfailMsg(c, http.StatusUnauthorized, errMsg)\n\t\treturn\n\t}\n\n\terr = model.UpdateUser(username.(string), form)\n\tif err != nil {\n\t\tfailMsg(c, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tc.JSON(http.StatusCreated, gin.H{\n\t\t\"success\": true,\n\t\t\"error\": \"\",\n\t\t\"data\": \"ok\",\n\t})\n}", "func Update() error {\n\tuser := map[string]interface{}{\n\t\t\"name\": \"viney.chow\",\n\t\t\"created\": time.Now().Format(\"2006-01-02 15:04:05\"),\n\t}\n\n\ti, err := orm.SetTable(\"tb_user\").SetPK(\"uid\").Where(\"uid=$1\", 2).Update(user)\n\tif err == nil {\n\t\tfmt.Println(i)\n\t\treturn nil\n\t}\n\n\treturn err\n}", "func (uv *userValidator) Update(user *User) error {\n\n\terr := runUserValFuncs(user,\n\t\tuv.passwordMinLength,\n\t\tuv.bcryptPassword,\n\t\tuv.passwordHashRequired,\n\t\tuv.rememberMinBytes,\n\t\tuv.hmacRemember,\n\t\tuv.rememberHashRequired,\n\t\tuv.normalizeEmail,\n\t\tuv.requireEmail,\n\t\tuv.emailFormat,\n\t\tuv.emailIsAvail)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn uv.UserDB.Update(user)\n}", "func Update(c *gin.Context) {\n\tuserId, idErr := getUserID(c.Param(\"user_id\"))\n\tif idErr != nil {\n\t\tc.JSON(idErr.Status, idErr)\n\t\treturn\n\t}\n\n\tvar user models.User\n\tif err := c.ShouldBindJSON(&user); err != nil {\n\t\trestErr := rest_errors.NewBadRequestError(\"invalid json body\")\n\t\tc.JSON(restErr.Status, restErr)\n\t\treturn\n\t}\n\n\tuser.Id = userId\n\n\tisPartial := c.Request.Method == http.MethodPatch\n\n\tresult, err := services.UsersService.UpdateUser(isPartial, user)\n\tif err != nil {\n\t\tc.JSON(err.Status, err)\n\t\treturn\n\t}\n\tc.JSON(http.StatusOK, result.Marshal(c.GetHeader(\"X-Public\") == \"true\"))\n}", "func (uv *userValidator) Update(user *User) error {\r\n\terr := runUserValFns(user,\r\n\t\tuv.passwordMinLength,\r\n\t\tuv.bcryptPassword,\r\n\t\tuv.passwordHashRequired,\r\n\t\tuv.rememberMinBytes,\r\n\t\tuv.hmacRemember,\r\n\t\tuv.rememberHashRequired,\r\n\t\tuv.normalizeEmail,\r\n\t\tuv.requireEmail,\r\n\t\tuv.emailFormat,\r\n\t\tuv.emailIsAvail)\r\n\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\treturn uv.UserDB.Update(user)\r\n}", "func (as *ActionSuite) TestUpdateUser() {\n\tt := as.T()\n\n\tf := fixturesForUserQuery(as)\n\n\ttype testCase struct {\n\t\tName string\n\t\tPayload string\n\t\tTestUser models.User\n\t\tExpectError string\n\t\tTest func(t *testing.T)\n\t}\n\n\tvar resp UserResponse\n\n\tnewNickname := \"U1 New Nickname\"\n\tlocation := `{description: \"Paris, France\", country: \"FR\", latitude: 48.8588377, longitude: 2.2770202}`\n\n\tpreferences := fmt.Sprintf(`{weightUnit: %s}`, strings.ToUpper(domain.UserPreferenceWeightUnitKGs))\n\n\tupdate := fmt.Sprintf(`mutation { user: updateUser(input:{id: \"%s\", nickname: \"%s\", location: %s,\n\t\t\tpreferences: %s, photoID: \"%s\"}) {%s} }`,\n\t\tf.Users[1].UUID.String(), newNickname, location, preferences, f.Files[0].UUID.String(), allUserFields)\n\n\ttestCases := []testCase{\n\t\t{\n\t\t\tName: \"duplicate nickname\",\n\t\t\tPayload: fmt.Sprintf(`mutation { user: updateUser(input:{id: \"%s\", nickname: \"%s\"}) {nickname} }`,\n\t\t\t\tf.Users[0].UUID, f.Users[1].Nickname),\n\t\t\tTestUser: f.Users[0],\n\t\t\tTest: func(t *testing.T) {},\n\t\t\tExpectError: \"That user nickname is already taken\",\n\t\t},\n\t\t{\n\t\t\tName: \"blank nickname\",\n\t\t\tPayload: fmt.Sprintf(`mutation { user: updateUser(input:{id: \"%s\", nickname: \"\"}) {nickname} }`,\n\t\t\t\tf.Users[0].UUID),\n\t\t\tTestUser: f.Users[0],\n\t\t\tTest: func(t *testing.T) {},\n\t\t\tExpectError: \"Your user nickname must contain at least one visible character\",\n\t\t},\n\t\t{\n\t\t\tName: \"allowed\",\n\t\t\tPayload: update,\n\t\t\tTestUser: f.Users[0],\n\t\t\tTest: func(t *testing.T) {\n\t\t\t\tif err := as.DB.Load(&(f.Users[1]), \"PhotoFile\"); err != nil {\n\t\t\t\t\tt.Errorf(\"failed to load user fixture, %s\", err)\n\t\t\t\t}\n\t\t\t\tas.Equal(newNickname, resp.User.Nickname, \"incorrect Nickname\")\n\t\t\t\tas.Equal(f.Users[1].PhotoFile.URL, resp.User.AvatarURL, \"incorrect AvatarURL\")\n\t\t\t\tas.Regexp(\"^https?\", resp.User.AvatarURL, \"invalid AvatarURL\")\n\t\t\t\tas.Equal(\"Paris, France\", resp.User.Location.Description, \"incorrect location\")\n\t\t\t\tas.Equal(\"FR\", resp.User.Location.Country, \"incorrect country\")\n\n\t\t\t\tas.Equal(strings.ToUpper(domain.UserPreferenceWeightUnitKGs), *resp.User.Preferences.WeightUnit,\n\t\t\t\t\t\"incorrect preference - weightUnit\")\n\t\t\t\tas.Equal(\"\", *resp.User.Preferences.Language, \"incorrect preference - language\")\n\t\t\t\tas.Equal(\"\", *resp.User.Preferences.TimeZone, \"incorrect preference - timeZone\")\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tName: \"not allowed\",\n\t\t\tPayload: fmt.Sprintf(`mutation {user: updateUser(input:{id: \"%v\", location: %v}) {%s}}`,\n\t\t\t\tf.Users[0].UUID, location, allUserFields),\n\t\t\tTestUser: f.Users[1],\n\t\t\tTest: func(t *testing.T) {},\n\t\t\tExpectError: \"not allowed\",\n\t\t},\n\t\t{\n\t\t\tName: \"remove photo\",\n\t\t\tPayload: fmt.Sprintf(`mutation {user: updateUser(input:{id: \"%v\", location: %v, preferences: %s}) {%s}}`,\n\t\t\t\tf.Users[1].UUID, location, preferences, allUserFields),\n\t\t\tTestUser: f.Users[0],\n\t\t\tTest: func(t *testing.T) {\n\t\t\t\tas.Equal(f.Users[1].AuthPhotoURL.String, resp.User.AvatarURL, \"expected photo to be deleted\")\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tName: \"remove location\",\n\t\t\tPayload: fmt.Sprintf(`mutation {user: updateUser(input:{id: \"%v\", preferences: %s}) {%s}}`,\n\t\t\t\tf.Users[1].UUID, preferences, allUserFields),\n\t\t\tTestUser: f.Users[0],\n\t\t\tTest: func(t *testing.T) {\n\t\t\t\tas.Nil(resp.User.Location, \"expected location to be deleted\")\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tName: \"remove preferences\",\n\t\t\tPayload: fmt.Sprintf(`mutation {user: updateUser(input:{id: \"%v\"}) {%s}}`,\n\t\t\t\tf.Users[1].UUID, allUserFields),\n\t\t\tTestUser: f.Users[0],\n\t\t\tTest: func(t *testing.T) {\n\t\t\t\tas.Equal(\"\", *resp.User.Preferences.WeightUnit, \"expected preferences to be deleted\")\n\t\t\t},\n\t\t},\n\t}\n\n\tfor _, test := range testCases {\n\t\tresp = UserResponse{}\n\t\terr := as.testGqlQuery(test.Payload, test.TestUser.Nickname, &resp)\n\n\t\tif test.ExpectError != \"\" {\n\t\t\tas.Error(err)\n\t\t\tas.Contains(err.Error(), test.ExpectError)\n\t\t} else {\n\t\t\tas.NoError(err)\n\t\t}\n\t\tt.Run(test.Name, test.Test)\n\t}\n}", "func (uu *UserUpdate) Save(ctx context.Context) (int, error) {\n\tif v, ok := uu.mutation.UserName(); ok {\n\t\tif err := user.UserNameValidator(v); err != nil {\n\t\t\treturn 0, &ValidationError{Name: \"user_name\", err: fmt.Errorf(\"ent: validator failed for field \\\"user_name\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Password(); ok {\n\t\tif err := user.PasswordValidator(v); err != nil {\n\t\t\treturn 0, &ValidationError{Name: \"password\", err: fmt.Errorf(\"ent: validator failed for field \\\"password\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := uu.mutation.Level(); ok {\n\t\tif err := user.LevelValidator(v); err != nil {\n\t\t\treturn 0, &ValidationError{Name: \"level\", err: fmt.Errorf(\"ent: validator failed for field \\\"level\\\": %w\", err)}\n\t\t}\n\t}\n\tvar (\n\t\terr error\n\t\taffected int\n\t)\n\tif len(uu.hooks) == 0 {\n\t\taffected, err = uu.sqlSave(ctx)\n\t} else {\n\t\tvar mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {\n\t\t\tmutation, ok := m.(*UserMutation)\n\t\t\tif !ok {\n\t\t\t\treturn nil, fmt.Errorf(\"unexpected mutation type %T\", m)\n\t\t\t}\n\t\t\tuu.mutation = mutation\n\t\t\taffected, err = uu.sqlSave(ctx)\n\t\t\tmutation.done = true\n\t\t\treturn affected, err\n\t\t})\n\t\tfor i := len(uu.hooks) - 1; i >= 0; i-- {\n\t\t\tmut = uu.hooks[i](mut)\n\t\t}\n\t\tif _, err := mut.Mutate(ctx, uu.mutation); err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t}\n\treturn affected, err\n}", "func updateAccountInfo(oldUsername string, newUsername string, newPassword string) bool {\n\n\tbool1 := true\n\n\tdb := connect()\n\n\tif strings.Contains(newUsername, \"'\") || strings.Contains(newPassword, \"'\") {\n\t\tnewUsername = strings.Replace(newUsername, \"'\", \"\\\\'\", -1)\n\t\tnewPassword = strings.Replace(newPassword, \"'\", \"\\\\'\", -1)\n\t}\n\n\t_, _, err := db.Query(\"UPDATE account SET userName = '\" + newUsername + \"', password = '\" + newPassword + \"' WHERE userName = '\" + oldUsername + \"'\")\n\n\tif err != nil {\n\t\tfmt.Println(\"Database Query Error:\", err)\n\t}\n\n\tdisconnect(db)\n\n\treturn bool1\n}", "func (m *Module) IsUpdateOpAuthorised(ctx context.Context, project, dbAlias, col, token string, req *model.UpdateRequest) (int, error) {\n\tm.RLock()\n\tdefer m.RUnlock()\n\n\trule, auth, err := m.authenticateCrudRequest(dbAlias, col, token, utils.Update)\n\tif err != nil {\n\t\treturn http.StatusUnauthorized, err\n\t}\n\n\targs := map[string]interface{}{\"op\": req.Operation, \"auth\": auth, \"find\": req.Find, \"update\": req.Update, \"token\": token}\n\t_, err = m.matchRule(ctx, project, rule, map[string]interface{}{\"args\": args}, auth)\n\tif err != nil {\n\t\treturn http.StatusForbidden, err\n\t}\n\n\treturn http.StatusOK, nil\n}", "func TestStore_UpdateUser(t *testing.T) {\n\tt.Parallel()\n\ts := MustOpenStore()\n\tdefer s.Close()\n\n\t// Create users.\n\tif _, err := s.CreateUser(\"susy\", \"pass\", true); err != nil {\n\t\tt.Fatal(err)\n\t} else if _, err := s.CreateUser(\"bob\", \"pass\", true); err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\t// Store password hash for bob.\n\tui, err := s.User(\"bob\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\t// Update user.\n\tif err := s.UpdateUser(\"bob\", \"XXX\"); err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\t// Verify password hash was updated.\n\tif other, err := s.User(\"bob\"); err != nil {\n\t\tt.Fatal(err)\n\t} else if ui.Hash == other.Hash {\n\t\tt.Fatal(\"password hash did not change\")\n\t}\n}", "func (u *UserService) Update(user *User) error {\n\tuser, err := u.generateRememberToken(user)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn u.db.Save(user).Error\n}", "func (*UsersController) Update(ctx *gin.Context) {\n\tvar updateJSON tat.UpdateUserJSON\n\tctx.Bind(&updateJSON)\n\n\tvar userToUpdate = tat.User{}\n\tfound, err := userDB.FindByUsername(&userToUpdate, updateJSON.Username)\n\tif !found {\n\t\tctx.JSON(http.StatusBadRequest, gin.H{\"error\": fmt.Errorf(\"user with username %s does not exist\", updateJSON.Username)})\n\t\treturn\n\t} else if err != nil {\n\t\tctx.JSON(http.StatusInternalServerError, gin.H{\"error\": fmt.Errorf(\"Error while fetching user with username %s\", updateJSON.Username)})\n\t\treturn\n\t}\n\n\tif strings.TrimSpace(updateJSON.NewFullname) == \"\" || strings.TrimSpace(updateJSON.NewEmail) == \"\" {\n\t\tctx.JSON(http.StatusBadRequest, gin.H{\"error\": fmt.Errorf(\"Invalid Fullname %s or Email %s\", updateJSON.NewFullname, updateJSON.NewEmail)})\n\t\treturn\n\t}\n\n\terr2 := userDB.Update(&userToUpdate, strings.TrimSpace(updateJSON.NewFullname), strings.TrimSpace(updateJSON.NewEmail))\n\tif err2 != nil {\n\t\tctx.JSON(http.StatusBadRequest, gin.H{\"error\": fmt.Sprintf(\"Update %s user to fullname %s and email %s failed : %s\", updateJSON.Username, updateJSON.NewFullname, updateJSON.NewEmail, err2.Error())})\n\t\treturn\n\t}\n\n\tctx.JSON(http.StatusCreated, gin.H{\"info\": \"user updated\"})\n}", "func (def *Definition) ValidarDatosUpdate(params *users.User, id *uint64) []string {\n\tvar errs []string\n\n\tif *id < 1 {\n\t\terrs = append(errs, i18n.Translate(def.Ctx, \"no_user_selected_updated\"))\n\t}\n\n\tif icommon.StrEmpty(params.Firstname) {\n\t\terrs = append(errs, i18n.Translate(def.Ctx, \"missing_firstname\"))\n\t}\n\tif icommon.StrEmpty(params.Lastname) {\n\t\terrs = append(errs, i18n.Translate(def.Ctx, \"missing_lastname\"))\n\t}\n\tif icommon.StrEmpty(params.Mlastname) {\n\t\terrs = append(errs, i18n.Translate(def.Ctx, \"missing_mlastname\"))\n\t}\n\tif def.Conf.TypeLogin == \"email\" {\n\t\tparams.Username = params.Email\n\t\tif icommon.IsEmail(params.Email) {\n\t\t\terrs = append(errs, i18n.Translate(def.Ctx, \"missing_email\"))\n\t\t}\n\t} else {\n\t\tif icommon.IsEmail(params.Username) {\n\t\t\terrs = append(errs, i18n.Translate(def.Ctx, \"missing_username\"))\n\t\t}\n\t}\n\tif icommon.IsPassword(params.Password) {\n\t\terrs = append(errs, i18n.Translate(def.Ctx, \"missing_password\"))\n\t}\n\t/*fmt.Println(params.RoleID, params.RoleID < 1)\n\tfmt.Println(params.StatusID)\n\tif params.RoleID < 1 {\n\t\terrs = append(errs, i18n.Translate(def.Ctx, \"missing_role\"))\n\t}\n\t*/\n\tif params.StatusID < 1 {\n\t\terrs = append(errs, i18n.Translate(def.Ctx, \"missing_status\"))\n\t}\n\n\treturn errs\n}", "func UpdateUserPass(username, pass, fundpass string) bool {\n\torm := get_DBFront()\n\tt := make(map[string]interface{})\n\tt[\"password\"] = pass\n\tt[\"fundpassword\"] = fundpass\n\t_, err := orm.SetTable(\"user\").Where(\"username=?\", username).Update(t)\n\tif !check_err(err) {\n\t\tLog(Log_Struct{\"error\", \"DB_Error_Line_251\", err})\n\t\treturn false\n\t}\n\treturn true\n}", "func (pu *PostUpdate) check() error {\n\tif v, ok := pu.mutation.UserID(); ok {\n\t\tif err := post.UserIDValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"user_id\", err: fmt.Errorf(`ent: validator failed for field \"Post.user_id\": %w`, err)}\n\t\t}\n\t}\n\tif v, ok := pu.mutation.Title(); ok {\n\t\tif err := post.TitleValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"title\", err: fmt.Errorf(`ent: validator failed for field \"Post.title\": %w`, err)}\n\t\t}\n\t}\n\tif v, ok := pu.mutation.Content(); ok {\n\t\tif err := post.ContentValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"content\", err: fmt.Errorf(`ent: validator failed for field \"Post.content\": %w`, err)}\n\t\t}\n\t}\n\tif v, ok := pu.mutation.Status(); ok {\n\t\tif err := post.StatusValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"status\", err: fmt.Errorf(`ent: validator failed for field \"Post.status\": %w`, err)}\n\t\t}\n\t}\n\tif v, ok := pu.mutation.ReplyNum(); ok {\n\t\tif err := post.ReplyNumValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"reply_num\", err: fmt.Errorf(`ent: validator failed for field \"Post.reply_num\": %w`, err)}\n\t\t}\n\t}\n\tif v, ok := pu.mutation.UpdateAt(); ok {\n\t\tif err := post.UpdateAtValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"update_at\", err: fmt.Errorf(`ent: validator failed for field \"Post.update_at\": %w`, err)}\n\t\t}\n\t}\n\tif v, ok := pu.mutation.Pin(); ok {\n\t\tif err := post.PinValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"pin\", err: fmt.Errorf(`ent: validator failed for field \"Post.pin\": %w`, err)}\n\t\t}\n\t}\n\treturn nil\n}", "func checkWithWUMUCAdmin() {\n\tapiURL := util.GetWUMUCConfigs().VersionURL + \"/\" + constant.WUMUCADMIN_API_CONTEXT + \"/\" + constant.\n\t\tVERSION + \"/\" + Version\n\n\tresponse := util.InvokeGetRequest(apiURL)\n\tversionResponse := util.VersionResponse{}\n\tutil.ProcessResponseFromServer(response, &versionResponse)\n\t// Exit if the current version is no longer supported for creating updates\n\tif !versionResponse.IsCompatible {\n\t\tutil.HandleErrorAndExit(errors.New(fmt.Sprintf(versionResponse.\n\t\t\tVersionMessage+\"\\n\\t Latest version: %s \\n\\t Released date: %s\\n\",\n\t\t\tversionResponse.LatestVersion.Version, versionResponse.LatestVersion.ReleaseDate)))\n\t}\n\t// If there is a new version of wum-uc being released\n\tif len(versionResponse.LatestVersion.Version) != 0 {\n\t\t// Print new version details if exists and continue creating the update\n\t\tutil.PrintInfo(fmt.Sprintf(versionResponse.VersionMessage+\"\\n\\t Latest version: %s \\n\\t Released date: %s\\n\",\n\t\t\tversionResponse.LatestVersion.Version, versionResponse.LatestVersion.ReleaseDate))\n\t}\n\t// Write the current timestamp to 'wum-uc-update' cache file for future reference\n\tutcTime := time.Now().UTC().Unix()\n\tlogger.Debug(fmt.Sprintf(\"Current timestamp %v\", utcTime))\n\tcacheDirectoryPath := filepath.Join(WUMUCHome, constant.WUMUC_CACHE_DIRECTORY)\n\terr := util.CreateDirectory(cacheDirectoryPath)\n\tif err != nil {\n\t\tlogger.Error(fmt.Sprintf(\"%v error occured in creating the directory %s for saving %s cache file\", err,\n\t\t\tcacheDirectoryPath, constant.WUMUC_UPDATE_CHECK_TIMESTAMP_FILENAME))\n\t}\n\twumucUpdateTimestampFilePath := filepath.Join(cacheDirectoryPath, constant.WUMUC_UPDATE_CHECK_TIMESTAMP_FILENAME)\n\terr = util.WriteFileToDestination([]byte(strconv.FormatInt(utcTime, 10)), wumucUpdateTimestampFilePath)\n\tif err != nil {\n\t\tlogger.Error(fmt.Sprintf(\"%v error occurred in writing to %s file\", err, wumucUpdateTimestampFilePath))\n\t}\n}", "func (u *UserResource) updateUser(request *restful.Request, response *restful.Response) {\n\tusr := new(User)\n\terr := request.ReadEntity(&usr)\n\tif err == nil {\n\t\tdb.WLock()\n\t\tdefer db.WUnlock() //unlock when exit this method\n\n\t\tif _, err = db.Engine.Id(usr.ID).Update(usr); err != nil {\n\t\t\tresponse.WriteHeaderAndEntity(http.StatusInternalServerError, UsersResponse{Error: err.Error()})\n\t\t} else {\n\t\t\tresponse.WriteEntity(UsersResponse{Success: true})\n\t\t}\n\t} else {\n\t\tresponse.WriteHeaderAndEntity(http.StatusInternalServerError, UsersResponse{Error: err.Error()})\n\t}\n}", "func (u *User) Update(tx *pop.Connection) (*validate.Errors, error) {\n\tif verrs, err := u.PrepFields(); (verrs != nil && verrs.HasAny()) || err != nil {\n\t\treturn verrs, err\n\t}\n\treturn tx.ValidateAndSave(u)\n}", "func UpdateUserProfileHandler(w http.ResponseWriter, r *http.Request) {\n\n}", "func updateUser(username string, attrs User) bool {\n\toutp, _ := exec.Command(\"getent\", \"shadow\", username).CombinedOutput()\n\tcurrentPassword := strings.TrimSpace(strings.Split(string(outp), \":\")[1])\n\touts, _ := exec.Command(\"getent\", \"passwd\", username).CombinedOutput()\n\tcurrentShell := strings.TrimSpace(strings.Split(string(outs), \":\")[6])\n\tcurrentHome := strings.TrimSpace(strings.Split(string(outs), \":\")[5])\n\tcurrentComment := strings.TrimSpace(strings.Split(string(outs), \":\")[4])\n\texistingGroups := getUserGroups(username)\n\n\tif attrs.Shell != currentShell {\n\t\tupdateShell(username, attrs.Shell)\n\t}\n\tif attrs.Password != currentPassword {\n\t\tupdatePassword(username, attrs.Password)\n\t}\n\tif attrs.Home != currentHome {\n\t\tupdateHome(username, attrs.Home)\n\t}\n\tif attrs.Comment != currentComment {\n\t\tupdateComment(username, attrs.Comment)\n\t}\n\tif strings.Join(existingGroups, \",\") != strings.Join(attrs.Groups, \",\") {\n\t\tupdateGroups(username, attrs.Groups)\n\t}\n\n\tkeyFile := path.Join(attrs.Home, \".ssh\", \"authorized_keys\")\n\tfileData := []string{}\n\tif buf, err := ioutil.ReadFile(keyFile); err == nil {\n\t\tfileData = strings.Split(string(buf), \"\\n\")\n\t\tsort.Strings(fileData)\n\t}\n\tif strings.Join(attrs.SSHKeys, \",\") != strings.Join(fileData, \",\") {\n\t\tupdateSSHPublicKeys(username, attrs)\n\t}\n\treturn true\n}", "func (u *User)Update()(e error){\n\tst := `update users set\n\t\t\tuser_name = ?,email = ?,password = ?\n\t\t\twhere user_id = ?`;\n\n\t_,e = db.Exec(st,u.Name,u.Email,u.password,u.Id)\n\n\treturn\n}", "func (me TAttlistCommentsCorrectionsRefType) IsUpdateOf() bool { return me.String() == \"UpdateOf\" }", "func (uv *userValidator) Update(user *User) error {\n\terr := runUserValFuncs(user,\n\t\tuv.passwordMinLength,\n\t\tuv.bcryptPassword,\n\t\tuv.passwordHashRequired,\n\t\tuv.normalizeEmail,\n\t\tuv.requireEmail,\n\t\tuv.emailFormat,\n\t\tuv.emailIsAvail)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn uv.UserDB.Update(user)\n}", "func (p *politeiawww) processVerifyUpdateUserKey(u *user.User, vu www.VerifyUpdateUserKey) (*user.User, error) {\n\t// Decode the verification token.\n\ttoken, err := hex.DecodeString(vu.VerificationToken)\n\tif err != nil {\n\t\tlog.Debugf(\"VerifyUpdateUserKey failure for %v: verification \"+\n\t\t\t\"token could not be decoded: %v\", u.Email, err)\n\t\treturn nil, www.UserError{\n\t\t\tErrorCode: www.ErrorStatusVerificationTokenInvalid,\n\t\t}\n\t}\n\n\t// Check that the verification token matches.\n\tif !bytes.Equal(token, u.UpdateKeyVerificationToken) {\n\t\tlog.Debugf(\"VerifyUpdateUserKey failure for %v: verification \"+\n\t\t\t\"token doesn't match, expected %v\", u.Email,\n\t\t\tu.UpdateKeyVerificationToken, token)\n\t\treturn nil, www.UserError{\n\t\t\tErrorCode: www.ErrorStatusVerificationTokenInvalid,\n\t\t}\n\t}\n\n\t// Check that the token hasn't expired.\n\tif u.UpdateKeyVerificationExpiry < time.Now().Unix() {\n\t\tlog.Debugf(\"VerifyUpdateUserKey failure for %v: verification \"+\n\t\t\t\"token not expired yet\", u.Email)\n\t\treturn nil, www.UserError{\n\t\t\tErrorCode: www.ErrorStatusVerificationTokenExpired,\n\t\t}\n\t}\n\n\t// Check signature\n\tsig, err := util.ConvertSignature(vu.Signature)\n\tif err != nil {\n\t\tlog.Debugf(\"VerifyUpdateUserKey failure for %v: signature \"+\n\t\t\t\"could not be decoded: %v\", u.Email, err)\n\t\treturn nil, www.UserError{\n\t\t\tErrorCode: www.ErrorStatusInvalidSignature,\n\t\t}\n\t}\n\n\tid := u.InactiveIdentity()\n\tif id == nil {\n\t\treturn nil, www.UserError{\n\t\t\tErrorCode: www.ErrorStatusNoPublicKey,\n\t\t}\n\t}\n\tpi, err := identity.PublicIdentityFromBytes(id.Key[:])\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif !pi.VerifyMessage([]byte(vu.VerificationToken), sig) {\n\t\tlog.Debugf(\"VerifyUpdateUserKey failure for %v: signature did\"+\n\t\t\t\" not match (pubkey: %v)\", u.Email, pi.String())\n\t\treturn nil, www.UserError{\n\t\t\tErrorCode: www.ErrorStatusInvalidSignature,\n\t\t}\n\t}\n\n\t// Clear out the verification token fields in the db and activate\n\t// the key and deactivate the one it's replacing.\n\tu.UpdateKeyVerificationToken = nil\n\tu.UpdateKeyVerificationExpiry = 0\n\terr = u.ActivateIdentity(id.Key[:])\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn u, p.db.UserUpdate(*u)\n}", "func (uv *userValidator) Update(u *User) error {\n\tfns := []userValidatorFunc{\n\t\tuv.emailNormalize,\n\t\tuv.emailRequired,\n\t\tuv.emailIsValid,\n\t\tuv.emailIsAvail,\n\t\tuv.passwordMinLength,\n\t\tuv.passwordBcrypt,\n\t\tuv.passwordHashRequired,\n\t\tuv.rememberHmac,\n\t\tuv.rememberMinBytes,\n\t\tuv.rememberHashRequired,\n\t}\n\tif err := runUserValFuncs(u, fns...); err != nil {\n\t\treturn err\n\t}\n\n\treturn uv.UserDB.Update(u)\n}", "func (ug *userGorm) Update(user *User) error {\n\n\treturn ug.db.Save(user).Error\n\n}", "func (UserService) UpdateStatus(dto dto.UserEditStatusDto) int64 {\n\tu := userDao.Get(dto.Id, false)\n\t//u.Status = dto.Status\n\tc := userDao.Update(&u, map[string]interface{}{\n\t\t\"status\": dto.Status,\n\t})\n\treturn c.RowsAffected\n}", "func verifyUserFields(pwdUser ign3types.PasswdUser) error {\n\temptyUser := ign3types.PasswdUser{}\n\ttempUser := pwdUser\n\tif tempUser.Name == constants.CoreUserName && ((tempUser.PasswordHash) != nil || len(tempUser.SSHAuthorizedKeys) >= 1) {\n\t\ttempUser.Name = \"\"\n\t\ttempUser.SSHAuthorizedKeys = nil\n\t\ttempUser.PasswordHash = nil\n\t\tif !reflect.DeepEqual(emptyUser, tempUser) {\n\t\t\treturn fmt.Errorf(\"SSH keys and password hash are not reconcilable\")\n\t\t}\n\t\tklog.Info(\"SSH Keys reconcilable\")\n\t} else {\n\t\treturn fmt.Errorf(\"ignition passwd user section contains unsupported changes: user must be core and have 1 or more sshKeys\")\n\t}\n\treturn nil\n}", "func (db *Currency) CanUpdate(curr *objects.Currency, user users.User) bool {\n\treturn true\n}", "func (user *User) UpdatedFields(updateID bool, updatedFields ...string) map[string]interface{} {\n fieldValueMap := make(map[string]interface{})\n for _, fieldName := range updatedFields {\n switch fieldName {\n case KeyUserID:\n if updateID {\n fieldValueMap[fieldName] = user.ID\n }\n case KeyUserUsername:\n fieldValueMap[fieldName] = user.Username\n case KeyUserEmail:\n fieldValueMap[fieldName] = user.Email\n case KeyUserEmailVerifiedAt:\n fieldValueMap[fieldName] = user.EmailVerifiedAt\n case KeyUserPassword:\n fieldValueMap[fieldName] = user.Password\n case KeyUserRememberToken:\n fieldValueMap[fieldName] = user.RememberToken\n case KeyUserCreatedAt:\n fieldValueMap[fieldName] = user.CreatedAt\n case KeyUserUpdatedAt:\n fieldValueMap[fieldName] = user.UpdatedAt\n //case KeyUserDeletedAt:\n // fieldValueMap[fieldName] = user.DeletedAt\n // break\n }\n }\n return fieldValueMap\n}", "func (account *Account) ValidateUpdate() (map[string]interface{}, bool) {\n\n\tif !strings.Contains(account.Email, \"@\") {\n\t\treturn u.Message(false, \"Email address is required\"), false\n\t}\n\n\tif len(account.Password) < 6 {\n\t\treturn u.Message(false, \"Password is required\"), false\n\t}\n\n\t//Email must be unique\n\ttemp := &Account{}\n\t//check for errors and duplicate emails\n\terr := GetDB().Table(\"accounts\").Where(\"email = ?\", account.Email).First(temp).Error\n\tif err != nil && err != gorm.ErrRecordNotFound {\n\t\treturn u.Message(false, \"Connection error. Please retry\"), false\n\t}\n\tif temp.Email == \"\" {\n\t\treturn u.Message(false, \"Email address not found in database.\"), false\n\t}\n\treturn u.Message(true, \"Requirement passed\"), true\n}", "func (user *User) BeforeUpdate(transaction *gorm.DB) error {\n return nil\n}", "func updateVolunteer(c *gin.Context) {\n\n\tvar vol Volunteer\n\temail := c.Params.ByName(\"email\")\n\n\t//Checks json data\n\tif err := c.BindJSON(&vol); err != nil {\n\t\tcreateBadRequestResponse(c, err)\n\t\treturn\n\t}\n\n\t//Gets volunteer from database\n\tvar oldvol Volunteer\n\tif err := db.Where(\"email = ?\", email).First(&oldvol).Error; err != nil {\n\t\tcreateNotFoundResponse(c)\n\t\treturn\n\t}\n\t//Sets data which could not be changed (I am still not sure if url path have to be with :email )\n\tvol.ID = oldvol.ID\n\tvol.Email = oldvol.Email\n\tif vol.Password == \"\" {\n\t\tvol.Password = oldvol.Password\n\t}\n\n\t//Checks if data belongs to the user\n\tif !volunteerAuth(c, &vol) {\n\t\treturn\n\t}\n\n\t//Saves Volunteer to the database\n\tif err := db.Save(&vol).Error; err != nil {\n\t\tcreateStatusConflictResponse(c)\n\t\treturn\n\t}\n\t//change password in auth map\n\tauthMap[vol.Email] = vol.Password\n\tc.JSON(200, vol)\n\n}", "func (ug *userGorm) Update(user *User) error{\n\treturn ug.db.Save(user).Error\n}" ]
[ "0.67521477", "0.66969067", "0.66159534", "0.6524795", "0.6491474", "0.64720905", "0.6394095", "0.6361187", "0.63606584", "0.6321268", "0.6307553", "0.6271807", "0.62663174", "0.6188823", "0.61886305", "0.61258125", "0.61094856", "0.6088319", "0.6080595", "0.6070621", "0.60536665", "0.6040204", "0.60219514", "0.60056627", "0.59997934", "0.5973236", "0.5971509", "0.5968191", "0.5968191", "0.5968191", "0.5968191", "0.5968191", "0.596192", "0.5956199", "0.593443", "0.59307283", "0.5922771", "0.5921694", "0.5900694", "0.5898977", "0.58710986", "0.5848298", "0.5843314", "0.5836492", "0.5818835", "0.5811743", "0.58056784", "0.5803862", "0.58027595", "0.5800738", "0.57964677", "0.5791786", "0.5783704", "0.57835007", "0.57727516", "0.5769716", "0.576824", "0.5755054", "0.575031", "0.5748707", "0.5748073", "0.5741701", "0.5731116", "0.5726504", "0.5717132", "0.57142836", "0.57125646", "0.5711551", "0.5710527", "0.5702998", "0.56974936", "0.5694914", "0.5694718", "0.5693467", "0.56890255", "0.5686717", "0.56810594", "0.5671438", "0.5655101", "0.5650601", "0.564719", "0.5634327", "0.56255525", "0.5623795", "0.5619296", "0.5611521", "0.56090415", "0.5605968", "0.5602738", "0.5596484", "0.5596068", "0.5592964", "0.55926454", "0.5584821", "0.5583838", "0.55834776", "0.5576165", "0.5575465", "0.5573978", "0.55710757", "0.5570044" ]
0.0
-1
7 Try to get a user that does not exist
func TestGetUserServiceDoesntExist (t *testing.T){ _, err := GetUserService("") assert.Equal(t, 404, err.HTTPStatus) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func getUserOrNullLogin(db *gorm.DB, appUserID string, w http.ResponseWriter, r *http.Request) *models.AppUser {\n\tuser := models.AppUser{}\n\tif err := db.Where(\"app_user_status = ?\", true).First(&user, models.AppUser{AppUserID: appUserID}).Error; err != nil {\n\t\treturn nil\n\t}\n\treturn &user\n}", "func getUserOrNull(db *gorm.DB, appUserID string, w http.ResponseWriter, r *http.Request) *models.AppUser {\n\tuser := models.AppUser{}\n\tif err := db.First(&user, models.AppUser{AppUserID: appUserID}).Error; err != nil {\n\t\treturn nil\n\t}\n\treturn &user\n}", "func UserNotFoundException() error {\n\treturn fmt.Errorf(\"user does not exist or wrong credentials\")\n}", "func (a *Api) findExistingUser(identifier, token string) *schema.UserData {\n\tif usr, err := a.sl.GetUser(identifier, token); err != nil {\n\t\tlog.Printf(\"Error [%s] trying to get existing users details\", err.Error())\n\t\treturn nil\n\t} else {\n\t\tlog.Printf(\"User found at shoreline using token %s\", token)\n\t\treturn usr\n\t}\n}", "func CheckExistUser(email string) (models.User, bool, string) {\n\tctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)\n\n\t//When end instruction remove timeout operation and liberate context\n\tdefer cancel()\n\n\tdb := MongoConnection.Database(\"socialnetwork\")\n\tcollection := db.Collection(\"Users\")\n\n\tobject := bson.M{\"Email\": email}\n\n\tvar result models.User\n\n\terr := collection.FindOne(ctx, object).Decode(&result)\n\n\tID := result.ID.Hex()\n\n\tif err != nil {\n\t\treturn result, false, ID\n\t}\n\n\treturn result, true, ID\n\n}", "func (kc KeycloakContext) GetUser(username Username) (gocloak.User, error) {\n\tfor _, u := range kc.Users {\n\t\tif u != nil && u.Username != nil && strings.EqualFold(*u.Username, string(username)) {\n\t\t\treturn *u, nil\n\t\t}\n\t}\n\treturn gocloak.User{},\n\t\tfmt.Errorf(\n\t\t\t\"l'utilisateur '%s' n'existe pas dans le contexte Keycloak\",\n\t\t\tusername,\n\t\t)\n}", "func TestGetUserByIDUserNotFound(t *testing.T) {\n\tuser, err := GetUserByID(0)\n\tassert.Nil(t, user, \"Id nao esperado\")\n\tassert.NotNil(t, err)\n\n\tassert.EqualValues(t, http.StatusNotFound, err.StatusCode)\n\tassert.EqualValues(t, \"User not found\", err.Message)\n}", "func IsUserExist(qr db.Queryer, email string) bool {\n\tstr := \"SELECT count(*) as cnt FROM users WHERE email = ?\"\n\tuid := int64(0)\n\terr := qr.Get(&uid, str, email)\n\tif err != nil {\n\t\tlog.Println(\"err\", err)\n\t\treturn false\n\t}\n\tlog.Println(\"uid\", err)\n\tif uid > 0 {\n\t\treturn true\n\t}\n\n\treturn false\n\n}", "func UserExistDb(email string) bool {\n\tlog.Println(\"call db func\")\n\tif _, ok := db[email]; !ok {\n\t\treturn false\n\t}\n\treturn true\n}", "func (u *User) checkExistUser() error {\n\tif u.Id == \"\" && u.Name == \"\" {\n\t\treturn fmt.Errorf(\"invalid user\")\n\t}\n\n\tif u.Id != \"\" {\n\t\t_,err := GetUserById(u.Id)\n\t\tif err == nil {\n\t\t\treturn fmt.Errorf(\"user exist\")\n\t\t}\n\t}\n\n\tif u.Name != \"\" {\n\t\t_,err := GetUserByUserName(u.Name)\n\t\tif err == nil {\n\t\t\treturn fmt.Errorf(\"user exist\")\n\t\t}\n\t}\n\n\treturn nil\n}", "func getUserOrCreate(bot *eb.Bot, username string, latest bool) (*model.EtternaUser, error) {\n\tuser, err := bot.Users.GetUsername(username)\n\texists := user != nil\n\n\tif err != nil {\n\t\treturn nil, err\n\t} else if user == nil {\n\t\tetternaUser, err := bot.API.GetByUsername(username)\n\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tid, err := bot.API.GetUserID(username)\n\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tuser = &model.EtternaUser{\n\t\t\tUsername: etternaUser.Username,\n\t\t\tEtternaID: id,\n\t\t\tAvatar: etternaUser.AvatarURL,\n\t\t\tMSDOverall: etternaUser.MSD.Overall,\n\t\t\tMSDStream: etternaUser.MSD.Stream,\n\t\t\tMSDJumpstream: etternaUser.MSD.Jumpstream,\n\t\t\tMSDHandstream: etternaUser.MSD.Handstream,\n\t\t\tMSDStamina: etternaUser.MSD.Stamina,\n\t\t\tMSDJackSpeed: etternaUser.MSD.JackSpeed,\n\t\t\tMSDChordjack: etternaUser.MSD.Chordjack,\n\t\t\tMSDTechnical: etternaUser.MSD.Technical,\n\t\t}\n\t}\n\n\t// Get the latest info for this user if they are cached\n\tif exists && latest {\n\t\tif err := getLatestUserInfo(bot, user); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\t// Save the user if it changed. The only scenario where we don't save the user is\n\t// if they are cached and we are not getting the latest info\n\tif !(exists && !latest) {\n\t\tif err := bot.Users.Save(user); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn user, nil\n}", "func does_user_exist(uname string) (string, bool) {\n\tuser_map_lock.Lock()\n\tdefer user_map_lock.Unlock()\n\tif _, is_exist := user_map[uname]; is_exist {\n\t\treturn fmt.Sprintf(\"success: user exists %s\\n\", END_TAG), false\n\t} else {\n\t\treturn fmt.Sprintf(\"error: no such user %s\\n\", END_TAG), false\n\t}\n}", "func existsUser(gh_id int64) bool {\n\terr := db.QueryRow(\"SELECT gh_id FROM users WHERE gh_id = $1\", gh_id).\n\t\tScan(&gh_id)\n\treturn err != sql.ErrNoRows\n}", "func isUserExist(usernameQuery string) bool {\n\tvar user User_DB\n\terr := mysql_client.QueryRow(\"SELECT username, password, kind FROM User WHERE username=?\", usernameQuery).Scan(&user.Username, &user.Password, &user.Kind)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn false\n\t} else {\n\t\treturn true\n\t}\n}", "func (ctx UserManagement) TryGetUserInformation(username string, password string) (config.User, bool) {\n\tuser, userCouldBeFound := ctx.users[username]\n\tif !userCouldBeFound || user.Password != password {\n\t\treturn user, false\n\t}\n\n\treturn user, true\n}", "func checkForUid(session *mgo.Session, userId string, w http.ResponseWriter) (c *mgo.Collection, user User) {\n\tif len(userId) != 24 {\n\t\tErrorWithJSON(w, \"Error in uid format. Lenght must be 24\", http.StatusNotFound)\n\t\treturn\n\t}\n\tc = session.DB(DBNAME).C(USERSCOLL)\n\terr1 := c.FindId(bson.ObjectIdHex(userId)).One(&user)\n\tif err1 != nil {\n\t\tswitch err1 {\n\t\tcase mgo.ErrNotFound:\n\t\t\tErrorWithJSON(w, \"UidNotFound\", http.StatusNotFound)\n\t\t\treturn\n\t\tdefault :\n\t\t\tErrorWithJSON(w, \"Database error\", http.StatusInternalServerError)\n\t\t\tlog.Println(\"Failed find user: \", err1)\n\t\t\treturn\n\t\t}\n\t}\n\treturn\n}", "func getUser(username string) (User, error) {\n\tvar user User\n\t// Execute the query\n\terr := db.QueryRow(\"SELECT * FROM users WHERE username = ?\", username).Scan(&user.ID, &user.Username, &user.Email, &user.CreatedAt)\n\n\tswitch {\n\tcase err == sql.ErrNoRows:\n\t\tlog.Printf(\"No user with that ID.\")\n\tcase err != nil:\n\t\tpanic(err.Error()) // proper error handling instead of panic in your app\n\t}\n\n\treturn user, err\n}", "func ERROR_AUTH_USER_NOT_FOUND(w http.ResponseWriter, pl string) {\n\tbuildForeignError(w, http.StatusForbidden, \"ERROR_AUTH_USER_NOT_FOUND\", pl)\n}", "func getUser(username string) (User, error) {\n\tuser, ok := users[username]\n\tif !ok {\n\t\treturn user, errors.New(ErrUserNotFound).WithField(\"username\", username)\n\t}\n\n\tif !user.Active {\n\t\treturn user, errors.New(ErrUserInactive).WithField(\"username\", username)\n\t}\n\n\t// Simulate \"critical\" error occurring when trying to look up this particular user. For example,\n\t// maybe our database server has just died.\n\tif username == \"stephen\" {\n\t\t// Pretend for a moment that this error was returned from some third-party library, etc. It\n\t\t// can be a regular error, we only expect the standard Go `error` interface when wrapping.\n\t\terr := errors.New(\"database went down, oh no\")\n\n\t\t// Wrap is identical to New, but must always take a non-nil Go `error` as it's first\n\t\t// parameter. That means you could create kinds to handle built-in \"sentinel\" errors.\n\t\treturn user, errors.Wrap(err, \"errors without a 'Kind' should probably always be handled\").\n\t\t\tWithField(\"username\", username)\n\t}\n\n\treturn user, nil\n}", "func (s *Server) GetUser(res http.ResponseWriter, req *http.Request, p httprouter.Params) {\n\tvar (\n\t\tmeUser = p.ByName(\"userID\") == \"me\"\n\t\tresp = JSON(nil, res)\n\t)\n\n\t// handle other users later!\n\tif !meUser {\n\t\tresp.Error(errors.New(\"kljshadf\"), http.StatusNotFound)\n\t\treturn\n\t}\n\n\tu := User{\n\t\tID: uuid.NewV1(),\n\t\tEmail: \"test\",\n\t}\n\n\t// switch err {\n\t// case nil:\n\tresp.Success(u)\n\t// case api.ErrInvalidUserID:\n\t// \tresp.Error(err, http.StatusNotFound)\n\t// default:\n\t// \tresp.Error(err, http.StatusInternalServerError)\n\t// }\n}", "func getOrRegisterUser(provider string, user *structs.User) models.User {\n\tvar userData models.User\n\n\tconfig.DB.Where(\"provider = ? AND social_id = ?\", provider, user.ID).First(&userData)\n\n\tif userData.ID == 0 {\n\t\ttoken, _ := RandomToken()\n\n\t\tnewUser := models.User{\n\t\t\tFullName: user.FullName,\n\t\t\tUserName: user.Username,\n\t\t\tEmail: user.Email,\n\t\t\tSocialID: user.ID,\n\t\t\tProvider: provider,\n\t\t\tAvatar: user.Avatar,\n\t\t\tVerificationToken: token,\n\t\t}\n\n\t\tconfig.DB.Create(&newUser)\n\n\t\treturn newUser\n\t}\n\n\treturn userData\n}", "func TestGetUserIDInvalid(t *testing.T) {\n\tts := initAPITestServer(t)\n\tdefer test.CloseServer(ts)\n\n\tinvalidUsername := \"not_\" + username\n\tid, err := GetUserID(invalidUsername)\n\tif err == nil || err.Error() != \"Username not found\" {\n\t\tt.Fatalf(\"Expected error\")\n\t}\n\tif id != \"\" {\n\t\tt.Fatalf(\"Expected empty userID\")\n\t}\n}", "func GetUser(u uint) *User {\n\n\tuser := &User{}\n\tGetDB().Table(\"users\").Where(\"id = ?\", u).First(user)\n\tif user.Email == \"\" { //User not found!\n\t\treturn nil\n\t}\n\n\tuser.Password = \"\"\n\tuser.FacebookUserID = \"\"\n\tuser.GoogleUserID = \"\"\n\treturn user\n}", "func GetExistingUser(db *sql.DB, userName string, password string) (st.User, error) {\n\tvar u st.User\n\terr := db.QueryRow(\"SELECT * from user where user_name=? and password=?\", userName, password).Scan(&u.ID, &u.Name, &u.Password)\n\tif err != nil {\n\t\treturn st.User{}, err\n\t}\n\treturn u, nil\n}", "func getUser(ctx context.Context, Username string) (*User, error) {\n\tkey := datastore.NameKey(\"Users\", strings.ToLower(Username), nil)\n\tcurUser := &User{}\n\tif err := dbclient.Get(ctx, key, curUser); err != nil {\n\t\treturn &User{}, err\n\t}\n\n\treturn curUser, nil\n}", "func GetUser(u uint) *User {\n\n\tuser := &User{}\n\tGetDB().Table(\"users\").Where(\"id = ?\", u).First(user)\n\tif user.Email == \"\" { //User not found!\n\t\treturn nil\n\t}\n\n\tuser.Password = \"\"\n\treturn user\n}", "func withArgs1(name string) (user, error) {\n\tfor _, user := range users {\n\t\tif user.name == name {\n\t\t\treturn user, nil\n\t\t}\n\t}\n\n\treturn user{}, errors.New(\"user is not found\")\n}", "func (dm *DatabaseManager) userFind(key string, value string) (*User, error) {\n\tvar resulsts []User\n\terr := dm.db.Select(&resulsts, dm.db.Where(key, \"=\", value))\n\t\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif len(resulsts) == 0 {\n\t\treturn nil, errors.New(\"Unknown user\")\n\t}\n\n\treturn &resulsts[0], nil\n}", "func NoUserWrapper(err error) error {\n\treturn errors.New(\"User not found. More: \" + err.Error())\n}", "func ErrUserDoesntExist() error {\n\treturn fmt.Errorf(UserDoesntExist)\n}", "func BenchmarkGetUserByIDUserNotFound(b *testing.B) {\n\tfor i := 0; i < b.N; i++ {\n\t\tGetUserByID(0)\n\t}\n}", "func GetUser(db sqlx.Queryer, id int64) (User, error) {\n\tvar user User\n\terr := sqlx.Get(db, &user, \"select \"+externalUserFields+\" from \\\"user\\\" where id = $1\", id)\n\tif err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\treturn user, ErrDoesNotExist\n\t\t}\n\t\treturn user, errors.Wrap(err, \"select error\")\n\t}\n\n\treturn user, nil\n}", "func (db *Client) GetUser(query *models.User) (*models.User, error) {\n\tuser := &models.User{}\n\tresult := db.Where(query).Find(user)\n\tif result.Error != nil {\n\t\treturn nil, result.Error\n\t}\n\tif result.RowsAffected == 0 {\n\t\treturn nil, nil\n\t}\n\treturn user, nil\n}", "func (ms *MySQL) GetUser(username string) (int, string, string, error) { // get acc data by profile\n\tvar u entities.User\n\tresult, err := db.Db.Query(\"SELECT id,username, hash FROM nf_stn.users WHERE username = ?;\", username)\n\tif err != nil {\n\t\tlog.Error(err.Error())\n\t\treturn u.ID, u.Username, u.Hash, err\n\t}\n\tfor result.Next() {\n\t\terr = result.Scan(&u.ID, &u.Username, &u.Hash)\n\t\tif err != nil {\n\t\t\tlog.Error(err.Error())\n\t\t}\n\t}\n\tlog.Info(\"successfully got user!\")\n\treturn u.ID, u.Username, u.Hash, err\n}", "func (u userDao) GetUser(userID int64) (*User, *utils.ApplicationError) {\n\tlog.Println(\"We are accessing the Database\")\n\tif user := users[userID]; user != nil {\n\t\treturn user, nil\n\t}\n\n\treturn nil, &utils.ApplicationError{\n\t\tMessage: fmt.Sprintf(\"user %v was not found\", userID),\n\t\tStatusCode: http.StatusNotFound,\n\t\tCode: \"Not found\",\n\t}\n}", "func getUser(vUserName string, vPassword string) (userobj Users, err error) {\n\t/*\n\t\tDefining the variables\n\t*/\n\tvar vDBId, vName, vEmail, vToken, vsqlPassword sql.NullString\n\tvar vIsActive sql.NullBool\n\n\t/*\n\t\tcreating a sql query using parameter\n\t*/\n\tsqlStmt := fmt.Sprintf(`SELECT id,Name,Email,Token,Is_Active,Password FROM shard_1.users WHERE LOWER(Email)=lower('%s') and lower(password) = md5('%s')`, strings.ToLower(vUserName), vPassword)\n\n\t/*\n\t\tExecuting the sql query\n\t\tIn case of error, error information will be returned\n\t\tUser object is returned in case credentials are valid\n\t*/\n\terr = db.QueryRow(sqlStmt).Scan(&vDBId, &vName, &vEmail, &vToken, &vIsActive, &vsqlPassword)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\terr = fmt.Errorf(\"unknown email : %s\", err.Error())\n\t\treturn\n\t}\n\tuserobj.DBId = vDBId.String\n\tuserobj.Name = vName.String\n\tuserobj.Email = vEmail.String\n\tuserobj.Token = vToken.String\n\tuserobj.IsActive = vIsActive.Bool\n\tuserobj.Password = \"\"\n\treturn\n}", "func UserErrNotFound(props ...*userActionProps) *userError {\n\tvar e = &userError{\n\t\ttimestamp: time.Now(),\n\t\tresource: \"system:user\",\n\t\terror: \"notFound\",\n\t\taction: \"error\",\n\t\tmessage: \"user not found\",\n\t\tlog: \"user not found\",\n\t\tseverity: actionlog.Warning,\n\t\tprops: func() *userActionProps {\n\t\t\tif len(props) > 0 {\n\t\t\t\treturn props[0]\n\t\t\t}\n\t\t\treturn nil\n\t\t}(),\n\t}\n\n\tif len(props) > 0 {\n\t\te.props = props[0]\n\t}\n\n\treturn e\n\n}", "func getUser(username string) (*sqlx.Rows, error) {\n rows, err := model.Database.Queryx(\"SELECT * FROM users WHERE username = ?\", username)\n if err != nil {\n return nil, err\n }\n return rows, nil\n}", "func getUser(req *http.Request) *grepbook.User {\n\tif rv := context.Get(req, UserKeyName); rv != nil {\n\t\tres := rv.(*grepbook.User)\n\t\treturn res\n\t}\n\treturn nil\n}", "func TestGetUserNotFoundInDatabase(t *testing.T) {\n\t// customize the return value for getUserFunction\n\tgetUserFunction = func(userID int64) (*domain.User, *utils.ApplicationError) {\n\t\treturn nil, &utils.ApplicationError{\n\t\t\tMessage: fmt.Sprintf(\"user %v was not found\", userID),\n\t\t\tStatusCode: http.StatusNotFound,\n\t\t\tCode: \"not_found\",\n\t\t}\n\t}\n\n\t// mock domain.UserDao layer\n\tuser, err := UserService.GetUser(0)\n\tassert.Nil(t, user)\n\tassert.NotNil(t, err)\n\tassert.EqualValues(t, http.StatusNotFound, err.StatusCode)\n\tassert.EqualValues(t, \"user 0 was not found\", err.Message)\n}", "func (a *noAuth) GetUser(userGUID string) (*interfaces.ConnectedUser, error) {\n\tvar scopes []string\n\tscopes = make([]string, 1)\n\tscopes[0] = \"stratos.noauth\"\n\n\tconnectdUser := &interfaces.ConnectedUser{\n\t\tGUID: noAuthUserID,\n\t\tName: interfaces.DefaultAdminUserName,\n\t\tAdmin: true,\n\t\tScopes: scopes,\n\t}\n\n\treturn connectdUser, nil\n}", "func GetUser(db *sqlx.DB, pu *mUsers) (*mUsers, error) {\n\tvar err2 error\n\tif pu.ID == 0 && pu.Username == \"\" {\n\t\terr2 = errors.New(\"insufficient data\")\n\t\treturn pu, err2\n\t}\n\tvar stmt SelectStatement\n\tif pu.ID != 0 {\n\t\tstmt = SELECT(Users.AllColumns).WHERE(\n\t\t\tUsers.ID.EQ(Int(int64(pu.ID))),\n\t\t).FROM(Users).LIMIT(1)\n\t} else {\n\t\tstmt = SELECT(Users.AllColumns).\n\t\t\tFROM(Users).\n\t\t\tWHERE(Users.Username.EQ(String(pu.Username)))\n\t}\n\terr := stmt.Query(db, pu)\n\tif err != nil {\n\t\tlog.Println(\"func GetUser:\", err)\n\t\tlog.Println(stmt.DebugSql())\n\t\treturn pu, err\n\t}\n\n\treturn pu, err2\n}", "func (u UserResource) findUser(request *restful.Request, response *restful.Response) {\n\tid, _ := strconv.Atoi(request.PathParameter(\"user-id\"))\n\tusr := User{ID: id}\n\n\tif has, err := db.Engine.Get(&usr); err != nil {\n\t\tresponse.WriteHeaderAndEntity(http.StatusInternalServerError, UsersResponse{Error: err.Error()})\n\t} else if !has {\n\t\tresponse.WriteHeaderAndEntity(http.StatusNotFound, UsersResponse{Error: \"User could not be found.\"})\n\t} else {\n\t\tresponse.WriteEntity(UsersResponse{IsExist: true, User: usr})\n\t}\n}", "func checkUserByEmail(email string) bool {\n\trows, err := db.Query(\"SELECT * FROM users WHERE email = $1;\", email)\n\tif err != nil {\n\t\tfmt.Println(\"error in checkUserByEmail\")\n\t}\n\tdefer rows.Close()\n\n\t// return true if user is found\n\treturn rows.Next()\n}", "func GetUserOrUnexpected(c appengine.Context, w http.ResponseWriter,\n\tr *http.Request) (*user.User, bool) {\n\n\t// Get the current user.\n\tu := user.Current(c)\n\tif u == nil {\n\t\tLogAndUnexpected(c, w, r,\n\t\t\tfmt.Errorf(\"no user found, but auth is required.\"))\n\t\treturn nil, false\n\t}\n\n\treturn u, true\n\n}", "func GetUser(userID int64) (*models.User, *utils.ApplicationError) {\n\tif user := users[userID]; user != nil {\n\t\treturn user, nil\n\t}\n\n\treturn nil, &utils.ApplicationError{\n\t\tMessage: fmt.Sprintf(\"User with ID %v was not found\", userID),\n\t\tStatusCode: http.StatusNotFound,\n\t\tCode: \"Not found\",\n\t}\n}", "func userExists(username string) bool {\n\tif _, err := user.Lookup(username); err == nil {\n\t\treturn true\n\t}\n\treturn false\n}", "func (p *UserStoreClient) GetUser(ctx context.Context, authenticationToken string) (r *User, err error) {\n var _args13 UserStoreGetUserArgs\n _args13.AuthenticationToken = authenticationToken\n var _result14 UserStoreGetUserResult\n if err = p.Client_().Call(ctx, \"getUser\", &_args13, &_result14); err != nil {\n return\n }\n switch {\n case _result14.UserException!= nil:\n return r, _result14.UserException\n case _result14.SystemException!= nil:\n return r, _result14.SystemException\n }\n\n return _result14.GetSuccess(), nil\n}", "func CheckUserExists(username string, table string, session *r.Session) bool {\n\tvar u interface{}\n\tdb := os.Getenv(\"DB\")\n\t// userTable := os.Getenv(\"USERTABLE\")\n\tcur, _ := r.DB(db).Table(table).GetAllByIndex(\"username\", username).Run(session)\n\t_ = cur.One(&u)\n\tcur.Close()\n\t// fmt.Println(u)\n\tif u == nil {\n\t\t// fmt.Println(\"NO\")\n\t\treturn false\n\t}\n\t// fmt.Println(\"YES\")\n\treturn true\n}", "func GetUser(c *gin.Context) {\n\tnID := c.Param(\"user_id\")\n\tdb := dbConn()\n\tselDB, err := db.Query(\"CALL read_user(?)\", nID)\n\tif err != nil {\n\t\tpanic(err.Error)\n\t}\n\n\tuser := User{}\n\tusers := []User{}\n\tfor selDB.Next() {\n\t\tvar id, username, useremail, fname, lname, password, passwordchange, passwordexpired, lastlogon, accountlocked string\n\t\terr = selDB.Scan(&id, &username, &useremail, &fname, &lname, &password, &passwordchange, &passwordexpired, &lastlogon, &accountlocked)\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t\tc.JSON(500, gin.H{\n\t\t\t\t\"error\": err.Error(),\n\t\t\t})\n\t\t}\n\t\tuser.ID = id\n\t\tuser.UserName = username\n\t\tuser.UserEmail = useremail\n\t\tuser.FName = fname\n\t\tuser.LName = lname\n\t\tuser.Password = password\n\t\tuser.PasswordChange = passwordchange\n\t\tuser.PasswordExpired = passwordexpired\n\t\tuser.LastLogon = lastlogon\n\t\tuser.AccountLocked = accountlocked\n\t\tiid, err := strconv.Atoi(id)\n\t\tif err != nil {\n\t\t\tpanic(err.Error)\n\t\t}\n\t\tselDB02, err := db.Query(\"CALL read_access_userid(?)\", iid)\n\t\tif err != nil {\n\t\t\tpanic(err.Error)\n\t\t}\n\t\taccess := Access{}\n\t\taccessList := []Access{}\n\t\tfor selDB02.Next() {\n\t\t\tvar accessid, userid, courtid, caseaccess, personaccess, accountingaccess, juryaccess, attorneyaccess, configaccess, securitylevel, sealedcase string\n\t\t\terr := selDB02.Scan(&accessid, &userid, &courtid, &caseaccess, &personaccess, &accountingaccess, &juryaccess, &attorneyaccess, &configaccess, &securitylevel, &sealedcase)\n\t\t\tif err != nil {\n\t\t\t\tlog.Println(err)\n\t\t\t\tc.JSON(500, gin.H{\n\t\t\t\t\t\"error\": err.Error(),\n\t\t\t\t})\n\t\t\t}\n\t\t\taccess.AccessID = accessid\n\t\t\taccess.IDUser = userid\n\t\t\taccess.IDCourt = courtid\n\t\t\taccess.CaseAccess = caseaccess\n\t\t\taccess.PersonAccess = personaccess\n\t\t\taccess.AccountingAccess = accountingaccess\n\t\t\taccess.JuryAccess = juryaccess\n\t\t\taccess.AttorneyAccess = attorneyaccess\n\t\t\taccess.ConfigAccess = configaccess\n\t\t\taccess.SecurityLevel = securitylevel\n\t\t\taccess.SealedCase = sealedcase\n\t\t\taccessList = append(accessList, access)\n\t\t}\n\t\tuser.AccessList = accessList\n\t\tusers = append(users, user)\n\t}\n\n\tc.JSON(200, gin.H{\n\t\t\"result\": users,\n\t})\n\n\tdefer db.Close()\n}", "func CheckUserExists(email string) (models.User, bool, string) {\n\tctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)\n\tdefer cancel()\n\n\tdb := MongoConnection.Database(\"cardinal\")\n\tusers := db.Collection(\"users\")\n\n\tcondition := bson.M{\"email\": email}\n\tvar result models.User\n\terr := users.FindOne(ctx, condition).Decode(&result)\n\tID := result.ID.Hex()\n\tif err != nil {\n\t\treturn result, false, ID\n\t}\n\treturn result, true, ID\n}", "func (u *User) FindOrCreateFromOrglessAuthUser(tx *pop.Connection, authUser *auth.User, authType string) error {\n\tif err := tx.Where(\"email = ?\", authUser.Email).First(u); domain.IsOtherThanNoRows(err) {\n\t\treturn errors.WithStack(err)\n\t}\n\n\treturn u.hydrateFromAuthUser(tx, authUser, authType)\n}", "func sqlUserExists(db *sql.DB, username string) (userID, userStatus int) {\n\tsqlUserQuery := `SELECT user_id, status FROM public.users WHERE username=$1;`\n\trow := db.QueryRow(sqlUserQuery, username)\n\tswitch err := row.Scan(&userID, &userStatus); err {\n\tcase sql.ErrNoRows:\n\t\tfmt.Println(\"User not found, attempting insert\")\n\t\tuserID = sqlUserInsert(db, username)\n\t\tuserStatus = 3\n\t\treturn\n\tcase nil:\n\t\tfmt.Println(\"User found, checking hash\")\n\t\treturn\n\tdefault:\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n}", "func (o *UserPasswordCredentials) GetUserOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.User, true\n}", "func (app *application) getUser(w http.ResponseWriter, r *http.Request) {\n\tid, err := uuid.Parse(r.URL.Query().Get(\":uuid\"))\n\tif err != nil || id == uuid.Nil {\n\t\tapp.notFound(w)\n\t\treturn\n\t}\n\n\tuser, err := app.users.GetByUUID(id)\n\n\tif err == models.ErrNoRecord {\n\t\tapp.notFound(w)\n\t\treturn\n\t} else if err != nil {\n\t\tapp.serverError(w, err)\n\t\treturn\n\t}\n\n\tapp.jsonResponse(w, user)\n}", "func isExistingUser(userName string, req *http.Request) bool {\nctx := appengine.NewContext(req)\nitem, err := memcache.Get(ctx, userName)\nif err != nil {\nlogError(err)\nreturn false\n}\nlog.Println(\"item: \" + item.Key)\nif item.Key == \"\" {\nreturn false\n}\nreturn true\n}", "func (mi *MixtapeIndex) userExists(id string) bool {\n\tif uid, ok := mi.Users[id]; !ok {\n\t\tfmt.Println(\"User DNE ->\", uid)\n\t\treturn false\n\t}\n\treturn true\n}", "func (s *searcher) getUser() (user *git.User, reload bool, err error) {\n\t// read a user from database.\n\tvar userData []byte\n\tsuberr := s.db.Update(func(tx *bolt.Tx) error {\n\t\tvar inerr error\n\t\tbucket := tx.Bucket([]byte(userBucketName))\n\t\tif bucket == nil {\n\t\t\tbucket, inerr = tx.CreateBucket([]byte(userBucketName))\n\t\t\tif inerr != nil {\n\t\t\t\treturn inerr\n\t\t\t}\n\t\t}\n\t\tuserData = bucket.Get([]byte(s.gitToken))\n\t\treturn nil\n\t})\n\tif suberr != nil { // maybe collapse db file.\n\t\tClearAll()\n\t\tcolor.Yellow(\"[err] collapse db file, so delete db file\")\n\t\terr = fmt.Errorf(\"[err] getUser %w\", suberr)\n\t\treturn\n\t}\n\n\t// if a user doesn't exist.\n\tif userData == nil || len(userData) == 0 {\n\t\tnewUser, suberr := s.git.User()\n\t\tif suberr != nil {\n\t\t\terr = fmt.Errorf(\"[err] createIndex %w\", suberr)\n\t\t\treturn\n\t\t}\n\t\tuser = newUser\n\t\treload = true\n\t\treturn\n\t}\n\n\t// unmarshal user\n\tif suberr := json.Unmarshal(userData, &user); suberr != nil {\n\t\tcolor.Yellow(\"[err] collapse user data, so delete user data\")\n\t\tcolor.Red(\"[err] retry again!\")\n\t\ts.db.Update(func(tx *bolt.Tx) error {\n\t\t\tb := tx.Bucket([]byte(userBucketName))\n\t\t\tb.Delete([]byte(s.gitToken))\n\t\t\treturn nil\n\t\t})\n\t\terr = fmt.Errorf(\"[err] createIndex %w\", suberr)\n\t\treturn\n\t}\n\n\t// check whether reload or not.\n\tif user.CachedAt.Unix() < time.Now().Add(-1*time.Hour).Unix() {\n\t\treload = true\n\t\tnewUser, suberr := s.git.User()\n\t\tif suberr != nil {\n\t\t\tcolor.Yellow(\"[err] a user doesn't reload %s\", suberr.Error())\n\t\t} else {\n\t\t\tuser = newUser\n\t\t}\n\t}\n\treturn\n}", "func GetForLogin(db *pg.DB, email string) (*User, error) {\n\tuser := &User{}\n\tcount, err := db.Model(user).Where(\"email = ?\", email).Count()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif count < 1 {\n\t\treturn nil, errors.New(\"User does not exist\")\n\t}\n\n\terr = db.Model(user).Where(\"email = ?\", email).Select()\n\n\treturn user, nil\n}", "func TestGetByEmailInvalid(t *testing.T) {\n\tdb := database.Connect()\n\tu := User{\n\t\tEmail: \"[email protected]\",\n\t}\n\tu.GetByEmail(db)\n\tif u.ID != 0 {\n\t\tt.Errorf(\"Expected no result, got %v\", u)\n\t}\n}", "func (test *Test) GetUser(username string) (models.User, error) {\n\tif tests.NormalUser.Name == username {\n\t\treturn tests.NormalUser, nil\n\t}\n\treturn models.User{}, errors.New(\"User not found\")\n}", "func userExist(name string) User {\n\n\tu := User{}\n\tfmt.Println(name)\n\tfmt.Println(reflect.TypeOf(name))\n\n\tvar theQuery = \"SELECT * FROM users WHERE name=$1\"\n\n\trow := db.QueryRow(theQuery, name)\n\terr := row.Scan(&u.ID, &u.Name, &u.Score);\n\n\tif err != nil && err != sql.ErrNoRows {\n\t\tfmt.Println(err.Error())\t\n\t}\n\n\treturn u\n\n}", "func GetUser(username string, session *r.Session) ct.User {\n\tvar u ct.User\n\t// var user ct.User\n\tdb := os.Getenv(\"DB\")\n\ttable := os.Getenv(\"USERTABLE\")\n\t// userTable := os.Getenv(\"USERTABLE\")\n\tcur, _ := r.DB(db).Table(table).GetAllByIndex(\"username\", username).Run(session)\n\t_ = cur.One(&u)\n\tcur.Close()\n\t// fmt.Println(u)\n\t// mapstructure.Decode(u, &user)\n\treturn u\n}", "func getUserFromUsername(c appengine.Context, username string) (User, error) {\n\tuser := User{}\n\n\t// get user based on Username\n\tquery := datastore.NewQuery(\"Users\").Filter(\"Username =\", username)\n\n\tvar users []User\n\tkeys, err := query.GetAll(c, &users)\n\tif err != nil {\n\t\treturn user, err\n\t}\n\tif len(users) > 1 {\n\t\treturn user, errors.New(\"More than one user is returned, something bad happened\")\n\t}\n\n\tif len(users) == 0 {\n\t\treturn User{}, nil\n\t}\n\tuser = users[0]\n\tuser.ID = keys[0].IntID()\n\treturn user, nil\n}", "func TestGetUser(t *testing.T) {\n\ttests := []struct {\n\t\tid int64\n\t\texceptResult *schedule.User\n\t}{\n\t\t{1, &schedule.User{ID: 1}},\n\t}\n\n\tfor _, test := range tests {\n\t\tr, err := ss.GetUser(ctx, test.id)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"GetUser(%d) fail, err %s, result %+v\", test.id, err, r)\n\t\t\treturn\n\t\t}\n\n\t\tif r.ID != test.exceptResult.ID {\n\t\t\tt.Errorf(\"GetUser(%d) = %+v, except %+v\", test.id, r, test.exceptResult)\n\t\t\treturn\n\t\t}\n\t}\n}", "func (v *Validator) GetUser(username string) (*model.User, int, error) {\n\tif len(username) < 2 || len(username) > 16 {\n\t\treturn nil, http.StatusBadRequest, errors.ErrInvalidUsername\n\t}\n\n\t// Check empty id.\n\tkey := internal.GetKey(internal.KeyEmptyUser, username)\n\tif v.isEmptyID(key) {\n\t\treturn nil, http.StatusNotFound, errors.ErrNot200\n\t}\n\n\t// Parse.\n\tdata, code, err := v.api.GetUser(username)\n\n\t// Save empty id.\n\tv.saveEmptyID(code, key)\n\n\treturn data, code, err\n}", "func Exists(email string, pass string) (user *models.User, status int, err error) {\n\tif email == \"\" || pass == \"\" {\n\t\treturn user, http.StatusNotFound, errors.New(\"no_username_password\")\n\t}\n\tvar userExist = &models.User{}\n\t// search by email or username\n\tif userValidator.EmailValidation(email) {\n\t\tif models.ORM.Where(\"email = ?\", email).First(userExist).RecordNotFound() {\n\t\t\tstatus, err = http.StatusNotFound, errors.New(\"user_not_found\")\n\t\t\treturn\n\t\t}\n\t} else if models.ORM.Where(\"username = ?\", email).First(userExist).RecordNotFound() {\n\t\tstatus, err = http.StatusNotFound, errors.New(\"user_not_found\")\n\t\treturn\n\t}\n\tuser = userExist\n\terr = bcrypt.CompareHashAndPassword([]byte(userExist.Password), []byte(pass))\n\tif err != nil {\n\t\tstatus, err = http.StatusUnauthorized, errors.New(\"incorrect_password\")\n\t\treturn\n\t}\n\tif userExist.IsBanned() {\n\t\tstatus, err = http.StatusUnauthorized, errors.New(\"account_banned\")\n\t\treturn\n\t}\n\tif userExist.IsScraped() {\n\t\tstatus, err = http.StatusUnauthorized, errors.New(\"account_need_activation\")\n\t\treturn\n\t}\n\tstatus, err = http.StatusOK, nil\n\treturn\n}", "func (this *SSNDB) GetUser() User {\n\tvar user User\n\tthis.First(&user)\n\tlogger.AssertError(user.Id != 0, \"main user does not exist in db!\")\n\treturn user\n}", "func (UserDao) GetUser(account string) (*models.User, error) {\n\tuser := new(models.User)\n\thas, err := X.Table(\"sys_user\").Where(\"account = ?\", account).Get(user)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif !has {\n\t\treturn nil, errors.New(\"user not found\")\n\t}\n\treturn user, nil\n}", "func getUser(client *chef.Client, name string) chef.User {\n\tuserList, err := client.Users.Get(name)\n\tif err != nil {\n\t\tfmt.Fprintln(os.Stderr, \"Issue listing user\", err)\n\t}\n\treturn userList\n}", "func (user *User) Get() *errors.RestErr {\n\tstmt, err := usersdb.Client.Prepare(queryGetUser)\n\tif err != nil {\n\t\tlogger.Error(\"error when trying to prepaire get user statment\", err)\n\t\treturn errors.NewInternalServerError(\"database error\")\n\t}\n\tdefer stmt.Close()\n\n\tresult := stmt.QueryRow(user.ID)\n\n\tif err := result.Scan(&user.ID, &user.FirstName, &user.LastName, &user.Email, &user.DateCreated); err != nil {\n\t\tlogger.Error(\"error attempting to get user by id\", err)\n\t\treturn errors.NewInternalServerError(\"database error\")\n\t\t//fmt.Println(err)\n\t\t/*UNSURE\n\t\tif strings.Contains(err.Error(), errorNoRows) {\n\t\t\treturn errors.NewNotFoundError(fmt.Sprintf(\"user %d not found\", user.ID))\n\n\t\t}\n\n\t\treturn errors.NewInternalServerError(\n\t\t\tfmt.Sprintf(\"error attempting to get user %d: %s\", user.ID, err.Error()))\n\t\t*/\n\t}\n\n\treturn nil\n\n}", "func (h *Handler) GetUser(w http.ResponseWriter, r *http.Request) {\n\tid, err := strconv.Atoi(r.URL.Query().Get(\"id\"))\n\tif err != nil {\n\t\trender.BadRequest(w, r, \"id must be an integer greater zero\")\n\t\treturn\n\t}\n\n\t// Query user details from userID\n\tuser, err := h.Client.User.\n\t\tQuery().\n\t\tWhere(usr.ID(id)).\n\t\tOnly(r.Context())\n\tif err != nil {\n\t\tswitch {\n\t\tcase ent.IsNotFound(err):\n\t\t\trender.NotFound(w, r, \"Email Doesn't exists\")\n\t\tdefault:\n\t\t\trender.InternalServerError(w, r, \"Server Error\")\n\t\t}\n\t\treturn\n\t}\n\trender.OK(w, r, user)\n}", "func (r *PostgresUserRepository) GetUser(id uuid.UUID) (models.User, error) {\n\tuser := new(models.User)\n\n\trow := r.db.QueryRow(\"SELECT * FROM user WHERE id=?\", id)\n\terr := row.Scan(&user.Id, &user.Email, &user.Name, &user.NotificationEndpoint)\n\n\tswitch {\n\tcase err == sql.ErrNoRows:\n\t\treturn *user, err\n\tcase err != nil:\n\t\tlog.Fatal(err)\n\t\treturn *user, err\n\t}\n\n\tif !user.Validate() {\n\t\terr = errors.New(\"User is Malformed\")\n\t}\n\n\treturn *user, nil\n}", "func getVerifyUser(user *models.User, code string) bool {\n\tif len(code) <= utils.TimeLimitCodeLength {\n\t\treturn false\n\t}\n\n\t// use tail hex username query user\n\thexStr := code[utils.TimeLimitCodeLength:]\n\tif b, err := hex.DecodeString(hexStr); err == nil {\n\t\tuser.UserName = string(b)\n\t\tif user.Read(\"UserName\") == nil {\n\t\t\treturn true\n\t\t}\n\t}\n\n\treturn false\n}", "func ExistingUser(email string) (User, bool) {\n\n\tvar user User\n\tDb.Debug().Where(\"email = ?\", email).Find(&user)\n\tif user == (User{}) {\n\t\treturn User{}, false\n\t}\n\treturn user, true\n}", "func (repo mockRepository) GetByCreds(email, password string) (*User, error) {\n\tfor _, u := range repo {\n\t\tif u.Email == email && u.Password == password {\n\t\t\treturn &u, nil\n\t\t}\n\t}\n\n\treturn nil, ErrNoSuchUser\n}", "func (u *UsersStoreMemory) GetUser(id string) (*common.User, error) {\n\tuser, exist := u.usersMap[id]\n\tif exist {\n\t\treturn &user, nil\n\t}\n\treturn nil, errors.New(\"No existe el usuario\")\n}", "func checkUserIsRegistered(email string, password string) string {\n\tdb, err := config.GetMongoDB()\n\n\tif err != nil {\n\t\tfmt.Println(\"Gagal menghubungkan ke database!\")\n\t\tos.Exit(2)\n\t}\n\n\tvar userRepository repository.UserRepository\n\n\tuserRepository = repository.NewUserRepositoryMongo(db, \"pengguna\")\n\n\tuserData, err1 := userRepository.FindAll()\n\n\tif err1 != nil {\n\t\treturn \"invalid_email_login\"\n\t} else {\n\t\tfor _, user := range userData {\n\t\t\tif email == user.Email {\n\t\t\t\tif components.CheckPasswordHash(password, user.Password) == true {\n\t\t\t\t\treturn \"login_success\"\n\t\t\t\t} else {\n\t\t\t\t\treturn \"invalid_password_login\"\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\treturn \"invalid_email_login\"\n\t\t\t}\n\t\t}\n\t}\n\n\treturn \"invalid_email_login\"\n}", "func GetUser(id string) *User {\n\tif user, ok := Users[id]; ok {\n\t\treturn user\n\t}\n\treturn nil\n}", "func (h *Handler) Get(_ context.Context, in *usersapi.GetPayload) (res *usersapi.User, err error) {\n\tfmt.Println(\"xxxxxxxx22222222333333Ali\")\n\tfmt.Printf(\"user isssss %s\\n\", *in.ID)\n\tusr, err := h.provider.Get(*in.ID)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &usersapi.User{Username: usr.Username, Password: usr.Password}, nil\n}", "func (s *Database) GetUser(id string) (*UserPartner, error) {\n\tuser := &UserPartner{Id: id}\n\tc, err := s.Engine.Get(user)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif !c {\n\t\treturn nil, errors.New(\"Khong tim thay\")\n\t}\n\treturn user, nil\n}", "func UserExist(db *gorm.DB, tel string) bool {\n\tvar user model.User\n\tdb.First(&user, \"telephone = ?\", tel)\n\tif user.ID != 0 {\n\t\treturn true\n\t}\n\treturn false\n}", "func (u *User) GetUser(db *sql.DB) (*User, error){\n\taccount := &User{}\n\tvar err error\n\tdefer func() {\n\t\tlog.Printf(\"get user: err %v\", err)\n\t}()\n\t//log.Printf(u.Email)\n\terr = db.QueryRow(\"select first_name, last_name, id, password from users where email = $1\", u.Email).\n\t\tScan(&u.FirstName, &u.LastName, &u.ID, &u.Password)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn account, nil\n}", "func ExistingUser(email, password string) bool {\n\tvar u User\n\tDb.Where(\"email = ? AND password = ?\", email, password).First(&u)\n\tif email != u.Email && password != u.Password {\n\t\treturn false\n\t}\n\treturn true\n}", "func TryToGetUser(c *gin.Context) {\n\ttokenStr := c.Request.Header.Get(\"Authorization\")\n\tusername, _, _ := model.ParseToken(tokenStr[7:])\n\tif username != \"\" {\n\t\tc.Set(\"username\", username)\n\t}\n\tc.Next()\n}", "func (b *NaiveUserManager) GetUserByToken(_ context.Context, token string) (gimlet.User, error) {\n\tfor i, user := range b.users {\n\t\t//check to see if token exists\n\t\tpossibleToken := fmt.Sprintf(\"%v:%v:%v\", i, user.Email, md5.Sum([]byte(user.Username+user.Password)))\n\t\tif token == possibleToken {\n\t\t\treturn &simpleUser{\n\t\t\t\tUserId: user.Username,\n\t\t\t\tName: user.DisplayName,\n\t\t\t\tEmailAddress: user.Email,\n\t\t\t}, nil\n\t\t}\n\t}\n\treturn nil, errors.New(\"No valid user found\")\n}", "func GetUserUsername(c *gin.Context) {\n\tuserName := c.Param(\"user_name\")\n\tdb := dbConn()\n\tselDB, err := db.Query(\"CALL read_user_username(?)\", userName)\n\tif err != nil {\n\t\tpanic(err.Error)\n\t}\n\n\tuser := User{}\n\tusers := []User{}\n\tfor selDB.Next() {\n\t\tvar id, username, useremail, fname, lname, password, passwordchange, passwordexpired, lastlogon, accountlocked string\n\t\terr = selDB.Scan(&id, &username, &useremail, &fname, &lname, &password, &passwordchange, &passwordexpired, &lastlogon, &accountlocked)\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t\tc.JSON(500, gin.H{\n\t\t\t\t\"error\": err.Error(),\n\t\t\t})\n\t\t}\n\t\tuser.ID = id\n\t\tuser.UserName = username\n\t\tuser.UserEmail = useremail\n\t\tuser.FName = fname\n\t\tuser.LName = lname\n\t\tuser.Password = password\n\t\tuser.PasswordChange = passwordchange\n\t\tuser.PasswordExpired = passwordexpired\n\t\tuser.LastLogon = lastlogon\n\t\tuser.AccountLocked = accountlocked\n\t\tiid, err := strconv.Atoi(id)\n\t\tif err != nil {\n\t\t\tpanic(err.Error)\n\t\t}\n\t\tselDB02, err := db.Query(\"CALL read_access_userid(?)\", iid)\n\t\tif err != nil {\n\t\t\tpanic(err.Error)\n\t\t}\n\t\taccess := Access{}\n\t\taccessList := []Access{}\n\t\tfor selDB02.Next() {\n\t\t\tvar accessid, userid, courtid, caseaccess, personaccess, accountingaccess, juryaccess, attorneyaccess, configaccess, securitylevel, sealedcase string\n\t\t\terr := selDB02.Scan(&accessid, &userid, &courtid, &caseaccess, &personaccess, &accountingaccess, &juryaccess, &attorneyaccess, &configaccess, &securitylevel, &sealedcase)\n\t\t\tif err != nil {\n\t\t\t\tlog.Println(err)\n\t\t\t\tc.JSON(500, gin.H{\n\t\t\t\t\t\"error\": err.Error(),\n\t\t\t\t})\n\t\t\t}\n\t\t\taccess.AccessID = accessid\n\t\t\taccess.IDUser = userid\n\t\t\taccess.IDCourt = courtid\n\t\t\taccess.CaseAccess = caseaccess\n\t\t\taccess.PersonAccess = personaccess\n\t\t\taccess.AccountingAccess = accountingaccess\n\t\t\taccess.JuryAccess = juryaccess\n\t\t\taccess.AttorneyAccess = attorneyaccess\n\t\t\taccess.ConfigAccess = configaccess\n\t\t\taccess.SecurityLevel = securitylevel\n\t\t\taccess.SealedCase = sealedcase\n\t\t\taccessList = append(accessList, access)\n\t\t}\n\t\tuser.AccessList = accessList\n\t\tusers = append(users, user)\n\t}\n\n\tc.JSON(200, gin.H{\n\t\t\"result\": users,\n\t})\n\n\tdefer db.Close()\n}", "func (u *User) GetUser(db *pg.DB) (*User, error) {\n\tcount, err := db.Model(u).WherePK().Count()\n\n\tif count < 1 {\n\t\treturn nil, errors.New(\"User does not exist\")\n\t}\n\terr = db.Model(u).WherePK().Select()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn u, nil\n}", "func GetUser(db *gorm.DB, w http.ResponseWriter, r *http.Request) {\n\trepo := users.NewUserRepository(db)\n\tvars := mux.Vars(r)\n\n\ttarget, _ := strconv.Atoi(vars[\"id\"])\n\tuser, changed := repo.GetUser(users.User{}, target)\n\n\tif !changed {\n\t\tutils.JsonResponse(w, utils.ErrorResponse{Error: \"User matching query not found\"}, http.StatusNotFound)\n\t} else {\n\t\tutils.JsonResponse(w, user, http.StatusOK)\n\t}\n}", "func (repo *UserRepo) GetUser(username string) (user *models.User) {\n\tuser = &models.User{}\n\terr := repo.db.Where(\"user_name = ?\", username).Find(user)\n\tif err.RecordNotFound() {\n\t\tlog.Println(\"Username not correct\")\n\t\treturn nil\n\t}\n\tlog.Println(\"Correct username and password\")\n\treturn\n}", "func checkUserResponse(user, resp User) (err error) {\n\tif user.Name != resp.Name {\n\t\terr = errors.New(\"Name isn't equal\")\n\t\treturn\n\t}\n\tif user.Username != resp.Username {\n\t\terr = errors.New(\"Username isn't equal\")\n\t\treturn\n\t}\n\tif user.Phone != resp.Phone {\n\t\terr = errors.New(\"Phone isn't equal\")\n\t\treturn\n\t}\n\tif user.Password != \"\" {\n\t\terr = errors.New(\"Password isn't empty\")\n\t\treturn\n\t}\n\treturn\n}", "func (f *Fs) getUser(ctx context.Context) (user *api.User, err error) {\n\tvar resp *http.Response\n\topts := rest.Opts{\n\t\tMethod: \"GET\",\n\t\tPath: \"/user\",\n\t}\n\terr = f.pacer.Call(func() (bool, error) {\n\t\tresp, err = f.srv.CallXML(ctx, &opts, nil, &user)\n\t\treturn shouldRetry(ctx, resp, err)\n\t})\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to get user: %w\", err)\n\t}\n\treturn user, nil\n}", "func getUser(s *sessions.Session) User {\n\tval := s.Values[\"user\"]\n\tvar user = User{}\n\tuser, ok := val.(User)\n\tif !ok {\n\t\treturn User{Authenticated: false}\n\t}\n\treturn user\n}", "func (db *BotDB) GetUser(id uint64) (*discordgo.User, time.Time, *time.Location, *uint64) {\n\tu := &discordgo.User{}\n\tvar lastseen time.Time\n\tvar loc sql.NullString\n\tvar guild sql.NullInt64\n\tvar discriminator int\n\terr := db.sqlGetUser.QueryRow(id).Scan(&u.ID, &u.Username, &discriminator, &lastseen, &loc, &guild)\n\tif discriminator > 0 {\n\t\tu.Discriminator = strconv.Itoa(discriminator)\n\t}\n\tif err == sql.ErrNoRows || db.CheckError(\"GetUser\", err) != nil {\n\t\treturn nil, lastseen, nil, nil\n\t}\n\tif !guild.Valid {\n\t\treturn u, lastseen, evalTimeZone(loc), nil\n\t}\n\tg := uint64(guild.Int64)\n\treturn u, lastseen, evalTimeZone(loc), &g\n}", "func (c APIClient) GetUser(username string) (User, error) {\n\tvar u User\n\tstatus, err := c.doHTTPUnmarshal(\"GET\", fmt.Sprintf(\"https://api.nsone.net/v1/account/users/%s\", username), nil, &u)\n\tif status == 404 {\n\t\tu.Username = \"\"\n\t\tu.Name = \"\"\n\t\treturn u, nil\n\t}\n\treturn u, err\n}", "func (rd *RedisHelper)GetUser(userName string) (_user user.User,err error) {\n\tif rd==nil{\n\t\tlog4go.Error(\"RedisHelper instance is nil\")\n\t\treturn user.User{}, errors.New(\"Nil point receivcer\")\n\t}\n\tresult, err := redis.Strings(rd.conn.Do(\"LRANGE\", userName, 0, -1))\n\tif err != nil{\n\t\tlog4go.Error(err)\n\t\treturn\n\t}\n\tif len(result) == 0{\n\t\tlog4go.Error(fmt.Sprintf(\"No such user %v in redis\", userName))\n\t\treturn user.User{}, errors.New(fmt.Sprintf(\"No such user %v in redis\", userName))\n\t}\n\t_user.SetUserName(userName)\n\t_user.SetPassword(result[0])\n\tif len(result)>1 {\n\t\t_user.SetToken(result[1])\n\t}\n\treturn\n}", "func (s *Service) GetUser(c context.Context, username string) (usr *user.User, err error) {\n\tusr = &user.User{}\n\terr = s.DB.Where(\"username = ?\", username).First(usr).Error\n\tif err == gorm.ErrRecordNotFound {\n\t\tusr.UserName = username\n\t\tusr.NickName = username\n\t\terr = s.DB.Create(usr).Error\n\t}\n\tif err != nil {\n\t\tlog.Error(\"apmSvc.GetUser error(%v)\", err)\n\t\treturn\n\t}\n\ts.ranksCache.Lock()\n\tif s.ranksCache.Map[username] != nil {\n\t\tusr.AvatarURL = s.ranksCache.Map[username].AvatarURL\n\t} else {\n\t\tusr.AvatarURL, _ = s.dao.GitLabFace(c, username)\n\t}\n\ts.ranksCache.Unlock()\n\treturn\n}", "func getUser(c *gin.Context, db *gorm.DB) *models.User {\n\tuserID, err := strconv.Atoi(c.Param(\"userID\"))\n\tif err != nil {\n\t\tjsonError(c, \"could not parse user id\", err)\n\t\treturn nil\n\t}\n\n\tuser := &models.User{}\n\tq := db.Where(\"id = ?\", userID).First(user)\n\tif err := q.Error; err != nil {\n\t\tjsonError(c, \"error looking up user\", err)\n\t\treturn nil\n\t} else if q.RecordNotFound() {\n\t\tjsonError(c, \"user not found\", nil)\n\t\treturn nil\n\t}\n\treturn user\n}", "func getGeneratedKeycloakUser(ctx context.Context, serverClient k8sclient.Client, ns string, tsUser *User) (*keycloak.KeycloakUser, error) {\n\n\tvar users keycloak.KeycloakUserList\n\n\tlistOptions := []k8sclient.ListOption{\n\t\tk8sclient.MatchingLabels(rhsso.GetInstanceLabels()),\n\t\tk8sclient.InNamespace(ns),\n\t}\n\terr := serverClient.List(ctx, &users, listOptions...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor i := range users.Items {\n\t\tkcUser := users.Items[i]\n\t\tif tsUserIDInKc(tsUser, &kcUser) {\n\t\t\treturn &kcUser, nil\n\t\t}\n\t}\n\n\treturn nil, fmt.Errorf(\"genrated Keycloak user was not found\")\n}", "func findUser(id int) (int,error) {\n\n\tif id != 0 {\n\t\tfor i:=0; i<len(userList.UserList); i++ {\n\n\t\t\tif userList.UserList[i].Id == id{\n\t\t\t\treturn i, nil\n\n\t\t\t}\n\t\t}\n\t}\n\treturn -1, errors.New(\"User not found or invalid id\")\n}", "func getUser(uid string, token string) (*User, error) {\n\t// declarations\n\tuser := User{}\n\tvar user_name, real_name, email sql.NullString\n\n\t// fetch user and verify token\n\tif err := db.QueryRow(\"SELECT * FROM users WHERE id=$1 AND token=$2\", uid,\n\t\ttoken).Scan(&user.Id, &user.GH_Id, &user_name, &real_name, &email,\n\t\t&user.Token, &user.Worker_token, &user.Admin); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// set remaining fields\n\tif user_name.Valid {\n\t\tuser.User_name = user_name.String\n\t}\n\tif real_name.Valid {\n\t\tuser.Real_name = real_name.String\n\t}\n\tif email.Valid {\n\t\tuser.Email = email.String\n\t}\n\n\treturn &user, nil\n}" ]
[ "0.68073165", "0.66821694", "0.6365818", "0.63615876", "0.63530475", "0.6308443", "0.63007885", "0.6273276", "0.62538654", "0.6250142", "0.6225184", "0.6201736", "0.6161349", "0.61523443", "0.61411387", "0.61315435", "0.6121934", "0.61152124", "0.609519", "0.6061417", "0.60569686", "0.60522777", "0.6041733", "0.60393935", "0.60281205", "0.602787", "0.6011983", "0.6011193", "0.59904176", "0.59886265", "0.59771174", "0.596547", "0.59532756", "0.5943059", "0.594271", "0.59399676", "0.59330404", "0.59323126", "0.59224766", "0.5916699", "0.591377", "0.59016156", "0.58740425", "0.5866848", "0.5861335", "0.58602524", "0.58573693", "0.5855638", "0.58545846", "0.58499", "0.5848581", "0.58401304", "0.583852", "0.58277524", "0.5827711", "0.5820395", "0.58195955", "0.581564", "0.5811952", "0.5810577", "0.5806578", "0.5795704", "0.5795394", "0.5791303", "0.57902646", "0.5789731", "0.578958", "0.5788431", "0.57806677", "0.5779053", "0.5771651", "0.576909", "0.5751398", "0.5737465", "0.573158", "0.57296664", "0.5729227", "0.57260466", "0.5725955", "0.5722714", "0.5720174", "0.5712484", "0.57082134", "0.57068634", "0.570557", "0.5703608", "0.57018095", "0.57017106", "0.56986815", "0.5683094", "0.56826925", "0.5681886", "0.5679234", "0.56756204", "0.56748146", "0.56739706", "0.5673458", "0.5670003", "0.5658692", "0.5657122", "0.5652168" ]
0.0
-1
8 Try to delete 4 users
func TestDeleteUserService (t *testing.T){ err := DeleteUserService(user_01.SocialNumber) assert.Equal(t, 200, err.HTTPStatus) err = DeleteUserService(user_02.SocialNumber) assert.Equal(t, 200, err.HTTPStatus) err = DeleteUserService(user_03.SocialNumber) assert.Equal(t, 200, err.HTTPStatus) err = DeleteUserService(user_04.SocialNumber) assert.Equal(t, 200, err.HTTPStatus) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (api *API) deleteUserHandler() service.Handler {\n\treturn func(ctx context.Context, w http.ResponseWriter, r *http.Request) error {\n\t\tvars := mux.Vars(r)\n\t\tusername := vars[\"permUsernamePublic\"]\n\n\t\tconsumer := getUserConsumer(ctx)\n\n\t\ttx, err := api.mustDB().Begin()\n\t\tif err != nil {\n\t\t\treturn sdk.WrapError(err, \"cannot start transaction\")\n\t\t}\n\t\tdefer tx.Rollback() // nolint\n\n\t\tvar u *sdk.AuthentifiedUser\n\t\tif username == \"me\" {\n\t\t\tu, err = user.LoadByID(ctx, tx, consumer.AuthConsumerUser.AuthentifiedUserID)\n\t\t} else {\n\t\t\tu, err = user.LoadByUsername(ctx, tx, username)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// We can't delete the last admin\n\t\tif u.Ring == sdk.UserRingAdmin {\n\t\t\tcount, err := user.CountAdmin(tx)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif count < 2 {\n\t\t\t\treturn sdk.NewErrorFrom(sdk.ErrForbidden, \"can't remove the last admin\")\n\t\t\t}\n\t\t}\n\n\t\t// We can't delete a user if it's the last admin in a group\n\t\tvar adminGroupIDs []int64\n\t\tgus, err := group.LoadLinksGroupUserForUserIDs(ctx, tx, []string{u.ID})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfor i := range gus {\n\t\t\tif gus[i].Admin {\n\t\t\t\tadminGroupIDs = append(adminGroupIDs, gus[i].GroupID)\n\t\t\t}\n\t\t}\n\t\tif len(adminGroupIDs) > 0 {\n\t\t\tgus, err := group.LoadLinksGroupUserForGroupIDs(ctx, tx, adminGroupIDs)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tadminLeftCount := make(map[int64]int)\n\t\t\tfor _, id := range adminGroupIDs {\n\t\t\t\tadminLeftCount[id] = 0\n\t\t\t}\n\t\t\tfor i := range gus {\n\t\t\t\tif gus[i].AuthentifiedUserID != u.ID && gus[i].Admin {\n\t\t\t\t\tadminLeftCount[gus[i].GroupID] += 1\n\t\t\t\t}\n\t\t\t}\n\t\t\tfor _, count := range adminLeftCount {\n\t\t\t\tif count < 1 {\n\t\t\t\t\treturn sdk.NewErrorFrom(sdk.ErrForbidden, \"cannot remove user because it is the last admin of a group\")\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif err := user.DeleteByID(tx, u.ID); err != nil {\n\t\t\treturn sdk.WrapError(err, \"cannot delete user\")\n\t\t}\n\n\t\tif err := tx.Commit(); err != nil {\n\t\t\treturn sdk.WithStack(err)\n\t\t}\n\n\t\treturn service.WriteJSON(w, nil, http.StatusOK)\n\t}\n}", "func (d *database) deleteUsers() (err error) {\n\ttx, err := d.db.Begin()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUser\")\n\t}\n\tquery := \"DELETE FROM letters WHERE sender IN (SELECT sender FROM letters WHERE letter_purpose == '\" + purpose.ActionErase + \"');\"\n\tlogger.Log.Debug(query)\n\tstmt, err := tx.Prepare(query)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUser\")\n\t}\n\tdefer stmt.Close()\n\n\t_, err = stmt.Exec()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUser\")\n\t}\n\n\terr = tx.Commit()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUser\")\n\t}\n\treturn\n}", "func doDelete(w http.ResponseWriter, r *http.Request) {\n\n\t// get the user ID from the path\n\tfields := strings.Split(r.URL.String(), \"/\")\n\tid, err := strconv.ParseUint(fields[len(fields)-1], 10, 64)\n\tif nil != err {\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tlog.Printf(\"Request to delete user %v\", id)\n\n\t// start of protected code changes\n\tlock.Lock()\n\tvar tmp = []*User{}\n\tfor _, u := range db {\n\t\tif id == u.ID {\n\t\t\tcontinue\n\t\t}\n\t\ttmp = append(tmp, u)\n\t}\n\tdb = tmp\n\t// end protected code changes\n\tlock.Unlock()\n}", "func UserDelete(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {\n\n}", "func delete_specific_user(c client, id uint16, delete_thumb bool) uint8 {\n\tlogger_id(PRINT_NORMAL, c.deviceId, \"deleting user id = \", id)\n\tresponse_chan := make(chan Ipc_packet)\n\tsite_mux_reg_cmd(c.deviceId, response_chan)\n\n\tvar ipc Ipc_packet\n\n\tif delete_thumb {\n\t\tipc = create_ipc_cmd_dlt_usr_helper(c.deviceId, id, true)\n\t} else {\n\t\tipc = create_ipc_cmd_dlt_usr_helper(c.deviceId, id, false)\n\t}\n\tipc.ClientId = c.ClientId\n\tvar resp Ipc_packet\n\n\tgo be_handle_command(ipc)\n\n\tselect {\n\tcase resp = <-response_chan:\n\t\tbreak\n\tcase <-time.After(time.Second * COMMAND_TIMEOUT_TIME):\n\t\tsite_mux_unreg_cmd(c.deviceId)\n\t\tlogger(PRINT_FATAL, \"Timed out delting a user\")\n\t}\n\n\tsite_mux_unreg_cmd(c.deviceId)\n\n\tcmd_rsp := packet_cmd_response_unpack(resp.P.Data)\n\treturn cmd_rsp.Cmd_status\n}", "func (uc UserController) deleteUsers(response http.ResponseWriter, request *http.Request, p httprouter.Params) {\n\tresponse.Header().Add(\"content-type\", \"application/json\")\n\n\tctx, _ := context.WithTimeout(context.Background(), 10*time.Second)\n\tuc.collection.Drop(ctx)\n}", "func deleteTest(t *testing.T, creds client.Credentials, deleteData []testDeleteData) {\n\tserver := setup(t, testMultiUserFilenameJSON)\n\tdefer teardown(t, server)\n\ttokenCookie := login(t, server, creds)\n\tdefer logout(t, server, tokenCookie)\n\n\tfor _, d := range deleteData {\n\t\tt.Run(fmt.Sprintf(\"ID-%s\", d.id),\n\t\t\tfunc(t *testing.T) {\n\t\t\t\trequest := httptest.NewRequest(http.MethodDelete, \"http://user/Delete/\"+d.id, nil)\n\t\t\t\trequest.AddCookie(tokenCookie)\n\t\t\t\tresponse := httptest.NewRecorder()\n\t\t\t\tps := httprouter.Params{\n\t\t\t\t\thttprouter.Param{\n\t\t\t\t\t\tKey: \"id\",\n\t\t\t\t\t\tValue: d.id},\n\t\t\t\t}\n\t\t\t\tserver.DeleteUser(response, request, ps)\n\t\t\t\tassert.Equalf(t, d.expectedResponse, response.Code,\n\t\t\t\t\t\"%s attempted to delete user ID %s, expected '%s' got '%s'\", creds.Username, d.id,\n\t\t\t\t\thttp.StatusText(d.expectedResponse), http.StatusText(response.Code))\n\t\t\t})\n\t}\n}", "func (c *SQLiteConn) authUserDelete(username string) int {\n\t// NOOP\n\treturn 0\n}", "func DeleteUsers(c *gin.Context) {\n\tclient := mongoconn.Client\n\tcollection := client.Database(\"demo\").Collection(\"users\")\n\tctx, err := context.WithTimeout(context.Background(), 5*time.Second)\n\tif err != nil {\n\t}\n\tusername := c.Query(\"username\")\n\tvar dbquery dictionary\n\tif username != \"\" {\n\t\tdbquery = dictionary{\"name\": username}\n\t} else {\n\t\tdbquery = dictionary{}\n\t}\n\tresult, err2 := collection.DeleteMany(ctx, dbquery)\n\tif err2 != nil {\n\t\tfmt.Println(\"Error deleting items\")\n\t}\n\trecordsDeleted := result.DeletedCount\n\tif recordsDeleted == 0 {\n\t\tc.JSON(200, gin.H{\n\t\t\t\"message\": \"No records to delete\",\n\t\t})\n\t\treturn\n\t}\n\tc.JSON(200, gin.H{\n\t\t\"message\": \"Successfully deleted users\",\n\t})\n}", "func DeleteUser(userid int64) error {\n _, err := model.Database.Exec(\"DELETE FROM users WHERE userid = ? AND isadmin = ?\", userid, false)\n if err != nil {\n return err\n }\n return nil\n}", "func (_obj *WebApiAuth) SysUser_BatchDelete(id []int32, req *SysUser, res *bool, _opt ...map[string]string) (err error) {\n\n\tvar length int32\n\tvar have bool\n\tvar ty byte\n\t_os := codec.NewBuffer()\n\terr = _os.WriteHead(codec.LIST, 1)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = _os.Write_int32(int32(len(id)), 0)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfor _, v := range id {\n\n\t\terr = _os.Write_int32(v, 0)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t}\n\n\terr = req.WriteBlock(_os, 2)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = _os.Write_bool((*res), 3)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar _status map[string]string\n\tvar _context map[string]string\n\tif len(_opt) == 1 {\n\t\t_context = _opt[0]\n\t} else if len(_opt) == 2 {\n\t\t_context = _opt[0]\n\t\t_status = _opt[1]\n\t}\n\t_resp := new(requestf.ResponsePacket)\n\ttarsCtx := context.Background()\n\n\terr = _obj.s.Tars_invoke(tarsCtx, 0, \"SysUser_BatchDelete\", _os.ToBytes(), _status, _context, _resp)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t_is := codec.NewReader(tools.Int8ToByte(_resp.SBuffer))\n\terr = _is.Read_bool(&(*res), 3, true)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif len(_opt) == 1 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t} else if len(_opt) == 2 {\n\t\tfor k := range _context {\n\t\t\tdelete(_context, k)\n\t\t}\n\t\tfor k, v := range _resp.Context {\n\t\t\t_context[k] = v\n\t\t}\n\t\tfor k := range _status {\n\t\t\tdelete(_status, k)\n\t\t}\n\t\tfor k, v := range _resp.Status {\n\t\t\t_status[k] = v\n\t\t}\n\n\t}\n\t_ = length\n\t_ = have\n\t_ = ty\n\treturn nil\n}", "func (d *database) deleteUsersEdits(publicKey string) (err error) {\n\tids, err := d.getIDs(publicKey)\n\tif err != nil {\n\t\treturn\n\t}\n\tfor _, id := range ids {\n\t\tidVersions, err := d.getAllVersions(id)\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\t\tfor i, idVersion := range idVersions {\n\t\t\tif i == 0 {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\td.deleteLetterFromID(idVersion)\n\t\t}\n\t}\n\treturn\n}", "func deleteUser_u(client *chef.Client, name string) (err error) {\n\terr = client.Users.Delete(name)\n\tif err != nil {\n\t\tfmt.Fprintln(os.Stderr, \"Issue deleting user:\", err)\n\t}\n\treturn\n}", "func (d *webData) deleteUserWeb(w http.ResponseWriter, r *http.Request) {\n\tp := storage.QueryAllUserInfo(d.PDB)\n\terr := d.tpl.ExecuteTemplate(w, \"deleteUserCompletePage\", p)\n\tif err != nil {\n\t\tlog.Println(\"showUsersWeb: template execution error = \", err)\n\t}\n\n\t//parse the html form and get all the data\n\tr.ParseForm()\n\tfn, _ := strconv.Atoi(r.FormValue(\"users\"))\n\tstorage.DeleteUser(d.PDB, fn)\n}", "func (d *database) deleteUsersOldActions(publicKey string, purpose string) (err error) {\n\ttx, err := d.db.Begin()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUsersOldActions\")\n\t}\n\tlogger.Log.Debug(publicKey, purpose)\n\tquery := \"DELETE FROM letters WHERE id in (SELECT id FROM letters WHERE opened == 1 AND letter_purpose == ? AND sender == ? ORDER BY time DESC LIMIT 1000000000 OFFSET 1);\"\n\tlogger.Log.Debug(query)\n\tstmt, err := tx.Prepare(query)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUsersOldActions\")\n\t}\n\tdefer stmt.Close()\n\n\t_, err = stmt.Exec(purpose, publicKey)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUsersOldActions\")\n\t}\n\n\terr = tx.Commit()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUsersOldActions\")\n\t}\n\treturn\n}", "func Delete() error {\n\ti, err := orm.SetTable(\"tb_user\").SetPK(\"uid\").Where(\"name=$1 and uid>$2\", \"viney\", 3).DeleteRow()\n\tif err == nil {\n\t\tfmt.Println(i)\n\t\treturn nil\n\t}\n\treturn err\n}", "func DeleteUser(c *gin.Context) {}", "func DeleteUser(id int) {\n\tvar i int\n\ti = GetIndexOfUser(id)\n\tDeleteUserFromDatabase(i)\n}", "func delete_user(uname string) (string, bool) {\n\t//delete user from server memory\n\tuser_map_lock.Lock()\n\tdelete(user_map, uname)\n\tuser_map_lock.Unlock()\n\terr := rewrite_userlist() //delete user from user list file\n\tif err != nil {\n\t\treturn fmt.Sprintf(\"error: Server rewrite uselist error%s\\n\", END_TAG), false\n\t}\n\n\t//delete user message file\n\tfilename := uname + \".txt\"\n\tcreate_and_lock(filename) // lock the file we want to delete\n\tdefer lock_for_files_map[filename].Unlock()\n\tos.Remove(filename)\n\t//repond sucess\n\treturn fmt.Sprintf(\"success: Deleted user %s.%s\\n\", uname, END_TAG), true\n}", "func deleteUser(w http.ResponseWriter, r *http.Request) {\r\n\tparams := mux.Vars(r)\r\n\tstmt, err := db.Prepare(\"DELETE FROM users WHERE id = ?\")\r\n\tif err != nil {\r\n\t\tpanic(err.Error())\r\n\t}\r\n\t_, err = stmt.Exec(params[\"id\"])\r\n\tif err != nil {\r\n\t\tpanic(err.Error())\r\n\t}\r\n\tfmt.Fprintf(w, \"User with id = %s was deleted\", params[\"id\"])\r\n}", "func DeleteUserController(c echo.Context) error {\n\tid, _ := strconv.Atoi(c.Param(\"id\"))\n\tif id == -1 {\n\t\treturn c.JSON(http.StatusBadRequest, map[string]interface{}{\n\t\t\t\"message\": \"Invalid id\",\n\t\t})\n\t}\n\tfor i := 0; i < len(users); i++ {\n\t\tif users[i].Id == id {\n\t\t\tif i == len(users)-1 {\n\t\t\t\tusers = users[:len(users)-1]\n\t\t\t\treturn c.JSON(http.StatusOK, map[string]interface{}{\n\t\t\t\t\t\"messages\": \"success get all users\",\n\t\t\t\t\t\"users\": users,\n\t\t\t\t})\n\t\t\t}\n\t\t\tusers = users[i+1:]\n\t\t\treturn c.JSON(http.StatusOK, map[string]interface{}{\n\t\t\t\t\"messages\": \"success get all users\",\n\t\t\t\t\"users\": users,\n\t\t\t})\n\t\t}\n\n\t}\n\treturn c.JSON(http.StatusBadRequest, map[string]interface{}{\n\t\t\"message\": \"invalid id\",\n\t})\n}", "func _delete(context echo.Context, user *User) error {\n\tdeleteErr := Remove(user.Key)\n\tif deleteErr != nil {\n\t\tlog.Printf(\"Cannot delete user %v\", deleteErr)\n\t\treturn context.JSON(http.StatusInternalServerError, errors.New(\"Cannot delete user with ID: \"+user.ID))\n\t}\n\treturn context.NoContent(http.StatusNoContent)\n}", "func DeleteUserHandler(w http.ResponseWriter, r *http.Request) {\n\tparams := mux.Vars(r)\n\tk := params[\"id\"]\n\n\tif _, ok := Listusers[k]; ok {\n\n\t\tdelete(Listusers, k)\n\n\t} else {\n\t\tlog.Printf(\"No encontramos el id %s\", k)\n\t}\n\tw.WriteHeader(http.StatusNoContent)\n\n}", "func (s *GodrorStorage) DeleteUsers(db XODB, us []*User) error {\n\tvar err error\n\n\tif len(us) == 0 {\n\t\treturn nil\n\t}\n\n\tvar args []interface{}\n\tvar placeholder string\n\tfor i, u := range us {\n\t\targs = append(args, u.ID)\n\t\tif i != 0 {\n\t\t\tplaceholder = placeholder + \", \"\n\t\t}\n\t\tplaceholder += fmt.Sprintf(\":%d\", i+1)\n\t}\n\n\t// sql query\n\tvar sqlstr = `DELETE FROM \"AC\".\"user\" WHERE \"id\" in (` + placeholder + `)`\n\n\t// run query\n\ts.Logger.Info(sqlstr, args)\n\t_, err = db.Exec(sqlstr, args...)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func DeleteUsers(id int64) (err error) {\n\to := orm.NewOrm()\n\tv := Users{Id: id}\n\t// ascertain id exists in the database\n\tif err = o.Read(&v); err == nil {\n\t\tvar num int64\n\t\tif num, err = o.Delete(&Users{Id: id}); err == nil {\n\t\t\tfmt.Println(\"Number of records deleted in database:\", num)\n\t\t}\n\t}\n\treturn\n}", "func db_delete_user(username string) {\n file_path := path.Join(\"db/users\", strings.ToLower(username) + \".json\")\n\n err := os.Remove(file_path)\n \n if err != nil {\n fmt.Println(err.Error())\n return\n }\n fmt.Println(\"User Removed: \", username)\n}", "func DeleteUserProfileHandler(w http.ResponseWriter, r *http.Request) {\n\n}", "func (r *Repository) UsersDelete(id int64) error {\n\treturn r.db.QueryRow(\"DELETE FROM users WHERE id = $1 RETURNING id\", id).Scan(&id)\n}", "func DeleteUser(w http.ResponseWriter, r *http.Request) {\r\n\tdefer r.Body.Close()\r\n\tuser := r.Context().Value(\"user\").(string)\r\n\r\n\tif err := dao.DBConn.RemoveUserByEmail(user); err != nil {\r\n\t\tlog.Println(err)\r\n\t\tu.RespondWithError(w, http.StatusBadRequest, \"User doesn't exist or has already been deleted\")\r\n\t\treturn\r\n\t}\r\n\r\n\tif err := dao.DBConn.RemoveUserExpenses(user); err != nil {\r\n\t\tlog.Println(err)\r\n\t\tu.RespondWithError(w, http.StatusBadRequest, \"User doesn't exist or has already been deleted\")\r\n\t\treturn\r\n\t}\r\n\r\n\tu.RespondWithJSON(w, http.StatusOK, \"User deleted\")\r\n}", "func userDeleteCommandFunc(cmd *cobra.Command, args []string) {\n\tif len(args) != 1 {\n\t\tExitWithError(ExitBadArgs, fmt.Errorf(\"user delete command requires user name as its argument\"))\n\t}\n\n\tfmt.Println(\"删除用户成功\", args[0])\n}", "func ViewDeleteUserUnderHim(w http.ResponseWriter, r *http.Request) { \n w.Header().Set(\"Content-Type\", \"text/html; charset=utf-8\")\n t, err := template.ParseFiles(\"templates/viewUsers.html\")\n\n userDetails := getSession(r)\n\n if err != nil {\n fmt.Println(err) // Ugly debug output\n w.WriteHeader(http.StatusInternalServerError) // Proper HTTP response\n return\n }\n \n if err != nil {\n fmt.Println(err)\n }\n\n userId := UserIds{\n UserId: r.FormValue(\"userId\"),\n }\n\n var userList []helpers.User\n var successMessage string\n var isShow bool \n\n if (userId.UserId != \"\" ) {\n if (dbquery.DeleteManagerUser(\"User\",userId.UserId)){\n isShow = true\n successMessage = \"User Deleted Successfully\"\n }\n }\n \n userList = dbquery.GetUserByMngrList(userDetails.UserId)\n t.Execute(w, AllUsersResponse{Users: userList, SuccessMessage: successMessage, IsShow: isShow}) \n}", "func DeleteAdminUsers(id int64) (err error) {\n\n\tv := AdminUsers{Id: id}\n\t// ascertain id exists in the database\n\t_, err = common.Engine.Id(v.Id).Get(&v)\n\tif err == nil {\n\t\tif _, err = common.Engine.ID(v.Id).Delete(&v); err == nil {\n\t\t\tfmt.Println(\"Number of records updated in database:\", v)\n\t\t}\n\t}\n\n\treturn\n}", "func (chat *Chat) DeleteUser(id string) {\n\tchat.lock.Lock()\n\tdefer chat.lock.Unlock()\n\n\tusers := []*User{}\n\tfor _, chatUser := range chat.users {\n\t\tif chatUser.Id == id {\n\t\t\t//close ws\n\t\t\tchatUser.Ws.Close()\n\t\t\tchatUser.Dt = time.Since(chatUser.OnlineAt) / 1e9\n\n\t\t\t//进行数据跟踪\n\t\t\tgo httpPostForm(chatUser)\n\n\t\t\tcontinue\n\t\t}\n\t\tusers = append(users, chatUser)\n\t}\n\n\tchat.users = users\n}", "func deleteUser(username string) int {\n\tif isServerAlive() {\n\t\tctx, cancel := context.WithTimeout(context.Background(), time.Second)\n\t\tdefer cancel()\n\t\treply, err := rpcCaller.DeleteUser(ctx, &pb.Credentials{Uname: username, Broadcast: true})\n\t\tif err == nil {\n\t\t\tfmt.Println(\"Delete User RPC successful\", reply)\n\t\t\treturn 0\n\t\t} else {\n\t\t\tfmt.Println(\"Delete User RPC failed\", reply, err)\n\t\t\treturn -1\n\t\t}\n\t} else {\n\t\tdebugPrint(\"Debug: Primary server down, cant process requests\")\n\t\treturn -1\n\t}\n}", "func (env *Env) DeleteUser(c *gin.Context) {\n\n\t//Convert ID Parameter into int32\n\ttmp, err := strconv.Atoi(c.Param(\"id\"))\n\tif err != nil {\n\t\tLog.WithField(\"module\", \"handler\").WithError(err)\n\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, errs.RQST001)\n\t\treturn\n\t}\n\tuserid := int32(tmp)\n\n\treqUserId, _ := c.Get(\"userid\")\n\n\t//Check if UserID\n\tvar exists int64\n\tresult := env.db.Model(mysql.User{}).Where(\"id = ?\", userid).Count(&exists)\n\tif result.Error != nil {\n\t\tLog.WithField(\"module\", \"sql\").WithError(result.Error)\n\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, errs.DBSQ001)\n\t\treturn\n\t}\n\n\tif exists == 0 {\n\t\tLog.WithField(\"module\", \"handler\").Error(\"User not Found in Database\")\n\t\tc.AbortWithStatusJSON(http.StatusNotFound, errs.DBSQ006)\n\t\treturn\n\t}\n\n\tif userid != reqUserId {\n\t\tvar user mysql.User\n\n\t\tresult := env.db.Where(\"id = ?\", reqUserId).First(&user)\n\t\tif result.Error != nil {\n\t\t\tLog.WithField(\"module\", \"sql\").WithError(result.Error)\n\t\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, errs.DBSQ001)\n\t\t\treturn\n\t\t}\n\n\t\tLog.Debug(user)\n\n\t\terr = env.db.Model(&user).Association(\"Permissions\").Find(&user.Permissions)\n\t\tif err != nil {\n\t\t\tLog.WithField(\"module\", \"sql\").WithError(err)\n\t\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, errs.DBSQ001)\n\t\t\treturn\n\t\t}\n\n\t\tif !user.Permissions.Admin {\n\t\t\tLog.WithField(\"module\", \"handler\").Error(\"User not Authorized for this Action\")\n\t\t\tc.AbortWithStatusJSON(http.StatusUnauthorized, errs.AUTH009)\n\t\t\treturn\n\t\t}\n\t}\n\n\tresult = env.db.Delete(mysql.User{}, userid)\n\tif result.Error != nil {\n\t\tif errors.Is(result.Error, gorm.ErrRecordNotFound) {\n\t\t\tLog.WithField(\"module\", \"handler\").WithError(result.Error)\n\t\t\tc.AbortWithStatusJSON(http.StatusNotFound, errs.DBSQ006)\n\t\t\treturn\n\t\t} else {\n\t\t\tLog.WithField(\"module\", \"sql\").WithError(result.Error)\n\t\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, errs.DBSQ001)\n\t\t\treturn\n\t\t}\n\t}\n\n}", "func Duser(w http.ResponseWriter, r *http.Request) {\n\tid := r.PostFormValue(\"id\")\n\tidint, _ := strconv.Atoi(id)\n\tstmt := datastorage.GetDataRouter().GetStmt(\"delete_user\")\n\t_, err := stmt.Exec(idint)\n\tif err != nil {\n\t\tmessages.SetMessage(r, \"Σφάλμα κατά την διαγραφή του χρήστη\")\n\t\tlog.Println(err)\n\t\thttp.Redirect(w, r, \"/retrieveuser?id=\"+id, http.StatusMovedPermanently)\n\t\treturn\n\t}\n\thttp.Redirect(w, r, \"/listusers\", http.StatusMovedPermanently)\n}", "func (c CvpRestAPI) DeleteUsers(userIds []string) error {\n\tif len(userIds) == 0 {\n\t\treturn errors.New(\"DeleteUsers: no user specified for deletion\")\n\t}\n\tresp, err := c.client.Post(\"/user/deleteUsers.do\", nil, userIds)\n\tif err != nil {\n\t\treturn errors.Errorf(\"DeleteUsers: %s\", err)\n\t}\n\tvar msg struct {\n\t\tResponseMessage string `json:\"data\"`\n\t\tErrorResponse\n\t}\n\tif err = json.Unmarshal(resp, &msg); err != nil {\n\t\treturn errors.Errorf(\"DeleteUsers: JSON unmarshal error: \\n%v\", err)\n\t}\n\tvar retErr error\n\tif err = msg.Error(); err != nil {\n\t\tswitch msg.ErrorCode {\n\t\tcase SUPERUSER_DELETE_ATTEMPT:\n\t\t\tretErr = errors.Errorf(\"DeleteUsers: cannot delete superuser '%s'\", defaultUser)\n\t\tcase INVALID_USER:\n\t\t\tretErr = errors.Errorf(\"DeleteUsers: one of the users in %v does not exist\", userIds)\n\t\tdefault:\n\t\t\tretErr = errors.Errorf(\"DeleteUsers: Unexpected error: %v\", err)\n\t\t}\n\t} else {\n\t\tlowerCaseResp := strings.ToLower(msg.ResponseMessage)\n\t\tif !strings.Contains(lowerCaseResp, successMsg) {\n\t\t\tretErr = errors.New(\"DeleteUsers: Successful deletion response not found\")\n\t\t}\n\t}\n\treturn retErr\n}", "func deleteUser(res http.ResponseWriter, req *http.Request, p httprouter.Params) {\n\t_, err := db.Exec(`\n\t\tDELETE FROM accounts\n\t\tWHERE username = $1;`, p.ByName(\"username\"),\n\t)\n\tif err != nil {\n\t\tlog.Println(\"deleteUser:\", err)\n\t}\n\n\twriteJSON(res, 200, jsMap{\"status\": \"OK\"})\n}", "func DeleteUserHandler(connection *sql.DB, cnf config.Config) negroni.HandlerFunc {\n\treturn negroni.HandlerFunc(func(w http.ResponseWriter, r *http.Request, next http.HandlerFunc) {\n\t\tvar queryToken = r.URL.Query().Get(\"token\")\n\n\t\tif len(queryToken) < 1 {\n\t\t\tqueryToken = r.Header.Get(\"token\")\n\t\t}\n\n\t\tif len(queryToken) < 1 {\n\t\t\tutil.SendBadRequest(w, errors.New(\"token is mandatory\"))\n\t\t\treturn\n\t\t}\n\n\t\tuser := &models.UserResponse{}\n\t\terr := util.RequestToJSON(r, user)\n\t\tif err != nil {\n\t\t\tutil.SendBadRequest(w, errors.New(\"Bad json\"))\n\t\t\treturn\n\t\t}\n\n\t\tsecretKey := cnf.SecretKey\n\t\ttok, err := jwt.Parse(queryToken, func(t *jwt.Token) (interface{}, error) {\n\t\t\treturn []byte(secretKey), nil\n\t\t})\n\n\t\tclaims := tok.Claims.(jwt.MapClaims)\n\t\tvar ID = claims[\"sub\"].(float64)\n\n\t\tif int64(ID) != user.ID {\n\t\t\tutil.SendBadRequest(w, errors.New(\"you can only delete your own user object\"))\n\t\t\treturn\n\t\t}\n\n\t\tdb.DeleteUser(connection, user)\n\t\tif err != nil {\n\t\t\tutil.SendBadRequest(w, err)\n\t\t\treturn\n\t\t}\n\t\tutil.SendOK(w, string(\"\"))\n\n\t})\n}", "func Delete(w http.ResponseWriter, r *http.Request) {\n\tuserID := context.Get(r, \"userID\").(int)\n\n\t// Excluindo usuário logado\n\terr := ServiceUser.Delete(userID)\n\n\tif err != nil {\n\t\tw.Write(util.MessageInfo(\"message\", err.Error()))\n\t\treturn\n\t}\n\n\tw.Write(util.MessageInfo(\"message\", \"Excluído com sucesso\"))\n}", "func DeleteUser(c *gin.Context) {\n\tuuid := c.Params.ByName(\"uuid\")\n\tvar user models.User\n\tdb := db.GetDB()\n\tif uuid != \"\" {\n\n\t\tjwtClaims := jwt.ExtractClaims(c)\n\t\tauthUserAccessLevel := jwtClaims[\"access_level\"].(float64)\n\t\tauthUserUUID := jwtClaims[\"uuid\"].(string)\n\t\tif authUserAccessLevel != 1 {\n\t\t\tif authUserUUID != uuid {\n\t\t\t\tc.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{\n\t\t\t\t\t\"error\": \"Sorry but you can't delete user, ONLY admins can\",\n\t\t\t\t})\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\t// DELETE FROM users WHERE uuid= user.uuid\n\t\t// exemple : UPDATE users SET deleted_at=date.now WHERE uuid = user.uuid;\n\t\tif err := db.Where(\"uuid = ?\", uuid).Delete(&user).Error; err != nil {\n\t\t\t// error handling...\n\t\t\tc.AbortWithStatusJSON(http.StatusBadRequest, gin.H{\n\t\t\t\t\"error\": err.Error(),\n\t\t\t})\n\t\t\treturn\n\t\t}\n\n\t\t// Display JSON result\n\t\t// c.JSON(200, gin.H{\"success\": \"User #\" + uuid + \" deleted\"})\n\t\tc.JSON(200, gin.H{\"success\": \"User successfully deleted\"})\n\t} else {\n\t\t// Display JSON error\n\t\tc.JSON(404, gin.H{\"error\": \"User not found\"})\n\t}\n\n}", "func deleteUser(userID int) error {\n\tdb, err := sql.Open(\"mysql\", DB_USER_NAME+\":\"+DB_PASSWORD+\"@unix(/var/run/mysql/mysql.sock)/\"+DB_NAME)\n\tif err != nil {\n\t\treturn errors.New(\"No connection\")\n\t}\n\n\tres, err := db.Exec(\"delete from Users where UserID=? and not exists(select 1 from StudentCourses where Student=? limit 1)\", userID, userID)\n\n\tif err != nil {\n\t\treturn errors.New(\"User is currently enrolled in a class. Please remove the student from the class before deleting the user.\")\n\t}\n\trowsAffected, err := res.RowsAffected()\n\n\tif rowsAffected != 1 {\n\t\treturn errors.New(\"Query didn't match any users.\")\n\t}\n\n\treturn nil\n}", "func DeleteUserController(c echo.Context) error {\n\tid, _ := strconv.Atoi(c.Param(\"id\"))\n\tusers = append(users[:id], users[id+1:]...)\n\treturn c.NoContent(http.StatusNoContent)\n}", "func DeleteLpBrookUser(id int) (err error) {\n\to := orm.NewOrm()\n\tv := LpBrookUser{Id: id}\n\t// ascertain id exists in the database\n\tif err = o.Read(&v); err == nil {\n\t\tvar num int64\n\t\tif num, err = o.Delete(&LpBrookUser{Id: id}); err == nil {\n\t\t\tfmt.Println(\"Number of records deleted in database:\", num)\n\t\t}\n\t}\n\treturn\n}", "func delWebUser(webUserId string) error {\n\n\n\n\terr:=db.Update(func(tx *bolt.Tx) error {\n\t\twebUsersBucket :=tx.Bucket([]byte(\"webUsers\"))\n\t\tif webUsersBucket ==nil{\n\t\t\treturn errors.New(\"warning:trying to del non-exist user\")\n\t\t}\n\n\t\terr:=webUsersBucket.Delete([]byte(webUserId))\n\t\treturn err\n\t})\n\n\treturn err\n}", "func Delete() error {\n\tuser := &Users{}\n\ti, err := engine.Id(1).Delete(user)\n\tif err == nil {\n\t\treturn nil\n\t} else if i <= 0 {\n\t\treturn errors.New(\"删除失败\")\n\t}\n\n\treturn nil\n}", "func (d *database) deleteUsersOldestPost(publicKey string) (err error) {\n\ttx, err := d.db.Begin()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUsersOldestPost\")\n\t}\n\tlogger.Log.Debug(publicKey)\n\tquery := \"DELETE from letters WHERE id in (SELECT id FROM letters WHERE letter_purpose IN ('share-text','share-image/png','share-image/jpg','') AND sender == ? ORDER BY time LIMIT 1);\"\n\tlogger.Log.Debug(query)\n\tstmt, err := tx.Prepare(query)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUsersOldestPost\")\n\t}\n\tdefer stmt.Close()\n\n\t_, err = stmt.Exec(publicKey)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUsersOldestPost\")\n\t}\n\n\terr = tx.Commit()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUsersOldestPost\")\n\t}\n\treturn\n}", "func DeleteUser(person *Person, id string) (err error) {\n\tConfig.DB.Where(\"id = ?\", id).Delete(person)\n\treturn nil\n}", "func DeleteUser(w http.ResponseWriter, r *http.Request) {\n\n\thttpext.SuccessAPI(w, \"ok\")\n}", "func (pc UserController) Delete(c *gin.Context) {\n\tid := c.Params.ByName(\"id\")\n\tvar u repository.UserRepository\n\tidInt, _ := strconv.Atoi(id)\n\tif err := u.DeleteByID(idInt); err != nil {\n\t\tc.AbortWithStatus(403)\n\t\tc.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tc.JSON(200, gin.H{\"success\": \"ID\" + id + \"のユーザーを削除しました\"})\n\treturn\n}", "func DeleteUser(user *entity.User, id string, client *statsd.Client) (err error) {\n\tt := client.NewTiming()\n\tif config.DB.Where(\"id = ?\", id).First(&user); user.ID == \"\" {\n\t\treturn errors.New(\"the user doesn't exist!!!\")\n\t}\n\tconfig.DB.Where(\"id = ?\", id).Delete(&user)\n\tt.Send(\"delete_user.query_time\")\n\treturn nil\n}", "func UserDelete(w http.ResponseWriter, r *http.Request) {\n\n\t// Init output\n\toutput := []byte(\"\")\n\n\t// Add content type header to the response\n\tcontentType := \"application/json\"\n\tcharset := \"utf-8\"\n\tw.Header().Add(\"Content-Type\", fmt.Sprintf(\"%s; charset=%s\", contentType, charset))\n\n\t// Grab context references\n\trefStr := gorillaContext.Get(r, \"str\").(stores.Store)\n\t// Grab url path variables\n\turlVars := mux.Vars(r)\n\turlUser := urlVars[\"user\"]\n\n\tuserUUID := auth.GetUUIDByName(urlUser, refStr)\n\n\terr := auth.RemoveUser(userUUID, refStr)\n\tif err != nil {\n\t\tif err.Error() == \"not found\" {\n\t\t\terr := APIErrorNotFound(\"User\")\n\t\t\trespondErr(w, err)\n\t\t\treturn\n\t\t}\n\t\terr := APIErrGenericInternal(err.Error())\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\t// Write empty response if anything ok\n\trespondOK(w, output)\n\n}", "func db_unfollow_deleted_users(username string) {\n file_path := path.Join(\"db/users\", strings.ToLower(username)+\".json\")\n \n if _, err := os.Stat(file_path); os.IsNotExist(err) {\n return\n }\n user := db_JSON_to_user(username)\n\n follows := user.Follows\n \n offset := 0\n for i, followed := range follows {\n if !db_check_user_exists(followed) {\n // unfollow user if account doesn't exist\n user.Follows = append(user.Follows[:i-offset], user.Follows[i+1-offset:]...)\n offset = offset + 1\n }\n }\n updated_user := db_user_to_JSON(user)\n \n writeerr := ioutil.WriteFile(file_path, updated_user, 0644)\n \n if writeerr != nil {\n panic(writeerr)\n }\n}", "func deleteUser(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tparams := mux.Vars(r)\n\tresult := delete.DeleteUserData(params[\"id\"])\n\tjson.NewEncoder(w).Encode(map[string]string{\n\t\t\"result\": result,\n\t})\n}", "func DeleteRelative(c *gin.Context) {\n\tid_admin, _ := strconv.Atoi(c.Param(\"id\"))\n\tid_relative, _ := strconv.Atoi(c.Param(\"id_relative\"))\n\n\tadminUser, db := selectUserID(id_admin, c)\n\trelativeUser, _ := selectUserID(id_relative, c)\n\tdefer db.Close()\n\n\tdb.Model(&adminUser).Association(\"UsersFamily\").Delete(&relativeUser)\n\tdb.Save(&adminUser)\n\n\tc.JSON(200, adminUser)\n}", "func (s *Server) deleteUser(request *restful.Request, response *restful.Response) {\n\t// Authorize\n\tif !s.auth(request, response) {\n\t\treturn\n\t}\n\t// get user-id and put into temp\n\tuserId := request.PathParameter(\"user-id\")\n\tif err := s.dataStore.DeleteUser(userId); err != nil {\n\t\tinternalServerError(response, err)\n\t\treturn\n\t}\n\tok(response, Success{RowAffected: 1})\n}", "func (r *RepositoryUsersCRUD) Delete(uid uint32) (int64, error) {\n\tvar rs *gorm.DB\n\tdone := make(chan bool)\n\tgo func(ch chan<- bool) {\n\t\tdefer close(ch)\n\t\trs = r.model.Where(\"id = ?\", uid).Take(&models.User{}).Delete(&models.User{})\n\t\tch <- true\n\t}(done)\n\n\tif channels.OK(done) {\n\t\tif rs.Error != nil {\n\t\t\treturn 0, rs.Error\n\t\t}\n\n\t\treturn rs.RowsAffected, nil\n\t}\n\treturn 0, rs.Error\n}", "func deleteUser(c *gin.Context) {\n\tvar user user\n\tuserID := c.Param(\"id\")\n\n\tdb.First(&user, userID)\n\n\tif user.Id == 0 {\n\t\tc.JSON(http.StatusNotFound, gin.H{\"status\": http.StatusNotFound, \"message\": \"No user found!\"})\n\t\treturn\n\t}\n\n\tdb.Delete(&user)\n\tc.JSON(http.StatusOK, gin.H{\"status\": http.StatusOK, \"message\": \"User deleted successfully!\"})\n}", "func (r *TCData) ForceDeleteTestUsers(t *testing.T) {\n\n\t// NOTE: Special circumstances! This should *NOT* be done without a really good reason!\n\t// Connects directly to the DB to remove users rather than going thru the client.\n\t// This is required here because the DeleteUser action does not really delete users, but disables them.\n\tdb, err := r.OpenConnection()\n\tif err != nil {\n\t\tt.Error(\"cannot open db\")\n\t}\n\tdefer db.Close()\n\n\tvar usernames []string\n\tfor _, user := range r.TestData.Users {\n\t\tusernames = append(usernames, `'`+*user.Username+`'`)\n\t}\n\n\t// there is a constraint that prevents users from being deleted when they have a log\n\tq := `DELETE FROM log WHERE NOT tm_user = (SELECT id FROM tm_user WHERE username = 'admin')`\n\terr = execSQL(db, q)\n\tif err != nil {\n\t\tt.Errorf(\"cannot execute SQL: %s; SQL is %s\", err.Error(), q)\n\t}\n\n\tq = `DELETE FROM tm_user WHERE username IN (` + strings.Join(usernames, \",\") + `)`\n\terr = execSQL(db, q)\n\tif err != nil {\n\t\tt.Errorf(\"cannot execute SQL: %s; SQL is %s\", err.Error(), q)\n\t}\n}", "func Delete(ctx context.Context, dbConn *db.DB, userID string) error {\n\tif !bson.IsObjectIdHex(userID) {\n\t\treturn errors.Wrapf(web.ErrInvalidID, \"bson.IsObjectIdHex: %s\", userID)\n\t}\n\n\tq := bson.M{\"user_id\": userID}\n\n\tf := func(collection *mgo.Collection) error {\n\t\treturn collection.Remove(q)\n\t}\n\tif err := dbConn.MGOExecute(ctx, usersCollection, f); err != nil {\n\t\tif err == mgo.ErrNotFound {\n\t\t\treturn web.ErrNotFound\n\t\t}\n\t\treturn errors.Wrap(err, fmt.Sprintf(\"db.users.remove(%s)\", db.Query(q)))\n\t}\n\n\treturn nil\n}", "func DeleteUser(id int) (err error) {\n\to := orm.NewOrm()\n\tv := User{Id: id}\n\t// ascertain id exists in the database\n\tif err = o.Read(&v); err == nil {\n\t\tvar num int64\n\t\tif num, err = o.Delete(&User{Id: id}); err == nil {\n\t\t\tfmt.Println(\"Number of records deleted in database:\", num)\n\t\t}\n\t}\n\treturn\n}", "func (s *Server) deleteUser(request *restful.Request, response *restful.Response) {\n\t// Authorize\n\tif !s.auth(request, response) {\n\t\treturn\n\t}\n\t// get user-id and put into temp\n\tuserId := request.PathParameter(\"user-id\")\n\tif err := s.DataStore.DeleteUser(userId); err != nil {\n\t\tinternalServerError(response, err)\n\t\treturn\n\t}\n\tok(response, Success{RowAffected: 1})\n}", "func DeleteUsersOld(id int) (err error) {\n\to := orm.NewOrm()\n\tv := UsersOld{Id: id}\n\t// ascertain id exists in the database\n\tif err = o.Read(&v); err == nil {\n\t\tvar num int64\n\t\tif num, err = o.Delete(&UsersOld{Id: id}); err == nil {\n\t\t\tfmt.Println(\"Number of records deleted in database:\", num)\n\t\t}\n\t}\n\treturn\n}", "func (uh *UserHandler) Delete(c echo.Context) error {\n\tid_, err := strconv.Atoi(c.Param(\"id\"))\n\tid := uint(id_)\n\n\terr = uh.UserUseCase.Delete(id)\n\n\tif err != nil {\n\t\treturn c.JSON(GetStatusCode(err), ResponseError{Message: err.Error()})\n\t}\n\n\treturn c.NoContent(http.StatusNoContent)\n}", "func (uv *userValidator) Delete(id uint) error {\n\tu := User{Model: gorm.Model{ID: id}}\n\n\tif err := runUserValFuncs(&u, uv.isGreaterThan(0)); err != nil {\n\t\treturn err\n\t}\n\n\treturn uv.UserDB.Delete(id)\n}", "func (s *Service) Delete(r *http.Request, args *DeleteEntryArgs, result *DeleteResponse) error {\n\tif args.UserID == \"\" {\n\t\tresult.Error = uidMissing\n\t\tresult.Deleted = -1\n\t\treturn nil\n\t}\n\tcoll := s.Session.DB(MentatDatabase).C(args.UserID)\n\tUUIDsToDelete := args.UUIDs\n\tif len(UUIDsToDelete) > 0 {\n\t\tif len(UUIDsToDelete) > BatchDeleteThreshold {\n\t\t\tchanged, err := coll.RemoveAll(bson.M{\"type\": bson.M{\"$in\": args.UUIDs}})\n\t\t\tif err != nil {\n\t\t\t\tresult.Error = fmt.Sprintf(\"cleanup failed: %s\", err)\n\t\t\t\tresult.Deleted = -1\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tresult.Deleted = changed.Removed\n\t\t\treturn nil\n\t\t}\n\t\tdeletedCount := 0\n\t\tvar failedEntries []string\n\t\tfor _, uuid := range UUIDsToDelete {\n\t\t\terr := coll.Remove(bson.M{\"uuid\": uuid})\n\t\t\tif err == nil {\n\t\t\t\tdeletedCount++\n\t\t\t} else {\n\t\t\t\tfailedEntries = append(failedEntries, uuid)\n\t\t\t}\n\t\t}\n\t\tif len(failedEntries) > 0 {\n\t\t\tresult.Error = fmt.Sprintf(\"failed to delete entries: %s\", strings.Join(failedEntries, \", \"))\n\t\t}\n\t\tresult.Deleted = deletedCount\n\t\treturn nil\n\t}\n\tresult.Error = \"No UUIDs provided\"\n\tresult.Deleted = -1\n\treturn nil\n}", "func DeleteActionsByUserID(c *gin.Context) {\n\tvar (\n\t\tuserID = c.Param(\"user_id\")\n\t\taction = []models.Action{}\n\t)\n\n\tresult := models.DB.Find(&action, \"created_by = ?\", userID)\n\n\tif result.RowsAffected == 0 {\n\t\tc.JSON(http.StatusOK, helpers.NoResults())\n\t\treturn\n\t}\n\n\tmodels.DB.Exec(\"DELETE FROM actions WHERE created_by = ?\", userID)\n\n\tc.JSON(http.StatusOK, gin.H{\"data\": helpers.Results{\n\t\tCount: len(action),\n\t\tResults: action,\n\t}})\n}", "func (d *database) deleteUsersOldestLargestPost(publicKey string) (err error) {\n\ttx, err := d.db.Begin()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUsersOldestLargestPost\")\n\t}\n\tlogger.Log.Debug(publicKey)\n\tquery := \"DELETE from letters WHERE id in (SELECT id FROM letters WHERE LENGTH(sealed_letter) > 5000 AND sender == ? ORDER BY time LIMIT 1);\"\n\tlogger.Log.Debug(query)\n\tstmt, err := tx.Prepare(query)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUsersOldestLargestPost\")\n\t}\n\tdefer stmt.Close()\n\n\t_, err = stmt.Exec(publicKey)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUsersOldestLargestPost\")\n\t}\n\n\terr = tx.Commit()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUsersOldestLargestPost\")\n\t}\n\treturn\n}", "func (h *UserHandler) Delete(w http.ResponseWriter, r *http.Request) {\n\tvars := mux.Vars(r)\n\tidStr := vars[\"id\"]\n\tid, err := strconv.ParseUint(idStr, 10, 64)\n\tif err != nil {\n\t\tlog.Println(errors.Wrapf(err, \"error parse uint:%v\", idStr))\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tlog.Printf(\"/users/%d DELETE handled\", id)\n\n\tif err := h.model.Delete(id); err != nil {\n\t\tlog.Println(err)\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tw.WriteHeader(http.StatusNoContent)\n}", "func (a *Server) DeleteUser(w http.ResponseWriter, r *http.Request) {\n\tfmt.Println(\"delete a user\")\n}", "func DeleteUser(id int64) error {\n\tdb.Exec(\"DELETE FROM members WHERE user_id = ?\", id)\n\tdb.Exec(\"DELETE FROM users WHERE id = ?\", id)\n\t// TODO delete all projects\n\treturn nil\n}", "func DeleteUserLoginIps(id int) (err error) {\n\to := orm.NewOrm()\n\tv := UserLoginIps{Id: id}\n\t// ascertain id exists in the database\n\tif err = o.Read(&v); err == nil {\n\t\tvar num int64\n\t\tif num, err = o.Delete(&UserLoginIps{Id: id}); err == nil {\n\t\t\tfmt.Println(\"Number of records deleted in database:\", num)\n\t\t}\n\t}\n\treturn\n}", "func DeleteUser(w http.ResponseWriter, r *http.Request) {\n\tparams := mux.Vars(r)\n\n\tuserID, err := strconv.ParseInt(params[\"id\"], 10, 64)\n\tif err != nil {\n\t\tresponses.Error(w, http.StatusBadRequest, err)\n\t\treturn\n\t}\n\n\tuserIDToken, err := authentication.ExtractUserId(r)\n\tif err != nil {\n\t\tresponses.Error(w, http.StatusUnauthorized, err)\n\t\treturn\n\t}\n\n\tif userIDToken != userID {\n\t\tresponses.Error(w, http.StatusForbidden, errors.New(\"não é possível manipular usuário de terceiros\"))\n\t\treturn\n\t}\n\n\tdb, err := database.Connect()\n\tif err != nil {\n\t\tresponses.Error(w, http.StatusInternalServerError, err)\n\t\treturn\n\t}\n\tdefer db.Close()\n\n\trepository := repository.NewRepositoryUser(db)\n\n\tif err := repository.DeleteUser(userID); err != nil {\n\t\tresponses.Error(w, http.StatusInternalServerError, err)\n\t\treturn\n\t}\n\n\tresponses.JSON(w, http.StatusNoContent, nil)\n}", "func DeleteUserHandler(w http.ResponseWriter, r *http.Request) {\n\tvars := mux.Vars(r)\n\n\tname := vars[\"name\"]\n\tselector := r.URL.Query().Get(\"selector\")\n\tnamespace := r.URL.Query().Get(\"namespace\")\n\tclientVersion := r.URL.Query().Get(\"version\")\n\n\tlog.Debugf(\"DeleteUserHandler parameters selector [%s] namespace [%s] version [%s] name [%s]\", selector, namespace, clientVersion, name)\n\n\tusername, err := apiserver.Authn(apiserver.DELETE_USER_PERM, w, r)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tw.Header().Set(\"WWW-Authenticate\", `Basic realm=\"Restricted\"`)\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(http.StatusOK)\n\n\tresp := msgs.DeleteUserResponse{}\n\n\tvar ns string\n\tns, err = apiserver.GetNamespace(apiserver.Clientset, username, namespace)\n\tif err != nil {\n\t\tresp.Status.Code = msgs.Error\n\t\tresp.Status.Msg = err.Error()\n\t\tjson.NewEncoder(w).Encode(resp)\n\t\treturn\n\t}\n\n\tif clientVersion != msgs.PGO_VERSION {\n\t\tresp.Status.Code = msgs.Error\n\t\tresp.Status.Msg = apiserver.VERSION_MISMATCH_ERROR\n\t\tjson.NewEncoder(w).Encode(resp)\n\t\treturn\n\t}\n\n\tresp = DeleteUser(name, selector, ns)\n\tjson.NewEncoder(w).Encode(resp)\n\n}", "func DeleteUser(clients *common.ClientContainer, handler common.HandlerInterface) http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tuserID := chi.URLParam(r, \"userID\")\n\t\tID, err := strconv.Atoi(userID)\n\t\tif err != nil {\n\t\t\tcommon.WriteErrorToResponse(w, http.StatusUnprocessableEntity,\n\t\t\t\thttp.StatusText(http.StatusUnprocessableEntity),\n\t\t\t\t\"userID provided is not integer\")\n\t\t\treturn\n\t\t}\n\n\t\t// check if the ID exists\n\t\t_, err = handler.GetUserID(clients, ID)\n\t\tif err != nil {\n\t\t\tswitch err.(type) {\n\t\t\t// grafanaclient.NotFound means, that user provided the\n\t\t\t// ID of non existent user. We return 404\n\t\t\tcase grafanaclient.NotFound:\n\t\t\t\terrMsg := fmt.Sprintf(\"User Not Found\")\n\t\t\t\tcommon.WriteErrorToResponse(w, http.StatusNotFound,\n\t\t\t\t\terrMsg, err.Error())\n\t\t\t\treturn\n\t\t\t// If any other error happened -> return 500 error\n\t\t\tdefault:\n\t\t\t\tlog.Logger.Error(err)\n\t\t\t\tcommon.WriteErrorToResponse(w, http.StatusInternalServerError,\n\t\t\t\t\thttp.StatusText(http.StatusInternalServerError),\n\t\t\t\t\t\"Internal server error occured\")\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\n\t\t// if ID exists then delete that user\n\t\terr = handler.DeleteUser(clients, ID)\n\t\tif err != nil {\n\t\t\tlog.Logger.Error(err)\n\t\t\tcommon.WriteErrorToResponse(w, http.StatusInternalServerError,\n\t\t\t\thttp.StatusText(http.StatusInternalServerError),\n\t\t\t\t\"Internal server error occured\")\n\t\t\treturn\n\t\t}\n\t}\n}", "func deleteUser(uid string) {\n\tif err := client.DeleteUser(context.Background(), uid); err != nil {\n\t\tlog.Printf(\"WARN: Failed to delete user %q on tear down: %v\", uid, err)\n\t}\n}", "func (uv *userValidator) Delete(id uint) error{\n\tvar user User\n\tuser.ID = id\n\terr := runUserValidatorFunction(&user, uv.idGreaterThan(0))\n\tif err != nil{\n\t\treturn err\n\t}\n\treturn uv.UserDB.Delete(id)\n}", "func (serv *AppServer) DeleteUser(delID int) {\n\tserv.ServerRequest([]string{\"DeleteUser\", strconv.Itoa(delID)})\n}", "func TestDeleteAdminPrivileges(t *testing.T) {\n\tcreds := client.Credentials{Username: testAdmin1Username, Password: testAdmin1UserPassword}\n\ttestData := []testDeleteData{\n\t\ttestDeleteData{testAdmin1ID, http.StatusBadRequest}, // Can't delete yourself\n\t\ttestDeleteData{testStaff1ID, http.StatusOK},\n\t\ttestDeleteData{testBasic1ID, http.StatusOK},\n\t\ttestDeleteData{testBasic1ID, http.StatusBadRequest}, // Attempt to delete the same user\n\t\ttestDeleteData{\"\", http.StatusBadRequest}, // Attempt to delete without specifying user\n\t}\n\tdeleteTest(t, creds, testData)\n}", "func DeleteSecUser(id int64) (err error) {\n\to := orm.NewOrm()\n\tv := SecUser{Id: id}\n\t// ascertain id exists in the database\n\tif err = o.Read(&v); err == nil {\n\t\tvar num int64\n\t\tif num, err = o.Delete(&SecUser{Id: id}); err == nil {\n\t\t\tfmt.Println(\"Number of records deleted in database:\", num)\n\t\t}\n\t}\n\treturn\n}", "func (server Server) DeleteUser(w http.ResponseWriter, r *http.Request) {\n\tvars := mux.Vars(r) //mux params\n\tid, err := strconv.Atoi(vars[\"id\"]) // convert the id in string to int\n\tvar res models.APIResponse // make a response\n\n\tif err != nil {\n\t\tlog.Printf(\"Unable to convert the string into int. %v\", err)\n\t\tres = models.BuildAPIResponseFail(\"Unable to convert the string into int\", nil)\n\t} else {\n\t\tdeletedRows := deleteUser(int64(id), server.db) // call the deleteUser, convert the int to int64\n\t\tres = models.BuildAPIResponseSuccess(\"User updated successfully.\", deletedRows)\n\t}\n\t// send the response\n\tjson.NewEncoder(w).Encode(res)\n}", "func (c *SQLiteConn) AuthUserDelete(username string) error {\n\t// NOOP\n\treturn nil\n}", "func AllUsers() error {\n\tstmt, err := mysqlBus.DB.Prepare(\"DELETE FROM Person\")\n\tif err != nil {\n\t\treturn err\n\t}\n\t_, err = stmt.Exec()\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (self Users) Delete() {\n\tsqlStatement := `DELETE FROM users WHERE id = $1`\n\t_, err := self.DB.Exec(sqlStatement, self.Id)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}", "func DeleteUser(w http.ResponseWriter, r *http.Request) {\n\tfmt.Println(\"Delete user endpoint hit\")\n\t\n\tvars := mux.Vars(r)\n\n\tid := vars[\"id\"]\n\n\tvar user models.User\n\n\tmessage := user.Destroy(id)\n\n json.NewEncoder(w).Encode(message)\n}", "func (h *Handler) delete() http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tvars := mux.Vars(r)\n\t\tid := vars[userID]\n\t\terr := h.UserDAO.Delete(r.Context(), id)\n\t\tswitch {\n\t\tcase errors.Is(err, errorx.ErrNoUser):\n\t\t\tmsg := &errorMessage{\n\t\t\t\tMessage: fmt.Sprintf(\"user %s does not exist\", id),\n\t\t\t}\n\t\t\tresponse.JSON(w, http.StatusNotFound, msg)\n\t\t\treturn\n\t\tcase errors.Is(err, errorx.ErrDeleteUser):\n\t\t\tmsg := &errorMessage{\n\t\t\t\tMessage: fmt.Sprintf(\"user %s has been deleted\", id),\n\t\t\t}\n\t\t\tresponse.JSON(w, http.StatusGone, msg)\n\t\t\treturn\n\t\tcase err != nil:\n\t\t\tmsg := &errorMessage{\n\t\t\t\tError: err.Error(),\n\t\t\t\tMessage: \"user datastore error\",\n\t\t\t}\n\t\t\tresponse.JSON(w, http.StatusInternalServerError, msg)\n\t\t\treturn\n\t\tdefault:\n\t\t\tresponse.JSON(w, http.StatusNoContent, nil)\n\t\t}\n\t}\n\n}", "func deleteUser(username string) bool {\n\tlog.Printf(\"Deleting user: %s\", username)\n\tcmd := exec.Command(\"/bin/sh\", \"-c\", fmt.Sprintf(delUserCommand, username))\n\tif _, err := cmd.CombinedOutput(); err != nil {\n\t\tlog.Printf(\"Error: Can't delete user: %s: %s\", username, err)\n\t\treturn false\n\t}\n\treturn true\n}", "func (d *database) deleteUser(publicKey string) (err error) {\n\ttx, err := d.db.Begin()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUser\")\n\t}\n\tquery := \"DELETE FROM letters WHERE sender == ?;\"\n\tlogger.Log.Debug(query)\n\tstmt, err := tx.Prepare(query)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUser\")\n\t}\n\tdefer stmt.Close()\n\n\t_, err = stmt.Exec(publicKey)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUser\")\n\t}\n\n\terr = tx.Commit()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"deleteUser\")\n\t}\n\treturn\n}", "func (user *User) Delete() *errors.RestErr {\n\t//prepare and execute the delete query\n\tstmt, err := usersdb.Client.Prepare(queryDeleteUser)\n\tif err != nil {\n\t\treturn errors.NewInternalServerError(err.Error())\n\t}\n\tdefer stmt.Close()\n\n\t//\n\tif _, err = stmt.Exec(user.ID); err != nil {\n\t\treturn errors.ParseError(err)\n\t}\n\n\treturn nil\n\n}", "func Delete(c *gin.Context) {\n\tuserId, idErr := getUserID(c.Param(\"user_id\"))\n\tif idErr != nil {\n\t\tc.JSON(idErr.Status, idErr)\n\t\treturn\n\t}\n\n\tif err := services.UsersService.DeleteUser(userId); err != nil {\n\t\tc.JSON(err.Status, err)\n\t\treturn\n\t}\n\tc.JSON(http.StatusOK, map[string]string{\"status\": \"deleted\"})\n}", "func uniaryDelete(client pbclient.MySpannerClient) error {\n\tfmt.Println(\"performing uniaryDelete\")\n\t_, err := client.UniaryDelete(ctx, &pb.ExampleTableRange{StartId: int64(0), EndId: int64(5)})\n\tif err != nil {\n\t\treturn err\n\t}\n\tfmt.Println(\"deleted using uniary delete\")\n\treturn nil\n}", "func DeleteUserController(c echo.Context) error {\n\t// user, err := strconv.Atoi(c.Param())\n\tuser := User{}\n\tid, err := strconv.Atoi(c.Param(\"id\"))\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn c.String(http.StatusBadRequest, \"Invalid ID\")\n\t}\n\tif err := DB.First(&user, id).Error; err != nil {\n\t\treturn c.String(http.StatusInternalServerError, \"Internal Server Error\")\n\t}\n\tif err := DB.Delete(&user).Error; err != nil {\n\t\tfmt.Println(err)\n\t\treturn c.String(http.StatusInternalServerError, \"Internal Server Error\")\n\t}\n\treturn c.JSON(http.StatusOK, map[string]interface{}{\n\t\t\"message\": \"success delete by user id\",\n\t\t\"user\": user,\n\t})\n}", "func (uv *userValidator) Delete(id uint) error {\r\n\tvar user User\r\n\tuser.ID = id\r\n\terr := runUserValFns(&user, uv.idGreaterThan(0))\r\n\tif err != nil {\r\n\t\treturn err\r\n\t}\r\n\treturn uv.UserDB.Delete(id)\r\n}", "func DeleteUser(\n\tctx context.Context,\n\ttx *sql.Tx,\n\trequest *models.DeleteUserRequest) error {\n\tdeleteQuery := deleteUserQuery\n\tselectQuery := \"select count(uuid) from user where uuid = ?\"\n\tvar err error\n\tvar count int\n\tuuid := request.ID\n\tauth := common.GetAuthCTX(ctx)\n\tif auth.IsAdmin() {\n\t\trow := tx.QueryRowContext(ctx, selectQuery, uuid)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"not found\")\n\t\t}\n\t\trow.Scan(&count)\n\t\tif count == 0 {\n\t\t\treturn errors.New(\"Not found\")\n\t\t}\n\t\t_, err = tx.ExecContext(ctx, deleteQuery, uuid)\n\t} else {\n\t\tdeleteQuery += \" and owner = ?\"\n\t\tselectQuery += \" and owner = ?\"\n\t\trow := tx.QueryRowContext(ctx, selectQuery, uuid, auth.ProjectID())\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"not found\")\n\t\t}\n\t\trow.Scan(&count)\n\t\tif count == 0 {\n\t\t\treturn errors.New(\"Not found\")\n\t\t}\n\t\t_, err = tx.ExecContext(ctx, deleteQuery, uuid, auth.ProjectID())\n\t}\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"delete failed\")\n\t}\n\n\terr = common.DeleteMetaData(tx, uuid)\n\tlog.WithFields(log.Fields{\n\t\t\"uuid\": uuid,\n\t}).Debug(\"deleted\")\n\treturn err\n}", "func Delete(c *gin.Context) {\n\tuserID, err := getUserID(c.Param(\"user_id\"))\n\tif err != nil {\n\t\tc.JSON(err.Status, err)\n\t\treturn\n\t}\n\n\tif err := services.UserServ.DeleteUser(userID); err != nil {\n\t\tc.JSON(err.Status, err)\n\t\treturn\n\t}\n\tc.JSON(http.StatusOK, map[string]string{\"status\": \"deleted\"})\n}", "func (app *App) deleteUser(w http.ResponseWriter, r *http.Request) {\n\tvars := mux.Vars(r)\n\tid, err := strconv.Atoi(vars[\"id\"])\n\tif err != nil {\n\t\trespondWithError(w, http.StatusInternalServerError, err.Error())\n\t\treturn\n\t}\n\n\tuser := &users.User{ID: int64(id)}\n\terr = user.DeleteUser(app.Db)\n\tif err != nil {\n\t\trespondWithError(w, http.StatusNotFound, err.Error())\n\t\treturn\n\t}\n\n\trespondWithJSON(w, http.StatusOK, map[string]string{\"message\": \"User deleted successfully\"})\n}", "func Delete(c *gin.Context) {\n\ttokenStr := c.Request.Header.Get(\"Authorization\")\n\tif tokenStr == \"\" || len(tokenStr) < 7 {\n\t\tfailUpdate(c, http.StatusUnauthorized, \"Unauthorized\")\n\t\treturn\n\t}\n\t_, admin, valid, err := ParseToken(tokenStr[7:])\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\tif !valid || !admin {\n\t\tfailUpdate(c, http.StatusUnauthorized, \"Unauthorized\")\n\t\treturn\n\t}\n\n\tuserid := c.Param(\"userid\")\n\n\terr = model.Delete(userid)\n\tif err != nil {\n\t\tfailUpdate(c, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tc.JSON(http.StatusNoContent, gin.H{\n\t\t\"message\": \"Deleted successfully\",\n\t\t\"status\": http.StatusNoContent,\n\t})\n}", "func (uv *userValidator) Delete(id uint) error {\n\tvar user User\n\tuser.ID = id\n\terr := runUserValFuncs(&user, uv.idGreaterThan(0))\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn uv.UserDB.Delete(id)\n}", "func (uv *userValidator) Delete(id uint) error {\n\tvar user User\n\tuser.ID = id\n\terr := runUserValFuncs(&user, uv.idGreaterThan(0))\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn uv.UserDB.Delete(id)\n}", "func DeleteAccount(userName string, password string, customCols map[string]interface{}) error {\n\tif len(userName) == 0 {\n\t\treturn errors.New(\"A user name is required to delete an account\")\n\t} else if len(password) == 0 {\n\t\treturn errors.New(\"A password is required to delete an account\")\n\t} else if checkStringSQLInjection(userName) {\n\t\treturn errors.New(\"Malicious characters detected\")\n\t} else if !checkCustomRequirements(customCols, customDeleteAccountRequirements) {\n\t\treturn errors.New(\"Incorrect data supplied\")\n\t}\n\n\t//FIRST TWO ARE id, password IN THAT ORDER\n\tvar vals []interface{} = []interface{}{new(int), new([]byte)}\n\n\t//CONSTRUCT SELECT QUERY\n\tselectQuery := \"Select \" + usersColumnID + \", \" + usersColumnPassword + \", \"\n\tif customCols != nil {\n\t\tfor key := range customCols {\n\t\t\tselectQuery = selectQuery + key + \", \"\n\t\t\t//MAINTAIN THE ORDER IN WHICH THE COLUMNS WERE DECLARED VIA A SLICE\n\t\t\tvals = append(vals, new(interface{}))\n\t\t}\n\t}\n\tselectQuery = selectQuery[0:len(selectQuery)-2] + \" FROM \" + tableUsers + \" WHERE \" + usersColumnName + \"=\\\"\" + userName + \"\\\" LIMIT 1;\"\n\n\t//EXECUTE SELECT QUERY\n\tcheckRows, err := database.Query(selectQuery)\n\tif err != nil {\n\t\treturn err\n\t}\n\t//\n\tcheckRows.Next()\n\tif scanErr := checkRows.Scan(vals...); scanErr != nil {\n\t\tcheckRows.Close()\n\t\treturn errors.New(\"Login or password is incorrect\")\n\t}\n\tcheckRows.Close()\n\n\t//\n\tdbIndex := *(vals[0]).(*int) // USE FOR SERVER CALLBACK & MAKE DATABASE RESPONSE MAP\n\tdbPass := *(vals[1]).(*[]byte)\n\n\t//COMPARE HASHED PASSWORDS\n\tif !helpers.CheckPasswordHash(password, dbPass) {\n\t\treturn errors.New(\"Login or password is incorrect\")\n\t}\n\n\t//REMOVE INSTANCES FROM friends TABLE\n\tdatabase.Exec(\"DELETE FROM \" + tableFriends + \" WHERE \" + friendsColumnUser + \"=\" + strconv.Itoa(dbIndex) + \" OR \" + friendsColumnFriend + \"=\" + strconv.Itoa(dbIndex) + \";\")\n\n\t//DELETE THE ACCOUNT\n\t_, deleteErr := database.Exec(\"DELETE FROM \" + tableUsers + \" WHERE \" + usersColumnID + \"=\" + strconv.Itoa(dbIndex) + \" LIMIT 1;\")\n\tif deleteErr != nil {\n\t\treturn deleteErr\n\t}\n\n\t//\n\treturn nil\n}" ]
[ "0.6807376", "0.6699899", "0.6681681", "0.6653365", "0.66240597", "0.6448112", "0.63996196", "0.63583326", "0.6317065", "0.6312442", "0.62865543", "0.62536424", "0.62300503", "0.61849064", "0.61762595", "0.61591995", "0.6146587", "0.61251163", "0.6084995", "0.60626435", "0.60480094", "0.6042493", "0.6006642", "0.597093", "0.5969919", "0.5961346", "0.5959116", "0.59565246", "0.5940453", "0.59376556", "0.5904366", "0.59011155", "0.58962816", "0.58943516", "0.5886039", "0.58808786", "0.5879691", "0.5872912", "0.58408606", "0.58373183", "0.5832077", "0.58260095", "0.5824359", "0.5789557", "0.57772493", "0.57466114", "0.5744892", "0.57446754", "0.5743857", "0.5740797", "0.57337826", "0.57293385", "0.5713552", "0.5710714", "0.57040566", "0.5697097", "0.569427", "0.5691452", "0.5687184", "0.5660753", "0.5652672", "0.5651637", "0.5651147", "0.56378037", "0.5636236", "0.5627941", "0.5627866", "0.5616231", "0.561268", "0.56119037", "0.5606414", "0.56028813", "0.55933344", "0.55919385", "0.55888325", "0.55875176", "0.55859023", "0.55745023", "0.5571722", "0.5568953", "0.5561966", "0.5560899", "0.554881", "0.5542087", "0.55370265", "0.55269647", "0.5526047", "0.55150825", "0.55096924", "0.5508688", "0.5504214", "0.550089", "0.5500878", "0.5488266", "0.54861253", "0.5481781", "0.5474758", "0.54740584", "0.54740584", "0.54653794" ]
0.64888215
5
9 Try to a user that does not exists
func TestDeleteUserServiceDoesntExist (t *testing.T){ err := DeleteUserService(user_01.SocialNumber) assert.Equal(t, 404, err.HTTPStatus) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func does_user_exist(uname string) (string, bool) {\n\tuser_map_lock.Lock()\n\tdefer user_map_lock.Unlock()\n\tif _, is_exist := user_map[uname]; is_exist {\n\t\treturn fmt.Sprintf(\"success: user exists %s\\n\", END_TAG), false\n\t} else {\n\t\treturn fmt.Sprintf(\"error: no such user %s\\n\", END_TAG), false\n\t}\n}", "func (u *User) checkExistUser() error {\n\tif u.Id == \"\" && u.Name == \"\" {\n\t\treturn fmt.Errorf(\"invalid user\")\n\t}\n\n\tif u.Id != \"\" {\n\t\t_,err := GetUserById(u.Id)\n\t\tif err == nil {\n\t\t\treturn fmt.Errorf(\"user exist\")\n\t\t}\n\t}\n\n\tif u.Name != \"\" {\n\t\t_,err := GetUserByUserName(u.Name)\n\t\tif err == nil {\n\t\t\treturn fmt.Errorf(\"user exist\")\n\t\t}\n\t}\n\n\treturn nil\n}", "func UserExistDb(email string) bool {\n\tlog.Println(\"call db func\")\n\tif _, ok := db[email]; !ok {\n\t\treturn false\n\t}\n\treturn true\n}", "func ErrUserDoesntExist() error {\n\treturn fmt.Errorf(UserDoesntExist)\n}", "func existsUser(gh_id int64) bool {\n\terr := db.QueryRow(\"SELECT gh_id FROM users WHERE gh_id = $1\", gh_id).\n\t\tScan(&gh_id)\n\treturn err != sql.ErrNoRows\n}", "func userExists(username string) bool {\n\tif _, err := user.Lookup(username); err == nil {\n\t\treturn true\n\t}\n\treturn false\n}", "func isUserExist(usernameQuery string) bool {\n\tvar user User_DB\n\terr := mysql_client.QueryRow(\"SELECT username, password, kind FROM User WHERE username=?\", usernameQuery).Scan(&user.Username, &user.Password, &user.Kind)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn false\n\t} else {\n\t\treturn true\n\t}\n}", "func IsUserExist(qr db.Queryer, email string) bool {\n\tstr := \"SELECT count(*) as cnt FROM users WHERE email = ?\"\n\tuid := int64(0)\n\terr := qr.Get(&uid, str, email)\n\tif err != nil {\n\t\tlog.Println(\"err\", err)\n\t\treturn false\n\t}\n\tlog.Println(\"uid\", err)\n\tif uid > 0 {\n\t\treturn true\n\t}\n\n\treturn false\n\n}", "func (mi *MixtapeIndex) userExists(id string) bool {\n\tif uid, ok := mi.Users[id]; !ok {\n\t\tfmt.Println(\"User DNE ->\", uid)\n\t\treturn false\n\t}\n\treturn true\n}", "func getUserOrNullLogin(db *gorm.DB, appUserID string, w http.ResponseWriter, r *http.Request) *models.AppUser {\n\tuser := models.AppUser{}\n\tif err := db.Where(\"app_user_status = ?\", true).First(&user, models.AppUser{AppUserID: appUserID}).Error; err != nil {\n\t\treturn nil\n\t}\n\treturn &user\n}", "func CheckExistUser(email string) (models.User, bool, string) {\n\tctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)\n\n\t//When end instruction remove timeout operation and liberate context\n\tdefer cancel()\n\n\tdb := MongoConnection.Database(\"socialnetwork\")\n\tcollection := db.Collection(\"Users\")\n\n\tobject := bson.M{\"Email\": email}\n\n\tvar result models.User\n\n\terr := collection.FindOne(ctx, object).Decode(&result)\n\n\tID := result.ID.Hex()\n\n\tif err != nil {\n\t\treturn result, false, ID\n\t}\n\n\treturn result, true, ID\n\n}", "func UserNotFoundException() error {\n\treturn fmt.Errorf(\"user does not exist or wrong credentials\")\n}", "func sqlUserExists(db *sql.DB, username string) (userID, userStatus int) {\n\tsqlUserQuery := `SELECT user_id, status FROM public.users WHERE username=$1;`\n\trow := db.QueryRow(sqlUserQuery, username)\n\tswitch err := row.Scan(&userID, &userStatus); err {\n\tcase sql.ErrNoRows:\n\t\tfmt.Println(\"User not found, attempting insert\")\n\t\tuserID = sqlUserInsert(db, username)\n\t\tuserStatus = 3\n\t\treturn\n\tcase nil:\n\t\tfmt.Println(\"User found, checking hash\")\n\t\treturn\n\tdefault:\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n}", "func NoUserWrapper(err error) error {\n\treturn errors.New(\"User not found. More: \" + err.Error())\n}", "func CheckUserExists(username string, table string, session *r.Session) bool {\n\tvar u interface{}\n\tdb := os.Getenv(\"DB\")\n\t// userTable := os.Getenv(\"USERTABLE\")\n\tcur, _ := r.DB(db).Table(table).GetAllByIndex(\"username\", username).Run(session)\n\t_ = cur.One(&u)\n\tcur.Close()\n\t// fmt.Println(u)\n\tif u == nil {\n\t\t// fmt.Println(\"NO\")\n\t\treturn false\n\t}\n\t// fmt.Println(\"YES\")\n\treturn true\n}", "func db_check_user_exists(username string) bool {\n file_path := path.Join(\"db/users\", strings.ToLower(username) + \".json\")\n \n if _, err := os.Stat(file_path); !os.IsNotExist(err) {\n return true\n }\n return false\n}", "func checkForUid(session *mgo.Session, userId string, w http.ResponseWriter) (c *mgo.Collection, user User) {\n\tif len(userId) != 24 {\n\t\tErrorWithJSON(w, \"Error in uid format. Lenght must be 24\", http.StatusNotFound)\n\t\treturn\n\t}\n\tc = session.DB(DBNAME).C(USERSCOLL)\n\terr1 := c.FindId(bson.ObjectIdHex(userId)).One(&user)\n\tif err1 != nil {\n\t\tswitch err1 {\n\t\tcase mgo.ErrNotFound:\n\t\t\tErrorWithJSON(w, \"UidNotFound\", http.StatusNotFound)\n\t\t\treturn\n\t\tdefault :\n\t\t\tErrorWithJSON(w, \"Database error\", http.StatusInternalServerError)\n\t\t\tlog.Println(\"Failed find user: \", err1)\n\t\t\treturn\n\t\t}\n\t}\n\treturn\n}", "func ERROR_AUTH_USER_NOT_FOUND(w http.ResponseWriter, pl string) {\n\tbuildForeignError(w, http.StatusForbidden, \"ERROR_AUTH_USER_NOT_FOUND\", pl)\n}", "func (a *Api) findExistingUser(identifier, token string) *schema.UserData {\n\tif usr, err := a.sl.GetUser(identifier, token); err != nil {\n\t\tlog.Printf(\"Error [%s] trying to get existing users details\", err.Error())\n\t\treturn nil\n\t} else {\n\t\tlog.Printf(\"User found at shoreline using token %s\", token)\n\t\treturn usr\n\t}\n}", "func TestGetUserIDInvalid(t *testing.T) {\n\tts := initAPITestServer(t)\n\tdefer test.CloseServer(ts)\n\n\tinvalidUsername := \"not_\" + username\n\tid, err := GetUserID(invalidUsername)\n\tif err == nil || err.Error() != \"Username not found\" {\n\t\tt.Fatalf(\"Expected error\")\n\t}\n\tif id != \"\" {\n\t\tt.Fatalf(\"Expected empty userID\")\n\t}\n}", "func createUser(u *User) error {\n\tif u.Status == 0 {\n\t\treturn errors.New(\"Invalid user value\")\n\t}\n\n\treturn nil\n}", "func (db *Database) UserExists(name string) (bool, error) {\n\trow := db.db.QueryRow(`\n\t\tSELECT id FROM melodious.accounts WHERE username=$1 LIMIT 1;\n\t`, name)\n\n\tvar id int // this is unused though\n\terr := row.Scan(&id)\n\tif err == sql.ErrNoRows {\n\t\treturn false, nil\n\t} else if err != nil {\n\t\treturn false, err\n\t}\n\n\treturn true, nil\n}", "func getUserOrNull(db *gorm.DB, appUserID string, w http.ResponseWriter, r *http.Request) *models.AppUser {\n\tuser := models.AppUser{}\n\tif err := db.First(&user, models.AppUser{AppUserID: appUserID}).Error; err != nil {\n\t\treturn nil\n\t}\n\treturn &user\n}", "func checkUserIsRegistered(email string, password string) string {\n\tdb, err := config.GetMongoDB()\n\n\tif err != nil {\n\t\tfmt.Println(\"Gagal menghubungkan ke database!\")\n\t\tos.Exit(2)\n\t}\n\n\tvar userRepository repository.UserRepository\n\n\tuserRepository = repository.NewUserRepositoryMongo(db, \"pengguna\")\n\n\tuserData, err1 := userRepository.FindAll()\n\n\tif err1 != nil {\n\t\treturn \"invalid_email_login\"\n\t} else {\n\t\tfor _, user := range userData {\n\t\t\tif email == user.Email {\n\t\t\t\tif components.CheckPasswordHash(password, user.Password) == true {\n\t\t\t\t\treturn \"login_success\"\n\t\t\t\t} else {\n\t\t\t\t\treturn \"invalid_password_login\"\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\treturn \"invalid_email_login\"\n\t\t\t}\n\t\t}\n\t}\n\n\treturn \"invalid_email_login\"\n}", "func checkUserID(userID string) error {\n\tif userID == \"\" {\n\t\tfmt.Println(\"UserID not found\")\n\t\treturn fmt.Errorf(\"Something went wrong\")\n\t}\n\treturn nil\n}", "func withArgs1(name string) (user, error) {\n\tfor _, user := range users {\n\t\tif user.name == name {\n\t\t\treturn user, nil\n\t\t}\n\t}\n\n\treturn user{}, errors.New(\"user is not found\")\n}", "func migrateNonexistUserTest(ctx context.Context, s *testing.State, cryptohome *hwsec.CryptohomeClient) {\n\t// Migrating the key of non-exist user should fail.\n\tif err := cryptohome.ChangeVaultPassword(ctx, username, oldPassword, util.PasswordLabel, newPassword); err == nil {\n\t\ts.Fatal(\"Password was successfully changed for non-existent user; want: should have failed\")\n\t}\n}", "func checkUserExists(userN string, lock *Lock) bool {\r\n\t\r\n\t//acquires shared lock tp read file\r\n\tacquireLock(\"read\", lock)\r\n\r\n\tuserFile, err := os.Open(usersFileName)\r\n\r\n\tif err != nil {\r\n\t\tlog.Println(\"Could not open file properly.\")\r\n\t\tlog.Fatal(err)\r\n\t}\r\n\r\n\tuserScanner := bufio.NewScanner(userFile)\r\n\r\n\tdefer releaseLock(\"read\",lock)\r\n\tdefer userFile.Close()\r\n\r\n\t//goes through users.txt file, parses each file line into user, checks\r\n\t//if user exists, if yes return true, else false\r\n\tfor userScanner.Scan() {\r\n\t\tcurUser := userScanner.Text()\r\n\t\tuserArr := strings.SplitAfter(curUser,\",\")\r\n\t\tif len(userArr) >= 1 && userArr[0] != \"\" {\r\n\t\t\tcurUsername := string(userArr[0][0:len(userArr[0])-1])\r\n\t\t\tif userN == curUsername {\r\n\t\t\t\treturn true\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\treturn false\r\n}", "func TestGetUserByIDUserNotFound(t *testing.T) {\n\tuser, err := GetUserByID(0)\n\tassert.Nil(t, user, \"Id nao esperado\")\n\tassert.NotNil(t, err)\n\n\tassert.EqualValues(t, http.StatusNotFound, err.StatusCode)\n\tassert.EqualValues(t, \"User not found\", err.Message)\n}", "func CheckUserExists(email string) (models.User, bool, string) {\n\tctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)\n\tdefer cancel()\n\n\tdb := MongoConnection.Database(\"cardinal\")\n\tusers := db.Collection(\"users\")\n\n\tcondition := bson.M{\"email\": email}\n\tvar result models.User\n\terr := users.FindOne(ctx, condition).Decode(&result)\n\tID := result.ID.Hex()\n\tif err != nil {\n\t\treturn result, false, ID\n\t}\n\treturn result, true, ID\n}", "func (r userRepository) newErrExisted() error {\n\treturn apperror.New(\"USER_EXISTED\", \"user is existed\", \"用户已存在\")\n}", "func UserExist(db *gorm.DB, tel string) bool {\n\tvar user model.User\n\tdb.First(&user, \"telephone = ?\", tel)\n\tif user.ID != 0 {\n\t\treturn true\n\t}\n\treturn false\n}", "func (this *managerStruct) UserExists(name string) bool {\n\tthis.mutex.RLock()\n\tid := this.getUserId(name)\n\tthis.mutex.RUnlock()\n\texists := id >= 0\n\treturn exists\n}", "func ExistingUser(email, password string) bool {\n\tvar u User\n\tDb.Where(\"email = ? AND password = ?\", email, password).First(&u)\n\tif email != u.Email && password != u.Password {\n\t\treturn false\n\t}\n\treturn true\n}", "func getUserOrCreate(bot *eb.Bot, username string, latest bool) (*model.EtternaUser, error) {\n\tuser, err := bot.Users.GetUsername(username)\n\texists := user != nil\n\n\tif err != nil {\n\t\treturn nil, err\n\t} else if user == nil {\n\t\tetternaUser, err := bot.API.GetByUsername(username)\n\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tid, err := bot.API.GetUserID(username)\n\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tuser = &model.EtternaUser{\n\t\t\tUsername: etternaUser.Username,\n\t\t\tEtternaID: id,\n\t\t\tAvatar: etternaUser.AvatarURL,\n\t\t\tMSDOverall: etternaUser.MSD.Overall,\n\t\t\tMSDStream: etternaUser.MSD.Stream,\n\t\t\tMSDJumpstream: etternaUser.MSD.Jumpstream,\n\t\t\tMSDHandstream: etternaUser.MSD.Handstream,\n\t\t\tMSDStamina: etternaUser.MSD.Stamina,\n\t\t\tMSDJackSpeed: etternaUser.MSD.JackSpeed,\n\t\t\tMSDChordjack: etternaUser.MSD.Chordjack,\n\t\t\tMSDTechnical: etternaUser.MSD.Technical,\n\t\t}\n\t}\n\n\t// Get the latest info for this user if they are cached\n\tif exists && latest {\n\t\tif err := getLatestUserInfo(bot, user); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\t// Save the user if it changed. The only scenario where we don't save the user is\n\t// if they are cached and we are not getting the latest info\n\tif !(exists && !latest) {\n\t\tif err := bot.Users.Save(user); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn user, nil\n}", "func getVerifyUser(user *models.User, code string) bool {\n\tif len(code) <= utils.TimeLimitCodeLength {\n\t\treturn false\n\t}\n\n\t// use tail hex username query user\n\thexStr := code[utils.TimeLimitCodeLength:]\n\tif b, err := hex.DecodeString(hexStr); err == nil {\n\t\tuser.UserName = string(b)\n\t\tif user.Read(\"UserName\") == nil {\n\t\t\treturn true\n\t\t}\n\t}\n\n\treturn false\n}", "func createDefaultUser() error {\n\tdb, err := gorm.Open(\"sqlite3\", dbPath)\n\tdefer db.Close()\n\tif err != nil {\n\t\treturn err\n\t}\n\tregisteredValues := 0\n\tdb.Find(&user{}).Count(&registeredValues)\n\tif registeredValues == 0 {\n\t\thashedPassword, _ := bcrypt.GenerateFromPassword([]byte(\"admin\"), 14)\n\t\tdb.Create(&user{\n\t\t\tUsername: \"admin\",\n\t\t\tHashedPassword: hashedPassword,\n\t\t})\n\t}\n\treturn nil\n}", "func checkUserByEmail(email string) bool {\n\trows, err := db.Query(\"SELECT * FROM users WHERE email = $1;\", email)\n\tif err != nil {\n\t\tfmt.Println(\"error in checkUserByEmail\")\n\t}\n\tdefer rows.Close()\n\n\t// return true if user is found\n\treturn rows.Next()\n}", "func CheckUserExists(db *sql.DB, id string, display sql.NullString) bool {\n\tusers := GetUsers(db, id, display)\n\treturn len(users) > 0\n}", "func (u *UserController) checkUserNameExist(c *gin.Context) {\n\tvar exist int\n\tname := c.Query(\"name\")\n\tid, err := strconv.Atoi(c.DefaultQuery(\"id\", \"0\"))\n\tif err != nil || id < 0 {\n\t\texist = 0\n\t} else {\n\t\tif u.userService.ValidateUserName(name, id) {\n\t\t\texist = 1\n\t\t} else {\n\t\t\texist = 0\n\t\t}\n\t}\n\tc.JSON(http.StatusOK, exist)\n}", "func CheckUserExists(userName string) (bool, error) {\n\tdbQuery := `\n\t\tSELECT count(user_id)\n\t\tFROM users\n\t\tWHERE lower(user_name) = lower($1)`\n\tvar userCount int\n\terr := pdb.QueryRow(dbQuery, userName).Scan(&userCount)\n\tif err != nil {\n\t\tlog.Printf(\"Database query failed: %v\\n\", err)\n\t\treturn true, err\n\t}\n\tif userCount == 0 {\n\t\t// Username isn't in system\n\t\treturn false, nil\n\t}\n\t// Username IS in system\n\treturn true, nil\n}", "func (s *Storage) AnotherUserExists(userID int64, username string) bool {\n\tvar result bool\n\ts.db.QueryRow(`SELECT true FROM users WHERE id != $1 AND username=LOWER($2)`, userID, username).Scan(&result)\n\treturn result\n}", "func MissingUser(w http.ResponseWriter, r *http.Request, h *render.Renderer) {\n\tInternalError(w, r, h, errMissingUser)\n\treturn\n}", "func UserExists(userid string) (exists bool) {\n\tvar user string\n\texists = true\n\terr := DB.QueryRow(\"select userid from user where userid=?\", userid).Scan(&user)\n\tif err != nil || user == \"\" {\n\t\texists = false\n\t}\n\n\treturn\n}", "func (ctx UserManagement) TryGetUserInformation(username string, password string) (config.User, bool) {\n\tuser, userCouldBeFound := ctx.users[username]\n\tif !userCouldBeFound || user.Password != password {\n\t\treturn user, false\n\t}\n\n\treturn user, true\n}", "func userExist(name string) User {\n\n\tu := User{}\n\tfmt.Println(name)\n\tfmt.Println(reflect.TypeOf(name))\n\n\tvar theQuery = \"SELECT * FROM users WHERE name=$1\"\n\n\trow := db.QueryRow(theQuery, name)\n\terr := row.Scan(&u.ID, &u.Name, &u.Score);\n\n\tif err != nil && err != sql.ErrNoRows {\n\t\tfmt.Println(err.Error())\t\n\t}\n\n\treturn u\n\n}", "func BenchmarkGetUserByIDUserNotFound(b *testing.B) {\n\tfor i := 0; i < b.N; i++ {\n\t\tGetUserByID(0)\n\t}\n}", "func checkUserID(id string) (err error) {\n\tif err = checkUserIDLong(id); err != nil {\n\t\treturn\n\t}\n\tif err = checkUserIDShort(id); err != nil {\n\t\treturn\n\t}\n\treturn\n}", "func TestUidForUsername(t *testing.T) {\n\tuid, err := uidForUsername(\"root\")\n\tif err != nil {\n\t\tt.Errorf(\"uidForUsername(\\\"root\\\") returned error: %v\", err)\n\t\treturn\n\t}\n\tif uid != 0 {\n\t\tt.Errorf(\"uidForUsername(\\\"root\\\") returned %d, should be 0\", uid)\n\t}\n\n\t_, err = uidForUsername(\"asdfASDFxxx999\")\n\tif err == nil {\n\t\tt.Errorf(\"uidForUsername(\\\"asdfASDFxxx\\\") did not return error\")\n\t\treturn\n\t}\n}", "func IsUserNotFound(err error) bool {\n\treturn internal.HasErrorCode(err, userNotFound)\n}", "func FindExistingUser(allUsers []IamUser, userToFind string) int {\n\tfor i, user := range allUsers {\n\t\tif user.UserName == userToFind {\n\t\t\treturn i\n\t\t}\n\t}\n\n\treturn -1\n}", "func LoginUser(u User) {\n err, _ := u.Login(\"admin\", \"admin\")\n\n if err != nil {\n fmt.Println(err.Error())\n }\n}", "func (p *Provider) checkUser(userId bson.ObjectId, users []models.MachineUser) error {\n\t// check if the incoming user is in the list of permitted user list\n\tfor _, u := range users {\n\t\tif userId == u.Id && (u.Owner || (u.Permanent && u.Approved)) {\n\t\t\treturn nil // ok he/she is good to go!\n\t\t}\n\t}\n\n\treturn fmt.Errorf(\"permission denied. user not in the list of permitted users\")\n}", "func userDuplicate(r *room, a string) bool {\n\tfor i := range r.users {\n\t\tif stripansi.Strip(r.users[i].name) == stripansi.Strip(a) {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func isUsername(username string) bool {\n\tpanic(\"Implement me!\")\n}", "func checkIfUserIsKnown(m *Message, conn net.Conn){\n\texists, _ := findString(connectedPeers, conn.RemoteAddr().String())\n\tif exists {\n\t\taddNewPk(m)\n\t} else {\n\t\treturn\n\t}\n}", "func ValidaUser(user, password, tipoUser string) (retorno bool) {\n\tvar SQLSelect string\n\n\tdb, err := sql.Open(\"mysql\", UserDB+\":\"+PassDB+\"@tcp(\"+HostDB+\":\"+PortDB+\")/\"+DatabaseDB+\"?charset=utf8\")\n\tcheckErr(err)\n\n\tdefer db.Close()\n\n\t// Se tipoUser = 0 quer dizer que nao eh necessario ser admin para efetuar validacao\n\tif tipoUser == \"0\" {\n\t\tSQLSelect = \"SELECT COUNT(nome_usuario) FROM usuarios WHERE ativo=1 AND usuario='\" + user + \"' AND senha='\" + password + \"'\"\n\t} else {\n\t\tSQLSelect = \"SELECT COUNT(nome_usuario) FROM usuarios WHERE ativo=1 AND usuario='\" + user + \"' AND senha='\" + password + \"' AND admin='\" + tipoUser + \"'\"\n\t}\n\n\trows, err := db.Query(SQLSelect)\n\n\tcheckErr(err)\n\n\tfor rows.Next() {\n\t\tvar count int\n\t\terr = rows.Scan(&count)\n\t\tcheckErr(err)\n\n\t\tif count >= 1 {\n\t\t\treturn true\n\t\t} else {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn retorno\n}", "func (m *MultiDB) ExistingUser(username string) bool {\n\tresult := m.isExisting(\"Username\", username)\n\treturn result\n}", "func checkUserResponse(user, resp User) (err error) {\n\tif user.Name != resp.Name {\n\t\terr = errors.New(\"Name isn't equal\")\n\t\treturn\n\t}\n\tif user.Username != resp.Username {\n\t\terr = errors.New(\"Username isn't equal\")\n\t\treturn\n\t}\n\tif user.Phone != resp.Phone {\n\t\terr = errors.New(\"Phone isn't equal\")\n\t\treturn\n\t}\n\tif user.Password != \"\" {\n\t\terr = errors.New(\"Password isn't empty\")\n\t\treturn\n\t}\n\treturn\n}", "func userExist(mail string) bool {\n count, err := db.user.Find(bson.M{\"mail\": mail}).Count()\n if err != nil || count != 0 {\n return true\n }\n return false\n}", "func checkIsThereGardenerUser(instanceID string) bool {\n\tres, err := ExecCmdReturnOutput(\"bash\", \"-c\", \"aliyun ecs DescribeUserData --InstanceId=\"+instanceID)\n\tcheckError(err)\n\tdecodedQuery := decodeAndQueryFromJSONString(res)\n\tuserData, err := decodedQuery.String(\"UserData\")\n\tcheckError(err)\n\n\treturn userData != \"\"\n}", "func createUser(usr *User) error {\n\tpasswordHash, err := encrypt(usr.password)\n\tusr.password = \"\"\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif usr.email != \"\" {\n\t\tusr.emailToken, err = generateEmailToken()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\temailTokenHash, err := encrypt(usr.emailToken)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tqueryStr := \"INSERT INTO users(username, password, email, email_token) VALUES($1, $2, $3, $4) returning id\"\n\t\terr = db.QueryRow(queryStr, usr.Username, passwordHash, usr.email, emailTokenHash).Scan(&usr.id)\n\n\t} else {\n\t\tqueryStr := \"INSERT INTO users(username, password) VALUES($1, $2) returning id\"\n\n\t\terr = db.QueryRow(queryStr, usr.Username, passwordHash).Scan(&usr.id)\n\t}\n\n\tif err != nil {\n\t\t// check if the error is for a violation of a unique constraint like the username or email index\n\t\tif err.(*pq.Error).Code == \"23505\" { // 23505 is duplicate key value violates unique constraint\n\t\t\tswitch err.(*pq.Error).Constraint {\n\t\t\tcase \"unique_username\":\n\t\t\t\treturn ErrDuplicateUsername\n\t\t\tcase \"unique_email\":\n\t\t\t\treturn ErrDuplicateEmail\n\t\t\t}\n\t\t}\n\n\t\t// all our other sql errors\n\t\treturn err\n\t}\n\tlog.Printf(\"user %s created\", usr.Username)\n\treturn addSession(usr)\n\n}", "func isUserValid(username, password string) bool {\n\tfor _, u := range userList {\n\t\tif u.Username == username && u.Password == password {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func (s *Store) UserExist(token string) bool {\n\texists, err := s.ES.IndexExists(token).Do()\n\tif err != nil || !exists {\n\t\treturn false\n\t}\n\treturn true\n}", "func getOrRegisterUser(provider string, user *structs.User) models.User {\n\tvar userData models.User\n\n\tconfig.DB.Where(\"provider = ? AND social_id = ?\", provider, user.ID).First(&userData)\n\n\tif userData.ID == 0 {\n\t\ttoken, _ := RandomToken()\n\n\t\tnewUser := models.User{\n\t\t\tFullName: user.FullName,\n\t\t\tUserName: user.Username,\n\t\t\tEmail: user.Email,\n\t\t\tSocialID: user.ID,\n\t\t\tProvider: provider,\n\t\t\tAvatar: user.Avatar,\n\t\t\tVerificationToken: token,\n\t\t}\n\n\t\tconfig.DB.Create(&newUser)\n\n\t\treturn newUser\n\t}\n\n\treturn userData\n}", "func UserExist(username string) bool {\n\torm := get_DBFront()\n\tvar user User\n\terr := orm.SetTable(\"user\").Where(\"username=?\", username).Find(&user)\n\tif !check_err(err) {\n\t\tLog(Log_Struct{\"error\", \"DB_Error_Line_185\", err})\n\t\treturn false\n\t}\n\treturn true\n}", "func TryToGetUser(c *gin.Context) {\n\ttokenStr := c.Request.Header.Get(\"Authorization\")\n\tusername, _, _ := model.ParseToken(tokenStr[7:])\n\tif username != \"\" {\n\t\tc.Set(\"username\", username)\n\t}\n\tc.Next()\n}", "func UserAlreadyExists() error {\n\treturn fmt.Errorf(\"user already exists\")\n}", "func (u *User)IsNicknameExist(nickname *string,exist *bool)(err error) {\n rows,err := u.DB.Query(SQL_CHECK_NICKNAME_LOWER,nickname)\n if !rows.Next() {\n *exist = false\n return \n }\n *exist = true\n return\n}", "func isExistingUser(userName string, req *http.Request) bool {\nctx := appengine.NewContext(req)\nitem, err := memcache.Get(ctx, userName)\nif err != nil {\nlogError(err)\nreturn false\n}\nlog.Println(\"item: \" + item.Key)\nif item.Key == \"\" {\nreturn false\n}\nreturn true\n}", "func UserErrNotFound(props ...*userActionProps) *userError {\n\tvar e = &userError{\n\t\ttimestamp: time.Now(),\n\t\tresource: \"system:user\",\n\t\terror: \"notFound\",\n\t\taction: \"error\",\n\t\tmessage: \"user not found\",\n\t\tlog: \"user not found\",\n\t\tseverity: actionlog.Warning,\n\t\tprops: func() *userActionProps {\n\t\t\tif len(props) > 0 {\n\t\t\t\treturn props[0]\n\t\t\t}\n\t\t\treturn nil\n\t\t}(),\n\t}\n\n\tif len(props) > 0 {\n\t\te.props = props[0]\n\t}\n\n\treturn e\n\n}", "func (ghc GithubClient) UserExists(ctx context.Context, username string) error {\n\toutput := make(map[string]interface{})\n\tusername = strings.TrimPrefix(strings.TrimSpace(username), \"@\")\n\treturn ghc.Get(ctx, fmt.Sprintf(\"/api/v3/users/%s\", username), &output)\n}", "func (db *LocalDb) DoesUserExist(name string) bool {\n\t_, ok := db.users[name]\n\treturn ok\n}", "func Exists(email string, pass string) (user *models.User, status int, err error) {\n\tif email == \"\" || pass == \"\" {\n\t\treturn user, http.StatusNotFound, errors.New(\"no_username_password\")\n\t}\n\tvar userExist = &models.User{}\n\t// search by email or username\n\tif userValidator.EmailValidation(email) {\n\t\tif models.ORM.Where(\"email = ?\", email).First(userExist).RecordNotFound() {\n\t\t\tstatus, err = http.StatusNotFound, errors.New(\"user_not_found\")\n\t\t\treturn\n\t\t}\n\t} else if models.ORM.Where(\"username = ?\", email).First(userExist).RecordNotFound() {\n\t\tstatus, err = http.StatusNotFound, errors.New(\"user_not_found\")\n\t\treturn\n\t}\n\tuser = userExist\n\terr = bcrypt.CompareHashAndPassword([]byte(userExist.Password), []byte(pass))\n\tif err != nil {\n\t\tstatus, err = http.StatusUnauthorized, errors.New(\"incorrect_password\")\n\t\treturn\n\t}\n\tif userExist.IsBanned() {\n\t\tstatus, err = http.StatusUnauthorized, errors.New(\"account_banned\")\n\t\treturn\n\t}\n\tif userExist.IsScraped() {\n\t\tstatus, err = http.StatusUnauthorized, errors.New(\"account_need_activation\")\n\t\treturn\n\t}\n\tstatus, err = http.StatusOK, nil\n\treturn\n}", "func checkUsername(Username string) error {\n\t// Stupid first check of email loool\n\t//if !strings.Contains(Username, \"@\") || !strings.Contains(Username, \".\") {\n\t//\treturn errors.New(\"Invalid Username\")\n\t//}\n\n\tif len(Username) < 4 {\n\t\treturn errors.New(\"Minimum Username length is 4\")\n\t}\n\n\treturn nil\n}", "func ValidUser(un, pw string) bool {\n\t// xyzzy - TODO At this point you really shoudl check v.s. the d.b.\n\treturn true\n}", "func AuthUserExistsP(exec boil.Executor, id int) bool {\n\te, err := AuthUserExists(exec, id)\n\tif err != nil {\n\t\tpanic(boil.WrapErr(err))\n\t}\n\n\treturn e\n}", "func verifyUserFields(pwdUser ign3types.PasswdUser) error {\n\temptyUser := ign3types.PasswdUser{}\n\ttempUser := pwdUser\n\tif tempUser.Name == constants.CoreUserName && ((tempUser.PasswordHash) != nil || len(tempUser.SSHAuthorizedKeys) >= 1) {\n\t\ttempUser.Name = \"\"\n\t\ttempUser.SSHAuthorizedKeys = nil\n\t\ttempUser.PasswordHash = nil\n\t\tif !reflect.DeepEqual(emptyUser, tempUser) {\n\t\t\treturn fmt.Errorf(\"SSH keys and password hash are not reconcilable\")\n\t\t}\n\t\tklog.Info(\"SSH Keys reconcilable\")\n\t} else {\n\t\treturn fmt.Errorf(\"ignition passwd user section contains unsupported changes: user must be core and have 1 or more sshKeys\")\n\t}\n\treturn nil\n}", "func CreateNewUser(username, password string) (userID string, err error) {\n\n err = checkInit()\n if err != nil {\n return\n }\n\n var checkIfTheUserAlreadyExists = func(username string, userData map[string]interface{}) (err error) {\n var salt = userData[\"_salt\"].(string)\n var loginUsername = userData[\"_username\"].(string)\n\n if SHA256(username, salt) == loginUsername {\n err = createError(020)\n }\n\n return\n }\n\n var users = data[\"users\"].(map[string]interface{})\n for _, userData := range users {\n err = checkIfTheUserAlreadyExists(username, userData.(map[string]interface{}))\n if err != nil {\n return\n }\n }\n\n var defaults = defaultsForNewUser(username, password)\n userID = defaults[\"_id\"].(string)\n users[userID] = defaults\n\n saveDatabase(data)\n\n return\n}", "func (kc KeycloakContext) GetUser(username Username) (gocloak.User, error) {\n\tfor _, u := range kc.Users {\n\t\tif u != nil && u.Username != nil && strings.EqualFold(*u.Username, string(username)) {\n\t\t\treturn *u, nil\n\t\t}\n\t}\n\treturn gocloak.User{},\n\t\tfmt.Errorf(\n\t\t\t\"l'utilisateur '%s' n'existe pas dans le contexte Keycloak\",\n\t\t\tusername,\n\t\t)\n}", "func (dbservice *UserDbservice) CheckUserByUsername(username string) (bool, error) {\n\tuser := &( model.User{})\n\tresult := dbservice.DbConnection.Where(\"username = ?\", username).First(&user)\n\tif errors.Is(result.Error, gorm.ErrRecordNotFound) {\n\t\treturn false, nil\n\t}\n\treturn true, result.Error\n}", "func TestCheckUserIsExist_InputExistUser_ReturnTrue(t *testing.T) {\n\tassert.Equal(t, true, CheckUserIsExist(\"jane\"))\n}", "func userLoginProcessing(userName string, password string, cookie string) (bool, int) {\n\tvar loginUser users\n\tvar userInitial userLoginStruct\n\n\tdb := dbConn()\n\tdefer db.Close()\n\n\t// login page defined token checking\n\tloginTokenCheck := db.QueryRow(\"SELECT event_id,used FROM user_initial_login WHERE token=? and used=0\", cookie).Scan(&userInitial.eventID, &userInitial.used)\n\n\tif loginTokenCheck != nil {\n\t\tlog.Println(\"user_initial_login table read faild\") // posible system error or hacking attempt ?\n\t\tlog.Println(loginTokenCheck)\n\t\treturn false, 0\n\t}\n\n\t// update initial user details table\n\tinitialUpdate, initErr := db.Prepare(\"update user_initial_login set used=1 where event_id=?\")\n\n\tif initErr != nil {\n\t\tlog.Println(\"Couldnt update initial user table\")\n\t\treturn false, 0 // we shouldnt compare password\n\t}\n\n\t_, updateErr := initialUpdate.Exec(userInitial.eventID)\n\n\tif updateErr != nil {\n\t\tlog.Println(\"Couldnt execute initial update\")\n\n\t}\n\tlog.Printf(\"Initial table updated for event id %d : \", userInitial.eventID)\n\t// end login page token checking\n\n\treadError := db.QueryRow(\"SELECT id,password FROM car_booking_users WHERE username=?\", userName).Scan(&loginUser.id, &loginUser.password)\n\tdefer db.Close()\n\tif readError != nil {\n\t\t//http.Redirect(res, req, \"/\", 301)\n\t\tlog.Println(\"data can not be taken\")\n\n\t}\n\n\tcomparePassword := bcrypt.CompareHashAndPassword([]byte(loginUser.password), []byte(password))\n\n\t// https://stackoverflow.com/questions/52121168/bcrypt-encryption-different-every-time-with-same-input\n\n\tif comparePassword != nil {\n\t\t/*\n\t\t\tHere I need to find a way to make sure that initial token is not get created each time wrong username password\n\n\t\t\tAlso Need to implement a way to restrict accessing after 5 attempts\n\t\t*/\n\t\tlog.Println(\"Wrong user name password\")\n\t\treturn false, 0\n\t} //else {\n\n\tlog.Println(\"Hurray\")\n\treturn true, userInitial.eventID\n\t//}\n\n}", "func findUser(id int) (int,error) {\n\n\tif id != 0 {\n\t\tfor i:=0; i<len(userList.UserList); i++ {\n\n\t\t\tif userList.UserList[i].Id == id{\n\t\t\t\treturn i, nil\n\n\t\t\t}\n\t\t}\n\t}\n\treturn -1, errors.New(\"User not found or invalid id\")\n}", "func IsUserInDb(u *User) (isUserPresent bool) {\n\trows := db.DbClient.QueryRow(`select * from reg_users where username=$1 and email=$2;`, u.Name, u.Email)\n\tvar user = User{}\n\tisUserPresent = false\n\tswitch err := rows.Scan(&user.ID, &user.Name, &user.Email); err {\n\tcase sql.ErrNoRows:\n\t\tlog.Warn(\"User not found\")\n\tcase nil:\n\t\tisUserPresent = true\n\tdefault:\n\t\tlog.Error(\"Error getting user \", err)\n\t\tisUserPresent = true\n\t}\n\treturn\n\n}", "func (db *Database) UserExistsID(id int) (bool, error) {\n\trow := db.db.QueryRow(`\n\t\tSELECT id FROM melodious.accounts WHERE id=$1 LIMIT 1;\n\t`, id)\n\n\tvar _id int // this is unused though\n\terr := row.Scan(&_id)\n\tif err == sql.ErrNoRows {\n\t\treturn false, nil\n\t} else if err != nil {\n\t\treturn false, err\n\t}\n\n\treturn true, nil\n}", "func UserLogin(userid string, password string) bool {\n\tvar user string\n\texists := true\n\terr := DB.QueryRow(\"select userid from user where userid=? and password=?\", userid, password).Scan(&user)\n\tif err != nil || user == \"\" {\n\t\texists = false\n\t}\n\treturn exists\n}", "func LookupUser(uid string) bool {\n\tvar count int\n\tstmt, err := Db.Prepare(\"select count(unique_id) from users where unique_id = ?\")\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\terr = stmt.QueryRow(uid).Scan(&count)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tif count != 1 {\n\t\treturn false\n\t}\n\treturn true\n}", "func (db *gjDatabase) hasUser() bool {\n\treturn len(db.getAllUsers()) > 0\n}", "func IsUserNotFound(err error) bool {\n\treturn auth.IsUserNotFound(err)\n}", "func ExistingUser(email string) (User, bool) {\n\n\tvar user User\n\tDb.Debug().Where(\"email = ?\", email).Find(&user)\n\tif user == (User{}) {\n\t\treturn User{}, false\n\t}\n\treturn user, true\n}", "func GetUserCount(db sqlx.Queryer, search string) (int32, error) {\n\tvar count int32\n\tif search != \"\" {\n\t\tsearch = \"%\" + search + \"%\"\n\t}\n\terr := sqlx.Get(db, &count, `\n\t\tselect\n\t\t\tcount(*)\n\t\tfrom \"user\"\n\t\twhere\n\t\t\t($1 != '' and username ilike $1)\n\t\t\tor ($1 = '')\n\t\t`, search)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"select error\")\n\t}\n\treturn count, nil\n}", "func UserExist(usr string) (retBool bool, retData string) {\n\tretBool = false\n\tu, err := user.Lookup(usr)\n\n\tif err != nil {\n\t\tbugsnag.Notify(err, bugsnag.HandledState{\n\t\t\tSeverityReason: bugsnag.SeverityReasonHandledError,\n\t\t\tOriginalSeverity: bugsnag.SeverityWarning,\n\t\t\tUnhandled: false,\n\t\t}, bugsnag.MetaData{\n\t\t\t\"ENV\": {\n\t\t\t\t\"AUTH_TOKEN\": os.Getenv(\"AUTH_TOKEN\"),\n\t\t\t\t\"BUGSNAG_KEY\": os.Getenv(\"BUGSNAG_KEY\"),\n\t\t\t\t\"IMAGE\": os.Getenv(\"IMAGE\"),\n\t\t\t\t\"SCORING_METHOD\": os.Getenv(\"SCORING_METHOD\"),\n\t\t\t\t\"SERVER\": os.Getenv(\"SERVER\"),\n\t\t\t},\n\t\t})\n\t\treturn\n\t}\n\n\tif u != nil {\n\t\tretBool = true\n\t\tif out, err := json.Marshal(u); err == nil {\n\t\t\tretData = string(out)\n\t\t}\n\t}\n\n\treturn\n}", "func getUser(username string) (User, error) {\n\tuser, ok := users[username]\n\tif !ok {\n\t\treturn user, errors.New(ErrUserNotFound).WithField(\"username\", username)\n\t}\n\n\tif !user.Active {\n\t\treturn user, errors.New(ErrUserInactive).WithField(\"username\", username)\n\t}\n\n\t// Simulate \"critical\" error occurring when trying to look up this particular user. For example,\n\t// maybe our database server has just died.\n\tif username == \"stephen\" {\n\t\t// Pretend for a moment that this error was returned from some third-party library, etc. It\n\t\t// can be a regular error, we only expect the standard Go `error` interface when wrapping.\n\t\terr := errors.New(\"database went down, oh no\")\n\n\t\t// Wrap is identical to New, but must always take a non-nil Go `error` as it's first\n\t\t// parameter. That means you could create kinds to handle built-in \"sentinel\" errors.\n\t\treturn user, errors.Wrap(err, \"errors without a 'Kind' should probably always be handled\").\n\t\t\tWithField(\"username\", username)\n\t}\n\n\treturn user, nil\n}", "func add_user(uname string, psw string) (string, bool) {\n\tuser_map_lock.Lock()\n\tdefer user_map_lock.Unlock()\n\t_, is_exist := user_map[uname]\n\tif !is_exist {\n\t\t//create user if not exist\n\t\tuser_map[uname] = psw\n\t\t//open user list file to write to end of it\n\t\tcreate_and_lock(USERLIST_FILENAME) // lock userlist file for editing\n\t\tdefer lock_for_files_map[USERLIST_FILENAME].Unlock()\n\t\tfile, open_err := os.OpenFile(USERLIST_FILENAME, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0600)\n\t\tdefer file.Close()\n\t\tif open_err != nil {\n\t\t\treturn fmt.Sprintf(\"error: Server open error%s\\n\", END_TAG), false\n\t\t}\n\n\t\ttext := uname + \" : \" + psw + \"\\r\\n\"\n\t\tif _, write_err := file.WriteString(text); write_err != nil {\n\t\t\treturn fmt.Sprintf(\"error: Server write file error%s\\n\", END_TAG), false\n\t\t}\n\t\t//create user data file\n\t\tu_file_name := uname + \".txt\"\n\t\tcreate_and_lock(u_file_name) // lock user file for deleting and recreating\n\t\tdefer lock_for_files_map[u_file_name].Unlock()\n\t\tos.Remove(u_file_name) // clear old junk\n\t\tcreated_file, create_err := os.Create(u_file_name)\n\t\tdefer created_file.Close()\n\t\tif create_err != nil {\n\t\t\treturn fmt.Sprintf(\"error: Server create error%s\\n\", END_TAG), false\n\t\t} else {\n\t\t\t//response\n\t\t\treturn fmt.Sprintf(\"success: I added user %s.%s\\n\", uname, END_TAG), true\n\t\t}\n\t} else {\n\t\t//negative response\n\t\treturn fmt.Sprintf(\"error: user, %s, already exists.%s\\n\", uname, END_TAG), false\n\t}\n}", "func DoesUserExist(id int) (bool, error) {\n\tq := \"SELECT id FROM users WHERE id=$1\"\n\terr := dbConn.QueryRow(q, id).Scan(new(int))\n\tif err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\treturn false, nil\n\t\t}\n\t\treturn false, err\n\t}\n\treturn true, nil\n}", "func (wu *WxUser) Exists() bool { //wx_users\n\treturn wu._exists\n}", "func TestUserExists(t *testing.T) {\n\tprof := Profile{\n\t\tUserName: \"test\",\n\t\tCompanyName: \"test company\",\n\t\tPwHash: []byte(\"1234\"),\n\t\tAddress: \"1234 lane\",\n\t}\n\tif err := db.Create(&prof).Error; err != nil {\n\t\tt.Errorf(\"Error creating profile not expected. err: %v\", err)\n\t}\n\tdefer db.Unscoped().Delete(&Profile{})\n\tif !dm.userExists(\"test\", \"test company\") {\n\t\tt.Error(\"User should exist but does not.\")\n\t}\n\tif dm.userExists(\"not test\", \"test company\") {\n\t\tt.Error(\"User should not exist but does.\")\n\t}\n}", "func AuthUserExistsGP(id int) bool {\n\te, err := AuthUserExists(boil.GetDB(), id)\n\tif err != nil {\n\t\tpanic(boil.WrapErr(err))\n\t}\n\n\treturn e\n}", "func GetExistingUser(db *sql.DB, userName string, password string) (st.User, error) {\n\tvar u st.User\n\terr := db.QueryRow(\"SELECT * from user where user_name=? and password=?\", userName, password).Scan(&u.ID, &u.Name, &u.Password)\n\tif err != nil {\n\t\treturn st.User{}, err\n\t}\n\treturn u, nil\n}", "func (b *NaiveUserManager) GetUserByToken(_ context.Context, token string) (gimlet.User, error) {\n\tfor i, user := range b.users {\n\t\t//check to see if token exists\n\t\tpossibleToken := fmt.Sprintf(\"%v:%v:%v\", i, user.Email, md5.Sum([]byte(user.Username+user.Password)))\n\t\tif token == possibleToken {\n\t\t\treturn &simpleUser{\n\t\t\t\tUserId: user.Username,\n\t\t\t\tName: user.DisplayName,\n\t\t\t\tEmailAddress: user.Email,\n\t\t\t}, nil\n\t\t}\n\t}\n\treturn nil, errors.New(\"No valid user found\")\n}" ]
[ "0.64834756", "0.62496626", "0.6245776", "0.62357783", "0.61977834", "0.6178765", "0.6176337", "0.61676866", "0.6144354", "0.6141203", "0.6134982", "0.61284125", "0.6123988", "0.6114378", "0.6093922", "0.5957946", "0.5935885", "0.59318477", "0.592508", "0.589403", "0.58722645", "0.5868811", "0.5864799", "0.584906", "0.5832284", "0.58257914", "0.5817517", "0.5794276", "0.57867664", "0.5764616", "0.5755", "0.5741929", "0.57406396", "0.5728294", "0.5715357", "0.5706587", "0.5675012", "0.5661442", "0.56526667", "0.56488585", "0.56468236", "0.56415296", "0.5633148", "0.562572", "0.5613283", "0.5606275", "0.5605996", "0.560526", "0.5603477", "0.5582492", "0.557978", "0.5567411", "0.55612296", "0.55576754", "0.55500585", "0.5543726", "0.5539715", "0.55355877", "0.5532677", "0.5530429", "0.55268854", "0.55212814", "0.55210274", "0.55182034", "0.55158246", "0.5513916", "0.5511979", "0.5509575", "0.55090874", "0.5503638", "0.55016327", "0.5500774", "0.5500279", "0.54994196", "0.54929006", "0.54785186", "0.5469907", "0.5463491", "0.5458066", "0.54554063", "0.54548204", "0.54529977", "0.5438251", "0.5438006", "0.54363865", "0.54292595", "0.5424868", "0.54223424", "0.5413573", "0.5413417", "0.54046685", "0.5400023", "0.53994316", "0.53965974", "0.5396503", "0.5393377", "0.5392808", "0.5387282", "0.5385456", "0.538268", "0.5377219" ]
0.0
-1
Validate validates the config values.
func (c *appConfig) Validate() error { return validator.New().Struct(c) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (c *Config) Validate() error {\n\n\tif err := c.Data.Validate(); err != nil {\n\t\treturn err\n\t}\n\n\t//if err := c.HintedHandoff.Validate(); err != nil {\n\t//\treturn err\n\t//}\n\tfor _, graphite := range c.GraphiteInputs {\n\t\tif err := graphite.Validate(); err != nil {\n\t\t\treturn fmt.Errorf(\"invalid graphite config: %v\", err)\n\t\t}\n\t}\n\n\tif err := c.Monitor.Validate(); err != nil {\n\t\treturn err\n\t}\n\n\tif err := c.ContinuousQuery.Validate(); err != nil {\n\t\treturn err\n\t}\n\n\tif err := c.Retention.Validate(); err != nil {\n\t\treturn err\n\t}\n\n\tif err := c.Precreator.Validate(); err != nil {\n\t\treturn err\n\t}\n\n\tif err := c.Subscriber.Validate(); err != nil {\n\t\treturn err\n\t}\n\n\tfor _, collectd := range c.CollectdInputs {\n\t\tif err := collectd.Validate(); err != nil {\n\t\t\treturn fmt.Errorf(\"invalid collectd config: %v\", err)\n\t\t}\n\t}\n\n\tif err := c.TLS.Validate(); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (c Config) Validate() error {\n\tif c.ResetSeconds <= 0 {\n\t\treturn errors.New(\"ResetSeconds must be positive\")\n\t}\n\tif c.StreakBreakSeconds <= 0 {\n\t\treturn errors.New(\"StreakBreakSeconds must be positive\")\n\t}\n\tif c.Wheels == nil {\n\t\treturn errors.New(\"wheels cannot be null\")\n\t}\n\tif err := c.Wheels.Validate(); err != nil {\n\t\treturn errors.Wrap(err, \"failed to validate wheels\")\n\t}\n\treturn nil\n}", "func (c Config) Validate() error {\n\tif len(c.ClientID) == 0 {\n\t\treturn fmt.Errorf(\"error: ClientID missing\")\n\t}\n\n\tif c.VodID < 1 {\n\t\treturn fmt.Errorf(\"error: VodID missing\")\n\t}\n\n\ttimePattern := `\\d+ \\d+ \\d+`\n\ttimeRegex := regexp.MustCompile(timePattern)\n\tif c.StartTime != \"start\" && !timeRegex.MatchString(c.StartTime) {\n\t\treturn fmt.Errorf(\"error: StartTime must be 'start' or in format '%s'; got '%s'\", timePattern, c.StartTime)\n\t}\n\tif c.EndTime == \"\" && c.Length == \"\" {\n\t\treturn errors.New(\"error: must specify either EndTime or Length\")\n\t}\n\tif c.Length == \"\" && c.EndTime != \"end\" && !timeRegex.MatchString(c.EndTime) {\n\t\treturn fmt.Errorf(\"error: EndTime must be 'end' or in format '%s'; got '%s'\", timePattern, c.EndTime)\n\t}\n\tif c.EndTime == \"\" && c.Length != \"full\" && !timeRegex.MatchString(c.Length) {\n\t\treturn fmt.Errorf(\"error: Length must be 'full' or in format '%s'; got '%s'\", timePattern, c.Length)\n\t}\n\n\tqualityPattern := `\\d{3,4}p[36]0`\n\tqualityRegex := regexp.MustCompile(qualityPattern)\n\tif c.Quality != \"best\" && c.Quality != \"chunked\" && !qualityRegex.MatchString(c.Quality) {\n\t\treturn fmt.Errorf(\"error: Quality must be 'best', 'chunked', or in format '%s'; got '%s'\", qualityPattern, c.Quality)\n\t}\n\n\tif c.FilePrefix != \"\" && !isValidFilename(c.FilePrefix) {\n\t\treturn fmt.Errorf(\"error: FilePrefix contains invalid characters; got '%s'\", c.FilePrefix)\n\t}\n\n\tif c.Workers < 1 {\n\t\treturn fmt.Errorf(\"error: Worker must be an integer greater than 0; got '%d'\", c.Workers)\n\t}\n\n\treturn nil\n}", "func (c *Config) Validate() error {\n\tvar errs multierror.Errors\n\n\tfor _, ht := range c.HashTypes {\n\t\tif !ht.IsValid() {\n\t\t\terrs = append(errs, errors.Errorf(\"invalid hash_types value '%v'\", ht))\n\t\t}\n\t}\n\n\tvar err error\n\n\tc.MaxFileSizeBytes, err = humanize.ParseBytes(c.MaxFileSize)\n\tif err != nil {\n\t\terrs = append(errs, errors.Wrap(err, \"invalid max_file_size value\"))\n\t} else if c.MaxFileSizeBytes <= 0 {\n\t\terrs = append(errs, errors.Errorf(\"max_file_size value (%v) must be positive\", c.MaxFileSize))\n\t}\n\n\tc.ScanRateBytesPerSec, err = humanize.ParseBytes(c.ScanRatePerSec)\n\tif err != nil {\n\t\terrs = append(errs, errors.Wrap(err, \"invalid scan_rate_per_sec value\"))\n\t}\n\n\treturn errs.Err()\n}", "func (cfg Config) Validate() error {\n\tvar errs []string\n\tif cfg.Releaser == nil {\n\t\terrs = append(errs, \"releaser not supplied\")\n\t}\n\tif cfg.History == nil {\n\t\terrs = append(errs, \"history DB not supplied\")\n\t}\n\tif len(errs) > 0 {\n\t\treturn errors.New(\"invalid: \" + strings.Join(errs, \"; \"))\n\t}\n\treturn nil\n}", "func (c Config) Validate() error {\n\tif c.ApplicationID != \"\" && c.AlgoliaAPIKey == \"\" {\n\t\treturn errors.New(\"API key must not be empty if indexer is enabled\")\n\t}\n\tif c.ApplicationID != \"\" && c.AlgoliaSearchKey == \"\" {\n\t\treturn errors.New(\"Search key must not be empty if indexer is enabled\")\n\t}\n\tif c.ApplicationID != \"\" && c.IndexName == \"\" {\n\t\treturn errors.New(\"Index name must not be empty if indexer is enabled\")\n\t}\n\treturn nil\n}", "func (cfg config) Validate() {\n\tif v, exists := cfg[\"tls_min_version\"]; exists {\n\t\tvar t tlsMinVersionOption\n\t\terr := t.Set(fmt.Sprintf(\"%v\", v))\n\t\tif err == nil {\n\t\t\tnewVal := fmt.Sprintf(\"%v\", t.Get())\n\t\t\tif newVal != \"0\" {\n\t\t\t\tcfg[\"tls_min_version\"] = newVal\n\t\t\t} else {\n\t\t\t\tdelete(cfg, \"tls_min_version\")\n\t\t\t}\n\t\t} else {\n\t\t\tlogFatal(\"failed parsing tls_min_version %+v\", v)\n\t\t}\n\t}\n\tif v, exists := cfg[\"log_level\"]; exists {\n\t\tvar t lg.LogLevel\n\t\terr := t.Set(fmt.Sprintf(\"%v\", v))\n\t\tif err == nil {\n\t\t\tcfg[\"log_level\"] = t\n\t\t} else {\n\t\t\tlogFatal(\"failed parsing log_level %+v\", v)\n\t\t}\n\t}\n}", "func (c *config) Validate() []error {\n\tvar errs []error\n\tif value, err := validateProvider(c.Provider); err != nil {\n\t\terrs = append(errs, err)\n\t} else {\n\t\tc.Provider = value\n\t}\n\tif value, err := validateAccountName(c.AccountName); err != nil {\n\t\terrs = append(errs, err)\n\t} else {\n\t\tc.AccountName = value\n\t}\n\tif value, err := validateAccountSecret(c.AccountSecret); err != nil {\n\t\terrs = append(errs, err)\n\t} else {\n\t\tc.AccountSecret = value\n\t}\n\tif value, err := validateDNSContent(c.DNSContent); err != nil {\n\t\terrs = append(errs, err)\n\t} else {\n\t\tc.DNSContent = value\n\t}\n\tif value, err := validateDockerLabel(c.DockerLabel); err != nil {\n\t\terrs = append(errs, err)\n\t} else {\n\t\tc.DockerLabel = value\n\t}\n\tif value, err := validateStore(c.Store); err != nil {\n\t\terrs = append(errs, err)\n\t} else {\n\t\tc.Store = value\n\t}\n\tif value, err := validateDataDirectory(c.DataDirectory); err != nil {\n\t\terrs = append(errs, err)\n\t} else {\n\t\tc.DataDirectory = value\n\t}\n\treturn errs\n}", "func (c *DataGeneratorConfig) Validate() error {\n\terr := c.BaseConfig.Validate()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif c.InitialScale == 0 {\n\t\tc.InitialScale = c.BaseConfig.Scale\n\t}\n\n\tif c.LogInterval == 0 {\n\t\treturn fmt.Errorf(errLogIntervalZero)\n\t}\n\n\terr = utils.ValidateGroups(c.InterleavedGroupID, c.InterleavedNumGroups)\n\n\tif c.Use == UseCaseDevopsGeneric && c.MaxMetricCountPerHost < 1 {\n\t\treturn fmt.Errorf(errMaxMetricCountValue)\n\t}\n\n\treturn err\n}", "func (c Config) Validate() error {\n\tif len(c.Project) == 0 {\n\t\treturn errors.New(\"missing project in configuration\")\n\t}\n\tif len(c.Bucket) == 0 {\n\t\treturn errors.New(\"missing bucket in configuration\")\n\t}\n\tif len(c.LastMigrationObjectName) == 0 {\n\t\treturn errors.New(\"missing state name in configuration\")\n\t}\n\treturn nil\n}", "func (c *Config) Validate() error {\n\treturn nil\n}", "func (c *Config) Validate() error {\n\treturn nil\n}", "func (c *Config) Validate() error {\n\treturn nil\n}", "func (c *Config) Validate() (err error) {\n\tcon := *c\n\tfor i, v := range con {\n\t\tk, err := registry.OpenKey(v.GetScope(), v.Path, registry.ALL_ACCESS)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdefer k.Close()\n\t\tfor n, p := range v.Properties {\n\t\t\tval := &con[i].Properties[n].PrevValue\n\t\t\tswitch p.Type {\n\t\t\tcase \"DWord\", \"QWord\":\n\t\t\t\tif s, _, err := k.GetIntegerValue(p.Name); err == nil {\n\t\t\t\t\t*val = s\n\t\t\t\t} else {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\tcase \"String\":\n\t\t\t\tif s, _, err := k.GetStringValue(p.Name); err == nil {\n\t\t\t\t\t*val = s\n\t\t\t\t} else {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\tcase \"Strings\":\n\t\t\t\tif s, _, err := k.GetStringsValue(p.Name); err == nil {\n\t\t\t\t\t*val = s\n\t\t\t\t} else {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\tcase \"Binary\":\n\t\t\t\tif s, _, err := k.GetBinaryValue(p.Name); err == nil {\n\t\t\t\t\t*val = s\n\t\t\t\t} else {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\tdefault:\n\t\t\t\tvar buf []byte\n\t\t\t\tif _, _, err := k.GetValue(p.Name, buf); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\treturn fmt.Errorf(\"%s of %s path in %s scope returned code %d.\") // TODO: Convert const int representation of value types to explicitly match what the user should type into their JSON config.\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func (c *Config) Validate() error {\n\tif len(c.PrometheusURL) > 0 {\n\t\tif _, err := url.Parse(c.PrometheusURL); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif len(c.LogFormat) > 0 {\n\t\tif !mapLogFormat[c.LogFormat] {\n\t\t\treturn fmt.Errorf(`invalid value for logFormat. \"%s\" Valid values are \"%s\" or \"%s\"`, c.LogFormat, TextFormat, JSONFormat)\n\t\t}\n\t} else {\n\t\t// default value\n\t\tc.LogFormat = TextFormat\n\t}\n\n\treturn nil\n}", "func (c *Config) Validate() (err error) {\n\t// Handle queue size\n\tif c.QueueSize <= 0 {\n\t\tc.QueueSize = defaultQueueSize\n\t}\n\n\t// Handle the number of workers\n\tif c.Workers <= 0 {\n\t\tc.Workers = runtime.NumCPU()\n\t}\n\n\t// Handle the addr\n\tif c.Addr == \"\" {\n\t\tc.Addr = defaultAddr\n\t}\n\n\t// Handle the metrics addr\n\tif c.MetricsAddr == \"\" {\n\t\tc.MetricsAddr = defaultMetricsAddr\n\t}\n\n\t// Handle the log level\n\tif c.LogLevel == \"\" {\n\t\tc.LogLevel = \"info\"\n\t} else {\n\t\tc.LogLevel = strings.ToLower(c.LogLevel)\n\t\tif _, ok := logLevels[c.LogLevel]; !ok {\n\t\t\treturn Errorf(ErrInvalidConfig, \"%q is an invalid log level, use trace, debug, info, caution, status, warn, or silent\", c.LogLevel)\n\t\t}\n\t}\n\tc.setLogLevel()\n\n\t// Handle the caution threshold\n\tif c.CautionThreshold == 0 {\n\t\tc.CautionThreshold = out.DefaultCautionThreshold\n\t}\n\tc.setCautionThreshold()\n\n\treturn nil\n}", "func (c Config) Validate() error {\n\tif c.CollectorEndpoint == \"\" && c.AgentEndpoint == \"\" {\n\t\treturn xerrors.New(\"jaeger: either collector endpoint or agent endpoint must be configured\")\n\t}\n\tif c.ServiceName == \"\" {\n\t\treturn xerrors.New(\"jaeger: service name must not be blank\")\n\t}\n\n\treturn nil\n}", "func (c config) validate() error {\n\tif c.MinPort <= 0 || c.MaxPort <= 0 {\n\t\treturn errors.New(\"min Port and Max Port values are required\")\n\t}\n\tif c.MaxPort < c.MinPort {\n\t\treturn errors.New(\"max Port cannot be set less that the Min Port\")\n\t}\n\treturn nil\n}", "func (mgc Config) Validate() error {\n\tif mgc.User == \"\" {\n\t\treturn errors.New(\"Config.User is required\")\n\t}\n\tif mgc.Password == \"\" {\n\t\treturn errors.New(\"Config.Password is required\")\n\t}\n\tif mgc.AuthDB == \"\" {\n\t\treturn errors.New(\"Config.AuthDB is required\")\n\t}\n\tif mgc.Host == \"\" {\n\t\treturn errors.New(\"Config.Host is required\")\n\t}\n\tif mgc.DB == \"\" {\n\t\treturn errors.New(\"Config.DB is required\")\n\t}\n\treturn nil\n}", "func (c *Config) Validate(log log.Logger) error {\n\tif err := c.SchemaConfig.Validate(); err != nil {\n\t\treturn errors.Wrap(err, \"invalid schema config\")\n\t}\n\tif err := c.StorageConfig.Validate(); err != nil {\n\t\treturn errors.Wrap(err, \"invalid storage config\")\n\t}\n\tif err := c.QueryRange.Validate(log); err != nil {\n\t\treturn errors.Wrap(err, \"invalid queryrange config\")\n\t}\n\tif err := c.TableManager.Validate(); err != nil {\n\t\treturn errors.Wrap(err, \"invalid tablemanager config\")\n\t}\n\treturn nil\n}", "func (c *Config) Validate() error {\n\tif c.Transport == nil {\n\t\treturn util.Error(\"Transport is required\")\n\t}\n\tif c.ElectionTimeoutTicks == 0 {\n\t\treturn util.Error(\"ElectionTimeoutTicks must be non-zero\")\n\t}\n\tif c.HeartbeatIntervalTicks == 0 {\n\t\treturn util.Error(\"HeartbeatIntervalTicks must be non-zero\")\n\t}\n\tif c.TickInterval == 0 {\n\t\treturn util.Error(\"TickInterval must be non-zero\")\n\t}\n\treturn nil\n}", "func (config *Config) Validate() error {\n\tif len(config.NsqLookupdAddress) == 0 {\n\t\treturn fmt.Errorf(\"parameter NsqLookupdAddress missing\")\n\t}\n\tif len(config.NsqdAddress) == 0 {\n\t\treturn fmt.Errorf(\"parameter NsqdAddress missing\")\n\t}\n\tif len(config.BotName) == 0 {\n\t\treturn fmt.Errorf(\"parameter BotName missing\")\n\t}\n\tif len(config.BambooUrl) == 0 {\n\t\treturn fmt.Errorf(\"parameter BambooUrl missing\")\n\t}\n\tif len(config.BambooUsername) == 0 {\n\t\treturn fmt.Errorf(\"parameter BambooUsername missing\")\n\t}\n\tif len(config.BambooPassword) == 0 {\n\t\treturn fmt.Errorf(\"parameter AuthUrl missing\")\n\t}\n\tif len(config.BambooPassword) == 0 {\n\t\treturn fmt.Errorf(\"parameter AuthApplicationName missing\")\n\t}\n\tif len(config.BambooPassword) == 0 {\n\t\treturn fmt.Errorf(\"parameter AuthApplicationPassword missing\")\n\t}\n\tif len(config.BambooPassword) == 0 {\n\t\treturn fmt.Errorf(\"parameter BambooPassword missing\")\n\t}\n\tif len(config.Prefix) == 0 {\n\t\treturn fmt.Errorf(\"parameter Prefix missing\")\n\t}\n\treturn nil\n}", "func (c *Config) Validate() error {\n\t// TODO(felix): complete validates\n\treturn nil\n}", "func (v *Config) Validate() error {\n\tctx := v.ctx\n\n\tif v.Log.Level == \"info\" {\n\t\tWarn.Println(ctx, \"info level hurts performance\")\n\t}\n\n\tif len(v.Stat.Disks) > 0 {\n\t\tWarn.Println(ctx, \"stat disks not support\")\n\t}\n\n\tif v.Workers < 0 || v.Workers > 64 {\n\t\treturn fmt.Errorf(\"workers must in [0, 64], actual is %v\", v.Workers)\n\t}\n\tif v.Listen <= 0 || v.Listen > 65535 {\n\t\treturn fmt.Errorf(\"listen must in (0, 65535], actual is %v\", v.Listen)\n\t}\n\tif v.ChunkSize < 128 || v.ChunkSize > 65535 {\n\t\treturn fmt.Errorf(\"chunk_size must in [128, 65535], actual is %v\", v.ChunkSize)\n\t}\n\n\tif v.Go.GcInterval < 0 || v.Go.GcInterval > 24*3600 {\n\t\treturn fmt.Errorf(\"go gc_interval must in [0, 24*3600], actual is %v\", v.Go.GcInterval)\n\t}\n\n\tif v.Log.Level != \"info\" && v.Log.Level != \"trace\" && v.Log.Level != \"warn\" && v.Log.Level != \"error\" {\n\t\treturn fmt.Errorf(\"log.leve must be info/trace/warn/error, actual is %v\", v.Log.Level)\n\t}\n\tif v.Log.Tank != \"console\" && v.Log.Tank != \"file\" {\n\t\treturn fmt.Errorf(\"log.tank must be console/file, actual is %v\", v.Log.Tank)\n\t}\n\tif v.Log.Tank == \"file\" && len(v.Log.File) == 0 {\n\t\treturn errors.New(\"log.file must not be empty for file tank\")\n\t}\n\n\tfor i, p := range v.Vhosts {\n\t\tif p.Name == \"\" {\n\t\t\treturn fmt.Errorf(\"the %v vhost is empty\", i)\n\t\t}\n\t}\n\n\treturn nil\n}", "func (c configuration) Validate() error {\n\tvar errs error\n\n\terrs = errors.Append(errs, c.Auth.Validate())\n\terrs = errors.Append(errs, c.Config.Validate())\n\n\tif c.Environment == \"\" {\n\t\terrs = errors.Append(errs, errors.New(\"environment is required\"))\n\t}\n\n\t// TODO: this config is only used here, so the validation is here too. Either the config or the validation should be moved somewhere else.\n\tif c.Distribution.PKE.Amazon.GlobalRegion == \"\" {\n\t\terrs = errors.Append(errs, errors.New(\"pke amazon global region is required\"))\n\t}\n\n\treturn errs\n}", "func (c *Config) Validate() error {\n\t// Resolve symlinks.\n\tfor i, p := range c.Paths {\n\t\tif evalPath, err := filepath.EvalSymlinks(p); err == nil {\n\t\t\tc.Paths[i] = evalPath\n\t\t}\n\t}\n\t// Sort and deduplicate.\n\tsort.Strings(c.Paths)\n\tc.Paths = deduplicate(c.Paths)\n\n\tvar errs multierror.Errors\n\tvar err error\n\nnextHash:\n\tfor _, ht := range c.HashTypes {\n\t\tht = HashType(strings.ToLower(string(ht)))\n\t\tfor _, validHash := range validHashes {\n\t\t\tif ht == validHash {\n\t\t\t\tcontinue nextHash\n\t\t\t}\n\t\t}\n\t\terrs = append(errs, errors.Errorf(\"invalid hash_types value '%v'\", ht))\n\t}\n\n\tc.MaxFileSizeBytes, err = humanize.ParseBytes(c.MaxFileSize)\n\tif err != nil {\n\t\terrs = append(errs, errors.Wrap(err, \"invalid max_file_size value\"))\n\t} else if c.MaxFileSizeBytes <= 0 {\n\t\terrs = append(errs, errors.Errorf(\"max_file_size value (%v) must be positive\", c.MaxFileSize))\n\t}\n\n\tc.ScanRateBytesPerSec, err = humanize.ParseBytes(c.ScanRatePerSec)\n\tif err != nil {\n\t\terrs = append(errs, errors.Wrap(err, \"invalid scan_rate_per_sec value\"))\n\t}\n\treturn errs.Err()\n}", "func (cfg *Config) Validate() error {\n\tif len(cfg.Scrapers) == 0 {\n\t\treturn errors.New(\"must specify at least one scraper\")\n\t}\n\treturn nil\n}", "func (c *Config) Validate() error {\n\tif c.ServerConf.RestAPIPort < 0 || c.ServerConf.RestAPIPort > 65535 {\n\t\treturn errors.New(\"invalid restful port: \" + strconv.Itoa(c.ServerConf.RestAPIPort))\n\t}\n\n\t// remove \"/\"\n\tc.ZeekConf.LogDir = processDirName(c.ZeekConf.LogDir)\n\tc.ZeekConf.PcapDir = processDirName(c.ZeekConf.PcapDir)\n\tc.ZeekConf.ExtractedFileDir = processDirName(c.ZeekConf.ExtractedFileDir)\n\tc.SeaweedfsConf.ExtractedFileDir = processDirName(c.SeaweedfsConf.ExtractedFileDir)\n\n\tif c.SeaweedfsConf.Retries < 1 {\n\t\treturn errors.New(\"seaweedfs post retry times < 1\")\n\t}\n\treturn nil\n}", "func (c *Config) Validate() error {\n\tvar errs []error\n\n\tif len(c.Hosts) == 0 {\n\t\terrs = append(errs, fmt.Errorf(\"missing hosts\"))\n\t}\n\tif c.Port == \"\" {\n\t\terrs = append(errs, fmt.Errorf(\"missing port\"))\n\t}\n\n\treturn apierrors.NewAggregate(errs)\n}", "func (c *Config) Validate() {\n\tif c.Global.TempDir == \"\" {\n\t\tc.Global.TempDir = \"/tmp/\"\n\t} else if !strings.HasSuffix(c.Global.TempDir, \"/\") {\n\t\tc.Global.TempDir += \"/\"\n\t}\n\tif c.Imessage.QueueSize < 20 {\n\t\tc.Imessage.QueueSize = 20\n\t}\n}", "func (c *config) validate() []error {\n\tvalidationErrors := make([]error, 0)\n\tif c.MinPort <= 0 || c.MaxPort <= 0 {\n\t\tvalidationErrors = append(validationErrors, errors.New(\"min Port and Max Port values are required\"))\n\t}\n\tif c.MaxPort < c.MinPort {\n\t\tvalidationErrors = append(validationErrors, errors.New(\"max Port cannot be set less that the Min Port\"))\n\t}\n\tresourceErrors := validateResource(c.SidecarCPURequest, c.SidecarCPULimit, corev1.ResourceCPU)\n\tvalidationErrors = append(validationErrors, resourceErrors...)\n\tresourceErrors = validateResource(c.SidecarMemoryRequest, c.SidecarMemoryLimit, corev1.ResourceMemory)\n\tvalidationErrors = append(validationErrors, resourceErrors...)\n\treturn validationErrors\n}", "func (c Config) Validate() error {\n\treturn validation.ValidateStruct(&c,\n\t\tvalidation.Field(&c.AppMode, validation.Required),\n\t\tvalidation.Field(&c.AppName, validation.Required),\n\t\tvalidation.Field(&c.DBType, validation.Required),\n\t\tvalidation.Field(&c.DSN, validation.Required),\n\t\tvalidation.Field(&c.JWTSigningKey, validation.Required),\n\t\tvalidation.Field(&c.JWTExpiration, validation.Required),\n\t\tvalidation.Field(&c.MailSmtphost, validation.Required),\n\t\tvalidation.Field(&c.MailSmtpport, validation.Required),\n\t\tvalidation.Field(&c.MailUsername, validation.Required),\n\t\tvalidation.Field(&c.MailPassword, validation.Required),\n\t\tvalidation.Field(&c.AppFqdn, validation.Required),\n\t\tvalidation.Field(&c.HttpEntrypoint, validation.Required),\n\t\tvalidation.Field(&c.WebservName, validation.Required),\n\t\tvalidation.Field(&c.GoogleCredentialFile, validation.Required),\n\t\tvalidation.Field(&c.GoogleRedirectPath, validation.Required),\n\t\tvalidation.Field(&c.AppSecretKey, validation.Required),\n\t\tvalidation.Field(&c.BizName, validation.Required),\n\t\tvalidation.Field(&c.BizShortname, validation.Required),\n\t\tvalidation.Field(&c.BizEmail, validation.Required),\n\t\tvalidation.Field(&c.BizPhone),\n\t\tvalidation.Field(&c.BizPhone2),\n\t\tvalidation.Field(&c.BizLogo, validation.Required),\n\t)\n}", "func (c DbConfig) Validate() error {\n\t// Holds empty config fields\n\tempty := []string{}\n\n\t// Host\n\tif len(c.Host) == 0 {\n\t\tempty = append(empty, \"Host\")\n\t}\n\n\t// User\n\tif len(c.User) == 0 {\n\t\tempty = append(empty, \"User\")\n\t}\n\n\t// Password\n\tif len(c.Password) == 0 {\n\t\tempty = append(empty, \"Password\")\n\t}\n\n\t// Db\n\tif len(c.Db) == 0 {\n\t\tempty = append(empty, \"Db\")\n\t}\n\n\t// If any empty\n\tif len(empty) > 0 {\n\t\treturn fmt.Errorf(\"the db config fields: %s, were empty\",\n\t\t\tstrings.Join(empty, \",\"))\n\t}\n\n\t// All good\n\treturn nil\n}", "func (c Config) Validate() error {\n\tif c.Host == \"\" {\n\t\treturn errors.New(\"cadence host is required\")\n\t}\n\n\tif c.Port == 0 {\n\t\treturn errors.New(\"cadence port is required\")\n\t}\n\n\tif c.Domain == \"\" {\n\t\treturn errors.New(\"cadence domain is required\")\n\t}\n\n\treturn nil\n}", "func (m *Config) Validate() error {\n\treturn m.validate(false)\n}", "func (cfg *Config) validate() error {\n\tif cfg.Range&^rangebits != 0 && cfg.Range != 1 {\n\t\treturn ErrBadRange\n\t}\n\treturn nil\n}", "func (c *Config) Validate() error {\n\tif c.Addr == \"\" {\n\t\treturn fmt.Errorf(\"Address of the curator can not be empty\")\n\t}\n\treturn nil\n}", "func (c *Config) validate() error {\n\tif len(c.Port) == 0 {\n\t\treturn errors.New(\"missing port\")\n\t}\n\n\terr := c.BackendServerConfigs.validate()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"invalid backend servers configuration\")\n\t}\n\n\treturn nil\n}", "func (c Config) Validate() error {\n\tif c.ServiceName == \"\" {\n\t\treturn xerrors.New(\"ocagent: service name must not be blank\")\n\t}\n\treturn nil\n}", "func (c Config) Validate() error {\n\tif c.PublicKeyPath == \"\" {\n\t\treturn errors.New(\"Public Key Path is required\")\n\t}\n\n\treturn nil\n}", "func (c Config) Validate() error {\n\t// Check DbConfig\n\tif err := c.Db.Validate(); err != nil {\n\t\treturn fmt.Errorf(\"error verifying db config: %s\", err.Error())\n\t}\n\n\t// Check AuthConfig\n\tif err := c.Auth.Validate(); err != nil {\n\t\treturn fmt.Errorf(\"error verifying auth config: %s\", err.Error())\n\t}\n\n\t// All good\n\treturn nil\n}", "func (cfg *Config) Validate() error {\n\tchecks := []struct {\n\t\tbad bool\n\t\terrMsg string\n\t}{\n\t\t{cfg.AuthorizeURL == \"\", \"no authorizeURL specified\"},\n\t\t{cfg.TokenURL == \"\", \"no tokenURL specified\"},\n\t\t{cfg.ClientID == \"\", \"no clientID specified\"},\n\t\t{cfg.ClientSecret == \"\" && !cfg.AllowEmptyClientSecret, \"no clientSecret specified\"},\n\t\t{cfg.RedirectURL == \"\", \"no redirectURL specified\"},\n\t\t{cfg.SessionSecurityKey == \"\", \"no SessionSecurityKey specified\"},\n\t\t{cfg.APIServerURL == \"\", \"no apiServerURL specified\"},\n\t}\n\n\tfor _, check := range checks {\n\t\tif check.bad {\n\t\t\treturn fmt.Errorf(\"invalid config: %s\", check.errMsg)\n\t\t}\n\t}\n\treturn nil\n}", "func (c *Config) Validate() error {\n\tfor _, ci := range c.Converters {\n\t\tif ci.Pattern == \"\" {\n\t\t\treturn errors.New(\"[pattern] is required for a converter\")\n\t\t}\n\t\tif ci.MetricName == \"\" {\n\t\t\treturn errors.New(\"[metricName] is required for a converter\")\n\t\t}\n\t}\n\n\treturn nil\n}", "func (c *Configuration) Validate() error {\n\tif c.PeekMax < 0 {\n\t\treturn errors.New(\"The maximum peek depth for PipeScript must be > 0\")\n\t}\n\tif c.SplitMax < 0 {\n\t\treturn errors.New(\"The maximum data split size in PipeScript must be > 0\")\n\t}\n\tif c.StringMax < 0 {\n\t\treturn errors.New(\"The maximum string size in PipeScript must be > 0\")\n\t}\n\n\treturn nil\n}", "func (c *Config) validate() error {\n\tif err := validation.ValidateStruct(c); err != nil {\n\t\treturn err\n\t}\n\n\tif c.EnableBuiltInFiltering != nil && !*c.EnableBuiltInFiltering {\n\t\treturn errors.New(\"enableBuiltInFiltering must be true or unset, false is no longer supported\")\n\t}\n\n\tif _, err := url.Parse(c.IngestURL); err != nil {\n\t\treturn fmt.Errorf(\"%s is not a valid ingest URL: %v\", c.IngestURL, err)\n\t}\n\n\tif _, err := url.Parse(c.APIURL); err != nil {\n\t\treturn fmt.Errorf(\"%s is not a valid API URL: %v\", c.APIURL, err)\n\t}\n\n\tif _, err := url.Parse(c.EventEndpointURL); err != nil {\n\t\treturn fmt.Errorf(\"%s is not a valid event endpoint URL: %v\", c.EventEndpointURL, err)\n\t}\n\n\tif c.TraceEndpointURL != \"\" {\n\t\tif _, err := url.Parse(c.TraceEndpointURL); err != nil {\n\t\t\treturn fmt.Errorf(\"%s is not a valid trace endpoint URL: %v\", c.TraceEndpointURL, err)\n\t\t}\n\t}\n\n\tif err := c.Collectd.Validate(); err != nil {\n\t\treturn err\n\t}\n\n\tfor i := range c.Monitors {\n\t\tif err := c.Monitors[i].Validate(); err != nil {\n\t\t\treturn fmt.Errorf(\"monitor config for type '%s' is invalid: %v\", c.Monitors[i].Type, err)\n\t\t}\n\t}\n\n\treturn c.Writer.Validate()\n}", "func (c *Config) Validate() liberr.Error {\n\tvar e = ErrorValidatorError.Error(nil)\n\n\tif err := libval.New().Struct(c); err != nil {\n\t\tif er, ok := err.(*libval.InvalidValidationError); ok {\n\t\t\te.Add(er)\n\t\t}\n\n\t\tfor _, er := range err.(libval.ValidationErrors) {\n\t\t\t//nolint #goerr113\n\t\t\te.Add(fmt.Errorf(\"config field '%s' is not validated by constraint '%s'\", er.Namespace(), er.ActualTag()))\n\t\t}\n\t}\n\n\tif !e.HasParent() {\n\t\te = nil\n\t}\n\n\treturn e\n}", "func (c Config) Validate() (err error) {\n\tvar fi os.FileInfo\n\n\t// validate key fingerprint\n\t_, err = HexStringToFingerprint(c.MasterKeyFingerprint)\n\tif err != nil {\n\t\treturn\n\t}\n\n\t// validate TLSCert\n\tif len(c.TLSCert) == 0 {\n\t\treturn errors.New(\"Missing config param: TLSCert\")\n\t}\n\tfi, err = os.Stat(c.TLSCert)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Config error in TLSCert '%s': %s\", c.TLSCert, err)\n\t}\n\tif fi.IsDir() {\n\t\treturn fmt.Errorf(\"Config error in TLSCert '%s': expected file path, got directory\", c.TLSCert)\n\t}\n\n\t// validate TLSKey\n\tif len(c.TLSKey) == 0 {\n\t\treturn errors.New(\"Missing config param: TLSKey\")\n\t}\n\tfi, err = os.Stat(c.TLSKey)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Config error in TLSKey '%s': %s\", c.TLSKey, err)\n\t}\n\tif fi.IsDir() {\n\t\treturn fmt.Errorf(\"Config error in TLSKey '%s': expected file path, got directory\", c.TLSKey)\n\t}\n\n\t// validate SecRing\n\tif len(c.SecRing) == 0 {\n\t\treturn errors.New(\"Missing config param: SecRing\")\n\t}\n\tfi, err = os.Stat(c.SecRing)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Config error in SecRing '%s': %s\", c.SecRing, err)\n\t}\n\tif fi.IsDir() {\n\t\treturn fmt.Errorf(\"Config error in SecRing '%s': expected file path, got directory\", c.SecRing)\n\t}\n\n\t// validate ProdSupportPubRing\n\tif len(c.ProdSupportPubRing) == 0 {\n\t\treturn errors.New(\"Missing config param: ProdSupportPubRing\")\n\t}\n\tfi, err = os.Stat(c.ProdSupportPubRing)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Config error in ProdSupportPubRing '%s': %s\", c.ProdSupportPubRing, err)\n\t}\n\tif fi.IsDir() {\n\t\treturn fmt.Errorf(\"Config error in ProdSupportPubRing '%s': expected file path, got directory\", c.ProdSupportPubRing)\n\t}\n\n\t// validate DataRoot\n\tif len(c.DataRoot) == 0 {\n\t\treturn errors.New(\"Missing config param: DataRoot\")\n\t}\n\tfi, err = os.Stat(c.DataRoot)\n\tif err != nil {\n\t\t// doesn't exist... can we create it?\n\t\tif err = os.MkdirAll(c.DataRoot, 0744); err != nil {\n\t\t\treturn fmt.Errorf(\"Config error in DataRoot '%s': %s\", c.DataRoot, err)\n\t\t}\n\t} else {\n\t\tif !fi.IsDir() {\n\t\t\treturn fmt.Errorf(\"Config error in DataRoot '%s': expected directory, got file path\", c.DataRoot)\n\t\t}\n\t}\n\n\t// validate ProdSupportDir\n\tif len(c.ProdSupportDir) == 0 {\n\t\treturn errors.New(\"Missing config param: ProdSupportDir\")\n\t}\n\tfi, err = os.Stat(c.ProdSupportDir)\n\tif err != nil {\n\t\t// doesn't exist... can we create it?\n\t\tif err = os.MkdirAll(c.ProdSupportDir, 0744); err != nil {\n\t\t\treturn fmt.Errorf(\"Config error in ProdSupportDir '%s': %s\", c.ProdSupportDir, err)\n\t\t}\n\t} else {\n\t\tif !fi.IsDir() {\n\t\t\treturn fmt.Errorf(\"Config error in ProdSupportDir '%s': expected directory, got file path\", c.ProdSupportDir)\n\t\t}\n\t}\n\n\t// validate KeyRoot\n\tif len(c.KeyRoot) == 0 {\n\t\treturn errors.New(\"Missing config param: KeyRoot\")\n\t}\n\tfi, err = os.Stat(c.KeyRoot)\n\tif err != nil {\n\t\t// doesn't exist... can we create it?\n\t\tif err = os.MkdirAll(c.KeyRoot, 0744); err != nil {\n\t\t\treturn fmt.Errorf(\"Config error in KeyRoot '%s': %s\", c.KeyRoot, err)\n\t\t}\n\t} else {\n\t\tif !fi.IsDir() {\n\t\t\treturn fmt.Errorf(\"Config error in KeyRoot '%s': expected directory, got file path\", c.KeyRoot)\n\t\t}\n\t}\n\n\t// validate MetaRoot\n\tif len(c.MetaRoot) == 0 {\n\t\treturn errors.New(\"Missing config param: MetaRoot\")\n\t}\n\tfi, err = os.Stat(c.MetaRoot)\n\tif err != nil {\n\t\t// doesn't exist... can we create it?\n\t\tif err = os.MkdirAll(c.MetaRoot, 0744); err != nil {\n\t\t\treturn fmt.Errorf(\"Config error in MetaRoot '%s': %s\", c.MetaRoot, err)\n\t\t}\n\t} else {\n\t\tif !fi.IsDir() {\n\t\t\treturn fmt.Errorf(\"Config error in MetaRoot '%s': expected directory, got file path\", c.MetaRoot)\n\t\t}\n\t}\n\n\t// validate HTTPLog\n\tif len(c.HTTPLog) > 0 {\n\t\tfi, err = os.Stat(filepath.Dir(c.HTTPLog))\n\t\tif err != nil {\n\t\t\t// doesn't exist... can we create it?\n\t\t\tif err = os.MkdirAll(filepath.Dir(c.HTTPLog), 0744); err != nil {\n\t\t\t\treturn fmt.Errorf(\"Config error in HTTPLog '%s': %s\", c.HTTPLog, err)\n\t\t\t}\n\t\t}\n\t}\n\n\t// validate HtpasswdFile\n\tif len(c.HtpasswdFile) == 0 {\n\t\treturn errors.New(\"Missing config param: HtpasswdFile\")\n\t}\n\tfi, err = os.Stat(c.HtpasswdFile)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Config error in HtpasswdFile '%s': %s\", c.HtpasswdFile, err)\n\t}\n\tif fi.IsDir() {\n\t\treturn fmt.Errorf(\"Config error in HtpasswdFile '%s': expected file path, got directory\", c.HtpasswdFile)\n\t}\n\n\tif len(c.MasterKeyPassphrase) == 0 {\n\t\tlog.Println(\"no passphrase specified for secure keyring\")\n\t}\n\n\treturn nil\n}", "func (c *ServiceConfig) Validate() error {\n\tif c.Name == \"\" {\n\t\treturn errors.New(\"Name must be set\")\n\t}\n\n\tif c.ID == \"\" {\n\t\treturn errors.New(\"ID must be set\")\n\t}\n\n\tif c.Network == \"\" {\n\t\treturn errors.New(\"nginx.network must be set\")\n\t}\n\n\t//calculate max parameter length\n\tmax := len(c.Ports)\n\n\tif l := len(c.ListenIPs); l > max {\n\t\tmax = l\n\t}\n\tif l := len(c.ListenPorts); l > max {\n\t\tmax = l\n\t}\n\tif l := len(c.ListenProtos); l > max {\n\t\tmax = l\n\t}\n\n\t//verify parameter lengths\n\tif l := len(c.Ports); l == 1 {\n\t\tfor i := 1; i < max; i++ {\n\t\t\tc.Ports = append(c.Ports, c.Ports[0])\n\t\t}\n\t} else if l < max {\n\t\treturn fmt.Errorf(\"nginx.port length mismatch. Expected %d or 1, got %d\", max, l)\n\t}\n\n\tif l := len(c.ListenIPs); l == 1 {\n\t\tfor i := 1; i < max; i++ {\n\t\t\tc.ListenIPs = append(c.ListenIPs, c.ListenIPs[0])\n\t\t}\n\t} else if l < max {\n\t\treturn fmt.Errorf(\"nginx.listenIP length mismatch. Expected %d or 1, got %d\", max, l)\n\t}\n\n\tif l := len(c.ListenPorts); l == 1 {\n\t\tfor i := 1; i < max; i++ {\n\t\t\tc.ListenPorts = append(c.ListenPorts, c.ListenPorts[0])\n\t\t}\n\t} else if l < max {\n\t\treturn fmt.Errorf(\"nginx.listenPort length mismatch. Expected %d or 1, got %d\", max, l)\n\t}\n\n\tif l := len(c.ListenProtos); l == 1 {\n\t\tfor i := 1; i < max; i++ {\n\t\t\tc.ListenProtos = append(c.ListenProtos, c.ListenProtos[0])\n\t\t}\n\t} else if l < max {\n\t\treturn fmt.Errorf(\"nginx.listenProto length mismatch. Expected %d or 1, got %d\", max, l)\n\t}\n\n\treturn nil\n}", "func (c *Config) Validate() error {\n\tif c.RNG.Seed == 0 {\n\t\tc.RNG.Seed = time.Now().Unix()\n\t}\n\n\tif c.Time.TicksPerDay < 1 {\n\t\treturn fmt.Errorf(\"time.ticksPerDay must be greater than 1\")\n\t}\n\n\tif c.Time.TotalDays < 1 {\n\t\treturn fmt.Errorf(\"time.totalDays must be greater than 1\")\n\t}\n\n\tif c.Arena.Width < 1 {\n\t\treturn fmt.Errorf(\"arena.width must be greater than 1\")\n\t}\n\n\tif c.Arena.Height < 1 {\n\t\treturn fmt.Errorf(\"arena.height must be greater than 1\")\n\t}\n\n\tif c.Arena.MajorX > c.Arena.Height {\n\t\treturn fmt.Errorf(\"arena.majorX cannot be more than arena.height (%d)\", c.Arena.Height)\n\t}\n\n\tif c.Arena.MajorY > c.Arena.Width {\n\t\treturn fmt.Errorf(\"arena.majorY cannot be more than arena.width (%d)\", c.Arena.Width)\n\t}\n\n\tmoralSum := c.Moral.MajorMajorLow + c.Moral.MajorMinorLow + c.Moral.MinorMinorLow\n\tif moralSum != 100 && (c.Arena.MajorX > 0 || c.Arena.MajorY > 0) {\n\t\treturn fmt.Errorf(\"moral context percentages must add up to 100%% (currently: %d%%)\", moralSum)\n\t}\n\n\tworkspaceSum := c.Workspace.MajorMajorLow + c.Workspace.MajorMajorHigh +\n\t\tc.Workspace.MajorMinorLow + c.Workspace.MajorMinorHigh +\n\t\tc.Workspace.MinorMinorLow + c.Workspace.MinorMinorHigh\n\tif workspaceSum != 100 && (c.Arena.MajorX > 0 || c.Arena.MajorY > 0) {\n\t\treturn fmt.Errorf(\"workspace distribution percentages must add up to 100%% (currently: %d%%)\", workspaceSum)\n\t}\n\n\tif c.Arena.MajorX == 0 && c.Arena.MajorY == 0 {\n\t\tif c.Moral.MajorMajorLow > 0 {\n\t\t\treturn fmt.Errorf(\"moral.majorMajorLow cannot be greater than 0 with no major streets\")\n\t\t}\n\t\tif c.Moral.MajorMinorLow > 0 {\n\t\t\treturn fmt.Errorf(\"moral.majorMinorLow cannot be greater than 0 with no major streets\")\n\t\t}\n\n\t\tif c.Workspace.MajorMajorLow > 0 {\n\t\t\treturn fmt.Errorf(\"workspace.majorMajorLow cannot be greater than 0 with no major streets\")\n\t\t}\n\n\t\tif c.Workspace.MajorMajorHigh > 0 {\n\t\t\treturn fmt.Errorf(\"workspace.majorMajorHigh cannot be greater than 0 with no major streets\")\n\t\t}\n\n\t\tif c.Workspace.MajorMinorLow > 0 {\n\t\t\treturn fmt.Errorf(\"workspace.majorMinorLow cannot be greater than 0 with no major streets\")\n\t\t}\n\n\t\tif c.Workspace.MajorMinorHigh > 0 {\n\t\t\treturn fmt.Errorf(\"workspace.majorMinorHigh cannot be greater than 0 with no major streets\")\n\t\t}\n\t}\n\n\tif c.Economy.Unemployment > 100 {\n\t\treturn fmt.Errorf(\"economy.unemployment cannot be more than 100%%\")\n\t}\n\n\tif c.Economy.HiringRate > 100 {\n\t\treturn fmt.Errorf(\"economy.hiringRate cannot be more than 100%%\")\n\t}\n\n\tif c.Economy.FiringRate > 100 {\n\t\treturn fmt.Errorf(\"economy.firingRate cannot be more than 100%%\")\n\t}\n\n\treturn nil\n}", "func (c *Config) Validate() error {\n\tvar err error\n\tif c.Endpoint == \"\" {\n\t\terr = multierr.Append(err, errors.New(\"no manager endpoint was specified\"))\n\t\treturn err\n\t}\n\n\tres, err := url.Parse(c.Endpoint)\n\tif err != nil {\n\t\terr = multierr.Append(err, fmt.Errorf(\"unable to parse url %s: %w\", c.Endpoint, err))\n\t\treturn err\n\t}\n\n\tif res.Scheme != \"http\" && res.Scheme != \"https\" {\n\t\terr = multierr.Append(err, errors.New(\"url scheme must be http or https\"))\n\t}\n\n\tif c.Username == \"\" {\n\t\terr = multierr.Append(err, errors.New(\"username not provided and is required\"))\n\t}\n\n\tif c.Password == \"\" {\n\t\terr = multierr.Append(err, errors.New(\"password not provided and is required\"))\n\t}\n\treturn err\n}", "func Validate(cfg *Config) error {\n\tif cfg == nil {\n\t\treturn errors.New(\"config needs to be defined\")\n\t}\n\n\tvar result *multierror.Error\n\tif cfg.HostProvider == \"\" {\n\t\tresult = multierror.Append(result, errors.New(\"a host provider needs to be provided\"))\n\t}\n\tif cfg.BaseClusterCloudprovider == \"\" {\n\t\tresult = multierror.Append(result, errors.New(\"the cloudprovider of the hostcluster needs to be defined\"))\n\t}\n\tif cfg.Shoots.DefaultTest == nil {\n\t\tresult = multierror.Append(result, errors.New(\"a default test needs to be defined\"))\n\t}\n\n\tif cfg.Shoots.Namespace == \"\" {\n\t\tresult = multierror.Append(result, errors.New(\"the shoot project namespace has to be defined\"))\n\t}\n\n\tif cfg.Gardener.Version == \"\" && cfg.Gardener.Commit == \"\" {\n\t\tresult = multierror.Append(result, errors.New(\"a gardener version or commit has to be defined\"))\n\t}\n\n\tif len(cfg.GardenerExtensions) == 0 {\n\t\tresult = multierror.Append(result, errors.New(\"the gardener extensions have to be defined\"))\n\t}\n\n\treturn util.ReturnMultiError(result)\n}", "func (config *Config) Validate() error {\n\t// We need to make sure that the Mode strings are all valid.d\n\tvalid := map[string]bool{\n\t\t\"None\": true,\n\t\t\"Day\": true,\n\t\t\"Instance\": true,\n\t}\n\tfor name, log := range config.Logging {\n\t\tif _, present := valid[log.Mode]; !present {\n\t\t\treturn errors.New(fmt.Sprintf(\"[%s] \\\"%s\\\" is not a valid mode.\", name, log.Mode))\n\t\t}\n\t\tstat, err := os.Stat(log.LogDirectory)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif !stat.IsDir() {\n\t\t\treturn errors.New(fmt.Sprintf(\"[%s] \\\"%s\\\" is not a directory.\", name, log.LogDirectory))\n\t\t}\n\t}\n\treturn nil\n}", "func (cfg Config) Validate() error {\n\treturn validation.ValidateStruct(\n\t\t&cfg,\n\t\tvalidation.Field(&cfg.NodeID, validation.Required),\n\t\tvalidation.Field(&cfg.ListenAddr, validation.Required, is.Host),\n\t\tvalidation.Field(&cfg.DataDir, validation.Required),\n\t\tvalidation.Field(&cfg.CompactionEnabled, validation.Required),\n\t\tvalidation.Field(&cfg.Peers),\n\t)\n}", "func (c AuthConfig) Validate() []error {\n\tvar errs []error\n\n\tif len(c.JwksURI) == 0 {\n\t\terrs = append(errs, errors.Errorf(\"AuthConfig requires a non-empty JwksURI config value\"))\n\t}\n\n\treturn errs\n}", "func (c *Config) validate() error {\n\t/* Metrics */\n\n\t// Users can omit the metrics block entirely to disable metrics reporting.\n\tif c.Metrics != nil && c.Metrics.Statsd != nil {\n\t\tif c.Metrics.Statsd.Address == \"\" {\n\t\t\treturn fmt.Errorf(\"config: missing metrics statsd address\")\n\t\t}\n\n\t\tif c.Metrics.Statsd.SampleRate < 0 || c.Metrics.Statsd.SampleRate > 1 {\n\t\t\treturn fmt.Errorf(\"config: statsd sample rate must be in range [0.0, 1.0]\")\n\t\t}\n\t}\n\n\t/* Listener */\n\n\tif c.Listener == nil {\n\t\treturn fmt.Errorf(\"config: missing top-level listener config key\")\n\t}\n\n\tif c.Listener.TCP == nil && c.Listener.UDP == nil {\n\t\treturn fmt.Errorf(\"config: at least one TCP or UDP listener must be specified\")\n\t}\n\n\tif c.Listener.TCP != nil && c.Listener.TCP.Address == \"\" {\n\t\treturn fmt.Errorf(\"config: missing TCP server listening address\")\n\t}\n\n\tif c.Listener.UDP != nil && c.Listener.UDP.Address == \"\" {\n\t\treturn fmt.Errorf(\"config: missing UDP server listening address\")\n\t}\n\n\t/* Upstream */\n\n\tif c.Upstream == nil {\n\t\treturn fmt.Errorf(\"config: missing top-level upstream config key\")\n\t}\n\n\t// Validate the load balancing policy, only if provided (empty signifies default).\n\tif c.Upstream.LoadBalancingPolicy != \"\" {\n\t\tif _, ok := network.ParseLoadBalancingPolicy(c.Upstream.LoadBalancingPolicy); !ok {\n\t\t\treturn fmt.Errorf(\n\t\t\t\t\"config: unknown load balancing policy: policy=%s\",\n\t\t\t\tc.Upstream.LoadBalancingPolicy,\n\t\t\t)\n\t\t}\n\t}\n\n\tif len(c.Upstream.Servers) == 0 {\n\t\treturn fmt.Errorf(\"config: no upstream servers specified\")\n\t}\n\n\tfor idx, server := range c.Upstream.Servers {\n\t\tif server.Address == \"\" {\n\t\t\treturn fmt.Errorf(\"config: missing server address: idx=%d\", idx)\n\t\t}\n\n\t\tif server.ServerName == \"\" {\n\t\t\treturn fmt.Errorf(\"config: missing server TLS hostname: idx=%d\", idx)\n\t\t}\n\t}\n\n\treturn nil\n}", "func Validate(config *Config) []error {\n\t// TODO: Write validation logic later\n\treturn []error{}\n}", "func (c *Config) validate() error {\n\tif c.iface == \"\" {\n\t\treturn errors.New(\"the iface must be set\")\n\t}\n\tif c.protocal != \"\" && c.protocal != \"tcp\" && c.protocal != \"udp\" {\n\t\treturn errors.New(\"the protocl must be set to tcp or udp or both\")\n\t}\n\treturn nil\n}", "func (c *Config) valid() error {\n\tif c.Score == nil {\n\t\treturn errors.New(\"Expected Score to not be nil\")\n\t}\n\tif c.Sampler == nil {\n\t\treturn errors.New(\"Expected Sampler to not be nil\")\n\t}\n\treturn nil\n}", "func (cfg *Config) Validate() error {\n\tif cfg.MetricTTL <= 0 {\n\t\treturn errors.New(\"disk.metric_ttl is invalid\")\n\t}\n\n\tif cfg.MetricType.String() == \"\" {\n\t\treturn errors.New(\"disk.metric_type is invalid\")\n\t}\n\treturn nil\n}", "func (cfg *LoggingConfig) Validate() []error {\n\treturn []error{}\n}", "func (m *StatsConfig) Validate() error {\n\treturn m.validate(false)\n}", "func (cfg *Config) Validate() error {\n\tif cfg.Key == \"\" {\n\t\treturn errors.New(\"missing DigitalOcean access key\")\n\t}\n\n\treturn nil\n}", "func (cfg *Config) Validate() error {\n\tif cfg.PushPreCheck == nil {\n\t\treturn fmt.Errorf(\"PreCheck is required\")\n\t}\n\tif cfg.PushFinalCheck == nil {\n\t\treturn fmt.Errorf(\"FinalCheck is required\")\n\t}\n\treturn nil\n}", "func (cfg *Config) Validate() error {\n\tif len(cfg.Key) == 0 {\n\t\treturn trace.BadParameter(`etcd: missing \"prefix\" parameter`)\n\t}\n\t// Make sure the prefix starts with a '/'.\n\tif cfg.Key[0] != '/' {\n\t\tcfg.Key = \"/\" + cfg.Key\n\t}\n\tif len(cfg.Nodes) == 0 {\n\t\treturn trace.BadParameter(`etcd: missing \"peers\" parameter`)\n\t}\n\tif !cfg.Insecure {\n\t\tif cfg.TLSCAFile == \"\" {\n\t\t\treturn trace.BadParameter(`etcd: missing \"tls_ca_file\" parameter`)\n\t\t}\n\t}\n\tif cfg.BufferSize == 0 {\n\t\tcfg.BufferSize = backend.DefaultBufferCapacity\n\t}\n\tif cfg.DialTimeout == 0 {\n\t\tcfg.DialTimeout = apidefaults.DefaultIOTimeout\n\t}\n\tif cfg.PasswordFile != \"\" {\n\t\tout, err := os.ReadFile(cfg.PasswordFile)\n\t\tif err != nil {\n\t\t\treturn trace.ConvertSystemError(err)\n\t\t}\n\t\t// trim newlines as passwords in files tend to have newlines\n\t\tcfg.Password = strings.TrimSpace(string(out))\n\t}\n\n\tif cfg.ClientPoolSize < 1 {\n\t\tcfg.ClientPoolSize = defaultClientPoolSize\n\t}\n\treturn nil\n}", "func (c Config) Validate() error {\n\tif c.ID == \"\" {\n\t\treturn fmt.Errorf(\"consul discovery must be given a ID\")\n\t}\n\tif strings.TrimSpace(c.Address) == \"\" {\n\t\treturn fmt.Errorf(\"consul discovery requires a server address\")\n\t}\n\treturn nil\n}", "func (cc *CollectdConfig) Validate() error {\n\tif !validCollectdLogLevels.Has(cc.LogLevel) {\n\t\treturn fmt.Errorf(\"invalid collectd log level %s, valid choices are %v\",\n\t\t\tcc.LogLevel, validCollectdLogLevels)\n\t}\n\n\treturn nil\n}", "func (c *Config) Valid() error {\n\tif len(c.Servers) == 0 {\n\t\treturn fmt.Errorf(\"No servers in config\")\n\t}\n\tfor _, s := range c.Servers {\n\t\terr := s.Valid()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (c *Config) Validate() error {\n\t// we'll first validate the structure and values of the config file\n\tfile := c.viper.GetString(configFile)\n\tvar out interface{}\n\tb, err := os.ReadFile(file)\n\tif err != nil {\n\t\treturn err\n\t}\n\tswitch filepath.Ext(file) {\n\tcase \".yaml\", \".yml\":\n\t\terr = yaml.Unmarshal(b, &out)\n\tcase \".json\":\n\t\terr = json.Unmarshal(b, &out)\n\tdefault:\n\t\treturn fmt.Errorf(\"%s is not a supported config file extension\", filepath.Ext(file))\n\t}\n\tif err != nil {\n\t\treturn fmt.Errorf(\"couldn't read the config file: %v\", err)\n\t}\n\t// validate config file content\n\tvalid, errs := validate(interpolateSchema(), out)\n\tif !valid || len(errs) > 0 {\n\t\treturn fmt.Errorf(\"invalid config: %v\", multierror.Wrap(errs...))\n\t}\n\t// now validate the Viper config flags\n\tvalid, errs = validate(interpolateSchema(), c.viper.AllSettings())\n\tif !valid || len(errs) > 0 {\n\t\treturn fmt.Errorf(\"invalid config: %v\", multierror.Wrap(errs...))\n\t}\n\treturn nil\n}", "func (t TaskConfig) validate() error {\n\tvar err error\n\tif err = t.Platform.validate(); err != nil {\n\t\treturn fmt.Errorf(`validate \"platform\": %w`, err)\n\t}\n\tif err = t.Count.validate(); err != nil {\n\t\treturn fmt.Errorf(`validate \"count\": %w`, err)\n\t}\n\tif err = t.ExecuteCommand.validate(); err != nil {\n\t\treturn fmt.Errorf(`validate \"exec\": %w`, err)\n\t}\n\tif err = t.Storage.validate(); err != nil {\n\t\treturn fmt.Errorf(`validate \"storage\": %w`, err)\n\t}\n\tfor n, v := range t.Variables {\n\t\tif err := v.validate(); err != nil {\n\t\t\treturn fmt.Errorf(`validate %q \"variables\": %w`, n, err)\n\t\t}\n\t}\n\tfor _, v := range t.Secrets {\n\t\tif err := v.validate(); err != nil {\n\t\t\treturn fmt.Errorf(`validate \"secret\": %w`, err)\n\t\t}\n\t}\n\tif t.EnvFile != nil {\n\t\tenvFile := aws.StringValue(t.EnvFile)\n\t\tif filepath.Ext(envFile) != envFileExt {\n\t\t\treturn fmt.Errorf(\"environment file %s must have a %s file extension\", envFile, envFileExt)\n\t\t}\n\t}\n\treturn nil\n}", "func validateConfig() {\n\tvalidators := []*knf.Validator{\n\t\t{MAIN_RUN_USER, knfv.Empty, nil},\n\t\t{MAIN_RUN_GROUP, knfv.Empty, nil},\n\t\t{PATHS_WORKING_DIR, knfv.Empty, nil},\n\t\t{PATHS_HELPER_DIR, knfv.Empty, nil},\n\t\t{PATHS_SYSTEMD_DIR, knfv.Empty, nil},\n\t\t{PATHS_UPSTART_DIR, knfv.Empty, nil},\n\t\t{DEFAULTS_NPROC, knfv.Empty, nil},\n\t\t{DEFAULTS_NOFILE, knfv.Empty, nil},\n\t\t{DEFAULTS_RESPAWN_COUNT, knfv.Empty, nil},\n\t\t{DEFAULTS_RESPAWN_INTERVAL, knfv.Empty, nil},\n\t\t{DEFAULTS_KILL_TIMEOUT, knfv.Empty, nil},\n\n\t\t{DEFAULTS_NPROC, knfv.Less, 0},\n\t\t{DEFAULTS_NOFILE, knfv.Less, 0},\n\t\t{DEFAULTS_RESPAWN_COUNT, knfv.Less, 0},\n\t\t{DEFAULTS_RESPAWN_INTERVAL, knfv.Less, 0},\n\t\t{DEFAULTS_KILL_TIMEOUT, knfv.Less, 0},\n\n\t\t{MAIN_RUN_USER, knfs.User, nil},\n\t\t{MAIN_RUN_GROUP, knfs.Group, nil},\n\n\t\t{PATHS_WORKING_DIR, knff.Perms, \"DRWX\"},\n\t\t{PATHS_HELPER_DIR, knff.Perms, \"DRWX\"},\n\t}\n\n\tif knf.GetB(LOG_ENABLED, true) {\n\t\tvalidators = append(validators,\n\t\t\t&knf.Validator{LOG_DIR, knfv.Empty, nil},\n\t\t\t&knf.Validator{LOG_FILE, knfv.Empty, nil},\n\t\t\t&knf.Validator{LOG_DIR, knff.Perms, \"DWX\"},\n\t\t)\n\t}\n\n\terrs := knf.Validate(validators)\n\n\tif len(errs) != 0 {\n\t\tprintError(\"Errors while configuration validation:\")\n\n\t\tfor _, err := range errs {\n\t\t\tprintError(\" - %v\", err)\n\t\t}\n\n\t\tos.Exit(1)\n\t}\n}", "func (cfg *Config) Validate() error {\n\tif !cfg.LogDataEnabled && !cfg.ProfilingDataEnabled {\n\t\treturn errors.New(`either \"log_data_enabled\" or \"profiling_data_enabled\" has to be true`)\n\t}\n\tif cfg.HTTPClientSettings.Endpoint == \"\" {\n\t\treturn errors.New(`requires a non-empty \"endpoint\"`)\n\t}\n\t_, err := cfg.getURL()\n\tif err != nil {\n\t\treturn fmt.Errorf(`invalid \"endpoint\": %w`, err)\n\t}\n\tif cfg.Token == \"\" {\n\t\treturn errors.New(`requires a non-empty \"token\"`)\n\t}\n\n\tif cfg.MaxContentLengthLogs > maxContentLengthLogsLimit {\n\t\treturn fmt.Errorf(`requires \"max_content_length_logs\" <= %d`, maxContentLengthLogsLimit)\n\t}\n\n\tif cfg.MaxContentLengthMetrics > maxContentLengthMetricsLimit {\n\t\treturn fmt.Errorf(`requires \"max_content_length_metrics\" <= %d`, maxContentLengthMetricsLimit)\n\t}\n\n\tif cfg.MaxContentLengthTraces > maxContentLengthTracesLimit {\n\t\treturn fmt.Errorf(`requires \"max_content_length_traces\" <= %d`, maxContentLengthTracesLimit)\n\t}\n\n\tif cfg.MaxEventSize > maxMaxEventSize {\n\t\treturn fmt.Errorf(`requires \"max_event_size\" <= %d`, maxMaxEventSize)\n\t}\n\n\tif err := cfg.QueueSettings.Validate(); err != nil {\n\t\treturn fmt.Errorf(\"sending_queue settings has invalid configuration: %w\", err)\n\t}\n\treturn nil\n}", "func (config appConfig) Validate() error {\n\treturn validator.New().Struct(&config)\n}", "func (c *Config) Validate() (err error) {\n\terr = validation.ValidateStruct(c,\n\t\tvalidation.Field(&c.GithubAPIToken, validation.Required),\n\t\tvalidation.Field(&c.GithubOrganization, validation.Required),\n\t)\n\n\tif err != nil {\n\t\treturn\n\t}\n\n\t// Validate Github Team exists\n\tif c.GithubTeamName == \"\" && c.GithubTeamID == 0 {\n\t\terr = errors.New(\"Team name or Team id should be specified\")\n\t}\n\n\treturn\n}", "func (c *configuration) validateConfig() error {\n\tif c.Provider == \"\" {\n\t\treturn errors.New(msgConfigNoProvider)\n\t}\n\n\tif len(c.Servers) == 0 {\n\t\treturn errors.New(msgConfigNoServers)\n\t}\n\n\tfor i, srv := range c.Servers {\n\t\tif srv.FabricIface == \"\" {\n\t\t\treturn errors.Errorf(\n\t\t\t\tmsgConfigServerNoIface+\" for I/O service %d\", i)\n\t\t}\n\t}\n\n\treturn nil\n}", "func (c *UnparsedConf) Validate() map[string]string {\n\terrors := make(map[string]string)\n\tif c.NameServer == \"\" {\n\t\terrors[\"name_server\"] = \"This field is required.\"\n\t}\n\treturn errors\n}", "func (c *Config) Validate() error {\n\tvar allErrs error\n\n\tif c.Endpoint == \"\" {\n\t\treturn multierr.Append(allErrs, errEmptyEndpoint)\n\t}\n\n\thost, portStr, err := net.SplitHostPort(c.Endpoint)\n\tif err != nil {\n\t\treturn multierr.Append(allErrs, fmt.Errorf(\"%w: %s\", errBadEndpoint, err.Error()))\n\t}\n\n\tif host == \"\" {\n\t\tallErrs = multierr.Append(allErrs, errBadEndpoint)\n\t}\n\n\tport, err := strconv.ParseInt(portStr, 10, 32)\n\tif err != nil {\n\t\tallErrs = multierr.Append(allErrs, fmt.Errorf(\"%w: %s\", errBadPort, err.Error()))\n\t}\n\n\tif port < 0 || port > 65535 {\n\t\tallErrs = multierr.Append(allErrs, fmt.Errorf(\"%w: %d\", errBadPort, port))\n\t}\n\n\tif c.Username != \"\" && c.Password == \"\" {\n\t\tallErrs = multierr.Append(allErrs, errEmptyPassword)\n\t}\n\n\tif c.Password != \"\" && c.Username == \"\" {\n\t\tallErrs = multierr.Append(allErrs, errEmptyUsername)\n\t}\n\tif c.Timeout.Milliseconds() < 0 {\n\t\tallErrs = multierr.Append(allErrs, fmt.Errorf(\"%w: must be positive\", errNegativeTimeout))\n\t}\n\n\tif c.TLS != nil {\n\t\t_, err := c.TLS.LoadTLSConfig()\n\t\tif err != nil {\n\t\t\tallErrs = multierr.Append(allErrs, fmt.Errorf(\"%w: %s\", errFailedTLSLoad, err.Error()))\n\t\t}\n\t}\n\n\tif c.TLS != nil && c.TLSName == \"\" {\n\t\tallErrs = multierr.Append(allErrs, fmt.Errorf(\"%w: when using TLS\", errEmptyEndpointTLSName))\n\t}\n\n\treturn allErrs\n}", "func (config *Config) validateConfig() error {\n\tfor _, element := range config.AllowedEntries {\n\t\tif element.Width <= 0 && element.Height <= 0 {\n\t\t\treturn fmt.Errorf(\"The width and height of the configuration element with name \\\"%s\\\" are invalid.\", element.Name)\n\t\t}\n\n\t\tif element.Name == \"\" {\n\t\t\treturn fmt.Errorf(\"Name must be set\")\n\t\t}\n\n\t\tif element.Type == \"\" {\n\t\t\treturn fmt.Errorf(\"Type must be set\")\n\t\t}\n\n\t\ttypes := paint.GetAvailableTypes()\n\t\tif _, found := types[element.Type]; !found {\n\t\t\treturn fmt.Errorf(\"Type must be either %s, %s or %s at element \\\"%s\\\"\", paint.TypeCrop, paint.TypeResize, paint.TypeFit, element.Name)\n\t\t}\n\t}\n\n\treturn nil\n}", "func (c *Config) Validate() error {\n\tlogrus.Trace(\"validating config configuration\")\n\n\t// verify server is provided\n\tif len(c.Server) == 0 {\n\t\treturn fmt.Errorf(\"no config server provided\")\n\t}\n\n\t// check to make sure it's a valid url\n\t_, err := url.ParseRequestURI(c.Server)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"%s is not a valid url\", c.Server)\n\t}\n\n\t// verify token is provided\n\tif len(c.Token) == 0 {\n\t\treturn fmt.Errorf(\"no config token provided\")\n\t}\n\n\treturn nil\n}", "func (c *Config) Validate() error {\n\tif c.Address == \"\" {\n\t\treturn errors.New(\"the address of mockserver is required\")\n\t}\n\treturn nil\n}", "func (c *Config) Validate() error {\n\treturn c.validateTemplates()\n}", "func (c *Config) Validate() error {\n\tvar errs error\n\tif c.Tenant.RemoteServiceAPI == \"\" {\n\t\terrs = multierror.Append(errs, fmt.Errorf(\"tenant.remote_service_api is required\"))\n\t}\n\tif c.Tenant.InternalAPI == \"\" && c.Analytics.FluentdEndpoint == \"\" && c.Analytics.CredentialsJSON == nil {\n\t\terrs = multierror.Append(errs, fmt.Errorf(\"tenant.internal_api or tenant.analytics.fluentd_endpoint is required if no service account\"))\n\t}\n\tif c.Tenant.OrgName == \"\" {\n\t\terrs = multierror.Append(errs, fmt.Errorf(\"tenant.org_name is required\"))\n\t}\n\tif c.Tenant.EnvName == \"\" {\n\t\terrs = multierror.Append(errs, fmt.Errorf(\"tenant.env_name is required\"))\n\t}\n\tif (c.Global.TLS.CertFile != \"\" || c.Global.TLS.KeyFile != \"\") &&\n\t\t(c.Global.TLS.CertFile == \"\" || c.Global.TLS.KeyFile == \"\") {\n\t\terrs = multierror.Append(errs, fmt.Errorf(\"global.tls.cert_file and global.tls.key_file are both required if either are present\"))\n\t}\n\tif (c.Analytics.TLS.CAFile != \"\" || c.Analytics.TLS.CertFile != \"\" || c.Analytics.TLS.KeyFile != \"\") &&\n\t\t(c.Analytics.TLS.CAFile == \"\" || c.Analytics.TLS.CertFile == \"\" || c.Analytics.TLS.KeyFile == \"\") {\n\t\terrs = multierror.Append(errs, fmt.Errorf(\"all analytics.tls options are required if any are present\"))\n\t}\n\treturn errs\n}", "func (s *Settings) Validate() error {\n\treturn nil\n}", "func (s *Settings) Validate() error {\n\treturn nil\n}", "func (s *Settings) Validate() error {\n\treturn nil\n}", "func (cfg *Config) Validate() error {\n\tif cfg.HTTPServerSettings == nil && cfg.GRPCServerSettings == nil {\n\t\treturn errAtLeastOneProtocol\n\t}\n\n\tif cfg.Source.File != \"\" && cfg.Source.Remote != nil {\n\t\treturn errTooManySources\n\t}\n\n\tif cfg.Source.File == \"\" && cfg.Source.Remote == nil {\n\t\treturn errNoSources\n\t}\n\n\treturn nil\n}", "func (cfg *Config) Validate() error {\n\tif cfg.HTTPServerSettings == nil && cfg.GRPCServerSettings == nil {\n\t\treturn errAtLeastOneProtocol\n\t}\n\n\tif cfg.Source.File != \"\" && cfg.Source.Remote != nil {\n\t\treturn errTooManySources\n\t}\n\n\tif cfg.Source.File == \"\" && cfg.Source.Remote == nil {\n\t\treturn errNoSources\n\t}\n\n\treturn nil\n}", "func (config *Config) Validate() error {\n\n\tif _, err := os.Stat(filepath.Join(config.KirdPath, config.KernelFile)); os.IsNotExist(err) {\n\t\treturn fmt.Errorf(\"kernel '%s' not found\", filepath.Join(config.KirdPath, config.KernelFile))\n\t}\n\tif _, err := os.Stat(filepath.Join(config.KirdPath, config.InitrdFile)); os.IsNotExist(err) {\n\t\treturn fmt.Errorf(\"initrd '%s' not found\", filepath.Join(config.KirdPath, config.InitrdFile))\n\t}\n\n\t// Ensure all the MappedVirtualDisks exist on the host\n\tfor _, mvd := range config.MappedVirtualDisks {\n\t\tif _, err := os.Stat(mvd.HostPath); err != nil {\n\t\t\treturn fmt.Errorf(\"mapped virtual disk '%s' not found\", mvd.HostPath)\n\t\t}\n\t\tif mvd.ContainerPath == \"\" {\n\t\t\treturn fmt.Errorf(\"mapped virtual disk '%s' requested without a container path\", mvd.HostPath)\n\t\t}\n\t}\n\n\treturn nil\n}", "func (c *EtcdConfig) Validate() error {\n\t_, err := govalidator.ValidateStruct(c)\n\treturn err\n}", "func (config AppConfigStruct) Validate() error {\n\treturn nil\n}", "func (scc *ServiceCustomConfig) Validate() error {\n\n\tif len(scc.MyserviceInfo.Host) == 0 {\n\t\treturn errors.New(\"host setting for service not configured\")\n\t}\n\tif scc.MyserviceInfo.Port == 0 {\n\t\treturn errors.New(\"port setting for service not configured\")\n\t}\n\n\tif len(scc.CommandClientInfo.Host) == 0 {\n\t\treturn errors.New(\"host setting for Core Command client not configured\")\n\t}\n\tif scc.CommandClientInfo.Port == 0 {\n\t\treturn errors.New(\"port setting for Core Command client not configured\")\n\t}\n\n\tif len(scc.NotificationClientInfo.Host) == 0 {\n\t\treturn errors.New(\"host setting for Core Command client not configured\")\n\t}\n\tif scc.NotificationClientInfo.Port == 0 {\n\t\treturn errors.New(\"port setting for Core Command client not configured\")\n\t}\n\n\tif len(scc.SchedulerClientInfo.Host) == 0 {\n\t\treturn errors.New(\"host setting for Scheduler client not configured\")\n\t}\n\tif scc.SchedulerClientInfo.Port == 0 {\n\t\treturn errors.New(\"port setting for Scheduler client not configured\")\n\t}\n\n\tif len(scc.RuleEngineClientInfo.Host) == 0 {\n\t\treturn errors.New(\"host setting for Rule Engine client not configured\")\n\t}\n\tif scc.RuleEngineClientInfo.Port == 0 {\n\t\treturn errors.New(\"port setting for Rule Engine client not configured\")\n\t}\n\n\treturn nil\n}", "func (kce *KafkaConfigExecutor) Validate(config *gateways.ConfigContext) error {\n\tkafkaConfig, err := parseConfig(config.Data.Config)\n\tif err != nil {\n\t\treturn gateways.ErrConfigParseFailed\n\t}\n\tif kafkaConfig == nil {\n\t\treturn fmt.Errorf(\"%+v, configuration must be non empty\", gateways.ErrInvalidConfig)\n\t}\n\tif kafkaConfig.URL == \"\" {\n\t\treturn fmt.Errorf(\"%+v, url must be specified\", gateways.ErrInvalidConfig)\n\t}\n\tif kafkaConfig.Topic == \"\" {\n\t\treturn fmt.Errorf(\"%+v, topic must be specified\", gateways.ErrInvalidConfig)\n\t}\n\tif kafkaConfig.Partition == \"\" {\n\t\treturn fmt.Errorf(\"%+v, partition must be specified\", gateways.ErrInvalidConfig)\n\t}\n\treturn nil\n}", "func (c *configData) validate() error {\n\tfor _, s := range c.settings() {\n\t\tvalue := s.Parent.GetString(s.Key)\n\t\tif value == \"\" {\n\t\t\tfmt.Println(s.Description)\n\n\t\t\tif s.InstructionURL != \"\" && confirm(\"Open URL with instructions\") {\n\t\t\t\topenURL(jira.APIInstructionsURL)\n\t\t\t}\n\n\t\t\tvalue, err := promptString(s.Label)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\ts.Parent.Set(s.Key, value)\n\t\t}\n\t}\n\treturn nil\n}", "func (s SchedulerConfig) Validate() []error {\n\treturn []error{\n\t\tcheck.Contains(\n\t\t\ts.FittingPolicy, []interface{}{best, worst}, \"invalid fitting policy\",\n\t\t),\n\t}\n}", "func (c *Config) Validate() error {\n\tnames := make(map[string]struct{}, len(c.Configs))\n\tfor idx, c := range c.Configs {\n\t\tif c.Name == \"\" {\n\t\t\treturn fmt.Errorf(\"tempo config at index %d is missing a name\", idx)\n\t\t}\n\t\tif _, exist := names[c.Name]; exist {\n\t\t\treturn fmt.Errorf(\"found multiple tempo configs with name %s\", c.Name)\n\t\t}\n\t\tnames[c.Name] = struct{}{}\n\t}\n\n\treturn nil\n}", "func (cc *Config) Validate() error {\n\tif len(cc.BrokerList) == 0 {\n\t\treturn ErrConfigNoBrokers\n\t}\n\n\tif cc.ReadTimeout < time.Millisecond {\n\t\treturn ErrConfigInvalidReadTimeout\n\t}\n\n\tif cc.WriteTimeout < time.Millisecond {\n\t\treturn ErrConfigInvalidWriteTimeout\n\t}\n\n\tif cc.ConnectTimeout < time.Millisecond {\n\t\treturn ErrConfigInvalidConnectTimeout\n\t}\n\n\tif cc.KeepAliveTimeout < time.Millisecond {\n\t\treturn ErrConfigInvalidKeepAliveTimeout\n\t}\n\n\tif cc.FetchSize < 1 {\n\t\treturn ErrConfigInvalidFetchSize\n\t}\n\n\tif cc.MetadataRetries < 0 {\n\t\treturn ErrConfigInvalidMetadataRetries\n\t}\n\n\tif cc.MetadataBackoff < time.Millisecond {\n\t\treturn ErrConfigInvalidMetadataBackoff\n\t}\n\n\tif cc.MetadataTTL < time.Millisecond {\n\t\treturn ErrConfigInvalidMetadataTTL\n\t}\n\n\tif cc.CommitOffsetRetries < 0 {\n\t\treturn ErrConfigInvalidCommitOffsetRetries\n\t}\n\n\tif cc.CommitOffsetBackoff < time.Millisecond {\n\t\treturn ErrConfigInvalidCommitOffsetBackoff\n\t}\n\n\tif cc.ConsumerMetadataRetries < 0 {\n\t\treturn ErrConfigInvalidConsumerMetadataRetries\n\t}\n\n\tif cc.ConsumerMetadataBackoff < time.Millisecond {\n\t\treturn ErrConfigInvalidConsumerMetadataBackoff\n\t}\n\n\tif cc.ClientID == \"\" {\n\t\treturn ErrConfigEmptyClientID\n\t}\n\n\treturn nil\n}", "func validateConfig() {\n\tif viper.Get(\"project\") == \"\" {\n\t\tlog.Fatal(\"Error: --project is required\")\n\t}\n\tif viper.Get(\"region\") == \"\" {\n\t\tlog.Fatal(\"Error: --region is required, e.g. us-west1\")\n\t}\n}", "func (c HTTPConfig) Validate() []error {\n\tvar errs []error\n\tif len(c.Host) == 0 {\n\t\terrs = append(errs, errors.Errorf(\"HTTPConfig requires a non-empty Host config value\"))\n\t}\n\tif c.Port <= 0 {\n\t\terrs = append(errs, errors.Errorf(\"HTTPConfig requires a postive Port config value\"))\n\t}\n\treturn errs\n}", "func (c Config) Validate() []error {\n\terrs := c.HTTP.Validate()\n\treturn errs\n}", "func (mt *Vironsetting) Validate() (err error) {\n\tif mt.Name == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"name\"))\n\t}\n\tif mt.Color == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"color\"))\n\t}\n\tif mt.Theme == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"theme\"))\n\t}\n\tif mt.Pages == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"pages\"))\n\t}\n\tif mt.Tags == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"tags\"))\n\t}\n\tif mt.Thumbnail == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"thumbnail\"))\n\t}\n\tfor _, e := range mt.Pages {\n\t\tif e != nil {\n\t\t\tif err2 := e.Validate(); err2 != nil {\n\t\t\t\terr = goa.MergeErrors(err, err2)\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func validateConfig(appConfig AppConfig) error {\n\n\treturn nil\n}" ]
[ "0.7404781", "0.71457064", "0.71456033", "0.7127564", "0.7125616", "0.71085715", "0.7093498", "0.70810115", "0.70758003", "0.70457476", "0.7022566", "0.7022566", "0.7022566", "0.70011973", "0.6997131", "0.6980623", "0.697848", "0.6976476", "0.69467396", "0.6946517", "0.69463277", "0.6944748", "0.693948", "0.6925477", "0.6925007", "0.691834", "0.68731624", "0.6867182", "0.6839109", "0.6815431", "0.6813599", "0.68070155", "0.6798904", "0.67836237", "0.677999", "0.6765817", "0.6762751", "0.6760416", "0.67429835", "0.674283", "0.6731933", "0.672983", "0.6696904", "0.66946", "0.66891164", "0.6688604", "0.66860807", "0.6682912", "0.66782093", "0.66652673", "0.6650771", "0.66450256", "0.66411", "0.6639496", "0.6633817", "0.6629011", "0.6619041", "0.6618918", "0.66182446", "0.6613311", "0.660906", "0.66064554", "0.6600805", "0.6598122", "0.6587337", "0.6586402", "0.6560033", "0.6554112", "0.65537024", "0.6553184", "0.65511537", "0.65508103", "0.65444946", "0.65405726", "0.6532184", "0.65273", "0.6520744", "0.6508152", "0.6508085", "0.65079147", "0.6501972", "0.64939666", "0.64939666", "0.64939666", "0.6491832", "0.6491832", "0.6489062", "0.64890057", "0.64841026", "0.6481789", "0.6479665", "0.6475229", "0.64671344", "0.6463945", "0.6454289", "0.64504033", "0.6442148", "0.6428496", "0.64247704", "0.6408075" ]
0.65942454
64
Sends user input to Amazon Lex. Client applications can use this API to send requests to Amazon Lex at runtime. Amazon Lex then interprets the user input using the machine learning model it built for the bot. In response, Amazon Lex returns the next message to convey to the user an optional responseCard to display. Consider the following example messages: For a user input "I would like a pizza", Amazon Lex might return a response with a message eliciting slot data (for example, PizzaSize): "What size pizza would you like?" After the user provides all of the pizza order information, Amazon Lex might return a response with a message to obtain user confirmation "Proceed with the pizza order?". After the user replies to a confirmation prompt with a "yes", Amazon Lex might return a conclusion statement: "Thank you, your cheese pizza has been ordered.". Not all Amazon Lex messages require a user response. For example, a conclusion statement does not require a response. Some messages require only a "yes" or "no" user response. In addition to the message, Amazon Lex provides additional context about the message in the response that you might use to enhance client behavior, for example, to display the appropriate client user interface. These are the slotToElicit, dialogState, intentName, and slots fields in the response. Consider the following examples: If the message is to elicit slot data, Amazon Lex returns the following context information: dialogState set to ElicitSlot intentName set to the intent name in the current context slotToElicit set to the slot name for which the message is eliciting information slots set to a map of slots, configured for the intent, with currently known values If the message is a confirmation prompt, the dialogState is set to ConfirmIntent and SlotToElicit is set to null. If the message is a clarification prompt (configured for the intent) that indicates that user intent is not understood, the dialogState is set to ElicitIntent and slotToElicit is set to null. In addition, Amazon Lex also returns your applicationspecific sessionAttributes. For more information, see Managing Conversation Context (
func (c *Client) PostText(ctx context.Context, params *PostTextInput, optFns ...func(*Options)) (*PostTextOutput, error) { if params == nil { params = &PostTextInput{} } result, metadata, err := c.invokeOperation(ctx, "PostText", params, optFns, addOperationPostTextMiddlewares) if err != nil { return nil, err } out := result.(*PostTextOutput) out.ResultMetadata = metadata return out, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func askInput(message string, response interface{}) error {\n\treturn survey.AskOne(&survey.Input{Message: message}, response, survey.MinLength(1))\n}", "func chat(ctx *gin.Context) {\n\tcred := credentials.NewStaticCredentials(os.Getenv(\"ACCESS_KEY_ID\"), os.Getenv(\"SECRET_ACCESS_KEY\"), \"\")\n\tconfig := aws.NewConfig().WithCredentials(cred).WithRegion(os.Getenv(\"AWS_REGION\"))\n\tsess := session.Must(session.NewSession(config))\n\tsvc := lexruntimeservice.New(sess)\n\tinput := &lexruntimeservice.PostTextInput{\n\t\tBotName: aws.String(ctx.Query(\"bot_name\")),\n\t\tBotAlias: aws.String(ctx.Query(\"bot_alias\")),\n\t\tInputText: aws.String(ctx.Query(\"message\")),\n\t\tUserId: aws.String(ctx.Query(\"user_id\")),\n\t}\n\tresult, err := svc.PostText(input)\n\tif err != nil {\n\t\tctx.JSON(http.StatusInternalServerError, gin.H{\"error\": err.Error(), \"message\": \"Server Error.\", \"data\": nil})\n\t} else {\n\t\tctx.JSON(http.StatusOK, gin.H{\"error\": nil, \"message\": \"Bot Updated with new intent.\", \"data\": result})\n\t}\n}", "func (h Response) Ask(prompt, reprompt string) {\n\th.emit(\":ask\", prompt, reprompt)\n}", "func (a *App) VoiceReq(c *gin.Context) {\n\tctx, cancel := context.WithTimeout(c, time.Second*60)\n\tdefer cancel()\n\tmsg := models.AppReq{}\n\tif err := c.BindJSON(&msg); err != nil {\n\t\tlog.Errorf(\"app.go, ERROR UNMARSHALLING APP REQUEST: %v\", err)\n\t\tc.JSON(http.StatusBadRequest, \"Bad request\")\n\t\treturn\n\t}\n\n\tlog.Infof(\"app.go,incoming nlp request: %v\", msg)\n\tenTxt := translate(ctx, msg.Msg, msg.LangCode, \"en-IN\", a.Service.GetTranslateService())\n\t// get intent responce from dialogflow service\n\tdfResp, err := a.Service.GetDFService().GetIntent(c, msg.SessionID, enTxt)\n\tif err != nil || dfResp.Intent == \"\" {\n\t\ttmp := \"Sorry I'm still learning.\\nI would like you to PLEASE REPHRASE so that I can understand it better.\\nPlease type EXIT if you want to start over again ! \"\n\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(ctx, tmp, msg.LangCode, msg.LangCode,\n\t\t\ta.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\treturn\n\t}\n\n\tsrv := a.Service.GetFFService(getQueryParams(\"Param1\", *dfResp), msg.MobileNumber, msg.SessionID, msg.UserCode, *dfResp)\n\tlog.Infof(\"app.go, srv: %v\", srv)\n\tif srv == nil && dfResp.Intent == \"Fallback\" {\n\t\ttmp := \"Iyris is still learning !\\n \\nI would like you to PLEASE REPHRASE!\\n \\nI can assist you with placing an order request.\\n \\nPlease let me know how can I help you !\"\n\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, tmp, msg.LangCode, msg.LangCode,\n\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\treturn\n\t}\n\tif srv == nil && dfResp.Intent == \"EndIntent\" {\n\t\ttmp := \"Iyris is happy to help you !\\n \\nIt is my pleasure to have you on this chat today!\\n \\nI can assist you with placing an order request.\\n \\nPlease let me know how can I help you\\n \\nThank you and have a nice day!\"\n\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, tmp, msg.LangCode, msg.LangCode,\n\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\treturn\n\t}\n\tif srv == nil && dfResp.Intent == \"HowAreYouIntent\" {\n\t\ttmp := \"Hi, I am doing good. !\\n \\nIt is my pleasure to have you on this chat today!\\n \\nI can assist you with placing an order request.\\n \\nPlease let me know how can I help you.\"\n\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, tmp, msg.LangCode, msg.LangCode,\n\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\treturn\n\t}\n\tif srv == nil {\n\t\tlog.Infof(\"could not resolve ff service : %v\", dfResp)\n\t\ttmp := \"Dear Customer, Sorry I'm still learning.\\n \\nIn case I am not able to assist you, please reach us at [email protected] for further assistance.\\n\\nThanks !\"\n\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, tmp, msg.LangCode, msg.LangCode,\n\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\treturn\n\t}\n\n\t// get fulfillment service response\n\tr, err := srv.GetFFResp(ctx)\n\tlog.Infof(\"app.go, r: %v \", r)\n\tlog.Infof(\"app.go, err %v:\", err)\n\n\tif err != nil {\n\t\t// if err == sapmodel.ErrFallBack {\n\t\t// \tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t// \t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t// \treturn\n\t\t// }\n\t\tif r == \"\" {\n\t\t\ttmp := \"Dear User, I am facing problem in fetching the desired information currently. Please try after sometime or reach us at [email protected] for further assistance.\\n\\nThanks !\"\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, tmp, msg.LangCode, msg.LangCode,\n\t\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrCustomerName {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t\t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrSerial {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t\t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrCustomerEmail {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t\t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrFallBackProduct {\n\t\t\tif r == \"I'm sorry but you have no product registered !\\nPlease type EXIT if you want to start over again ! \" {\n\t\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t\t\t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\t\treturn\n\t\t\t}\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t\t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrFallBackProductType {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t\t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrFallBackRegistration {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t\t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\treturn\n\t\t}\n\n\t\tif err == sapmodel.ErrFallBackIssue {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t\t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrFallBackComplaint {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t\t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrFallBackAddress {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t\t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrFallBackService {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t\t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrJobID {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t\t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrAddressLines {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t\t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrPin {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t\t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrOrderRequest {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t\t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrCreditRequest {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t\t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrWalletRequest {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t\t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrOrderConfirmCancel {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t\t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\treturn\n\t\t}\n\t\t// if err == sapmodel.ErrOrde{\n\t\t// \tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode,\n\t\t// \t\tmsg.LangCode, a.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t// \treturn\n\t\t// }\n\t\t// if err == sapmodel.ErrEndSession {\n\t\t// \ttmp := \"You have opted not to apply for any leave, Thanks !!!\"\n\t\t// \tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, tmp, msg.LangCode, msg.LangCode,\n\t\t// \t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\t// \treturn\n\t\t// }\n\t\tif err == sapmodel.ErrEndSessionCustomer {\n\t\t\ttmp := \"You have opted not to register yourself, Thanks !!!\"\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, tmp, msg.LangCode, msg.LangCode,\n\t\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrEndSessionProduct {\n\t\t\ttmp := \"You have opted not to provide any Product Serial Code, Thanks !!!\"\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, tmp, msg.LangCode, msg.LangCode,\n\t\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrEndSessionCreateSR {\n\t\t\ttmp := \"You have opted not to raise any Service Request, Thanks !!!\"\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, tmp, msg.LangCode, msg.LangCode,\n\t\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrWrongAddress {\n\t\t\ttmp := \"Dear Customer, We are not able to raise a service request for the address option selected. Please try to raise the service request again !\"\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, tmp, msg.LangCode, msg.LangCode,\n\t\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrEndSessionAddressAdd {\n\t\t\ttmp := \"You have opted not to add any new address, Thanks !!!\"\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, tmp, msg.LangCode, msg.LangCode,\n\t\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrEndSessionAddressUpdate {\n\t\t\ttmp := \"You have opted not to update any new address, Thanks !!!\"\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, tmp, msg.LangCode, msg.LangCode,\n\t\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrEndJobStatus {\n\t\t\ttmp := \"You have opted out of Job Status, Please visit again. Thanks !!!\"\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, tmp, msg.LangCode, msg.LangCode,\n\t\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrEndRephrase {\n\t\t\ttmp := \"Sorry I'm still learning.\\nI would like you to PLEASE REPHRASE and provide all the information !\\nPlease type EXIT if you want to start over again ! \"\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, tmp, msg.LangCode, msg.LangCode,\n\t\t\t\ta.Service.GetTranslateService()), SessionID: msg.SessionID})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrEndSessionService {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode, msg.LangCode,\n\t\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrEndSessionOrder {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode, msg.LangCode,\n\t\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrEndSessionCredit {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode, msg.LangCode,\n\t\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrEndSessionWallet {\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode, msg.LangCode,\n\t\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\t\treturn\n\t\t}\n\t\tif err == sapmodel.ErrFallBackIndex {\n\t\t\ttmp := \"You've entered INVALID INDEX NUMBER. Please try to raise the Service Request again.\"\n\t\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, tmp, msg.LangCode, msg.LangCode,\n\t\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\t\treturn\n\t\t}\n\n\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r), msg.LangCode, msg.LangCode,\n\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\treturn\n\t}\n\tif r == nil || r == \"\" {\n\t\ttmp := \"Dear Customer, Sorry I'm still learning.\\n\\nIn case I am not able to assist you, please reach us at [email protected] for further assistance.\\n\\nThanks !\"\n\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, tmp, msg.LangCode, msg.LangCode,\n\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\treturn\n\t}\n\tif r == \"\" {\n\t\ttmp := \"Dear Customer, Sorry I'm still learning.\\n\\nIn case I am not able to assist you, please reach us at [email protected] for further assistance.\\n\\nThanks !\"\n\t\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, tmp, msg.LangCode, msg.LangCode,\n\t\t\ta.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\t\treturn\n\t}\n\n\tc.JSON(http.StatusOK, &models.AppResp{Data: translate(c, fmt.Sprintf(\"%v\", r),\n\t\tmsg.LangCode, msg.LangCode, a.Service.GetTranslateService()), SessionID: getSessionID(msg.UserCode)})\n\treturn\n}", "func (p *Prompt) Ask(text string, opts *InputOptions) (string, error) {\n\tformat := p.fmtInputOptions(opts)\n\n\tresp, err := p.read(text, format)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tinput := strings.TrimSpace(resp)\n\n\t// show me what you're working with\n\tswitch input {\n\tcase \"\":\n\t\t// check the opts\n\t\tswitch opts {\n\t\tcase nil:\n\t\t\t// no options and no input means we return an error\n\t\t\treturn \"\", errors.New(\"no input or default value provided\")\n\t\tdefault:\n\t\t\t// check if there is a default to return\n\t\t\tif opts.Default != \"\" {\n\t\t\t\treturn opts.Default, nil\n\t\t\t}\n\n\t\t\tif opts.Validator != nil {\n\t\t\t\t// validate in provided input - even if empty\n\t\t\t\tif err := opts.Validator(input); err != nil {\n\t\t\t\t\treturn \"\", err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\tdefault:\n\t\tswitch opts {\n\t\tcase nil:\n\t\t\t// there are no options, so just return the input\n\t\t\treturn input, nil\n\t\tdefault:\n\t\t\tif opts.Validator != nil {\n\t\t\t\t// validate in provided input\n\t\t\t\tif err := opts.Validator(input); err != nil {\n\t\t\t\t\treturn \"\", err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn input, nil\n}", "func Askwit(textrequest string, res interface{}) error {\n\t//I prepare the request to send to wit.ai to interpret what is being said in Telegram\n\treq, err := http.NewRequest(\"GET\", \"https://api.wit.ai/message\", nil)\n\tif err != nil {\n\t\tlog.Print(err)\n\t\tos.Exit(1)\n\t}\n\tq := req.URL.Query()\n\tq.Add(\"q\", textrequest)\n\treq.URL.RawQuery = q.Encode()\n\tfmt.Println(req.URL.String())\n\treq.Header.Add(\"Authorization\", \"Bearer \")\n\treq.Header.Add(\"Content-Type\", \"application/json\")\n\treq.Header.Add(\"X-Accept\", \"application/json\")\n\tclient := &http.Client{}\n\tresp, err := client.Do(req)\n\tif err != nil {\n\t\tfmt.Println(\"Error when sending request to the server\")\n\t}\n\tdefer resp.Body.Close()\n\n\treturn json.NewDecoder(resp.Body).Decode(res)\n}", "func EchoWhoseTurn(w http.ResponseWriter, r *http.Request) {\n\t//echoReq := context.Get(r, \"echoRequest\").(*alexa.EchoRequest)\n\techoReq := alexa.GetEchoRequest(r)\n\tlog.Println(echoReq.GetRequestType())\n\n\t// Start Mongo\n\tmongodb, err := mgo.Dial(\"localhost\")\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t//access the correct user from the DB\n\tcol := mongodb.DB(\"whoseTurn\").C(\"users\")\n\tdefer mongodb.Close()\n\n\t//get user's id and load into user var\n\tid := echoReq.GetUserID()\n\tuser := loadUser(col, id)\n\n\t//prints request type for debugging\n\tlog.Println(echoReq.GetRequestType())\n\tif echoReq.GetRequestType() == \"LaunchRequest\" {\n\t\tmsg := \"Welcome to Who's Next. What can I do for you?\"\n\t\techoResp := alexa.NewEchoResponse().OutputSpeech(msg).EndSession(false)\n\n\t\tjson, _ := echoResp.String()\n\t\tw.Header().Set(\"Content-Type\", \"application/json;charset=UTF-8\")\n\t\tw.Write(json)\n\n\t} else if echoReq.GetRequestType() == \"SessionEndedRequest\" {\n\t\tmsg := \"Goodbye\"\n\t\t//end the session in the case of a SessionWndedRequest\n\t\techoResp := alexa.NewEchoResponse().OutputSpeech(msg).EndSession(true)\n\t\tjson, _ := echoResp.String()\n\t\tw.Header().Set(\"Content-Type\", \"application/json;charset=UTF-8\")\n\t\tw.Write(json)\n\n\t} else if echoReq.GetRequestType() == \"IntentRequest\" {\n\t\t//print intent name for debugging\n\t\tlog.Println(echoReq.GetIntentName())\n\t\t//create an echoResp that will be populated by intent function\n\t\tvar echoResp *alexa.EchoResponse\n\n\t\t//call intent function depending on given intent name\n\t\tswitch echoReq.GetIntentName() {\n\t\tcase \"AMAZON.HelpIntent\":\n\t\t\techoResp = help(echoReq)\n\t\tcase \"AMAZON.StopIntent\":\n\t\t\techoResp = cancel(echoReq)\n\t\tcase \"AMAZON.CancelIntent\":\n\t\t\techoResp = cancel(echoReq)\n\t\tcase \"ListActivities\":\n\t\t\techoResp = listActivities(echoReq, user)\n\t\tcase \"ListPeopleOnActivity\":\n\t\t\techoResp = listPeopleOnActivity(echoReq, col, user)\n\t\tcase \"AddActivity\":\n\t\t\techoResp = addActivity(echoReq, col, user)\n\t\tcase \"AddPersonToActivity\":\n\t\t\techoResp = addPersonToActivity(echoReq, col, user)\n\t\tcase \"RemoveActivity\":\n\t\t\techoResp = removeActivity(echoReq, col, user)\n\t\tcase \"RemovePersonFromActivity\":\n\t\t\techoResp = removePersonFromActivity(echoReq, col, user)\n\t\tcase \"WhoseTurnForActivity\":\n\t\t\techoResp = whoseTurnForActivity(echoReq, col, user)\n\t\tcase \"CompletedActivity\":\n\t\t\techoResp = completedActivity(echoReq, col, user)\n\t\t}\n\t\tjson, _ := echoResp.String()\n\t\tw.Header().Set(\"Content-Type\", \"application/json;charset=UTF-8\")\n\t\tw.Write(json)\n\t}\n}", "func (term *Terminal) Ask(question string) (bool, error) {\n\tinput, err := term.Custom(question+\"? (y/n)\", func(input string) (string, bool) {\n\t\tif input == \"\" {\n\t\t\treturn \"\", false\n\t\t}\n\t\tinput = strings.ToLower(input)\n\n\t\tif input == \"y\" || input == \"yes\" {\n\t\t\treturn \"yes\", true\n\t\t}\n\n\t\treturn \"\", true\n\t})\n\n\tvar ok bool\n\tif input != \"\" {\n\t\tok = true\n\t}\n\n\treturn ok, err\n}", "func (adapter *Adapter) SendMessage(ctx context.Context, output sarah.Output) {\n\tvar message *webapi.PostMessage\n\tswitch content := output.Content().(type) {\n\tcase *webapi.PostMessage:\n\t\tmessage = content\n\n\tcase string:\n\t\tchannel, ok := output.Destination().(event.ChannelID)\n\t\tif !ok {\n\t\t\tlogger.Errorf(\"Destination is not instance of Channel. %#v.\", output.Destination())\n\t\t\treturn\n\t\t}\n\t\tmessage = webapi.NewPostMessage(channel, content)\n\n\tcase *sarah.CommandHelps:\n\t\tchannelID, ok := output.Destination().(event.ChannelID)\n\t\tif !ok {\n\t\t\tlogger.Errorf(\"Destination is not instance of Channel. %#v.\", output.Destination())\n\t\t\treturn\n\t\t}\n\n\t\tvar fields []*webapi.AttachmentField\n\t\tfor _, commandHelp := range *output.Content().(*sarah.CommandHelps) {\n\t\t\tfields = append(fields, &webapi.AttachmentField{\n\t\t\t\tTitle: commandHelp.Identifier,\n\t\t\t\tValue: commandHelp.Instruction,\n\t\t\t\tShort: false,\n\t\t\t})\n\t\t}\n\t\tattachments := []*webapi.MessageAttachment{\n\t\t\t{\n\t\t\t\tFallback: \"Here are some input instructions.\",\n\t\t\t\tPretext: \"Help:\",\n\t\t\t\tTitle: \"\",\n\t\t\t\tFields: fields,\n\t\t\t},\n\t\t}\n\t\tmessage = webapi.NewPostMessage(channelID, \"\").WithAttachments(attachments)\n\n\tdefault:\n\t\tlogger.Warnf(\"Unexpected output %#v\", output)\n\t\treturn\n\t}\n\n\tresp, err := adapter.client.PostMessage(ctx, message)\n\tif err != nil {\n\t\tlogger.Errorf(\"Something went wrong with Web API posting: %+v. %+v\", err, message)\n\t\treturn\n\t}\n\n\tif !resp.OK {\n\t\tlogger.Errorf(\"Failed to post message %#v: %s\", message, resp.Error)\n\t}\n}", "func (_BREMICO *BREMICOSession) Request() (struct {\n\tValue *big.Int\n\tConfirmAmount *big.Int\n}, error) {\n\treturn _BREMICO.Contract.Request(&_BREMICO.CallOpts)\n}", "func (c *Client) Ask(prompt string) string {\n\tfmt.Printf(\"%s \", prompt)\n\trd := bufio.NewReader(os.Stdin)\n\tline, err := rd.ReadString('\\n')\n\tif err == nil {\n\t\treturn strings.TrimSpace(line)\n\t}\n\treturn \"\"\n}", "func (_BREMICO *BREMICOCallerSession) Request() (struct {\n\tValue *big.Int\n\tConfirmAmount *big.Int\n}, error) {\n\treturn _BREMICO.Contract.Request(&_BREMICO.CallOpts)\n}", "func createBot(ctx *gin.Context) {\n\tbody := struct {\n\t\tName string `json:\"name\"`\n\t\tChildDirected bool `json:\"child_directed\"`\n\t\tLocale string `json:\"locale\"`\n\t\tAbortMessages []string `json:\"abort_messages\"`\n\t\tClarificationPrompts []string `json:\"clarification_prompts\"`\n\t}{}\n\tif err := ctx.Bind(&body); err != nil { //validation error\n\t\tctx.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error(), \"message\": \"Validation Error.\", \"data\": nil})\n\t} else {\n\t\tcred := credentials.NewStaticCredentials(os.Getenv(\"ACCESS_KEY_ID\"), os.Getenv(\"SECRET_ACCESS_KEY\"), \"\")\n\t\tconfig := aws.NewConfig().WithCredentials(cred).WithRegion(os.Getenv(\"AWS_REGION\"))\n\t\tsess := session.Must(session.NewSession(config))\n\t\tsvc := lexmodelbuildingservice.New(sess)\n\t\tvar clarificationPrompts []*lexmodelbuildingservice.Message\n\t\tfor _, val := range body.ClarificationPrompts {\n\t\t\tclarificationPrompts = append(clarificationPrompts, &lexmodelbuildingservice.Message{\n\t\t\t\tContent: aws.String(val),\n\t\t\t\tContentType: aws.String(\"PlainText\"),\n\t\t\t})\n\t\t}\n\t\tvar abortMessages []*lexmodelbuildingservice.Message\n\t\tfor _, val := range body.AbortMessages {\n\t\t\tabortMessages = append(abortMessages, &lexmodelbuildingservice.Message{\n\t\t\t\tContent: aws.String(val),\n\t\t\t\tContentType: aws.String(\"PlainText\"),\n\t\t\t})\n\t\t}\n\t\t_, err = svc.PutBot(&lexmodelbuildingservice.PutBotInput{\n\t\t\tName: aws.String(body.Name),\n\t\t\tChildDirected: aws.Bool(body.ChildDirected),\n\t\t\tLocale: aws.String(body.Locale),\n\t\t\tClarificationPrompt: &lexmodelbuildingservice.Prompt{Messages: clarificationPrompts, MaxAttempts: aws.Int64(5)},\n\t\t\tAbortStatement: &lexmodelbuildingservice.Statement{Messages: abortMessages},\n\t\t})\n\t\tif err != nil {\n\t\t\tctx.JSON(http.StatusInternalServerError, gin.H{\"error\": err.Error(), \"message\": \"Server Error.\", \"data\": nil})\n\t\t} else {\n\t\t\t_, err := svc.PutBotAlias(&lexmodelbuildingservice.PutBotAliasInput{\n\t\t\t\tBotName: aws.String(body.Name),\n\t\t\t\tBotVersion: aws.String(\"$LATEST\"),\n\t\t\t\tName: aws.String(body.Name),\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\tctx.JSON(http.StatusInternalServerError, gin.H{\"error\": err.Error(), \"message\": \"Server Error.\", \"data\": nil})\n\t\t\t} else {\n\t\t\t\tctx.JSON(http.StatusOK, gin.H{\"error\": nil, \"message\": \"New Bot Created.\", \"data\": nil})\n\t\t\t}\n\t\t}\n\t}\n}", "func HelpPrompt(echoRequest *skillserver.EchoRequest) (response *skillserver.EchoResponse) {\n\tresponse = skillserver.NewEchoResponse()\n\n\tresponse.OutputSpeech(\"Welcome Guardian, I am here to help manage your Destiny in-game inventory. You can ask \" +\n\t\t\"me to equip your max light loadout, unload engrams from your inventory, or transfer items between your available \" +\n\t\t\"characters including the vault. You can also ask how many of an \" +\n\t\t\"item you have. Trials of Osiris statistics provided by Trials Report are available too.\").\n\t\tEndSession(false)\n\n\treturn\n}", "func ExampleLexModelBuildingService_PutIntent_shared00() {\n\tsvc := lexmodelbuildingservice.New(session.New())\n\tinput := &lexmodelbuildingservice.PutIntentInput{\n\t\tConclusionStatement: &lexmodelbuildingservice.Statement{\n\t\t\tMessages: []*lexmodelbuildingservice.Message{\n\t\t\t\t{\n\t\t\t\t\tContent: aws.String(\"All right, I ordered you a {Crust} crust {Type} pizza with {Sauce} sauce.\"),\n\t\t\t\t\tContentType: aws.String(\"PlainText\"),\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tContent: aws.String(\"OK, your {Crust} crust {Type} pizza with {Sauce} sauce is on the way.\"),\n\t\t\t\t\tContentType: aws.String(\"PlainText\"),\n\t\t\t\t},\n\t\t\t},\n\t\t\tResponseCard: aws.String(\"foo\"),\n\t\t},\n\t\tConfirmationPrompt: &lexmodelbuildingservice.Prompt{\n\t\t\tMaxAttempts: aws.Int64(1),\n\t\t\tMessages: []*lexmodelbuildingservice.Message{\n\t\t\t\t{\n\t\t\t\t\tContent: aws.String(\"Should I order your {Crust} crust {Type} pizza with {Sauce} sauce?\"),\n\t\t\t\t\tContentType: aws.String(\"PlainText\"),\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tDescription: aws.String(\"Order a pizza from a local pizzeria.\"),\n\t\tFulfillmentActivity: &lexmodelbuildingservice.FulfillmentActivity{\n\t\t\tType: aws.String(\"ReturnIntent\"),\n\t\t},\n\t\tName: aws.String(\"DocOrderPizza\"),\n\t\tRejectionStatement: &lexmodelbuildingservice.Statement{\n\t\t\tMessages: []*lexmodelbuildingservice.Message{\n\t\t\t\t{\n\t\t\t\t\tContent: aws.String(\"Ok, I'll cancel your order.\"),\n\t\t\t\t\tContentType: aws.String(\"PlainText\"),\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tContent: aws.String(\"I cancelled your order.\"),\n\t\t\t\t\tContentType: aws.String(\"PlainText\"),\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tSampleUtterances: []*string{\n\t\t\taws.String(\"Order me a pizza.\"),\n\t\t\taws.String(\"Order me a {Type} pizza.\"),\n\t\t\taws.String(\"I want a {Crust} crust {Type} pizza\"),\n\t\t\taws.String(\"I want a {Crust} crust {Type} pizza with {Sauce} sauce.\"),\n\t\t},\n\t\tSlots: []*lexmodelbuildingservice.Slot{\n\t\t\t{\n\t\t\t\tDescription: aws.String(\"The type of pizza to order.\"),\n\t\t\t\tName: aws.String(\"Type\"),\n\t\t\t\tPriority: aws.Int64(1),\n\t\t\t\tSampleUtterances: []*string{\n\t\t\t\t\taws.String(\"Get me a {Type} pizza.\"),\n\t\t\t\t\taws.String(\"A {Type} pizza please.\"),\n\t\t\t\t\taws.String(\"I'd like a {Type} pizza.\"),\n\t\t\t\t},\n\t\t\t\tSlotConstraint: aws.String(\"Required\"),\n\t\t\t\tSlotType: aws.String(\"DocPizzaType\"),\n\t\t\t\tSlotTypeVersion: aws.String(\"$LATEST\"),\n\t\t\t\tValueElicitationPrompt: &lexmodelbuildingservice.Prompt{\n\t\t\t\t\tMaxAttempts: aws.Int64(1),\n\t\t\t\t\tMessages: []*lexmodelbuildingservice.Message{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tContent: aws.String(\"What type of pizza would you like?\"),\n\t\t\t\t\t\t\tContentType: aws.String(\"PlainText\"),\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tContent: aws.String(\"Vegie or cheese pizza?\"),\n\t\t\t\t\t\t\tContentType: aws.String(\"PlainText\"),\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tContent: aws.String(\"I can get you a vegie or a cheese pizza.\"),\n\t\t\t\t\t\t\tContentType: aws.String(\"PlainText\"),\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tDescription: aws.String(\"The type of pizza crust to order.\"),\n\t\t\t\tName: aws.String(\"Crust\"),\n\t\t\t\tPriority: aws.Int64(2),\n\t\t\t\tSampleUtterances: []*string{\n\t\t\t\t\taws.String(\"Make it a {Crust} crust.\"),\n\t\t\t\t\taws.String(\"I'd like a {Crust} crust.\"),\n\t\t\t\t},\n\t\t\t\tSlotConstraint: aws.String(\"Required\"),\n\t\t\t\tSlotType: aws.String(\"DocPizzaCrustType\"),\n\t\t\t\tSlotTypeVersion: aws.String(\"$LATEST\"),\n\t\t\t\tValueElicitationPrompt: &lexmodelbuildingservice.Prompt{\n\t\t\t\t\tMaxAttempts: aws.Int64(1),\n\t\t\t\t\tMessages: []*lexmodelbuildingservice.Message{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tContent: aws.String(\"What type of crust would you like?\"),\n\t\t\t\t\t\t\tContentType: aws.String(\"PlainText\"),\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tContent: aws.String(\"Thick or thin crust?\"),\n\t\t\t\t\t\t\tContentType: aws.String(\"PlainText\"),\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tDescription: aws.String(\"The type of sauce to use on the pizza.\"),\n\t\t\t\tName: aws.String(\"Sauce\"),\n\t\t\t\tPriority: aws.Int64(3),\n\t\t\t\tSampleUtterances: []*string{\n\t\t\t\t\taws.String(\"Make it {Sauce} sauce.\"),\n\t\t\t\t\taws.String(\"I'd like {Sauce} sauce.\"),\n\t\t\t\t},\n\t\t\t\tSlotConstraint: aws.String(\"Required\"),\n\t\t\t\tSlotType: aws.String(\"DocPizzaSauceType\"),\n\t\t\t\tSlotTypeVersion: aws.String(\"$LATEST\"),\n\t\t\t\tValueElicitationPrompt: &lexmodelbuildingservice.Prompt{\n\t\t\t\t\tMaxAttempts: aws.Int64(1),\n\t\t\t\t\tMessages: []*lexmodelbuildingservice.Message{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tContent: aws.String(\"White or red sauce?\"),\n\t\t\t\t\t\t\tContentType: aws.String(\"PlainText\"),\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tContent: aws.String(\"Garlic or tomato sauce?\"),\n\t\t\t\t\t\t\tContentType: aws.String(\"PlainText\"),\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\n\tresult, err := svc.PutIntent(input)\n\tif err != nil {\n\t\tif aerr, ok := err.(awserr.Error); ok {\n\t\t\tswitch aerr.Code() {\n\t\t\tcase lexmodelbuildingservice.ErrCodeConflictException:\n\t\t\t\tfmt.Println(lexmodelbuildingservice.ErrCodeConflictException, aerr.Error())\n\t\t\tcase lexmodelbuildingservice.ErrCodeLimitExceededException:\n\t\t\t\tfmt.Println(lexmodelbuildingservice.ErrCodeLimitExceededException, aerr.Error())\n\t\t\tcase lexmodelbuildingservice.ErrCodeInternalFailureException:\n\t\t\t\tfmt.Println(lexmodelbuildingservice.ErrCodeInternalFailureException, aerr.Error())\n\t\t\tcase lexmodelbuildingservice.ErrCodeBadRequestException:\n\t\t\t\tfmt.Println(lexmodelbuildingservice.ErrCodeBadRequestException, aerr.Error())\n\t\t\tcase lexmodelbuildingservice.ErrCodePreconditionFailedException:\n\t\t\t\tfmt.Println(lexmodelbuildingservice.ErrCodePreconditionFailedException, aerr.Error())\n\t\t\tdefault:\n\t\t\t\tfmt.Println(aerr.Error())\n\t\t\t}\n\t\t} else {\n\t\t\t// Print the error, cast err to awserr.Error to get the Code and\n\t\t\t// Message from an error.\n\t\t\tfmt.Println(err.Error())\n\t\t}\n\t\treturn\n\t}\n\n\tfmt.Println(result)\n}", "func (h *Handler) OnIntent(ctx context.Context, request *alexa.Request, session *alexa.Session, ctxPtr *alexa.Context, response *alexa.Response) error {\n\tlog.Printf(\"OnIntent requestId=%s, sessionId=%s, intent=%s\", request.RequestID, session.SessionID, request.Intent.Name)\n\n\tswitch request.Intent.Name {\n\tcase \"NEXT_WIN_CHANCE\":\n\t\tlog.Println(\"Next win chance intent triggered\")\n\t\tspeechText := \"Hello World\"\n\n\t\tteamSlot := request.Intent.Slots[\"TEAM\"]\n\t\tlog.Printf(teamSlot.ID)\n\t\tlog.Printf(teamSlot.Name)\n\t\tlog.Printf(teamSlot.Value)\n\n\t\tresponse.SetSimpleCard(cardTitle, speechText)\n\t\tresponse.SetOutputText(speechText)\n\n\t\tlog.Printf(\"Set Output speech, value now: %s\", response.OutputSpeech.Text)\n\tdefault:\n\t\treturn errors.New(\"Invalid Intent\")\n\t}\n\n\treturn nil\n}", "func start(ctx *ext.Context) error {\r\n\t_, err := ctx.EffectiveMessage.Reply(\r\n\t\tctx.Bot,\r\n\t\tfmt.Sprintf(\r\n\t\t\t\"Hello, I'm @%s. Your <b>personal assistant</b>, how can i help you?\",\r\n\t\t\tctx.Bot.User.Username,\r\n\t\t),\r\n\t\t&gotgbot.SendMessageOpts{\r\n\t\t\tParseMode: \"html\",\r\n\t\t\tReplyMarkup: gotgbot.InlineKeyboardMarkup{\r\n\t\t\t\tInlineKeyboard: [][]gotgbot.InlineKeyboardButton{{\r\n\t\t\t\t\t{Text: \"My Telegram ID\", CallbackData: \"uid_callback\"},\r\n\t\t\t\t\t{Text: \"Please Help Me!\", CallbackData: \"help_callback\"},\r\n\t\t\t\t}},\r\n\t\t\t},\r\n\t\t},\r\n\t)\r\n\r\n\tif err != nil {\r\n\t\tfmt.Println(\"failed to send: \" + err.Error())\r\n\t}\r\n\r\n\treturn nil\r\n}", "func (e *ExtendedConnection) answer(msg string) {\n\te.Emit(\"answer\", &Answer{msg})\n}", "func handlerFunc(w http.ResponseWriter, r *http.Request) {\n\n\t// //get the string from the input box\n\t//userSent := r.Header.Get(\"userAskEliza\")\n\t userSent := r.URL.Query().Get(\"value\")\n\n\t//send the answer to the user\n\tfmt.Fprintf(w, \"\\n%s\\n\", util.ReplyQuestion(userSent))\n\n}", "func AskAquestion() {\n\tanswerService := answer.NewAnswerService(os.Getenv(\"MICRO_API_TOKEN\"))\n\trsp, err := answerService.Question(&answer.QuestionRequest{\n\t\tQuery: \"microsoft\",\n\t})\n\tfmt.Println(rsp, err)\n}", "func HandleRequest(ctx context.Context, req events.APIGatewayProxyRequest) (events.APIGatewayProxyResponse, error) {\n\n\t// Slack sends its parameters as url encoded data in the request body. These need to be parsed to obtain the key/values. A list of the data slack sends can be seen [here](https://api.slack.com/interactivity/slash-commands).\n\n\t// Get slack params\n\tparams, err := url.ParseQuery(req.Body)\n\tif err != nil {\n\t\treturn internalError(fmt.Errorf(\"decoding slack params: %v\", err))\n\t}\n\ttext := params.Get(\"text\")\n\n\t// Do something. Anything you want really\n\t// Some cool code\n\n\t// Construct response data\n\tr := Response{\n\t\tType: \"in_channel\",\n\t\tText: fmt.Sprintf(\"You said '%s'\", text),\n\t}\n\n\tdata, err := json.Marshal(r)\n\tif err != nil {\n\t\treturn events.APIGatewayProxyResponse{\n\t\t\tStatusCode: 500,\n\t\t\tBody: err.Error(),\n\t\t}, nil\n\t}\n\n\treturn events.APIGatewayProxyResponse{\n\t\tStatusCode: 200,\n\t\tHeaders: map[string]string{\n\t\t\t\"Content-Type\": \"application/json\",\n\t\t},\n\t\tBody: string(data),\n\t}, nil\n}", "func (b *Bot) SendMessage(request axon.O) (result axon.O, err error) {\n\tvar response interface{}\n\tif response, err = b.doPost(\"sendMessage\", request); err == nil {\n\t\tresult = response.(map[string]interface{})\n\t}\n\treturn\n}", "func (cx *Context) Ask() *Context {\n\tcx.response.ShouldEndSession = false\n\treturn cx\n}", "func (user *User) Ask(other *User, question string) *Question {\n\treturn &Question{\n\t\tText: question,\n\t\tToUser: *other,\n\t\tToUserID: user.ID,\n\t\tFromUser: *user,\n\t\tFromUserID: user.ID,\n\t}\n}", "func (p *proxyALTextToSpeech) Say(stringToSay string) error {\n\tvar ret struct{}\n\targs := bus.NewParams(\"(s)\", stringToSay)\n\tresp := bus.NewResponse(\"v\", &ret)\n\terr := p.Proxy().Call2(\"say\", args, resp)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"call say failed: %s\", err)\n\t}\n\treturn nil\n}", "func Input(prompt string) string {\n\ttext, err := InputWithError(prompt)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn text\n}", "func SetInput(ctx context.Context, address, input string) *nerr.E {\n\n\turl := fmt.Sprintf(\"http://%s/aj.html?a=command&cmd=x%sAVx1\", address, input)\n\treq, err := http.NewRequest(\"GET\", url, nil)\n\tif err != nil {\n\t\treturn nerr.Translate(err).Addf(\"error when making request: %s\", err)\n\t}\n\treq = req.WithContext(ctx)\n\tres, gerr := http.DefaultClient.Do(req)\n\tif gerr != nil {\n\t\treturn nerr.Translate(gerr).Addf(\"error when making call: %s\", gerr)\n\t}\n\tdefer res.Body.Close()\n\treturn nil\n\n}", "func UserInputHandler(client proto.KeyValueStoreServiceClient, connection *grpc.ClientConn) {\n\treader := bufio.NewReader(os.Stdin)\n\n\t// Infinite loop\n\tfor {\n\t\tfmt.Println(\"Please Enter one of the following operations: \\nPUT/GET/DELETE :\")\n\t\tuserInput, _ := reader.ReadString('\\n')\n\n\t\tswitch formatUpper(userInput) {\n\t\tcase \"PUT\":\n\t\t\t{\n\t\t\t\tfmt.Println(\"Please enter a Key:\")\n\t\t\t\tkey, _ := reader.ReadString('\\n')\n\n\t\t\t\tfmt.Println(\"Please enter a Value\")\n\t\t\t\tvalue, _ := reader.ReadString('\\n')\n\n\t\t\t\tcheckForConnection(connection)\n\t\t\t\tresponse, err := client.PUT(context.Background(), &proto.PutRequest{Key: format(key), Value: format(value)})\n\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Fatalf(\"Error occurred when calling PUT %v \", err)\n\t\t\t\t}\n\n\t\t\t\tlog.Printf(\"Response is received from server \")\n\t\t\t\tlog.Printf(\"Response Code : %d \", response.ResponseCode)\n\t\t\t\tlog.Printf(\"Response Message : %s \\n\", response.Message)\n\t\t\t}\n\t\tcase \"GET\":\n\t\t\t{\n\t\t\t\tfmt.Println(\"Please enter the Key:\")\n\t\t\t\tkey, _ := reader.ReadString('\\n')\n\n\t\t\t\tcheckForConnection(connection)\n\t\t\t\tresponse, err := client.GET(context.Background(), &proto.GetAndDeleteRequest{Key: format(key)})\n\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Fatalf(\"Error occurred when calling GET %v \", err)\n\t\t\t\t}\n\n\t\t\t\tlog.Printf(\"Response is received from server \")\n\t\t\t\tlog.Printf(\"Response Code : %d \", response.ResponseCode)\n\t\t\t\tlog.Printf(\"Response Message : %s \\n\", response.Message)\n\t\t\t}\n\t\tcase \"DELETE\":\n\t\t\t{\n\t\t\t\tfmt.Println(\"Please enter the Key:\")\n\t\t\t\tkey, _ := reader.ReadString('\\n')\n\n\t\t\t\tcheckForConnection(connection)\n\t\t\t\tresponse, err := client.DELETE(context.Background(), &proto.GetAndDeleteRequest{Key: format(key)})\n\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Fatalf(\"Error occurred when calling GET %v \", err)\n\t\t\t\t}\n\n\t\t\t\tlog.Printf(\"Response is received from server \")\n\t\t\t\tlog.Printf(\"Response Code : %d \", response.ResponseCode)\n\t\t\t\tlog.Printf(\"Response Message : %s \\n\", response.Message)\n\t\t\t}\n\t\tdefault:\n\t\t\t{\n\t\t\t\tlog.Printf(\"\\nOperation entered in invalid.\\n\")\n\t\t\t}\n\t\t}\n\t}\n}", "func (c *JSONRPCSignalClient) Answer(answer *webrtc.SessionDescription) error {\n\tif c.jc == nil {\n\t\treturn errNotConnected\n\t}\n\n\tlog.Info(\"signal client sending answer\")\n\treturn c.jc.Notify(c.context, \"answer\", &cluster.Negotiation{Desc: *answer})\n}", "func HandleRequest(ctx context.Context, event InputEvent) (Response, error) {\n\tqueueURL := aws.String(os.Getenv(\"SQS_URL\"))\n\tbucketName := aws.String(os.Getenv(\"BUCKET_NAME\"))\n\tawsSession := session.New()\n\tpollyClient := polly.New(awsSession)\n\ts3Uploader := s3manager.NewUploader(awsSession)\n\tsqsClinet := sqs.New(awsSession)\n\tid, err := uuid.NewV1()\n\tfailureResponse := Response{}\n\n\tif err != nil {\n\t\t// Error when creating audio file name\n\t\treturn failureResponse, err\n\t}\n\n\toutput, err := synthesizeVoice(pollyClient, &event.Text, &event.Voice)\n\n\tif err != nil {\n\t\t// Failed to synthesize audio file\n\t\treturn failureResponse, err\n\t}\n\n\t_, err = uploadToS3(s3Uploader, bucketName, aws.String(id.String() + \".mp3\"), &output)\n\n\tif err != nil {\n\t\t// Failed to upload audio to S3\n\t\treturn failureResponse, err\n\t}\n\n\t_, err = sqsClinet.SendMessage(&sqs.SendMessageInput{\n\t\tMessageBody: aws.String(id.String() + \".mp3\"),\n\t\tQueueUrl: queueURL,\n\t})\n\n\tif err != nil {\n\t\t// Failed to push message to audio file name to SQS\n\t\treturn failureResponse, err\n\t}\n\n\treturn Response{Success: true, FileName: id.String(), FileType: \"mp3\"}, nil\n}", "func Ask(question string) string {\n\tfmt.Fprintln(Out, question)\n\n\treturn prompt()\n}", "func (c Client) helpRequest() {\n\terr := c.encodeRequest(messages.PlayerReq{Action: game.Help})\n\tif err != nil {\n\t\tfmt.Fprintf(c.Output, \"unexpected error: %v \\n\", err)\n\t}\n\n\tvar resp messages.HelpResp\n\terr = c.decodeResponse(&resp)\n\tif err != nil {\n\t\tfmt.Printf(\"Unexpected error: %v\", err)\n\t}\n\n\tfmt.Fprintln(c.Output, resp.Info)\n}", "func Request(pid *PID, message interface{}, respondTo *PID) {\n\tpid.ref().SendUserMessage(pid, message, respondTo)\n}", "func main() {\n\t// var whatToSay string\n\t// whatToSay = \"Hello World again!\"\n\n\t// store a string in a variable and it figure out what type this is based\n\t// whatToSay := \"Hello World again!\"\n\n\t// or\n\t// var whatToSay string = \"Hello World again!\"\n\t// sayHelloWorld(whatToSay)\n\n\treader := bufio.NewReader(os.Stdin)\n\n\tvar whatToSay string = doctor.Intro()\n\n\tfmt.Println(whatToSay)\n\n\tfor {\n\t\tfmt.Print(\"-> \")\n\t\tuserInput, _ := reader.ReadString('\\n')\n\t\tuserInput = strings.Replace(userInput, \"\\r\\n\", \"\", -1)\n\t\tuserInput = strings.Replace(userInput, \"\\n\", \"\", -1)\n\t\tif userInput == \"quit\" {\n\t\t\tbreak\n\t\t} else {\n\t\t\tfmt.Println(doctor.Response(userInput))\n\t\t}\n\t}\n}", "func HandleRequest(req events.APIGatewayProxyRequest) (events.APIGatewayProxyResponse, error) {\n\n\tmsg := Message{}\n\tlog.Printf(\"req.Body = %v\\n\", req.Body)\n\tif err := json.Unmarshal([]byte(req.Body), &msg); err != nil {\n\t\tlog.Printf(\"Executing defaultmessage lambda function\\n\")\n\t\treturn events.APIGatewayProxyResponse{\n\t\t\tStatusCode: 500,\n\t\t\tBody: \"Error parsing message\",\n\t\t}, nil\n\t}\n\n\tlog.Printf(\"Successful execution of defaultmessage lambda function\\n\")\n\treturn events.APIGatewayProxyResponse{\n\t\t//Body: msg.Content + \" (echoed)\",\n\t\tBody: \"{\\\"status\\\": 200}\",\n\t\tStatusCode: 200,\n\t}, nil\n}", "func parseInput(message string) Command {\n res := standardInputMessageRegex.FindAllStringSubmatch(message, -1)\n if (len(res) == 1) {\n // there is a command\n return Command {\n Command: res[0][1],\n Body: res[0][2],\n }\n } else {\n return Command {\n Body: util.Decode(message),\n }\n }\n}", "func RequestInput(message string, validate ValidatorFunction) (string, error) {\n\tfor {\n\t\tvalue, err := skipEOFError(getTextInput(fmt.Sprintf(\"%s: \", message)))\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\n\t\tvalue = strings.TrimSpace(value)\n\t\tif err = validate(value); err != nil {\n\t\t\tfmt.Println(strings.TrimSpace(err.Error()))\n\t\t\tcontinue\n\t\t}\n\t\treturn value, nil\n\t}\n}", "func help(echoReq *alexa.EchoRequest) *alexa.EchoResponse {\n\tmsg := \"Try adding an activity by saying add, then the name of the activity\"\n\techoResp := alexa.NewEchoResponse().OutputSpeech(msg).EndSession(false)\n\treturn echoResp\n}", "func (_Quiz *QuizTransactorSession) SendAnswer(_ans [32]byte) (*types.Transaction, error) {\n\treturn _Quiz.Contract.SendAnswer(&_Quiz.TransactOpts, _ans)\n}", "func (conn *IRODSConnection) Request(request Request, response Response) error {\n\trequestMessage, err := request.GetMessage()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Could not make a request message - %v\", err)\n\t}\n\n\t// translate xml.Marshal XML into irods-understandable XML (among others, replace &#34; by &quot;)\n\terr = conn.PreprocessMessage(requestMessage)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Could not send preprocess message - %v\", err)\n\t}\n\n\terr = conn.SendMessage(requestMessage)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Could not send a request message - %v\", err)\n\t}\n\n\t// Server responds with results\n\tresponseMessage, err := conn.ReadMessage()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Could not receive a response message - %v\", err)\n\t}\n\n\t// translate irods-dialect XML into valid XML\n\terr = conn.PostprocessMessage(responseMessage)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Could not send postprocess message - %v\", err)\n\t}\n\n\terr = response.FromMessage(responseMessage)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Could not parse a response message - %v\", err)\n\t}\n\n\treturn nil\n}", "func (_Quiz *QuizSession) Answer() ([32]byte, error) {\n\treturn _Quiz.Contract.Answer(&_Quiz.CallOpts)\n}", "func (r Response) Reprompt(textOrSSML string) Response {\n\tr.Body.Reprompt = &reprompt{\n\t\tOutputSpeech: intentResponse{\n\t\t\tSSML: wrapSSML(textOrSSML),\n\t\t},\n\t}\n\treturn r\n}", "func (_Quiz *QuizSession) SendAnswer(_ans [32]byte) (*types.Transaction, error) {\n\treturn _Quiz.Contract.SendAnswer(&_Quiz.TransactOpts, _ans)\n}", "func parseAndExecInput(input string) {\n\t// Split the line into 2 tokens (cmd and message)\n\ttokens := strings.SplitN(input, \" \", 2)\n\tcmd := tokens[0]\n\n\tswitch {\n\tcase cmd == \"\":\n\t\tbreak\n\tcase cmd == \"?\":\n\t\tfmt.Printf(helpStr)\n\t\tbreak\n\tcase strings.ToLower(cmd) == \"/users\":\n\t\tfmt.Println(USERS)\n\t\tbreak\n\tcase strings.ToLower(cmd) == \"/exit\":\n\t\tos.Exit(1)\n\t\tbreak\n\tcase cmd[0] == '@':\n\t\t// TODO-WORKSHOP-STEP-9: Write code to sendChat. Example\n\t\t// \"@gautam hello golang\" should send a message to handle with name \"gautam\" and message \"hello golang\"\n\t\t// Invoke sendChat to send the message\n\t\tbreak\n\tcase strings.ToLower(cmd) == \"/help\":\n\t\tfmt.Println(helpStr)\n\t\tbreak\n\tdefault:\n\t\tfmt.Println(helpStr)\n\t}\n}", "func (_Quiz *QuizCallerSession) Answer() ([32]byte, error) {\n\treturn _Quiz.Contract.Answer(&_Quiz.CallOpts)\n}", "func (c *Client) Input(i Input) error {\n\t// The hash of the _current_ input\n\t// is needed for the request\n\tcur, err := c.CurrentInput()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdata := reqInput{\n\t\tRequest: reqModify,\n\t\tHash: cur.Hash,\n\t\tValue: i.Name,\n\t}\n\n\tvar body bytes.Buffer\n\tif err := json.NewEncoder(&body).Encode(data); err != nil {\n\t\treturn err\n\t}\n\n\treq, err := http.NewRequest(http.MethodPut, \"/menu_native/dynamic/tv_settings/devices/current_input\", &body)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// response contains no specific fields\n\treturn c.do(req, &respWrapper{})\n}", "func RespondRequest(IdentityFile, Password, RequestFile, SharesFile, ResponseFile string, NumShares int, Fake bool, verbosity int) (string, error) {\n\tvar out []byte\n\tpubk, privk, err := ReadKey(IdentityFile, Password)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\trequest, err := ReadFile(RequestFile)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tmyShares, err := ReadFile(SharesFile)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tmyData, senderPubK, err := shares.VerifyShareRequestFromList(pubk, privk, request, myShares)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tif bytes.Equal(senderPubK, pubk) {\n\t\treturn \"\", errors.New(\"Will not respond to self\")\n\t}\n\tout = printVerbose(out, verbosity, 2, fmt.Sprintf(\"Sender PubKey: %s\", hex.EncodeToString(senderPubK)))\n\tout = printVerbose(out, verbosity, 2, fmt.Sprintf(\"My PubKey: %s\", hex.EncodeToString(pubk)))\n\tout = printVerbose(out, verbosity, 3, fmt.Sprintf(\"Share configuration:\"))\n\tout = printVerbose(out, verbosity, 3, fmt.Sprintf(\"\\tSecretShare ID: %s\", hex.EncodeToString(myData.CommonMessageHeader.SigPubKeyHash)))\n\tout = printVerbose(out, verbosity, 3, fmt.Sprintf(\"\\tComment: %s\", string(myData.CommonMessageHeader.Comment)))\n\tout = printVerbose(out, verbosity, 3, fmt.Sprintf(\"\\tShares: %d\", len(myData.MemberMessages[0].Shares)))\n\tif myData.MemberMessages[0].Fake == nil {\n\t\tout = printVerbose(out, verbosity, 3, fmt.Sprintf(\"\\tFake available: false\"))\n\t} else {\n\t\tout = printVerbose(out, verbosity, 3, fmt.Sprintf(\"\\tFake available: true\"))\n\t}\n\tif NumShares > len(myData.MemberMessages[0].Shares) {\n\t\tNumShares = len(myData.MemberMessages[0].Shares)\n\t}\n\tsharemessage, err := myData.GenShareReply(senderPubK, NumShares, Fake)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\terr = WriteFile(ResponseFile, sharemessage)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tout = printVerbose(out, verbosity, 1, fmt.Sprintf(\"Response content:\"))\n\tout = printVerbose(out, verbosity, 2, fmt.Sprintf(\"\\tSecretShare ID: %s\", hex.EncodeToString(myData.CommonMessageHeader.SigPubKeyHash)))\n\tif Fake {\n\t\tout = printVerbose(out, verbosity, 1, fmt.Sprintf(\"\\tContains only one fake share!\"))\n\t} else {\n\t\tout = printVerbose(out, verbosity, 1, fmt.Sprintf(\"\\tShares: %d\", NumShares))\n\t}\n\n\treturn string(out), nil\n}", "func (c *Connector) Request(route string, v interface{}, callback Callback) error {\n\n\n\tdata, err := json.Marshal(v)\n\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tmsg := &message.Message{\n\t\tType: message.Request,\n\t\tRoute: route,\n\t\tID: c.mid,\n\t\tData: data,\n\t}\n\n\tc.setResponseHandler(c.mid, callback)\n\tif err := c.sendMessage(msg); err != nil {\n\t\tc.setResponseHandler(c.mid, nil)\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func updateBot(ctx *gin.Context) {\n\tbody := struct {\n\t\tName string `json:\"name\"`\n\t\tChildDirected bool `json:\"child_directed\"`\n\t\tLocale string `json:\"locale\"`\n\t\tMessages []string `json:\"messages\"`\n\t\tUtterances []string `json:\"utterances\"`\n\t\tIntentName string `json:\"intent_name\"`\n\t\tAbortMessages []string `json:\"abort_messages\"`\n\t\tClarificationPrompts []string `json:\"clarification_prompts\"`\n\t\tVersion string `json:\"version\"`\n\t}{}\n\tif err := ctx.Bind(&body); err != nil { //validation error\n\t\tctx.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error(), \"message\": \"Validation Error.\", \"data\": nil})\n\t} else {\n\t\tcred := credentials.NewStaticCredentials(os.Getenv(\"ACCESS_KEY_ID\"), os.Getenv(\"SECRET_ACCESS_KEY\"), \"\")\n\t\tconfig := aws.NewConfig().WithCredentials(cred).WithRegion(os.Getenv(\"AWS_REGION\"))\n\t\tsess := session.Must(session.NewSession(config))\n\t\tsvc := lexmodelbuildingservice.New(sess)\n\t\tinput := &lexmodelbuildingservice.GetBotInput{\n\t\t\tName: aws.String(body.Name),\n\t\t\tVersionOrAlias: aws.String(body.Version), //use \"$LATEST\" for latest version\n\t\t}\n\t\tbot, err := svc.GetBot(input)\n\t\tif err != nil {\n\t\t\tctx.JSON(http.StatusInternalServerError, gin.H{\"error\": err.Error(), \"message\": \"Server Error.\", \"data\": nil})\n\t\t} else {\n\t\t\tvar messages []*lexmodelbuildingservice.Message\n\t\t\tfor _, val := range body.Messages {\n\t\t\t\tmessages = append(messages, &lexmodelbuildingservice.Message{\n\t\t\t\t\tContent: aws.String(val),\n\t\t\t\t\tContentType: aws.String(\"PlainText\"),\n\t\t\t\t})\n\t\t\t}\n\t\t\tvar utterances []*string\n\t\t\tfor _, val := range body.Utterances {\n\t\t\t\tutterances = append(utterances, aws.String(val))\n\t\t\t}\n\t\t\tintent := &lexmodelbuildingservice.PutIntentInput{\n\t\t\t\tName: aws.String(body.IntentName),\n\t\t\t\tConclusionStatement: &lexmodelbuildingservice.Statement{\n\t\t\t\t\tMessages: messages,\n\t\t\t\t},\n\t\t\t\tSampleUtterances: utterances,\n\t\t\t\tFulfillmentActivity: &lexmodelbuildingservice.FulfillmentActivity{Type: aws.String(\"ReturnIntent\")},\n\t\t\t}\n\t\t\tresult, err := svc.PutIntent(intent)\n\t\t\tif err != nil {\n\t\t\t\tctx.JSON(http.StatusInternalServerError, gin.H{\"error\": err.Error(), \"message\": \"Server Error.\", \"data\": nil})\n\t\t\t} else {\n\t\t\t\tbot.Intents = append(bot.Intents, &lexmodelbuildingservice.Intent{IntentName: result.Name, IntentVersion: result.Version})\n\t\t\t\tvar clarificationPrompts []*lexmodelbuildingservice.Message\n\t\t\t\tfor _, val := range body.ClarificationPrompts {\n\t\t\t\t\tclarificationPrompts = append(clarificationPrompts, &lexmodelbuildingservice.Message{\n\t\t\t\t\t\tContent: aws.String(val),\n\t\t\t\t\t\tContentType: aws.String(\"PlainText\"),\n\t\t\t\t\t})\n\t\t\t\t}\n\t\t\t\tvar abortMessages []*lexmodelbuildingservice.Message\n\t\t\t\tfor _, val := range body.AbortMessages {\n\t\t\t\t\tabortMessages = append(abortMessages, &lexmodelbuildingservice.Message{\n\t\t\t\t\t\tContent: aws.String(val),\n\t\t\t\t\t\tContentType: aws.String(\"PlainText\"),\n\t\t\t\t\t})\n\t\t\t\t}\n\t\t\t\t_, err = svc.PutBot(&lexmodelbuildingservice.PutBotInput{\n\t\t\t\t\tChecksum: bot.Checksum,\n\t\t\t\t\tIntents: bot.Intents,\n\t\t\t\t\tName: bot.Name,\n\t\t\t\t\tChildDirected: bot.ChildDirected,\n\t\t\t\t\tLocale: bot.Locale,\n\t\t\t\t\tClarificationPrompt: &lexmodelbuildingservice.Prompt{Messages: clarificationPrompts, MaxAttempts: aws.Int64(5)},\n\t\t\t\t\tAbortStatement: &lexmodelbuildingservice.Statement{Messages: abortMessages},\n\t\t\t\t})\n\t\t\t\tif err != nil {\n\t\t\t\t\tctx.JSON(http.StatusInternalServerError, gin.H{\"error\": err.Error(), \"message\": \"Server Error.\", \"data\": nil})\n\t\t\t\t} else {\n\t\t\t\t\tctx.JSON(http.StatusOK, gin.H{\"error\": nil, \"message\": \"Bot Updated with new intent.\", \"data\": nil})\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}", "func (_BREMICO *BREMICOCaller) Request(opts *bind.CallOpts) (struct {\n\tValue *big.Int\n\tConfirmAmount *big.Int\n}, error) {\n\tret := new(struct {\n\t\tValue *big.Int\n\t\tConfirmAmount *big.Int\n\t})\n\tout := ret\n\terr := _BREMICO.contract.Call(opts, out, \"request\")\n\treturn *ret, err\n}", "func (m *Basic) Action(card *model.Card, face *int, startTime time.Time, payload interface{}) (bool, error) {\n\tquery := convertQuery(payload)\n\tlog.Debugf(\"Submit recieved for face %d: %v\\n\", *face, query)\n\tbutton := studyview.Button(query.Submit)\n\tlog.Debugf(\"Button %s pressed\\n\", button)\n\tswitch *face {\n\tcase QuestionFace:\n\t\t// Any input is fine; the only options are the right button, or 'ENTER' in a text field.\n\tcase AnswerFace:\n\t\tkey := buttonsKey(card, *face)\n\t\tif _, valid := buttonMaps[key][button]; !valid {\n\t\t\treturn false, errors.Errorf(\"Unexpected button press %s\", button)\n\t\t}\n\tdefault:\n\t\treturn false, errors.Errorf(\"Unexpected face %d\", *face)\n\t}\n\tswitch *face {\n\tcase QuestionFace:\n\t\t*face++\n\t\ttypedAnswers := query.TypedAnswers\n\t\tif len(typedAnswers) > 0 {\n\t\t\tresults := make(map[string]answer)\n\t\t\tfor _, fieldName := range card.Fields() {\n\t\t\t\tif typedAnswer, ok := typedAnswers[fieldName]; ok {\n\t\t\t\t\tfv := card.FieldValue(fieldName)\n\t\t\t\t\tif fv == nil {\n\t\t\t\t\t\tpanic(\"No field value for field\")\n\t\t\t\t\t}\n\t\t\t\t\tcorrect, d := diff.Diff(fv.Text, typedAnswer)\n\t\t\t\t\tresults[fieldName] = answer{\n\t\t\t\t\t\tText: d,\n\t\t\t\t\t\tCorrect: correct,\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tcard.Context = map[string]interface{}{\n\t\t\t\tcontextKeyTypedAnswers: results,\n\t\t\t}\n\t\t\treturn false, nil\n\t\t}\n\t\treturn false, nil\n\tcase AnswerFace:\n\t\tlog.Debugf(\"Old schedule: Due %s, Interval: %s, Ease: %f, ReviewCount: %d\\n\", card.Due, card.Interval, card.EaseFactor, card.ReviewCount)\n\t\tif err := model.Schedule(card, time.Now().Sub(startTime), quality(button)); err != nil {\n\t\t\treturn false, err\n\t\t}\n\t\tlog.Debugf(\"New schedule: Due %s, Interval: %s, Ease: %f, ReviewCount: %d\\n\", card.Due, card.Interval, card.EaseFactor, card.ReviewCount)\n\t\tcard.Context = nil // Clear any saved answers\n\t\treturn true, nil\n\t}\n\tlog.Printf(\"Unexpected face/button combo: %d / %+v\\n\", *face, button)\n\treturn false, nil\n}", "func main() {\n\tguildID := discord.GuildID(mustSnowflakeEnv(\"GUILD_ID\"))\n\n\ttoken := os.Getenv(\"BOT_TOKEN\")\n\tif token == \"\" {\n\t\tlog.Fatalln(\"No $BOT_TOKEN given.\")\n\t}\n\n\ts := state.New(\"Bot \" + token)\n\n\tapp, err := s.CurrentApplication()\n\tif err != nil {\n\t\tlog.Fatalln(\"Failed to get application ID:\", err)\n\t}\n\n\ts.AddHandler(func(e *gateway.InteractionCreateEvent) {\n\t\tvar resp api.InteractionResponse\n\t\tswitch d := e.Data.(type) {\n\t\tcase *discord.CommandInteraction:\n\t\t\tcontent := option.NewNullableString(\"Pong: \" + d.Options[0].String() + \"!\")\n\t\t\tresp = api.InteractionResponse{\n\t\t\t\tType: api.MessageInteractionWithSource,\n\t\t\t\tData: &api.InteractionResponseData{\n\t\t\t\t\tContent: content,\n\t\t\t\t},\n\t\t\t}\n\t\tcase *discord.AutocompleteInteraction:\n\t\t\tallChoices := api.AutocompleteStringChoices{\n\t\t\t\t{Name: \"Choice A\", Value: \"Choice A\"},\n\t\t\t\t{Name: \"Choice B\", Value: \"Choice B\"},\n\t\t\t\t{Name: \"Choice C\", Value: \"Choice C\"},\n\t\t\t\t{Name: \"Abc Def\", Value: \"Abcdef\"},\n\t\t\t\t{Name: \"Ghi Jkl\", Value: \"Ghijkl\"},\n\t\t\t\t{Name: \"Mno Pqr\", Value: \"Mnopqr\"},\n\t\t\t\t{Name: \"Stu Vwx\", Value: \"Stuvwx\"},\n\t\t\t}\n\t\t\tquery := strings.ToLower(d.Options[0].String())\n\t\t\tvar choices api.AutocompleteStringChoices\n\t\t\tfor _, choice := range allChoices {\n\t\t\t\tif strings.HasPrefix(strings.ToLower(choice.Name), query) ||\n\t\t\t\t\tstrings.HasPrefix(strings.ToLower(choice.Value), query) {\n\t\t\t\t\tchoices = append(choices, choice)\n\t\t\t\t}\n\t\t\t}\n\t\t\tresp = api.InteractionResponse{\n\t\t\t\tType: api.AutocompleteResult,\n\t\t\t\tData: &api.InteractionResponseData{\n\t\t\t\t\tChoices: &choices,\n\t\t\t\t},\n\t\t\t}\n\t\tdefault:\n\t\t\treturn\n\t\t}\n\n\t\tif err := s.RespondInteraction(e.ID, e.Token, resp); err != nil {\n\t\t\tlog.Println(\"failed to send interaction callback:\", err)\n\t\t}\n\t})\n\n\ts.AddIntents(gateway.IntentGuilds)\n\ts.AddIntents(gateway.IntentGuildMessages)\n\n\tif err := s.Open(context.Background()); err != nil {\n\t\tlog.Fatalln(\"failed to open:\", err)\n\t}\n\tdefer s.Close()\n\n\tlog.Println(\"Gateway connected. Getting all guild commands.\")\n\n\tcommands, err := s.GuildCommands(app.ID, guildID)\n\tif err != nil {\n\t\tlog.Fatalln(\"failed to get guild commands:\", err)\n\t}\n\n\tfor _, command := range commands {\n\t\tlog.Println(\"Existing command\", command.Name, \"found.\")\n\t}\n\n\tnewCommands := []api.CreateCommandData{\n\t\t{\n\t\t\tName: \"ping\",\n\t\t\tDescription: \"Basic ping command.\",\n\t\t\tOptions: []discord.CommandOption{\n\t\t\t\t&discord.StringOption{\n\t\t\t\t\tOptionName: \"text\",\n\t\t\t\t\tDescription: \"Text to echo back\",\n\t\t\t\t\tAutocomplete: true,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\n\tif _, err := s.BulkOverwriteGuildCommands(app.ID, guildID, newCommands); err != nil {\n\t\tlog.Fatalln(\"failed to create guild command:\", err)\n\t}\n\n\t// Block forever.\n\tselect {}\n}", "func (r DescribeInputDeviceRequest) Send(ctx context.Context) (*DescribeInputDeviceResponse, error) {\n\tr.Request.SetContext(ctx)\n\terr := r.Request.Send()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresp := &DescribeInputDeviceResponse{\n\t\tDescribeInputDeviceOutput: r.Request.Data.(*DescribeInputDeviceOutput),\n\t\tresponse: &aws.Response{Request: r.Request},\n\t}\n\n\treturn resp, nil\n}", "func SendMessage(ctx context.Context, cli sqsiface.SQSAPI, url string, body string) (*sqs.SendMessageOutput, error) {\n\tinput := &sqs.SendMessageInput{\n\t\tQueueUrl: aws.String(url),\n\t\tMessageBody: aws.String(body),\n\t}\n\n\treturn cli.SendMessageWithContext(ctx, input)\n}", "func Answer(e string) (int, bool) {\n\te = strings.Replace(e, \"?\", \" ? 0\", -1) // question mark is final token, with dummy value.\n\te = strings.Replace(e, \" is\", \"Is\", -1) // Simplify \"What is\" text\n\te = strings.Replace(e, \" by\", \"By\", -1) // Simplify \"multiplied by\" and \"divided by\" text\n\tt := strings.Split(e, \" \") // tokenise input.\n\topVals := opValStreamer(t) // initialise event feeder with tokens\n\treturn sm.run(\"start\", 0, opVals)\n}", "func (Meetquiz) Answer(sessionID string, question int, answerLabel string) {\n\tna := newAnswer{sessionID, question, answerLabel, make(chan error, 1)}\n\tctl.answer <- na\n\terr := <-na.rc\n\tcheckUserError(err)\n}", "func (e *GameEngine) IncomingTalk(from ID, text string) {\n\te.talk <- struct {\n\t\tID\n\t\tstring\n\t}{from, text}\n}", "func (c *MyContext) AskQuestions(rw web.ResponseWriter, req *web.Request) {\n\tvar question Question\n\tif err := c.decode(&question, req.Body); err != nil {\n\t\tc.render(http.StatusBadRequest, nil, rw)\n\t\treturn\n\t}\n\tif len(question.Body) == 0 {\n\t\tc.render(http.StatusBadRequest, Message{Message: \"Empty body\"}, rw)\n\t\treturn\n\t}\n\n\tif err := db.Insert(&question); err != nil {\n\t\tc.render(http.StatusBadRequest, nil, rw)\n\t\treturn\n\t}\n\tc.render(http.StatusOK, question, rw)\n}", "func send(g *gocui.Gui, inputView *gocui.View) error {\n\n\t// Get rid of warnings\n\t_ = g\n\n\tinput := strings.Trim(inputView.Buffer(), \"\\n\")\n\n\tswitch {\n\tcase strings.HasPrefix(input, \"/\"):\n\n\t\t// Interpret \"input\" as command\n\t\tdisplayText(strings.Trim(fmt.Sprintf(\"%s%v\\n\", prompt, input), \"\\n\"))\n\n\t\texecuteCommand(strings.TrimLeft(input, \"/\"))\n\tcase strings.TrimSpace(input) == \"\":\n\t\tdisplayText(prompt)\n\tdefault:\n\t\tsendMessage(strings.Split(input, \" \"))\n\t\tdisplayText(strings.Trim(fmt.Sprintf(\"%s%v\\n\", prompt, input), \"\\n\"))\n\n\t}\n\n\t// Clear the \"input\" and reset the cursor\n\tinputView.Clear()\n\tif err := inputView.SetCursor(0, 0); err != nil {\n\t\tlog.Fatal(err)\n\t}\n\treturn nil\n}", "func Handler(ctx context.Context, input Input) (Response, error) {\n\tvar buf bytes.Buffer\n\tToken := os.Getenv(\"BOT_KEY\")\n\tdg, err := discordgo.New(\"Bot \" + Token)\n\tif err != nil {\n\t\tfmt.Println(\"Error creating bot reason: \", err)\n\t}\n\n\tfmt.Println(input.ChannelID)\n\n\tclient := dg.Open()\n\tif client != nil {\n\t\tfmt.Println(\"Error opening client session. Reason: \", client)\n\t}\n\n\trandom, err := dg.ChannelMessageSend(input.ChannelID, input.Text)\n\tif err != nil {\n\t\tfmt.Println(\"Message send failed, readin: \", err)\n\t}\n\tfmt.Println(random)\n\tbody, err := json.Marshal(map[string]interface{}{\n\t\t\"message\": input.Text,\n\t})\n\tif err != nil {\n\t\treturn Response{StatusCode: 404}, err\n\t}\n\tjson.HTMLEscape(&buf, body)\n\n\tresp := Response{\n\t\tStatusCode: 200,\n\t\tIsBase64Encoded: false,\n\t\tBody: buf.String(),\n\t\tHeaders: map[string]string{\n\t\t\t\"Content-Type\": \"application/json\",\n\t\t\t\"X-MyCompany-Func-Reply\": \"hello-handler\",\n\t\t},\n\t}\n\n\treturn resp, nil\n}", "func FlowUtilSay(paramMap interface{}) (returnMap interface{}, err error) {\n\t/**\n\t ** 开启流程会话接口\n\t * 设置HTTP REST POST请求\n\t * 1.使用http协议\n\t * 2.调用/robot/say.do接口开启对话\n\t * 4.设置必须请求参数:user_id,robot_id,input,token\n\t */\n\tlog.Print(paramMap.(map[string]interface{}))\n\treq := HttpRequest.NewRequest()\n\t//req.SetHeaders(map[string]string{\"Authorization\": FlowTokenInfo})\n\treq.SetHeaders(map[string]string{\"Content-Type\": \"application/json\"})\n\tresp, err := req.Post(FlowSayDoUrl, paramMap)\n\tif err != nil {\n\t\tfmt.Printf(\" post err:%s\", err)\n\t}\n\tvar dat map[string]interface{}\n\tbody, err := resp.Body()\n\t_ = json.Unmarshal(body, &dat)\n\tif _, ok := dat[\"successful\"]; ok {\n\t\treturnMap = dat[\"info\"].(map[string]interface{})\n\t\treturn\n\t}\n\treturn\n}", "func AskYes(question string, defaultYes bool) (isYes bool) {\n\tdefaultStrVal := \"yes\"\n\tif !defaultYes {\n\t\tdefaultStrVal = \"no\"\n\t}\n\tfmt.Print(question, \" [\", defaultStrVal, \"]: \")\n\treader := bufio.NewReader(os.Stdin)\n\treply, _ := reader.ReadString('\\n')\n\tDefaultIfEmptyStr(&reply, defaultStrVal)\n\tif IsEqStr(reply, \"yes\") {\n\t\treturn true\n\t}\n\treturn\n}", "func IsValidInput(webhookMessage MessageCard, webhookURL string) (bool, error) {\n\t// validate url\n\tif valid, err := IsValidWebhookURL(webhookURL); !valid {\n\t\treturn false, err\n\t}\n\n\t// validate message\n\tif valid, err := IsValidMessageCard(webhookMessage); !valid {\n\t\treturn false, err\n\t}\n\n\treturn true, nil\n}", "func Cowsay(w http.ResponseWriter, r *http.Request) {\n\t\t\tlog := log15.New()\n\t\t\tdefer r.Body.Close()\n\t\t\terr := r.ParseForm()\n\n\t\t\tif err != nil {\n\t\t\t\tlog.Error(\n\t\t\t\t\t\"Cannot parse form\",\n\t\t\t\t\t\"Error\", err,\n\t\t\t\t\t\"Form\", fmt.Sprintf(\"%+v\", r.Form),\n\t\t\t\t)\n\t\t\t\tw.WriteHeader(http.StatusBadRequest)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\ttext := r.PostFormValue(\"text\")\n\n\t\t\tsay, err := cowsay.Say(\n\t\t\t\tcowsay.Phrase(text),\n\t\t\t\tcowsay.Type(\"default\"),\n\t\t\t)\n\n\t\t\tif err != nil {\n\t\t\t\tlog.Error(\n\t\t\t\t\t\"Cowsay error\",\n\t\t\t\t\t\"Error\", err,\n\t\t\t\t)\n\t\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tcodeMark := []byte(\"```\")\n\t\t\tout := append(codeMark, say...)\n\t\t\tout = append(out, codeMark...)\n\t\t\tback := CowsayResponse{\n\t\t\t\tResponse_type: \"in_channel\",\n\t\t\t\tText: string(out),\n\t\t\t}\n\n\t\t\tresp, err := json.Marshal(back)\n\t\t\tif err != nil {\n\t\t\t\tlog.Error(\n\t\t\t\t\t\"Cannot marshal response\",\n\t\t\t\t\t\"Error\", err,\n\t\t\t\t\t\"Response\", string(resp),\n\t\t\t\t)\n\t\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tw.Header().Set(\"content-type\", \"application/json\")\n\t\t\tw.WriteHeader(http.StatusOK)\n\t\t\tw.Write(resp)\n\n}", "func (am *ACMEManager) askUserAgreement(agreementURL string) bool {\n\tam.promptUserAgreement(agreementURL)\n\tfmt.Print(\"Do you agree to the terms? (y/n): \")\n\n\treader := bufio.NewReader(stdin)\n\tanswer, err := reader.ReadString('\\n')\n\tif err != nil {\n\t\treturn false\n\t}\n\tanswer = strings.ToLower(strings.TrimSpace(answer))\n\n\treturn answer == \"y\" || answer == \"yes\"\n}", "func (_Quiz *QuizSession) Question() (string, error) {\n\treturn _Quiz.Contract.Question(&_Quiz.CallOpts)\n}", "func reply(c *sknet.Context, r interface{}) {\n\tc.Set(\"response\", r)\n}", "func (c Client) guessRequest(guess string) {\n\terr := c.encodeRequest(messages.PlayerReq{Action: game.Guess, Value: guess})\n\tif err != nil {\n\t\tfmt.Fprintf(c.Output, \"unexpected error: %v \\n\", err)\n\t}\n\tvar resp messages.GameStateResp\n\terr = c.decodeResponse(&resp)\n\tif err != nil {\n\t\tfmt.Fprintf(c.Output, \"Unexpected error: %v\", err)\n\t}\n\n\tif resp.Error != nil {\n\t\tfmt.Fprintf(c.Output, \"Error: %s \\n\", resp.Error.Message)\n\t} else {\n\t\tfmt.Fprintf(c.Output, \"Guess the hero: %s \\n\", resp.State.WordToGuess)\n\t\tfmt.Fprintln(c.Output, drawing.Display[len(resp.State.CharsTried)])\n\t\tfmt.Fprintf(c.Output, \"Characters tried: %s \\n\", strings.Join(resp.State.CharsTried, \" - \"))\n\n\t\tif resp.State.Status == game.GameOver {\n\t\t\tfmt.Fprintln(c.Output, \"*** GAME OVER ***\")\n\t\t}\n\n\t\tif resp.State.Status == game.Won {\n\t\t\tfmt.Fprintln(c.Output, \"*** YOU WIN ***\")\n\t\t}\n\t}\n}", "func AboutToSayIt(ctx context.Context, m messageService.MessageServiceClient, text string) (*messageService.Response, error) {\n\trequest := &messageService.Request{\n\t\tText: text,\n\t\tSubtext: \"New Message\",\n\t}\n\tr, err := m.SayIt(ctx, request)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn r, nil\n}", "func (ns *NSConn) Ask(ctx context.Context, event string, body []byte) (Message, error) {\n\tif ns == nil {\n\t\treturn Message{}, ErrWrite\n\t}\n\n\treturn ns.Conn.Ask(ctx, Message{Namespace: ns.namespace, Event: event, Body: body})\n}", "func Ask(label, startString string) (string, error) {\n\tp := Prompt{\n\t\tBasicPrompt: BasicPrompt{\n\t\t\tLabel: label,\n\t\t\tDefault: startString,\n\t\t},\n\t}\n\treturn p.Run()\n}", "func (this *Client) handleInput(text string) {\n\tif this.server {\n\t\t// We are the server, so send the message to all peers\n\t\tmsg := CreateMessage(MESSAGE_SHOW, text, this.nick)\n\t\tmsg.Send(this.connections)\n\t} else {\n\t\t// We are a client, so send the message to the server\n\t\tmsg := CreateMessage(MESSAGE_PUBLIC, text, this.nick)\n\t\t// TODO: Clean this up since we know there's only going to be one connection (to the server)\n\t\tmsg.Send(this.connections)\n\t}\n}", "func userInput() string {\n\tmsg, err := reader.ReadBytes('\\n')\n\tcheck(err)\n\tif len(msg) == 0 {\n\t\tmsg = append(msg, 32)\n\t}\n\treturn string(msg)\n}", "func AnswerMessage(ctx *gin.Context) {\n\tindex := rand.Int() % len(answers)\n\tchannel, ok := ctx.GetPostForm(\"channel_id\")\n\tif !ok {\n\t\tlog.Error(errors.New(\"no channel provided\"))\n\t\treturn\n\t}\n\n\tanswer := Answer{\n\t\tResponseType: \"in_channel\",\n\t\tChannel: channel,\n\t\tText: answers[index],\n\t}\n\n\tctx.Header(\"Content-Type\", \"application/json\")\n\tctx.Header(\"Authorization\", fmt.Sprintf(\"Bearer %s\", viper.GetString(\"token\")))\n\tctx.JSON(http.StatusOK, answer)\n}", "func (a *AGI) Answer() error {\n\treturn a.Command(\"ANSWER\").Err()\n}", "func (_Quiz *QuizCallerSession) Question() (string, error) {\n\treturn _Quiz.Contract.Question(&_Quiz.CallOpts)\n}", "func (q *Query) Prompt() {\n\tfmt.Printf(\"\\n%s [%s]: \", q.Question, q.DefaultValue)\n\tvar response string\n\tfmt.Scanln(&response)\n\tq.Answer = response\n\n}", "func (am *ACMEIssuer) askUserAgreement(agreementURL string) bool {\n\tam.promptUserAgreement(agreementURL)\n\tfmt.Print(\"Do you agree to the terms? (y/n): \")\n\n\treader := bufio.NewReader(stdin)\n\tanswer, err := reader.ReadString('\\n')\n\tif err != nil {\n\t\treturn false\n\t}\n\tanswer = strings.ToLower(strings.TrimSpace(answer))\n\n\treturn answer == \"y\" || answer == \"yes\"\n}", "func (act *CreateAction) Input() error {\n\tif err := act.verify(); err != nil {\n\t\treturn act.Err(pbcommon.ErrCode_E_BS_PARAMS_INVALID, err.Error())\n\t}\n\treturn nil\n}", "func (e *EncryptedChat) AsInput() *InputEncryptedChat {\n\tvalue := new(InputEncryptedChat)\n\tvalue.ChatID = e.GetID()\n\tvalue.AccessHash = e.GetAccessHash()\n\n\treturn value\n}", "func getInput(request string) string {\n\treader := bufio.NewReader(os.Stdin)\n\tfmt.Println(\"\\n\" + request)\n\tusrInput, _ := reader.ReadString('\\n')\n\treturn strings.TrimSpace(usrInput)\n}", "func (a *acssImpl) Input(input gpa.Input) gpa.OutMessages {\n\tif a.me != a.dealer {\n\t\tpanic(errors.New(\"only dealer can initiate the sharing\"))\n\t}\n\tif input == nil {\n\t\tpanic(errors.New(\"we expect kyber.Scalar as input\"))\n\t}\n\treturn a.handleInput(input.(kyber.Scalar))\n}", "func Hey(input string) string {\n\tquestionRe := regexp.MustCompile(\"[?]+[\\t\\n\\f\\r ]*$\")\n\tdigitsRe := regexp.MustCompile(\"[[:digit:]]+\")\n\tsilenceRe := regexp.MustCompile(\"^[\\n\\r\\t ]+$\")\n\tnormalRe := regexp.MustCompile(\"[[:alpha:]]+\")\n\n\tif input == strings.ToUpper(input) &&\n\t\tlen(normalRe.FindAllStringSubmatch(input, -1)) > 0 {\n\t\treturn responseYelling\n\t} else if len(questionRe.FindAllStringSubmatch(input, -1)) > 0 {\n\t\treturn responseQuestion\n\t} else if len(silenceRe.FindAllStringSubmatch(input, -1)) > 0 &&\n\t\tlen(digitsRe.FindAllStringSubmatch(input, -1)) == 0 || input == \"\" {\n\t\treturn responseSilence\n\t}\n\n\treturn responseDefault\n}", "func askQuestionToUser(isImportant bool, question string) string {\n\tprint(\"\\n\")\n\tif isImportant {\n\t\tcolor.Style{color.Cyan, color.OpBold}.Print(question)\n\t} else {\n\t\tcolor.Style{color.Cyan}.Print(question)\n\t}\n\tinput := bufio.NewScanner(os.Stdin)\n\tinput.Scan()\n\treturn input.Text()\n}", "func (s *SmartContract) Invoke(APIstub shim.ChaincodeStubInterface) sc.Response {\n\t// Retrieve the requested Smart Contract function and arguments\n\tfunction, args := APIstub.GetFunctionAndParameters()\n\t// Route to the appropriate handler function to interact with the ledger appropriately\n\tswitch function {\n\tcase \"initToken\":\n\t\treturn s.initLedger(APIstub, args)\n\tcase \"queryTotalAmount\":\n\t\treturn s.queryTotalAmount(APIstub)\n\tcase \"queryTokenName\":\n\t\treturn s.queryTokenName(APIstub)\n\tcase \"queryTokenSymbol\":\n\t\treturn s.queryTokenSymbol(APIstub)\n\tcase \"queryReserve\":\n\t\treturn s.queryReserve(APIstub)\n\tcase \"createAccount\":\n\t\treturn s.createAccount(APIstub, args)\n\tcase \"balanceOf\":\n\t\treturn s.queryValue(APIstub, args)\n\tcase \"transfer\":\n\t\treturn s.transfer(APIstub, args)\n\tcase \"buyToken\":\n\t\tnewArgs := []string{\"admin\",args[1],args[0],args[2]}\n\t\treturn s.transfer(APIstub, newArgs)\n\tcase \"sellToken\":\n\t\tnewArgs := []string{args[0],args[1],\"admin\",args[2]}\n\t\treturn s.transfer(APIstub, newArgs)\n\tcase \"mintToken\":\n\t\treturn s.mintTokens(APIstub, args)\n\tcase \"burnToken\":\n\t\treturn s.burnTokens(APIstub, args)\n\tcase \"approve\":\n\t\treturn s.approve(APIstub, args)\n\tcase \"allowance\":\n\t\treturn s.queryAllowance(APIstub, args)\n\tcase \"increaseAllowance\":\n\t\treturn s.increaseAllowance(APIstub, args)\n\tcase \"decreaseAllowance\":\n\t\treturn s.decreaseAllowance(APIstub, args)\n\tcase \"transferFrom\":\n\t\treturn s.transferFrom(APIstub, args)\n\t}\n\treturn shim.Error(\"Invalid Smart Contract function name.\")\n}", "func (act *AuthorizeAction) Input() error {\n\tif err := act.verify(); err != nil {\n\t\treturn act.Err(pbcommon.ErrCode_E_AUTH_PARAMS_INVALID, err.Error())\n\t}\n\treturn nil\n}", "func (bot *Bot) Chat(userRequest, userFeedback chan Message) error {\n\t// Get update channel to receive messages from user\n\tupdates, err := bot.API.GetUpdatesChan(bot.UpdateConfig)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Clear all unprocessed updates after certain period of time\n\ttime.Sleep(time.Millisecond * 500)\n\tupdates.Clear()\n\n\t// Waiting for messages from user via update channel,\n\t// send user request via channel to handler\n\t// and waiting for its reply on the feedback channel to send it\n\t// back to user\n\tfor {\n\t\tselect {\n\t\tcase upd := <-updates:\n\t\t\trq := make(map[string]string)\n\t\t\tif upd.Message.IsCommand() {\n\t\t\t\trq[upd.Message.Command()] = upd.Message.CommandArguments()\n\t\t\t} else {\n\t\t\t\trq[\"\"] = upd.Message.Text\n\t\t\t}\n\n\t\t\turq := Message{\n\t\t\t\tChatID: upd.Message.Chat.ID,\n\t\t\t\tUserID: upd.Message.From.ID,\n\t\t\t\tUserName: upd.Message.From.UserName,\n\t\t\t\tRequest: rq,\n\t\t\t}\n\t\t\tuserRequest <- urq\n\t\tcase ufb := <-userFeedback:\n\t\t\treply := tgbot.NewMessage(ufb.ChatID, ufb.Reply)\n\t\t\tbot.API.Send(reply)\n\t\t}\n\t}\n}", "func (c *ChoiceImpl) AskUser() {\n\tchoice := \"\"\n\tfor !c.choiceIsValid(choice) {\n\t\tfmt.Println(\"----- Available options:\")\n\t\tc.displayActions()\n\t\tfmt.Println(\"----- Your choice:\")\n\t\tchoice = c.getUsersChoice()\n\t}\n\ta := c.getActionByName(choice)\n\ta.execute()\n}", "func (t *TestClientWrapper) Say(in *proto.SayReq, timeout time.Duration) (*proto.SayResp, error) {\n\tin.Content = blancePolicy.String()\n\tv, err := t.caller.InvokeWithArgs2(\"Say\", []interface{}{in, []grpc.CallOption{}}, timeout)\n\tif err != nil {\n\t\tlogtest.Error(logs.Error(err))\n\t\treturn nil, err\n\t}\n\n\tresp, ok := v.(*proto.SayResp)\n\tif !ok {\n\t\tfmt.Printf(\"resp : %v\\r\\n\", resp)\n\t\treturn nil, rpcclient.ErrReturnValueCanNotConvertToStruct\n\t}\n\tt.mu.Lock()\n\tt.calls[resp.Content] = t.calls[resp.Content] + 1\n\tt.mu.Unlock()\n\treturn resp, nil\n}", "func (j *JustAddPowerReciever) SetAudioVideoInput(ctx context.Context, output, input string) error {\n\tj.Log.Debug(\"Setting receiver to transmitter\")\n\n\tgo j.checkTransmitterChannel(input)\n\n\tj.Log.Debug(\"Routing from, to\", zap.String(\"from\", j.Address), zap.String(\"to\", input))\n\n\tipAddress, err := net.ResolveIPAddr(\"ip\", input)\n\tipAddress.IP = ipAddress.IP.To4()\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Error when resolving IP Address [%s]: %w\", input, err)\n\t}\n\n\tchannel := fmt.Sprintf(\"%v\", ipAddress.IP[3])\n\n\tj.Log.Debug(\"channel\", zap.String(\"channel\", channel))\n\n\tresult, errrr := justAddPowerRequest(fmt.Sprintf(\"http://%s/cgi-bin/api/command/channel\", j.Address), channel, \"POST\")\n\n\tif errrr != nil {\n\t\treturn fmt.Errorf(\"Error when making request: %w\", errrr)\n\t}\n\n\tvar jsonResult JustAddPowerChannelResult\n\terr = json.Unmarshal(result, &jsonResult)\n\n\tj.Log.Debug(\"Result\", zap.Any(\"jsonResult\", jsonResult))\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Error when unpacking json\")\n\t}\n\treturn nil\n}", "func (_Quiz *QuizTransactor) SendAnswer(opts *bind.TransactOpts, _ans [32]byte) (*types.Transaction, error) {\n\treturn _Quiz.contract.Transact(opts, \"sendAnswer\", _ans)\n}", "func (t *AnswerChaincode) Query(stub shim.ChaincodeStubInterface) pb.Response {\n\treturn shim.Error(\"Unknown supported call - Query()\")\n}", "func (r *RecipeInfo) SayCurrentStep() error {\n\turl := os.Getenv(\"NLP_API\") + \"/send_message/\" + r.CurrentStep.Data.String\n\tlog.Infof(\"Sending to NLP: %s\", r.CurrentStep.Data.String)\n\n\treq, err := http.NewRequest(\"GET\", url, nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tres, err := http.DefaultClient.Do(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer res.Body.Close()\n\n\tbody, err := ioutil.ReadAll(res.Body)\n\tlog.Infof(\"NLP Response: %s\", body)\n\treturn err\n}", "func parseRawInput(line string) (IncomingData, bool) {\n\tif len(line) == 0 {\n\t\treturn IncomingData{}, false\n\t}\n\n\tsegments := strings.Split(strings.TrimSpace(line), \" \")\n\tresponseCode, err := strconv.Atoi(segments[1])\n\tif err != nil {\n\t\treturn parseNonNumericReply(segments)\n\t}\n\n\treturn parseNumericReply(responseCode, segments)\n}", "func (r SignalWorkflowExecutionRequest) Send(ctx context.Context) (*SignalWorkflowExecutionResponse, error) {\n\tr.Request.SetContext(ctx)\n\terr := r.Request.Send()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresp := &SignalWorkflowExecutionResponse{\n\t\tSignalWorkflowExecutionOutput: r.Request.Data.(*SignalWorkflowExecutionOutput),\n\t\tresponse: &aws.Response{Request: r.Request},\n\t}\n\n\treturn resp, nil\n}", "func (_Quiz *QuizCaller) Question(opts *bind.CallOpts) (string, error) {\n\tvar (\n\t\tret0 = new(string)\n\t)\n\tout := ret0\n\terr := _Quiz.contract.Call(opts, out, \"question\")\n\treturn *ret0, err\n}", "func Send(address string, req *pb.SaluteRequest, ctx context.Context) (*pb.SaluteResponse, error) {\n\t//implement here!\n\t//Don't forget to close the connection on exit!\n\n}", "func encodeGRPCSayHelloRequest(_ context.Context, request interface{}) (interface{}, error) {\n\treq := request.(endpoints.SayHelloRequest)\n\treturn &pb.SayHelloRequest{Saidword: req.Saidword, Want: req.Want}, nil\n\t\n\t\n}", "func (rpc *RPC) RequestMessage(args *RequestMessageArgs, reply *RequestMessageReply) error {\n\tans := make(map[string]int)\n\tfor k, v := range rpc.General.Answers {\n\t\tif len(k) == args.Turn && !strings.Contains(k, strconv.Itoa(args.Me)){\n\t\t\tif rpc.General.Traitor {\n\t\t\t\tans[k + strconv.Itoa(rpc.General.Me)] = rand.Intn(2)\n\t\t\t} else {\n\t\t\t\tans[k + strconv.Itoa(rpc.General.Me)] = v\n\t\t\t}\n\t\t}\n\t}\n\treply.Answers = ans\n\treturn nil\n}", "func (a *Actor) Send(m string) { a.input <- m }", "func WelcomePrompt(echoRequest *skillserver.EchoRequest) (response *skillserver.EchoResponse) {\n\tresponse = skillserver.NewEchoResponse()\n\n\tresponse.OutputSpeech(\"Welcome Guardian, would you like to equip max light, unload engrams, or transfer an item to a specific character, \" +\n\t\t\"find out how many of an item you have, or ask about Trials of Osiris?\").\n\t\tReprompt(\"Do you want to equip max light, unload engrams, transfer an item, find out how much of an item you have, or ask about Trials of Osiris?\").\n\t\tEndSession(false)\n\n\treturn\n}" ]
[ "0.6541768", "0.6091686", "0.5845148", "0.54016984", "0.5307556", "0.52704453", "0.5205231", "0.5100963", "0.5037373", "0.5035545", "0.4984164", "0.4983344", "0.4979458", "0.49784702", "0.4967835", "0.4950544", "0.48634472", "0.48553973", "0.48494896", "0.48332793", "0.48189175", "0.47890595", "0.47805548", "0.47599924", "0.47572914", "0.4754415", "0.47541752", "0.47534746", "0.47531503", "0.4749436", "0.47428897", "0.47397438", "0.47139198", "0.4703845", "0.47007278", "0.46992543", "0.4695352", "0.46925032", "0.46837676", "0.46692795", "0.46653047", "0.4635833", "0.46259138", "0.4604553", "0.46043473", "0.4603799", "0.46003047", "0.459675", "0.45810845", "0.45807928", "0.45755014", "0.4556808", "0.45563293", "0.45469496", "0.454455", "0.4543629", "0.45395184", "0.4537567", "0.45314008", "0.4509602", "0.4509558", "0.45069787", "0.45032874", "0.44991934", "0.44972503", "0.44964296", "0.44937482", "0.4491676", "0.44906926", "0.44896868", "0.44863424", "0.44833627", "0.44833344", "0.4478587", "0.44768843", "0.44704255", "0.4468837", "0.44687393", "0.4455179", "0.44433066", "0.44407037", "0.4440702", "0.4434098", "0.44314194", "0.44301608", "0.44288918", "0.44172752", "0.4416051", "0.44112194", "0.4408107", "0.4399391", "0.43970972", "0.43945086", "0.4393108", "0.43804228", "0.43802506", "0.43775", "0.4369956", "0.43677315", "0.43640882", "0.43623844" ]
0.0
-1
DeleteRepository provide the actions to do deleteRepositoryCMD
func DeleteRepository(cmd *cobra.Command, args []string) { if len(args) == 0 { fmt.Printf("delete repository need specific repo name\n") os.Exit(1) } req := &helmmanager.DeleteRepositoryReq{ ProjectID: &flagProject, Name: common.GetStringP(args[0]), } c := newClientWithConfiguration() if err := c.Repository().Delete(cmd.Context(), req); err != nil { fmt.Printf("delete repository failed, %s\n", err.Error()) os.Exit(1) } fmt.Printf("success to delete repository %s under project %s\n", req.GetName(), req.GetProjectID()) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (client ArtifactsClient) deleteRepository(ctx context.Context, request common.OCIRequest, binaryReqBody *common.OCIReadSeekCloser, extraHeaders map[string]string) (common.OCIResponse, error) {\n\n\thttpRequest, err := request.HTTPRequest(http.MethodDelete, \"/repositories/{repositoryId}\", binaryReqBody, extraHeaders)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar response DeleteRepositoryResponse\n\tvar httpResponse *http.Response\n\thttpResponse, err = client.Call(ctx, &httpRequest)\n\tdefer common.CloseBodyIfValid(httpResponse)\n\tresponse.RawResponse = httpResponse\n\tif err != nil {\n\t\tapiReferenceLink := \"https://docs.oracle.com/iaas/api/#/en/registry/20160918/Repository/DeleteRepository\"\n\t\terr = common.PostProcessServiceError(err, \"Artifacts\", \"DeleteRepository\", apiReferenceLink)\n\t\treturn response, err\n\t}\n\n\terr = common.UnmarshalResponse(httpResponse, &response)\n\treturn response, err\n}", "func (o *Repository) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) {\n\tif o == nil {\n\t\treturn 0, errors.New(\"models: no Repository provided for delete\")\n\t}\n\n\tif err := o.doBeforeDeleteHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\n\targs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), repositoryPrimaryKeyMapping)\n\tsql := \"DELETE FROM `repositories` WHERE `id`=?\"\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args...)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to delete from repositories\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by delete for repositories\")\n\t}\n\n\tif err := o.doAfterDeleteHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn rowsAff, nil\n}", "func RepoDelete(w http.ResponseWriter, r *http.Request, u *User, repo *Repo) error {\n\t// the user must confirm their password before deleting\n\tpassword := r.FormValue(\"password\")\n\tif err := u.ComparePassword(password); err != nil {\n\t\treturn RenderError(w, err, http.StatusBadRequest)\n\t}\n\n\t// delete the repo\n\tif err := database.DeleteRepo(repo.ID); err != nil {\n\t\treturn err\n\t}\n\n\thttp.Redirect(w, r, \"/dashboard\", http.StatusSeeOther)\n\treturn nil\n}", "func delete_repo(repoUrl string) error {\n\tvar arg0 = \"rm\"\n\tvar arg1 = \"-rf\"\n\tvar arg2 = CLONES_DIR + \"/\" + UrlToDir(repoUrl)\n\n\tcmd := exec.Command(arg0, arg1, arg2)\n\terr := cmd.Run()\n\n\tif err != nil {\n\t\tfmt.Print(\"an error occured during the deletion 'rm -rf' of: \" + arg2 + \". \")\n\t\tfmt.Printf(\"error is: %s\\n\", err)\n\t\treturn err\n\t}\n\n\t//fmt.Println(\"local git repo deleted successfully\")\n\treturn nil\n}", "func (client ArtifactsClient) DeleteRepository(ctx context.Context, request DeleteRepositoryRequest) (response DeleteRepositoryResponse, err error) {\n\tvar ociResponse common.OCIResponse\n\tpolicy := common.NoRetryPolicy()\n\tif client.RetryPolicy() != nil {\n\t\tpolicy = *client.RetryPolicy()\n\t}\n\tif request.RetryPolicy() != nil {\n\t\tpolicy = *request.RetryPolicy()\n\t}\n\tociResponse, err = common.Retry(ctx, request, client.deleteRepository, policy)\n\tif err != nil {\n\t\tif ociResponse != nil {\n\t\t\tif httpResponse := ociResponse.HTTPResponse(); httpResponse != nil {\n\t\t\t\topcRequestId := httpResponse.Header.Get(\"opc-request-id\")\n\t\t\t\tresponse = DeleteRepositoryResponse{RawResponse: httpResponse, OpcRequestId: &opcRequestId}\n\t\t\t} else {\n\t\t\t\tresponse = DeleteRepositoryResponse{}\n\t\t\t}\n\t\t}\n\t\treturn\n\t}\n\tif convertedResponse, ok := ociResponse.(DeleteRepositoryResponse); ok {\n\t\tresponse = convertedResponse\n\t} else {\n\t\terr = fmt.Errorf(\"failed to convert OCIResponse into DeleteRepositoryResponse\")\n\t}\n\treturn\n}", "func (m MockRepositoryStore) DeleteRepository(ctx context.Context, virtualStorage, relativePath string) (string, []string, error) {\n\tif m.DeleteRepositoryFunc == nil {\n\t\treturn \"\", nil, nil\n\t}\n\n\treturn m.DeleteRepositoryFunc(ctx, virtualStorage, relativePath)\n}", "func DeleteAppRepository(kubeHandler kube.AuthHandler) func(w http.ResponseWriter, req *http.Request) {\n\treturn func(w http.ResponseWriter, req *http.Request) {\n\t\trequestNamespace, requestCluster := getNamespaceAndCluster(req)\n\t\trepoName := mux.Vars(req)[\"name\"]\n\t\ttoken := auth.ExtractToken(req.Header.Get(\"Authorization\"))\n\n\t\tclientset, err := kubeHandler.AsUser(token, requestCluster)\n\t\tif err != nil {\n\t\t\treturnK8sError(err, w)\n\t\t\treturn\n\t\t}\n\n\t\terr = clientset.DeleteAppRepository(repoName, requestNamespace)\n\t\tif err != nil {\n\t\t\treturnK8sError(err, w)\n\t\t}\n\t}\n}", "func (m *Manager) Delete(ctx context.Context, name string) error {\n\tquery := \"select delete_chart_repository($1::uuid, $2::text)\"\n\tuserID := ctx.Value(hub.UserIDKey).(string)\n\t_, err := m.db.Exec(ctx, query, userID, name)\n\treturn err\n}", "func (s *RepositoryService) Delete(rs app.RequestScope, name string) (*models.Repository, error) {\n\trepository, err := s.dao.Get(rs.DB(), name)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\terr = s.dao.Delete(rs.DB(), name)\n\treturn repository, err\n}", "func DeleteRepo(cmdCtx *cobra.Command, args []string) {\n\tif len(args) == 0 {\n\t\tfmt.Println(\"Please enter a repo name ex. nere delete dopeRepo\")\n\t\tos.Exit(1)\n\t}\n\tctx := context.Background()\n\tts := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: args[1]})\n\tclient := oauth2.NewClient(ctx, ts)\n\n\tuserReq, err := http.NewRequest(\"GET\", fmt.Sprintf(\"%s/user\", baseUrl), nil)\n\tcheck(err)\n\tuserResp, err := client.Do(userReq)\n\tcheck(err)\n\tbody, err := ioutil.ReadAll(userResp.Body)\n\tcheck(err)\n\tuser := User{}\n\tjson.Unmarshal(body, &user)\n\treq, err := http.NewRequest(\"DELETE\", fmt.Sprintf(\"%s/repos/%s/%s\", baseUrl, user.Login, args[0]), nil)\n\tcheck(err)\n\t_, err = client.Do(req)\n\tcheck(err)\n\tfmt.Printf(\"Success: %s was deleted\\n\", args[0])\n}", "func (s *RepositoryClient) DeleteRepository(repoName string) error {\n\tresp, _, errs := s.NewRequest(gorequest.DELETE, \"/\"+repoName).\n\t\tEnd()\n\treturn CheckResponse(errs, resp, 200)\n}", "func HelmReposDelete(c *gin.Context) {\n\tlog := logger.WithFields(logrus.Fields{\"tag\": \"HelmReposDelete\"})\n\tlog.Info(\"Delete helm repository\")\n\n\tclusterName, ok := GetCommonClusterNameFromRequest(c)\n\tif ok != true {\n\t\treturn\n\t}\n\n\trepoName := c.Param(\"name\")\n\tlog.Debugln(\"repoName:\", repoName)\n\n\terr := helm.ReposDelete(clusterName, repoName)\n\tif err != nil {\n\t\tlog.Error(\"Error during get helm repo delete.\", err.Error())\n\t\tif err.Error() == helm.ErrRepoNotFound.Error() {\n\t\t\tc.JSON(http.StatusOK, htype.DeleteResponse{\n\t\t\t\tStatus: http.StatusOK,\n\t\t\t\tMessage: err.Error(),\n\t\t\t\tName: repoName})\n\t\t\treturn\n\n\t\t}\n\t\tc.JSON(http.StatusBadRequest, htype.ErrorResponse{\n\t\t\tCode: http.StatusBadRequest,\n\t\t\tMessage: \"error deleting helm repos\",\n\t\t\tError: err.Error(),\n\t\t})\n\t\treturn\n\t}\n\n\tc.JSON(http.StatusOK, htype.DeleteResponse{\n\t\tStatus: http.StatusOK,\n\t\tMessage: \"resource deleted successfully.\",\n\t\tName: repoName})\n\treturn\n}", "func (client ArtifactsClient) deleteContainerRepository(ctx context.Context, request common.OCIRequest, binaryReqBody *common.OCIReadSeekCloser, extraHeaders map[string]string) (common.OCIResponse, error) {\n\n\thttpRequest, err := request.HTTPRequest(http.MethodDelete, \"/container/repositories/{repositoryId}\", binaryReqBody, extraHeaders)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar response DeleteContainerRepositoryResponse\n\tvar httpResponse *http.Response\n\thttpResponse, err = client.Call(ctx, &httpRequest)\n\tdefer common.CloseBodyIfValid(httpResponse)\n\tresponse.RawResponse = httpResponse\n\tif err != nil {\n\t\tapiReferenceLink := \"https://docs.oracle.com/iaas/api/#/en/registry/20160918/ContainerRepository/DeleteContainerRepository\"\n\t\terr = common.PostProcessServiceError(err, \"Artifacts\", \"DeleteContainerRepository\", apiReferenceLink)\n\t\treturn response, err\n\t}\n\n\terr = common.UnmarshalResponse(httpResponse, &response)\n\treturn response, err\n}", "func (r *Repository) GitLabDeleteRepositories(ctx context.Context, gitLabGroupPath string) error {\n\tf := logrus.Fields{\n\t\t\"functionName\": \"v2.repositories.repository.GitLabDeleteRepositories\",\n\t\tutils.XREQUESTID: ctx.Value(utils.XREQUESTID),\n\t\t\"gitLabGroupPath\": gitLabGroupPath,\n\t}\n\n\tlog.WithFields(f).Debugf(\"loading repositories with name prefix: %s\", gitLabGroupPath)\n\trepositories, err := r.GitLabGetRepositoriesByNamePrefix(ctx, gitLabGroupPath)\n\tif err != nil {\n\t\t// If nothing to delete...\n\t\tif _, ok := err.(*utils.GitLabRepositoryNotFound); ok {\n\t\t\treturn nil\n\t\t}\n\t\tlog.WithFields(f).WithError(err).Warnf(\"problem loading repositories with name prefix: %s\", gitLabGroupPath)\n\t\treturn err\n\t}\n\tlog.WithFields(f).Debugf(\"processing repository delete request for %d repositories\", len(repositories))\n\n\ttype GitLabDeleteRepositoryResponse struct {\n\t\tRepositoryID string\n\t\tRepositoryName string\n\t\tRepositoryFullPath string\n\t\tError error\n\t}\n\tdeleteRepoRespChan := make(chan *GitLabDeleteRepositoryResponse, len(repositories))\n\n\tfor _, repo := range repositories {\n\t\tgo func(repo *repoModels.RepositoryDBModel) {\n\t\t\t_, err = r.dynamoDBClient.DeleteItem(&dynamodb.DeleteItemInput{\n\t\t\t\tKey: map[string]*dynamodb.AttributeValue{\n\t\t\t\t\trepoModels.RepositoryIDColumn: {S: aws.String(repo.RepositoryID)},\n\t\t\t\t},\n\t\t\t\tTableName: aws.String(r.repositoryTableName),\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\tlog.WithFields(f).WithError(err).Warnf(\"error deleting repository with ID:%s\", repo.RepositoryID)\n\t\t\t}\n\t\t\tdeleteRepoRespChan <- &GitLabDeleteRepositoryResponse{\n\t\t\t\tRepositoryID: repo.RepositoryID,\n\t\t\t\tRepositoryName: repo.RepositoryName,\n\t\t\t\tRepositoryFullPath: repo.RepositoryFullPath,\n\t\t\t\tError: err,\n\t\t\t}\n\t\t}(repo)\n\t}\n\n\t// Wait for the go routines to finish and load up the results\n\tlog.WithFields(f).Debug(\"waiting for delete repos to finish...\")\n\tvar lastErr error\n\tfor range repositories {\n\t\tselect {\n\t\tcase response := <-deleteRepoRespChan:\n\t\t\tif response.Error != nil {\n\t\t\t\tlog.WithFields(f).WithError(response.Error).Warn(response.Error.Error())\n\t\t\t\tlastErr = response.Error\n\t\t\t} else {\n\t\t\t\tlog.WithFields(f).Debugf(\"delete repo: %s with ID: %s with full path: %s\", response.RepositoryName, response.RepositoryID, response.RepositoryFullPath)\n\t\t\t}\n\t\tcase <-ctx.Done():\n\t\t\tlog.WithFields(f).WithError(ctx.Err()).Warnf(\"waiting for delete repositories timed out\")\n\t\t\tlastErr = fmt.Errorf(\"delete repositories failed with timeout, error: %v\", ctx.Err())\n\t\t}\n\t}\n\n\t// Return the last error, hopefully nil if no error occurred...\n\treturn lastErr\n}", "func kubeDeleteHelmRepository(t *testing.T, name, namespace string) error {\n\tt.Logf(\"+kubeDeleteHelmRepository(%s,%s)\", name, namespace)\n\tif ifc, err := kubeGetHelmRepositoryResourceInterface(namespace); err != nil {\n\t\treturn err\n\t} else if err = ifc.Delete(context.TODO(), name, metav1.DeleteOptions{}); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (rs *RepositoryService) ExecDeleteRepoRest(repositoryPath *Path) error {\n\trepoName := path.Join(\"repos\", repositoryPath.Subject, repositoryPath.Repo)\n\turl := rs.BintrayDetails.GetApiUrl() + repoName\n\n\tlog.Info(\"Deleting Repo...\")\n\thttpClientsDetails := rs.BintrayDetails.CreateHttpClientDetails()\n\tclient, err := httpclient.ClientBuilder().Build()\n\tif err != nil {\n\t\treturn err\n\t}\n\tresp, body, err := client.SendDelete(url, nil, httpClientsDetails)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif resp.StatusCode != http.StatusOK {\n\t\treturn errorutils.CheckError(errors.New(\"Bintray response: \" + resp.Status + \"\\n\" + clientutils.IndentJson(body)))\n\t}\n\n\tlog.Debug(\"Bintray response:\", resp.Status)\n\tlog.Info(\"Deleted Repo\", repositoryPath.Repo+\".\")\n\treturn nil\n}", "func (c *client) DeleteRepo(id int64) error {\n\tlogrus.Tracef(\"deleting repo %d in the database\", id)\n\n\t// send query to the database\n\treturn c.Postgres.\n\t\tTable(constants.TableRepo).\n\t\tExec(dml.DeleteRepo, id).Error\n}", "func (c *Repo) DeleteRepo(w http.ResponseWriter, req *http.Request) (int, interface{}) {\n\tsctx := req.Context().Value(middlewares.SessionKey)\n\tsession, ok := sctx.(*types.Session)\n\tif !ok {\n\t\treturn httputils.InternalError()\n\t}\n\n\tquery := req.URL.Query()\n\n\trepo := types.Repo{\n\t\tName: query[\"name\"][0],\n\t}\n\n\trepo.Uname = session.User.Uname\n\n\terr := repos.DeleteRepo(&repo)\n\tif err != nil {\n\t\treturn http.StatusConflict, httputils.ErrorResponse([]string{\"Repo does not exist\"})\n\t}\n\n\treturn http.StatusOK, nil\n}", "func (_m *MockECRAPI) DeleteRepository(_param0 *ecr.DeleteRepositoryInput) (*ecr.DeleteRepositoryOutput, error) {\n\tret := _m.ctrl.Call(_m, \"DeleteRepository\", _param0)\n\tret0, _ := ret[0].(*ecr.DeleteRepositoryOutput)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (o RepositorySlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) {\n\tif len(o) == 0 {\n\t\treturn 0, nil\n\t}\n\n\tif len(repositoryBeforeDeleteHooks) != 0 {\n\t\tfor _, obj := range o {\n\t\t\tif err := obj.doBeforeDeleteHooks(ctx, exec); err != nil {\n\t\t\t\treturn 0, err\n\t\t\t}\n\t\t}\n\t}\n\n\tvar args []interface{}\n\tfor _, obj := range o {\n\t\tpkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), repositoryPrimaryKeyMapping)\n\t\targs = append(args, pkeyArgs...)\n\t}\n\n\tsql := \"DELETE FROM `repositories` WHERE \" +\n\t\tstrmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 0, repositoryPrimaryKeyColumns, len(o))\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, sql)\n\t\tfmt.Fprintln(writer, args)\n\t}\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to delete all from repository slice\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by deleteall for repositories\")\n\t}\n\n\tif len(repositoryAfterDeleteHooks) != 0 {\n\t\tfor _, obj := range o {\n\t\t\tif err := obj.doAfterDeleteHooks(ctx, exec); err != nil {\n\t\t\t\treturn 0, err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn rowsAff, nil\n}", "func Delete(params DeleteParams) error {\n\tif err := params.Validate(); err != nil {\n\t\treturn err\n\t}\n\n\t_, _, err := params.V1API.PlatformConfigurationSnapshots.DeleteSnapshotRepository(\n\t\tplatform_configuration_snapshots.NewDeleteSnapshotRepositoryParams().\n\t\t\tWithRepositoryName(params.Name),\n\t\tparams.AuthWriter,\n\t)\n\n\treturn api.UnwrapError(err)\n}", "func (client ArtifactsClient) DeleteContainerRepository(ctx context.Context, request DeleteContainerRepositoryRequest) (response DeleteContainerRepositoryResponse, err error) {\n\tvar ociResponse common.OCIResponse\n\tpolicy := common.NoRetryPolicy()\n\tif client.RetryPolicy() != nil {\n\t\tpolicy = *client.RetryPolicy()\n\t}\n\tif request.RetryPolicy() != nil {\n\t\tpolicy = *request.RetryPolicy()\n\t}\n\tociResponse, err = common.Retry(ctx, request, client.deleteContainerRepository, policy)\n\tif err != nil {\n\t\tif ociResponse != nil {\n\t\t\tif httpResponse := ociResponse.HTTPResponse(); httpResponse != nil {\n\t\t\t\topcRequestId := httpResponse.Header.Get(\"opc-request-id\")\n\t\t\t\tresponse = DeleteContainerRepositoryResponse{RawResponse: httpResponse, OpcRequestId: &opcRequestId}\n\t\t\t} else {\n\t\t\t\tresponse = DeleteContainerRepositoryResponse{}\n\t\t\t}\n\t\t}\n\t\treturn\n\t}\n\tif convertedResponse, ok := ociResponse.(DeleteContainerRepositoryResponse); ok {\n\t\tresponse = convertedResponse\n\t} else {\n\t\terr = fmt.Errorf(\"failed to convert OCIResponse into DeleteContainerRepositoryResponse\")\n\t}\n\treturn\n}", "func (_mr *MockECRAPIMockRecorder) DeleteRepository(arg0 interface{}) *gomock.Call {\n\treturn _mr.mock.ctrl.RecordCall(_mr.mock, \"DeleteRepository\", arg0)\n}", "func deleteRepoRedirect(e Engine, ownerID int64, repoName string) error {\n\trepoName = strings.ToLower(repoName)\n\t_, err := e.Delete(&RepoRedirect{OwnerID: ownerID, LowerName: repoName})\n\treturn err\n}", "func TestDeleteRepoFileWithoutBranchNames(t *testing.T) {\n\tonGiteaRun(t, testDeleteRepoFileWithoutBranchNames)\n}", "func deleteRepos(ctx context.Context, entClient *ent.Client, namespace string, repos ...string) error {\n\trepoPredicates := []predicate.Module{}\n\tfor _, repo := range repos {\n\t\trepoPredicates = append(repoPredicates, module.RepoNameEQ(repo))\n\t}\n\tmodulePredicate := module.And(\n\t\tmodule.NamespaceEQ(namespace),\n\t\tmodule.Or(repoPredicates...),\n\t)\n\n\t_, err := entClient.ModuleVersion.Delete().\n\t\tWhere(moduleversion.HasModuleWith(modulePredicate)).\n\t\tExec(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Delete the module\n\t_, err = entClient.Module.Delete().\n\t\tWhere(modulePredicate).\n\t\tExec(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func Delete() {\n\n\t// dwn-delete:\n\n\t// @echo Deleting dep ...\n\t// @echo\n\t// @echo DEP_DOWNLOAD_DIR: \t$(DEP_DOWNLOAD_DIR)\n\t// @echo DEP_INSTALL_PREFIX: \t$(DEP_INSTALL_PREFIX)\n\t// @echo\n\t// @echo DWN_FILENAME: \t\t$(DWN_FILENAME)\n\t// @echo DWN_BIN_NAME: \t\t$(DWN_BIN_NAME)\n\t// @echo\n\t// @echo DWN_FILENAME_CALC: \t$(DWN_FILENAME_CALC)\n\t// @echo DWN_FILENAME_BASE: \t$(DWN_FILENAME_BASE)\n\t// @echo DWN_FILENAME_EXT:\t \t$(DWN_FILENAME_EXT)\n\t// @echo\n\n\t// # deletes the tar and binary\n\t// if [[ $(GOOS) = darwin || $(GOOS) = linux ]]; then \\\n\t// \trm -rf $(DEP_DOWNLOAD_DIR)/$(DWN_FILENAME); \\\n\t// \trm -rf $(DEP_DOWNLOAD_DIR)/$(DWN_BIN_NAME); \\\n\t// fi\n\n\t// if [[ $(GOOS) = windows ]]; then \\\n\t// \trm -rf $(DEP_DOWNLOAD_DIR)/$(DWN_FILENAME); \\\n\t// \trm -rf $(DEP_DOWNLOAD_DIR)/$(DWN_BIN_NAME); \\\n\t// fi\n\n\t// # delete the binary from the install location\n\t// if [[ $(GOOS) = darwin || $(GOOS) = linux ]]; then \\\n\t// \tsudo rm -rf $(DEP_INSTALL_PREFIX)/$(DWN_BIN_NAME); \\\n\t// fi\n\n\t// if [[ $(GOOS) = windows ]]; then \\\n\t// \trm -rf $(DEP_INSTALL_PREFIX)/$(DWN_BIN_NAME); \\\n\t// fi\n\n}", "func (s *store) DeleteUploadsWithoutRepository(ctx context.Context, now time.Time) (_ map[int]int, err error) {\n\tctx, trace, endObservation := s.operations.deleteUploadsWithoutRepository.With(ctx, &err, observation.Args{})\n\tdefer endObservation(1, observation.Args{})\n\n\ttx, err := s.db.Transact(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer func() { err = tx.Done(err) }()\n\n\tunset, _ := tx.SetLocal(ctx, \"codeintel.lsif_uploads_audit.reason\", \"upload associated with repository not known to this instance\")\n\tdefer unset(ctx)\n\n\tquery := sqlf.Sprintf(deleteUploadsWithoutRepositoryQuery, now.UTC(), DeletedRepositoryGracePeriod/time.Second)\n\trepositories, err := scanCounts(tx.Query(ctx, query))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tcount := 0\n\tfor _, numDeleted := range repositories {\n\t\tcount += numDeleted\n\t}\n\ttrace.Log(\n\t\tlog.Int(\"count\", count),\n\t\tlog.Int(\"numRepositories\", len(repositories)),\n\t)\n\n\treturn repositories, nil\n}", "func (r repository) Delete(ctx context.Context, id string) error {\n\tlink, err := r.Get(ctx, id)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn r.db.With(ctx).Model(&link).Delete()\n}", "func (suite *NoteRepoTestSuite) TestNoteRepoDelete() {\n\tvar noteID uint = 5\n\tsuite.Run(\"delete with valid data\", func() {\n\t\tsuite.mock.ExpectExec(\"UPDATE `notes` SET `deleted_at`=\").\n\t\t\tWillReturnResult(sqlmock.NewResult(0, 1))\n\t\terr := suite.noteRepo.Delete(noteID)\n\t\tif err != nil {\n\t\t\tsuite.Fail(\"error should be nil\")\n\t\t}\n\t})\n\tsuite.Run(\"delete with invalid data\", func() {\n\t\tsuite.mock.ExpectExec(\"UPDATE `notes` SET `deleted_at`=\").\n\t\t\tWillReturnResult(sqlmock.NewResult(0, 0))\n\t\terr := suite.noteRepo.Delete(noteID)\n\t\tif err != nil {\n\t\t\tsuite.Fail(\"error should be nil\")\n\t\t}\n\t})\n}", "func DeleteResourcesCommand(c *cli.Context) error {\n\n\tif c.NArg() != 2 {\n\t\treturn fmt.Errorf(\"wrong number of arguments: expected 2, got %d\", c.NArg())\n\t}\n\n\tkind := strings.ToLower(c.Args().First())\n\tguid := c.Args().Get(1)\n\n\tstatus, err := client.DeleteResource(client.GUID, kind, guid)\n\tif status > http.StatusAccepted && err == nil {\n\t\tfmt.Println(fmt.Sprintf(\"could not delete resource '%s/%s-%s'\", client.GUID, kind, guid))\n\t\treturn nil\n\t}\n\tif err != nil {\n\t\tprintError(c, err)\n\t\treturn err\n\t}\n\n\tfmt.Println(fmt.Sprintf(\"successfully delete resource '%s/%s-%s'\", client.GUID, kind, guid))\n\treturn nil\n}", "func (repo Repository) Delete(fullPath string) error {\n\tfilePath := path.Join(repo.StorageDir, fullPath)\n\n\treturn os.Remove(filePath)\n}", "func Delete(t *testing.T, knFunc *TestShellCmdRunner, project *FunctionTestProject) {\n\t// Invoke delete command\n\tresult := knFunc.Exec(\"delete\", project.FunctionName)\n\tif result.Error != nil && project.IsDeployed {\n\t\tt.Fail()\n\t}\n\tproject.IsDeployed = false\n\n\t// Invoke list to verify project was deleted\n\tList(t, knFunc, *project)\n}", "func (ar *ArticleRepository) Delete(article entity.Article) (err error) {\n\terr = ar.Conn.Delete(article).Error\n\treturn\n}", "func (s *ImagesByRepositoryRegistryStorage) Delete(id string) (<-chan interface{}, error) {\n\tmapping, err := mappingFromID(id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn apiserver.MakeAsync(func() (interface{}, error) {\n\t\treturn &api.Status{Status: api.StatusSuccess}, s.registry.RemoveImageFromRepository(mapping.RepositoryName, mapping.Image.ID)\n\t}), nil\n}", "func NewDeleteBinaryRepositoryOK() *DeleteBinaryRepositoryOK {\n\treturn &DeleteBinaryRepositoryOK{}\n}", "func (c *DriversController) DeleteDriver() {\n\tresult := models.DeleteDriver(c.Ctx.Input.Param(ID_PARAMETER))\n\tc.Ctx.Output.Body([]byte(result))\n}", "func restDeleteCommandById(w http.ResponseWriter, r *http.Request) {\n\tvars := mux.Vars(r)\n\tvar id string = vars[ID]\n\n\t// Check if the command exists\n\t_, err := dbClient.GetCommandById(id)\n\tif err != nil {\n\t\tLoggingClient.Error(err.Error())\n\t\thttp.Error(w, err.Error(), http.StatusNotFound)\n\t\treturn\n\t}\n\n\t// Check if the command is still in use by a device profile\n\tisStillInUse, err := isCommandStillInUse(id)\n\tif err != nil {\n\t\tLoggingClient.Error(err.Error())\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tif isStillInUse {\n\t\terr = errors.New(\"Can't delete command. Its still in use by Device Profiles\")\n\t\tLoggingClient.Error(err.Error())\n\t\thttp.Error(w, err.Error(), http.StatusConflict)\n\t\treturn\n\t}\n\n\tif err := dbClient.DeleteCommandById(id); err != nil {\n\t\tLoggingClient.Error(err.Error())\n\t\tif err == db.ErrCommandStillInUse {\n\t\t\thttp.Error(w, err.Error(), http.StatusConflict)\n\t\t} else {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t}\n\t\treturn\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(http.StatusOK)\n\tw.Write([]byte(\"true\"))\n}", "func (u *UserRepository) Delete(ID uint) error {\n\targs := u.Called(ID)\n\treturn args.Error(0)\n}", "func CmdDelete() *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"delete <namespace-store-name>\",\n\t\tShort: \"Delete namespace store\",\n\t\tRun: RunDelete,\n\t}\n\treturn cmd\n}", "func (rc *ResourceCommand) Delete(ctx context.Context, client auth.ClientI) (err error) {\n\tsingletonResources := []string{\n\t\ttypes.KindClusterAuthPreference,\n\t\ttypes.KindClusterMaintenanceConfig,\n\t\ttypes.KindClusterNetworkingConfig,\n\t\ttypes.KindSessionRecordingConfig,\n\t\ttypes.KindInstaller,\n\t\ttypes.KindUIConfig,\n\t}\n\tif !slices.Contains(singletonResources, rc.ref.Kind) && (rc.ref.Kind == \"\" || rc.ref.Name == \"\") {\n\t\treturn trace.BadParameter(\"provide a full resource name to delete, for example:\\n$ tctl rm cluster/east\\n\")\n\t}\n\n\tswitch rc.ref.Kind {\n\tcase types.KindNode:\n\t\tif err = client.DeleteNode(ctx, apidefaults.Namespace, rc.ref.Name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"node %v has been deleted\\n\", rc.ref.Name)\n\tcase types.KindUser:\n\t\tif err = client.DeleteUser(ctx, rc.ref.Name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"user %q has been deleted\\n\", rc.ref.Name)\n\tcase types.KindRole:\n\t\tif err = client.DeleteRole(ctx, rc.ref.Name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"role %q has been deleted\\n\", rc.ref.Name)\n\tcase types.KindToken:\n\t\tif err = client.DeleteToken(ctx, rc.ref.Name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"token %q has been deleted\\n\", rc.ref.Name)\n\tcase types.KindSAMLConnector:\n\t\tif err = client.DeleteSAMLConnector(ctx, rc.ref.Name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"SAML connector %v has been deleted\\n\", rc.ref.Name)\n\tcase types.KindOIDCConnector:\n\t\tif err = client.DeleteOIDCConnector(ctx, rc.ref.Name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"OIDC connector %v has been deleted\\n\", rc.ref.Name)\n\tcase types.KindGithubConnector:\n\t\tif err = client.DeleteGithubConnector(ctx, rc.ref.Name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"github connector %q has been deleted\\n\", rc.ref.Name)\n\tcase types.KindReverseTunnel:\n\t\tif err := client.DeleteReverseTunnel(rc.ref.Name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"reverse tunnel %v has been deleted\\n\", rc.ref.Name)\n\tcase types.KindTrustedCluster:\n\t\tif err = client.DeleteTrustedCluster(ctx, rc.ref.Name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"trusted cluster %q has been deleted\\n\", rc.ref.Name)\n\tcase types.KindRemoteCluster:\n\t\tif err = client.DeleteRemoteCluster(ctx, rc.ref.Name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"remote cluster %q has been deleted\\n\", rc.ref.Name)\n\tcase types.KindSemaphore:\n\t\tif rc.ref.SubKind == \"\" || rc.ref.Name == \"\" {\n\t\t\treturn trace.BadParameter(\n\t\t\t\t\"full semaphore path must be specified (e.g. '%s/%s/[email protected]')\",\n\t\t\t\ttypes.KindSemaphore, types.SemaphoreKindConnection,\n\t\t\t)\n\t\t}\n\t\terr := client.DeleteSemaphore(ctx, types.SemaphoreFilter{\n\t\t\tSemaphoreKind: rc.ref.SubKind,\n\t\t\tSemaphoreName: rc.ref.Name,\n\t\t})\n\t\tif err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"semaphore '%s/%s' has been deleted\\n\", rc.ref.SubKind, rc.ref.Name)\n\tcase types.KindClusterAuthPreference:\n\t\tif err = resetAuthPreference(ctx, client); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"cluster auth preference has been reset to defaults\\n\")\n\tcase types.KindClusterMaintenanceConfig:\n\t\tif err := client.DeleteClusterMaintenanceConfig(ctx); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"cluster maintenance configuration has been deleted\\n\")\n\tcase types.KindClusterNetworkingConfig:\n\t\tif err = resetClusterNetworkingConfig(ctx, client); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"cluster networking configuration has been reset to defaults\\n\")\n\tcase types.KindSessionRecordingConfig:\n\t\tif err = resetSessionRecordingConfig(ctx, client); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"session recording configuration has been reset to defaults\\n\")\n\tcase types.KindLock:\n\t\tname := rc.ref.Name\n\t\tif rc.ref.SubKind != \"\" {\n\t\t\tname = rc.ref.SubKind + \"/\" + name\n\t\t}\n\t\tif err = client.DeleteLock(ctx, name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"lock %q has been deleted\\n\", name)\n\tcase types.KindDatabaseServer:\n\t\tservers, err := client.GetDatabaseServers(ctx, apidefaults.Namespace)\n\t\tif err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tresDesc := \"database server\"\n\t\tservers = filterByNameOrPrefix(servers, rc.ref.Name)\n\t\tname, err := getOneResourceNameToDelete(servers, rc.ref, resDesc)\n\t\tif err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfor _, s := range servers {\n\t\t\terr := client.DeleteDatabaseServer(ctx, apidefaults.Namespace, s.GetHostID(), name)\n\t\t\tif err != nil {\n\t\t\t\treturn trace.Wrap(err)\n\t\t\t}\n\t\t}\n\t\tfmt.Printf(\"%s %q has been deleted\\n\", resDesc, name)\n\tcase types.KindNetworkRestrictions:\n\t\tif err = resetNetworkRestrictions(ctx, client); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"network restrictions have been reset to defaults (allow all)\\n\")\n\tcase types.KindApp:\n\t\tif err = client.DeleteApp(ctx, rc.ref.Name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"application %q has been deleted\\n\", rc.ref.Name)\n\tcase types.KindDatabase:\n\t\tdatabases, err := client.GetDatabases(ctx)\n\t\tif err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tresDesc := \"database\"\n\t\tdatabases = filterByNameOrPrefix(databases, rc.ref.Name)\n\t\tname, err := getOneResourceNameToDelete(databases, rc.ref, resDesc)\n\t\tif err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tif err := client.DeleteDatabase(ctx, name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"%s %q has been deleted\\n\", resDesc, name)\n\tcase types.KindKubernetesCluster:\n\t\tclusters, err := client.GetKubernetesClusters(ctx)\n\t\tif err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tresDesc := \"kubernetes cluster\"\n\t\tclusters = filterByNameOrPrefix(clusters, rc.ref.Name)\n\t\tname, err := getOneResourceNameToDelete(clusters, rc.ref, resDesc)\n\t\tif err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tif err := client.DeleteKubernetesCluster(ctx, name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"%s %q has been deleted\\n\", resDesc, name)\n\tcase types.KindWindowsDesktopService:\n\t\tif err = client.DeleteWindowsDesktopService(ctx, rc.ref.Name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"windows desktop service %q has been deleted\\n\", rc.ref.Name)\n\tcase types.KindWindowsDesktop:\n\t\tdesktops, err := client.GetWindowsDesktops(ctx,\n\t\t\ttypes.WindowsDesktopFilter{Name: rc.ref.Name})\n\t\tif err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tif len(desktops) == 0 {\n\t\t\treturn trace.NotFound(\"no desktops with name %q were found\", rc.ref.Name)\n\t\t}\n\t\tdeleted := 0\n\t\tvar errs []error\n\t\tfor _, desktop := range desktops {\n\t\t\tif desktop.GetName() == rc.ref.Name {\n\t\t\t\tif err = client.DeleteWindowsDesktop(ctx, desktop.GetHostID(), rc.ref.Name); err != nil {\n\t\t\t\t\terrs = append(errs, err)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tdeleted++\n\t\t\t}\n\t\t}\n\t\tif deleted == 0 {\n\t\t\terrs = append(errs,\n\t\t\t\ttrace.Errorf(\"failed to delete any desktops with the name %q, %d were found\",\n\t\t\t\t\trc.ref.Name, len(desktops)))\n\t\t}\n\t\tfmts := \"%d windows desktops with name %q have been deleted\"\n\t\tif err := trace.NewAggregate(errs...); err != nil {\n\t\t\tfmt.Printf(fmts+\" with errors while deleting\\n\", deleted, rc.ref.Name)\n\t\t\treturn err\n\t\t}\n\t\tfmt.Printf(fmts+\"\\n\", deleted, rc.ref.Name)\n\tcase types.KindCertAuthority:\n\t\tif rc.ref.SubKind == \"\" || rc.ref.Name == \"\" {\n\t\t\treturn trace.BadParameter(\n\t\t\t\t\"full %s path must be specified (e.g. '%s/%s/clustername')\",\n\t\t\t\ttypes.KindCertAuthority, types.KindCertAuthority, types.HostCA,\n\t\t\t)\n\t\t}\n\t\terr := client.DeleteCertAuthority(ctx, types.CertAuthID{\n\t\t\tType: types.CertAuthType(rc.ref.SubKind),\n\t\t\tDomainName: rc.ref.Name,\n\t\t})\n\t\tif err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"%s '%s/%s' has been deleted\\n\", types.KindCertAuthority, rc.ref.SubKind, rc.ref.Name)\n\tcase types.KindKubeServer:\n\t\tservers, err := client.GetKubernetesServers(ctx)\n\t\tif err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tresDesc := \"kubernetes server\"\n\t\tservers = filterByNameOrPrefix(servers, rc.ref.Name)\n\t\tname, err := getOneResourceNameToDelete(servers, rc.ref, resDesc)\n\t\tif err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfor _, s := range servers {\n\t\t\terr := client.DeleteKubernetesServer(ctx, s.GetHostID(), name)\n\t\t\tif err != nil {\n\t\t\t\treturn trace.Wrap(err)\n\t\t\t}\n\t\t}\n\t\tfmt.Printf(\"%s %q has been deleted\\n\", resDesc, name)\n\tcase types.KindUIConfig:\n\t\terr := client.DeleteUIConfig(ctx)\n\t\tif err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"%s has been deleted\\n\", types.KindUIConfig)\n\tcase types.KindInstaller:\n\t\terr := client.DeleteInstaller(ctx, rc.ref.Name)\n\t\tif err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tif rc.ref.Name == installers.InstallerScriptName {\n\t\t\tfmt.Printf(\"%s has been reset to a default value\\n\", rc.ref.Name)\n\t\t} else {\n\t\t\tfmt.Printf(\"%s has been deleted\\n\", rc.ref.Name)\n\t\t}\n\tcase types.KindLoginRule:\n\t\tloginRuleClient := client.LoginRuleClient()\n\t\t_, err := loginRuleClient.DeleteLoginRule(ctx, &loginrulepb.DeleteLoginRuleRequest{\n\t\t\tName: rc.ref.Name,\n\t\t})\n\t\tif err != nil {\n\t\t\treturn trail.FromGRPC(err)\n\t\t}\n\t\tfmt.Printf(\"login rule %q has been deleted\\n\", rc.ref.Name)\n\tcase types.KindSAMLIdPServiceProvider:\n\t\tif err := client.DeleteSAMLIdPServiceProvider(ctx, rc.ref.Name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"SAML IdP service provider %q has been deleted\\n\", rc.ref.Name)\n\tcase types.KindDevice:\n\t\tremote := client.DevicesClient()\n\t\tdevice, err := findDeviceByIDOrTag(ctx, remote, rc.ref.Name)\n\t\tif err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\n\t\tif _, err := remote.DeleteDevice(ctx, &devicepb.DeleteDeviceRequest{\n\t\t\tDeviceId: device[0].Id,\n\t\t}); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"Device %q removed\\n\", rc.ref.Name)\n\n\tcase types.KindIntegration:\n\t\tif err := client.DeleteIntegration(ctx, rc.ref.Name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"Integration %q removed\\n\", rc.ref.Name)\n\n\tcase types.KindAppServer:\n\t\tappServers, err := client.GetApplicationServers(ctx, rc.namespace)\n\t\tif err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tdeleted := false\n\t\tfor _, server := range appServers {\n\t\t\tif server.GetName() == rc.ref.Name {\n\t\t\t\tif err := client.DeleteApplicationServer(ctx, server.GetNamespace(), server.GetHostID(), server.GetName()); err != nil {\n\t\t\t\t\treturn trace.Wrap(err)\n\t\t\t\t}\n\t\t\t\tdeleted = true\n\t\t\t}\n\t\t}\n\t\tif !deleted {\n\t\t\treturn trace.NotFound(\"application server %q not found\", rc.ref.Name)\n\t\t}\n\t\tfmt.Printf(\"application server %q has been deleted\\n\", rc.ref.Name)\n\tcase types.KindOktaImportRule:\n\t\tif err := client.OktaClient().DeleteOktaImportRule(ctx, rc.ref.Name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"Okta import rule %q has been deleted\\n\", rc.ref.Name)\n\tcase types.KindUserGroup:\n\t\tif err := client.DeleteUserGroup(ctx, rc.ref.Name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"User group %q has been deleted\\n\", rc.ref.Name)\n\tcase types.KindProxy:\n\t\tif err := client.DeleteProxy(ctx, rc.ref.Name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"Proxy %q has been deleted\\n\", rc.ref.Name)\n\tcase types.KindAccessList:\n\t\tif err := client.AccessListClient().DeleteAccessList(ctx, rc.ref.Name); err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\t\tfmt.Printf(\"Access list %q has been deleted\\n\", rc.ref.Name)\n\tdefault:\n\t\treturn trace.BadParameter(\"deleting resources of type %q is not supported\", rc.ref.Kind)\n\t}\n\treturn nil\n}", "func RunDelete(cmd *cobra.Command, args []string) {\n\n\tlog := util.Logger()\n\n\tif len(args) != 1 || args[0] == \"\" {\n\t\tlog.Fatalf(`❌ Missing expected arguments: <namespace-store-name> %s`, cmd.UsageString())\n\t}\n\n\to := util.KubeObject(bundle.File_deploy_crds_noobaa_io_v1alpha1_namespacestore_cr_yaml)\n\tnamespaceStore := o.(*nbv1.NamespaceStore)\n\tnamespaceStore.Name = args[0]\n\tnamespaceStore.Namespace = options.Namespace\n\tnamespaceStore.Spec = nbv1.NamespaceStoreSpec{}\n\n\tnbClient := system.GetNBClient()\n\n\tnamespaceResourceinfo, err := nbClient.ReadNamespaceResourceAPI(nb.ReadNamespaceResourceParams{Name: namespaceStore.Name})\n\tif err != nil {\n\t\trpcErr, isRPCErr := err.(*nb.RPCError)\n\t\tif !isRPCErr || rpcErr.RPCCode != \"NO_SUCH_NAMESPACE_RESOURCE\" {\n\t\t\tlog.Fatalf(`❌ Failed to read NamespaceStore info: %s`, err)\n\t\t}\n\t} else if namespaceResourceinfo.Undeletable != \"\" && namespaceResourceinfo.Undeletable != \"IS_NAMESPACESTORE\" {\n\t\tswitch namespaceResourceinfo.Undeletable {\n\t\tcase \"CONNECTED_BUCKET_DELETING\":\n\t\t\tfallthrough\n\t\tcase \"IN_USE\":\n\t\t\tlog.Fatalf(`❌ Could not delete NamespaceStore %q in namespace %q as it is being used by one or more buckets`,\n\t\t\t\tnamespaceStore.Name, namespaceStore.Namespace)\n\t\tdefault:\n\t\t\tlog.Fatalf(`❌ Could not delete NamespaceStore %q in namespace %q, undeletable due to %q`,\n\t\t\t\tnamespaceStore.Name, namespaceStore.Namespace, namespaceResourceinfo.Undeletable)\n\t\t}\n\t}\n\tif !util.KubeDelete(namespaceStore) {\n\t\tlog.Fatalf(`❌ Could not delete NamespaceStore %q in namespace %q`,\n\t\t\tnamespaceStore.Name, namespaceStore.Namespace)\n\t}\n}", "func (m *MockDBStore) DeleteUploadsWithoutRepository(v0 context.Context, v1 time.Time) (map[int]int, error) {\n\tr0, r1 := m.DeleteUploadsWithoutRepositoryFunc.nextHook()(v0, v1)\n\tm.DeleteUploadsWithoutRepositoryFunc.appendCall(DBStoreDeleteUploadsWithoutRepositoryFuncCall{v0, v1, r0, r1})\n\treturn r0, r1\n}", "func (m MockRepositoryStore) DeleteInvalidRepository(ctx context.Context, repositoryID int64, storage string) error {\n\tif m.DeleteInvalidRepositoryFunc == nil {\n\t\treturn nil\n\t}\n\n\treturn m.DeleteInvalidRepositoryFunc(ctx, repositoryID, storage)\n}", "func (iu *ImageUtil) DeleteRepo(repoName string) error {\n\tif len(strings.TrimSpace(repoName)) == 0 {\n\t\treturn errors.New(\"Empty repo name for deleting\")\n\t}\n\n\turl := fmt.Sprintf(\"%s%s%s\", iu.rootURI, \"/api/repositories/\", repoName)\n\tif err := iu.testingClient.Delete(url); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (c DefaultClient) RemoveRepository(repositoryID string) (*HTTPStatus, error) {\n\treq, err := http.NewRequest(\"DELETE\", c.config.BaseURL+\"/api/repositories/\"+repositoryID, nil)\n\tif err != nil {\n\t\treturn &HTTPStatus{}, err\n\t}\n\tc.setAuthHeaders(req)\n\n\tresponse, err := c.config.Doer.Do(req)\n\tif err != nil {\n\t\treturn &HTTPStatus{}, err\n\t}\n\tdefer response.Body.Close()\n\n\tdata, err := ioutil.ReadAll(response.Body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif response.StatusCode/100 == 5 {\n\t\treturn &HTTPStatus{StatusCode: response.StatusCode, Entity: data}, http500{data}\n\t}\n\n\tif response.StatusCode != 200 {\n\t\treturn &HTTPStatus{response.StatusCode, data}, nil\n\t}\n\n\treturn nil, nil\n}", "func (r *Repos) Delete(name string) error {\n\tres := []*Table{}\n\n\tcounter := 0\n\tfor _, t := range r.Tables {\n\t\tif t.Name == name {\n\t\t\tcounter++\n\t\t\tcontinue\n\t\t}\n\t\tres = append(res, t)\n\t}\n\tif counter == 0 {\n\t\treturn fmt.Errorf(\"No repository named %s\", name)\n\t}\n\n\tr.Tables = res\n\treturn r.deleteRepo(name)\n}", "func (r *etcdRepository) Delete(ctx context.Context, key string) error {\n\t_, err := r.client.Delete(ctx, key)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (r Repository) DeleteAll(ctx context.Context, uniqueID string, action authentity.Action) error {\n\totpKey := generateOtpKey(uniqueID, action)\n\t_, err := r.redis.Delete(ctx, otpKey)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (c *Client) DeleteRepo(id string) error {\n\turi := c.formURI(\"/api/v1/remove/repo/\" + id)\n\treturn c.getBasicResponse(uri, &Response{})\n}", "func (_m *MockECRAPI) DeleteRepositoryPolicy(_param0 *ecr.DeleteRepositoryPolicyInput) (*ecr.DeleteRepositoryPolicyOutput, error) {\n\tret := _m.ctrl.Call(_m, \"DeleteRepositoryPolicy\", _param0)\n\tret0, _ := ret[0].(*ecr.DeleteRepositoryPolicyOutput)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (repo *HelmRepoRepository) DeleteHelmRepo(\n\thr *models.HelmRepo,\n) error {\n\t// clear TokenCache association\n\tassoc := repo.db.Model(hr).Association(\"TokenCache\")\n\n\tif assoc.Error != nil {\n\t\treturn assoc.Error\n\t}\n\n\tif err := assoc.Clear(); err != nil {\n\t\treturn err\n\t}\n\n\tif err := repo.db.Where(\"id = ?\", hr.ID).Delete(&models.HelmRepo{}).Error; err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (repo *BaseRepository) DeleteCommand(channel string, commandToDelete *models.Command) error {\n\trepo.mutex.Lock()\n\tdefer repo.mutex.Unlock()\n\n\tchanInfo, exists, err := repo.getChannelAndCommandExists(channel, commandToDelete)\n\tif err != nil {\n\t\treturn err // Channel not found\n\t}\n\tif !exists {\n\t\treturn ErrCommandNotFound\n\t}\n\tdelete(chanInfo.Commands, commandToDelete.Name)\n\treturn repo.updateChannel(&chanInfo)\n}", "func Delete(streams genericclioptions.IOStreams, f factory.Factory) *cobra.Command {\n\t// o := NewDeleteOptions(streams).(*DeleteOptions)\n\n\tcmd := &cobra.Command{\n\t\tUse: \"delete\",\n\t\tDisableFlagsInUseLine: true,\n\t\tShort: \"Delete a component wih falcoctl\",\n\t\tLong: `Delete a component wih falcoctl`,\n\t}\n\n\tcmd.AddCommand(DeleteFalco(streams, f))\n\n\treturn cmd\n}", "func (r *repository) Delete(id uint) error {\n\tif err := r.db.Where(\"id = ?\", id).Delete(&models.Upload{}).Error; err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func Delete(g *types.Cmd) {\n\tg.AddOptions(\"--delete\")\n}", "func (r *ItemsRepository) delete(i *Item) error {\n\tif query := r.databaseHandler.DB().Delete(&i); query.Error != nil {\n\t\treturn query.Error\n\t}\n\treturn nil\n}", "func (db *ProductRepo) Delete(id int) (message string) {\n\tdelete := \"this is delelte methode\"\n\treturn delete\n}", "func (s *GitlabSCM) DeleteRepository(ctx context.Context, opt *RepositoryOptions) (err error) {\n\t_, err = s.client.Projects.DeleteProject(strconv.FormatUint(opt.ID, 10), gitlab.WithContext(ctx))\n\treturn\n}", "func HandleDelete(\n\trepos core.RepositoryStore,\n\tbuilds core.BuildStore,\n\tstages core.StageStore,\n\tsteps core.StepStore,\n\tlogs core.LogStore,\n) http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tvar (\n\t\t\tnamespace = chi.URLParam(r, \"owner\")\n\t\t\tname = chi.URLParam(r, \"name\")\n\t\t)\n\t\tnumber, err := strconv.ParseInt(chi.URLParam(r, \"number\"), 10, 64)\n\t\tif err != nil {\n\t\t\trender.BadRequest(w, err)\n\t\t\treturn\n\t\t}\n\t\tstageNumber, err := strconv.Atoi(chi.URLParam(r, \"stage\"))\n\t\tif err != nil {\n\t\t\trender.BadRequest(w, err)\n\t\t\treturn\n\t\t}\n\t\tstepNumber, err := strconv.Atoi(chi.URLParam(r, \"step\"))\n\t\tif err != nil {\n\t\t\trender.BadRequest(w, err)\n\t\t\treturn\n\t\t}\n\t\trepo, err := repos.FindName(r.Context(), namespace, name)\n\t\tif err != nil {\n\t\t\trender.NotFound(w, err)\n\t\t\treturn\n\t\t}\n\t\tbuild, err := builds.FindNumber(r.Context(), repo.ID, number)\n\t\tif err != nil {\n\t\t\trender.NotFound(w, err)\n\t\t\treturn\n\t\t}\n\t\tstage, err := stages.FindNumber(r.Context(), build.ID, stageNumber)\n\t\tif err != nil {\n\t\t\trender.NotFound(w, err)\n\t\t\treturn\n\t\t}\n\t\tstep, err := steps.FindNumber(r.Context(), stage.ID, stepNumber)\n\t\tif err != nil {\n\t\t\trender.NotFound(w, err)\n\t\t\treturn\n\t\t}\n\t\terr = logs.Delete(r.Context(), step.ID)\n\t\tif err != nil {\n\t\t\trender.InternalError(w, err)\n\t\t\treturn\n\t\t}\n\t\tw.WriteHeader(204)\n\t}\n}", "func (a *Client) RmaRmaRepositoryV1DeleteDelete(params *RmaRmaRepositoryV1DeleteDeleteParams) (*RmaRmaRepositoryV1DeleteDeleteOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewRmaRmaRepositoryV1DeleteDeleteParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"rmaRmaRepositoryV1DeleteDelete\",\n\t\tMethod: \"DELETE\",\n\t\tPathPattern: \"/V1/returns/{id}\",\n\t\tProducesMediaTypes: []string{\"\"},\n\t\tConsumesMediaTypes: []string{\"\"},\n\t\tSchemes: []string{\"http\"},\n\t\tParams: params,\n\t\tReader: &RmaRmaRepositoryV1DeleteDeleteReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn result.(*RmaRmaRepositoryV1DeleteDeleteOK), nil\n\n}", "func (d *Dao) DeleteRepoTag(c context.Context, repoName, tagName string) (err error) {\n\tvar req *http.Request\n\n\thost := d.c.BiliHub.Host + _repositoryURI + \"/%s/tags/%s\"\n\turl := fmt.Sprintf(host, repoName, tagName)\n\n\tif req, err = d.newRequest(http.MethodDelete, url, nil); err != nil {\n\t\treturn\n\t}\n\n\treq.SetBasicAuth(d.c.BiliHub.Username, d.c.BiliHub.Password)\n\n\tif err = d.httpClient.Do(c, req, nil); err != nil {\n\t\tlog.Error(\"d.DeleteRepoTag url(%s) err(%v)\", url, err)\n\t\terr = ecode.MerlinHubRequestErr\n\t}\n\n\treturn\n}", "func (_m *MockECRAPI) DeleteRepositoryRequest(_param0 *ecr.DeleteRepositoryInput) (*request.Request, *ecr.DeleteRepositoryOutput) {\n\tret := _m.ctrl.Call(_m, \"DeleteRepositoryRequest\", _param0)\n\tret0, _ := ret[0].(*request.Request)\n\tret1, _ := ret[1].(*ecr.DeleteRepositoryOutput)\n\treturn ret0, ret1\n}", "func (c *Client) Delete(labels map[string]string, wait bool) error {\n\n\t// convert labels to selector\n\tselector := util.ConvertLabelsToSelector(labels)\n\tklog.V(3).Infof(\"Selectors used for deletion: %s\", selector)\n\n\tvar errorList []string\n\tvar deletionPolicy = metav1.DeletePropagationBackground\n\n\t// for --wait flag, it deletes component dependents first and then delete component\n\tif wait {\n\t\tdeletionPolicy = metav1.DeletePropagationForeground\n\t}\n\t// Delete Deployments\n\tklog.V(3).Info(\"Deleting Deployments\")\n\terr := c.appsClient.Deployments(c.Namespace).DeleteCollection(context.TODO(), metav1.DeleteOptions{PropagationPolicy: &deletionPolicy}, metav1.ListOptions{LabelSelector: selector})\n\tif err != nil {\n\t\terrorList = append(errorList, \"unable to delete deployments\")\n\t}\n\n\t// for --wait it waits for component to be deleted\n\t// TODO: Need to modify for `odo app delete`, currently wait flag is added only in `odo component delete`\n\t// so only one component gets passed in selector\n\tif wait {\n\t\terr = c.WaitForComponentDeletion(selector)\n\t\tif err != nil {\n\t\t\terrorList = append(errorList, err.Error())\n\t\t}\n\t}\n\n\t// Error string\n\terrString := strings.Join(errorList, \",\")\n\tif len(errString) != 0 {\n\t\treturn errors.New(errString)\n\t}\n\treturn nil\n\n}", "func DeleteRepositoryArchives(ctx context.Context) error {\n\tif err := repo_model.DeleteAllRepoArchives(); err != nil {\n\t\treturn err\n\t}\n\treturn storage.Clean(storage.RepoArchives)\n}", "func (mock *MockRepository) Delete(post *entity.Post) error {\n\treturn nil\n}", "func (repository *Repository) Delete(accountID string) error {\n\tcondition := entity.Account{ID: accountID}\n\treturn repository.connection.Delete(condition).Error\n}", "func (o *Repository) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) {\n\tif boil.HooksAreSkipped(ctx) {\n\t\treturn nil\n\t}\n\n\tfor _, hook := range repositoryAfterDeleteHooks {\n\t\tif err := hook(ctx, exec, o); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func TestGitCommandDeleteBranch(t *testing.T) {\n\ttype scenario struct {\n\t\ttestName string\n\t\tbranch string\n\t\tforce bool\n\t\tcommand func(string, ...string) *exec.Cmd\n\t\ttest func(error)\n\t}\n\n\tscenarios := []scenario{\n\t\t{\n\t\t\t\"Delete a branch\",\n\t\t\t\"test\",\n\t\t\tfalse,\n\t\t\tfunc(cmd string, args ...string) *exec.Cmd {\n\t\t\t\tassert.EqualValues(t, \"git\", cmd)\n\t\t\t\tassert.EqualValues(t, []string{\"branch\", \"-d\", \"test\"}, args)\n\n\t\t\t\treturn secureexec.Command(\"echo\")\n\t\t\t},\n\t\t\tfunc(err error) {\n\t\t\t\tassert.NoError(t, err)\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\t\"Force delete a branch\",\n\t\t\t\"test\",\n\t\t\ttrue,\n\t\t\tfunc(cmd string, args ...string) *exec.Cmd {\n\t\t\t\tassert.EqualValues(t, \"git\", cmd)\n\t\t\t\tassert.EqualValues(t, []string{\"branch\", \"-D\", \"test\"}, args)\n\n\t\t\t\treturn secureexec.Command(\"echo\")\n\t\t\t},\n\t\t\tfunc(err error) {\n\t\t\t\tassert.NoError(t, err)\n\t\t\t},\n\t\t},\n\t}\n\n\tfor _, s := range scenarios {\n\t\tt.Run(s.testName, func(t *testing.T) {\n\t\t\tgitCmd := NewDummyGitCommand()\n\t\t\tgitCmd.OSCommand.Command = s.command\n\t\t\ts.test(gitCmd.DeleteBranch(s.branch, s.force))\n\t\t})\n\t}\n}", "func (q repositoryQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) {\n\tif q.Query == nil {\n\t\treturn 0, errors.New(\"models: no repositoryQuery provided for delete all\")\n\t}\n\n\tqueries.SetDelete(q.Query)\n\n\tresult, err := q.Query.ExecContext(ctx, exec)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: unable to delete all from repositories\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"models: failed to get rows affected by deleteall for repositories\")\n\t}\n\n\treturn rowsAff, nil\n}", "func testCmdDeleteFilesystem(t *testing.T) {\n\tt.Log(\"TODO\")\n}", "func RunImagesDelete(ns string, config doit.Config, out io.Writer, args []string) error {\n\tclient := config.GetGodoClient()\n\n\tif len(args) != 1 {\n\t\treturn doit.NewMissingArgsErr(ns)\n\t}\n\n\tid, err := strconv.Atoi(args[0])\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t_, err = client.Images.Delete(id)\n\treturn err\n}", "func DeletePayment(repo repository.Repository) http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tpaymentID := mux.Vars(r)[\"paymentID\"]\n\n\t\terr := repo.Delete(paymentID)\n\t\tif err != nil && err == repository.ErrNotFound {\n\t\t\tSendErrorResponse(w, r, http.StatusInternalServerError, err)\n\t\t\treturn\n\t\t}\n\t\tw.WriteHeader(http.StatusNoContent)\n\t\treturn\n\t}\n}", "func newDeleteCmd(clientset *client.ConfigSet) *cobra.Command {\n\tvar file string\n\tdeleteCmd := &cobra.Command{\n\t\tUse: \"delete\",\n\t\tShort: \"Delete knative resource\",\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\ts.Namespace = client.Namespace\n\t\t\tif err := s.DeleteYAML(file, args, concurrency, clientset); err != nil {\n\t\t\t\tlog.Fatal(err)\n\t\t\t}\n\t\t},\n\t}\n\n\tdeleteCmd.Flags().StringVarP(&file, \"file\", \"f\", \"serverless.yaml\", \"Delete functions defined in yaml\")\n\tdeleteCmd.Flags().IntVarP(&concurrency, \"concurrency\", \"c\", 3, \"Number of concurrent deletion threads\")\n\tdeleteCmd.AddCommand(cmdDeleteConfiguration(clientset))\n\tdeleteCmd.AddCommand(cmdDeleteRevision(clientset))\n\tdeleteCmd.AddCommand(cmdDeleteService(clientset))\n\tdeleteCmd.AddCommand(cmdDeleteRoute(clientset))\n\tdeleteCmd.AddCommand(cmdDeleteChannel(clientset))\n\tdeleteCmd.AddCommand(cmdDeleteTask(clientset))\n\tdeleteCmd.AddCommand(cmdDeleteTaskRun(clientset))\n\tdeleteCmd.AddCommand(cmdDeletePipelineResource(clientset))\n\n\treturn deleteCmd\n}", "func newDeleteCmd(out io.Writer, errOut io.Writer) *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"delete\",\n\t\tShort: \"deletes various entities managed by the Ziti Edge Controller\",\n\t\tLong: \"deletes various entities managed by the Ziti Edge Controller\",\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\terr := cmd.Help()\n\t\t\tcmdhelper.CheckErr(err)\n\t\t},\n\t}\n\n\tnewOptions := func() *edgeOptions {\n\t\treturn &edgeOptions{\n\t\t\tCommonOptions: common.CommonOptions{\n\t\t\t\tOut: out,\n\t\t\t\tErr: errOut,\n\t\t\t},\n\t\t}\n\t}\n\n\tcmd.AddCommand(newDeleteCmdForEntityType(\"api-session\", newOptions()))\n\tcmd.AddCommand(newDeleteAuthenticatorCmd(\"authenticator\", newOptions()))\n\tcmd.AddCommand(newDeleteCmdForEntityType(\"ca\", newOptions()))\n\tcmd.AddCommand(newDeleteCmdForEntityType(\"config\", newOptions()))\n\tcmd.AddCommand(newDeleteCmdForEntityType(\"config-type\", newOptions()))\n\tcmd.AddCommand(newDeleteCmdForEntityType(\"edge-router\", newOptions(), \"er\", \"ers\"))\n\tcmd.AddCommand(newDeleteCmdForEntityType(\"edge-router-policy\", newOptions(), \"erp\", \"erps\"))\n\tcmd.AddCommand(newDeleteCmdForEntityType(\"identity\", newOptions()))\n\tcmd.AddCommand(newDeleteCmdForEntityType(\"service\", newOptions()))\n\tcmd.AddCommand(newDeleteCmdForEntityType(\"service-edge-router-policy\", newOptions(), \"serp\", \"serps\"))\n\tcmd.AddCommand(newDeleteCmdForEntityType(\"service-policy\", newOptions(), \"sp\", \"sps\"))\n\tcmd.AddCommand(newDeleteCmdForEntityType(\"session\", newOptions()))\n\tcmd.AddCommand(newDeleteCmdForEntityType(\"terminator\", newOptions()))\n\tcmd.AddCommand(newDeleteCmdForEntityType(\"posture-check\", newOptions()))\n\n\treturn cmd\n}", "func (a *App) Delete(w http.ResponseWriter, r *http.Request) {\n\tvar err error\n\n\tv := mux.Vars(r)\n\tidentifier := v[\"identifier\"]\n\n\tjsonpath := path.Join(a.ConfigDir, fmt.Sprintf(\"%s.json\", identifier))\n\tjsonpathexists := true\n\n\tconfigpath := path.Join(a.ConfigDir, fmt.Sprintf(\"%s.conf\", identifier))\n\tconfigpathexists := true\n\n\tif _, err = os.Stat(jsonpath); os.IsNotExist(err) {\n\t\tjsonpathexists = false\n\t}\n\tif err != nil && !os.IsNotExist(err) {\n\t\terr = errors.Wrapf(err, \"error checking file %s\", jsonpath)\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tif _, err = os.Stat(configpath); os.IsNotExist(err) {\n\t\tconfigpathexists = false\n\t}\n\tif err != nil && !os.IsNotExist(err) {\n\t\terr = errors.Wrapf(err, \"error checking file %s\", configpath)\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tif jsonpathexists {\n\t\tif err = os.Remove(jsonpath); err != nil {\n\t\t\terr = errors.Wrapf(err, \"error removing file %s\", jsonpath)\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t}\n\n\tif configpathexists {\n\t\tif err = os.Remove(configpath); err != nil {\n\t\t\terr = errors.Wrapf(err, \"error removing file %s\", configpath)\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t}\n\n\tif jsonpathexists || configpathexists {\n\t\tif err = a.SignalContainers(); err != nil {\n\t\t\terr = errors.Wrap(err, \"error HUPing container(s)\")\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t}\n}", "func DeleteRepos(baseDir string, urlPrefix *url.URL, names map[string]struct{}, filter *Filter) error {\n\tpaths, err := ListRepos(baseDir, urlPrefix)\n\tif err != nil {\n\t\treturn err\n\t}\n\tvar toDelete []string\n\tfor _, p := range paths {\n\t\t_, exists := names[p]\n\t\trepoName := strings.Replace(p, filepath.Join(urlPrefix.Host, urlPrefix.Path), \"\", 1)\n\t\trepoName = strings.TrimPrefix(repoName, \"/\")\n\t\tif filter.Include(repoName) && !exists {\n\t\t\ttoDelete = append(toDelete, p)\n\t\t}\n\t}\n\n\tif len(toDelete) > 0 {\n\t\tlog.Printf(\"deleting repos %v\", toDelete)\n\t}\n\n\tvar errs []string\n\tfor _, d := range toDelete {\n\t\tif err := os.RemoveAll(filepath.Join(baseDir, d)); err != nil {\n\t\t\terrs = append(errs, err.Error())\n\t\t}\n\t}\n\tif len(errs) > 0 {\n\t\treturn fmt.Errorf(\"errors: %v\", errs)\n\t}\n\treturn nil\n}", "func (repo *Repository) Delete(ctx context.Context, claims auth.Claims, req DeleteRequest) error {\n\tspan, ctx := tracer.StartSpanFromContext(ctx, \"internal.expenditure.Delete\")\n\tdefer span.Finish()\n\n\t// Validate the request.\n\tv := webcontext.Validator()\n\terr := v.Struct(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Ensure the claims can modify the project specified in the request.\n\tif claims.Audience == \"\" {\n\t\treturn errors.WithStack(ErrForbidden)\n\t}\n\t// Admin users can update Categories they have access to.\n\tif !claims.HasRole(auth.RoleAdmin) {\n\t\treturn errors.WithStack(ErrForbidden)\n\t}\n\n\t_, err = models.RepsExpenses(models.RepsExpenseWhere.ID.EQ(req.ID)).DeleteAll(ctx, repo.DbConn)\n\tif err != nil {\n\t\treturn errors.WithStack(err)\n\t}\n\n\treturn nil\n}", "func (r GopassRepo) DeleteState() error {\n\tif err := r.prepare(); err != nil {\n\t\treturn err\n\t}\n\tcmd := exec.Command(\"gopass\", \"rm\", \"-f\", r.config.Repo.State)\n\tcmd.Stdout = os.Stdout\n\tcmd.Stderr = os.Stderr\n\tcmd.Stdin = os.Stdin\n\tif err := cmd.Run(); err != nil {\n\t\treturn fmt.Errorf(\"gopass failed with %s\", err)\n\t}\n\treturn nil\n}", "func NewDeleteCommand() *cobra.Command {\n\tvar (\n\t\tflags listFlags\n\t\tall bool\n\t\tallNamespaces bool\n\t\tdryRun bool\n\t)\n\tvar command = &cobra.Command{\n\t\tUse: \"delete [--dry-run] [WORKFLOW...|[--all] [--older] [--completed] [--resubmitted] [--prefix PREFIX] [--selector SELECTOR]]\",\n\t\tShort: \"delete workflows\",\n\t\tExample: `# Delete a workflow:\n\n argo delete my-wf\n\n# Delete the latest workflow:\n\n argo delete @latest\n`,\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\tctx, apiClient := client.NewAPIClient()\n\t\t\tserviceClient := apiClient.NewWorkflowServiceClient()\n\t\t\tvar workflows wfv1.Workflows\n\t\t\tif !allNamespaces {\n\t\t\t\tflags.namespace = client.Namespace()\n\t\t\t}\n\t\t\tfor _, name := range args {\n\t\t\t\tworkflows = append(workflows, wfv1.Workflow{\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{Name: name, Namespace: flags.namespace},\n\t\t\t\t})\n\t\t\t}\n\t\t\tif all || flags.completed || flags.resubmitted || flags.prefix != \"\" || flags.labels != \"\" || flags.finishedAfter != \"\" {\n\t\t\t\tlisted, err := listWorkflows(ctx, serviceClient, flags)\n\t\t\t\terrors.CheckError(err)\n\t\t\t\tworkflows = append(workflows, listed...)\n\t\t\t}\n\t\t\tfor _, wf := range workflows {\n\t\t\t\tif !dryRun {\n\t\t\t\t\t_, err := serviceClient.DeleteWorkflow(ctx, &workflowpkg.WorkflowDeleteRequest{Name: wf.Name, Namespace: wf.Namespace})\n\t\t\t\t\tif err != nil && status.Code(err) == codes.NotFound {\n\t\t\t\t\t\tfmt.Printf(\"Workflow '%s' not found\\n\", wf.Name)\n\t\t\t\t\t\tcontinue\n\t\t\t\t\t}\n\t\t\t\t\terrors.CheckError(err)\n\t\t\t\t\tfmt.Printf(\"Workflow '%s' deleted\\n\", wf.Name)\n\t\t\t\t} else {\n\t\t\t\t\tfmt.Printf(\"Workflow '%s' deleted (dry-run)\\n\", wf.Name)\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t}\n\n\tcommand.Flags().BoolVar(&allNamespaces, \"all-namespaces\", false, \"Delete workflows from all namespaces\")\n\tcommand.Flags().BoolVar(&all, \"all\", false, \"Delete all workflows\")\n\tcommand.Flags().BoolVar(&flags.completed, \"completed\", false, \"Delete completed workflows\")\n\tcommand.Flags().BoolVar(&flags.resubmitted, \"resubmitted\", false, \"Delete resubmitted workflows\")\n\tcommand.Flags().StringVar(&flags.prefix, \"prefix\", \"\", \"Delete workflows by prefix\")\n\tcommand.Flags().StringVar(&flags.finishedAfter, \"older\", \"\", \"Delete completed workflows finished before the specified duration (e.g. 10m, 3h, 1d)\")\n\tcommand.Flags().StringVarP(&flags.labels, \"selector\", \"l\", \"\", \"Selector (label query) to filter on, not including uninitialized ones\")\n\tcommand.Flags().BoolVar(&dryRun, \"dry-run\", false, \"Do not delete the workflow, only print what would happen\")\n\treturn command\n}", "func (l *Linux) Delete(cfg api.Config, command string, hostname string, user *user.User, vault vault.TerraVault) error {\n\tif vault != nil {\n\t\terr := vault.Delete()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tmsg := fmt.Sprintf(\"- the credential object '%s' has been removed\", hostname)\n\t\thelpers.Logging(cfg, msg, \"INFO\")\n\n\t\tif command == \"delete\" {\n\t\t\tmsg := fmt.Sprintf(\"The credential object '%s' has been removed\", hostname)\n\t\t\tfmt.Fprintf(color.Output, \"%s: %s\\n\", color.GreenString(\"SUCCESS\"), msg)\n\t\t}\n\n\t\treturn err\n\t}\n\n\terr := keyring.Delete(hostname, string(user.Username))\n\tif err == nil {\n\t\tmsg := fmt.Sprintf(\"- the credential object '%s' has been removed\", hostname)\n\t\thelpers.Logging(cfg, msg, \"INFO\")\n\n\t\tif command == \"delete\" {\n\t\t\tmsg := fmt.Sprintf(\"The credential object '%s' has been removed\", hostname)\n\t\t\tfmt.Fprintf(color.Output, \"%s: %s\\n\", color.GreenString(\"SUCCESS\"), msg)\n\t\t}\n\n\t\treturn err\n\t}\n\n\thelpers.Logging(cfg, fmt.Sprintf(\"- %s\", err), \"ERROR\")\n\tif command == \"delete\" {\n\t\tfmt.Fprintf(color.Output, \"%s: You do not have permission to modify this credential\\n\", color.RedString(\"ERROR\"))\n\t}\n\n\treturn nil\n}", "func DeleteBusinessCompanyServiceOperationHourRepository(ctx context.Context, operationHourID int64) (*pb.DeleteBusinessCompanyServiceOperationHourResponse, error) {\n\tconn, err := pgx.Connect(ctx, config.PostgresConnection)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdefer conn.Close(ctx)\n\n\n\n\tsqlQuery := `DELETE FROM business_company_service_operation_hours WHERE id=$1 RETURNING *;`\n\n\trow := conn.QueryRow(ctx, sqlQuery, operationHourID)\n\n\tvar operationHour pb.BusinessCompanyServiceOperationHour\n\n\terr = row.Scan(\n\t\t&operationHour.ServiceOperationHourID,\n\t\t&operationHour.BusinessCompanyID,\n\t\t&operationHour.BusinessServiceID,\n\t\t&operationHour.DayOfWeek,\n\t\t&operationHour.OpenTime,\n\t\t&operationHour.CloseTime,\n\t)\n\n\n\n\treturn &pb.DeleteBusinessCompanyServiceOperationHourResponse{\n\t\tBusinessCompanyServiceOperationHour: &operationHour,\n\t}, nil\n}", "func TestDeleteCmdWithProject(t *testing.T) {\n\tfuncYaml := `name: bar\nnamespace: \"\"\nruntime: go\nimage: \"\"\nimageDigest: \"\"\nbuilder: quay.io/boson/faas-go-builder\nbuilderMap:\n default: quay.io/boson/faas-go-builder\nenvs: []\nannotations: {}\n`\n\ttmpDir, err := ioutil.TempDir(\"\", \"bar\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer os.RemoveAll(tmpDir)\n\n\tf, err := os.Create(filepath.Join(tmpDir, \"func.yaml\"))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer f.Close()\n\n\t_, err = f.WriteString(funcYaml)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tf.Close()\n\n\toldWD, err := os.Getwd()\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer func() {\n\t\terr = os.Chdir(oldWD)\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t}()\n\terr = os.Chdir(tmpDir)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\ttr := &testRemover{}\n\tcmd := NewDeleteCmd(func(ns string, verbose bool) (fn.Remover, error) {\n\t\treturn tr, nil\n\t})\n\n\tcmd.SetArgs([]string{\"-p\", \".\"})\n\terr = cmd.Execute()\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tif tr.invokedWith == nil {\n\t\tt.Fatal(\"fn.Remover has not been invoked\")\n\t}\n\n\tif *tr.invokedWith != \"bar\" {\n\t\tt.Fatalf(\"expected fn.Remover to be called with 'bar', but was called with '%s'\", *tr.invokedWith)\n\t}\n}", "func (d *driver) Delete(ctx context.Context, path string) error {\n\tdefer debugTime()()\n\td.rootlock.Lock()\n\tdefer d.rootlock.Unlock()\n\tlog.Error(\"roothash: \", d.roothash)\n\tnewParentHash, err := d.shell.Patch(d.roothash, \"rm-link\", path[1:])\n\tif err != nil {\n\t\tlog.Error(\"delete err: \", err)\n\t\tif err.Error() == \"merkledag: not found\" {\n\t\t\tfmt.Println(\"PATHNOTFOUND HAPPY HAPPY JOY JOY\")\n\t\t\treturn storagedriver.PathNotFoundError{Path: path}\n\t\t} else {\n\t\t\tfmt.Println(\"GOT A BAD ERROR: \", err)\n\t\t\treturn err\n\t\t}\n\t}\n\n\td.roothash = newParentHash\n\td.publishHash(newParentHash)\n\treturn nil\n}", "func TearDown(w http.ResponseWriter, r *http.Request) {\n\tctx := r.Context()\n\n\tcontainer := di.Get()\n\n\tif err := container.TaskRepository.DeleteAll(ctx); err != nil {\n\t\thttp.Error(w, \"taskRepository.DeleteAll error: \"+err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tif err := container.UserRepository.DeleteAll(ctx); err != nil {\n\t\thttp.Error(w, \"userRepository.DeleteAll error: \"+err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tfmt.Fprint(w, \"done\")\n}", "func (repo *PlatformRepository) Delete(payload *Platform) (*Platform, error) {\n\terr := repo.db.Delete(Platform{}, \"id = ?\", payload.Id).Error\n\n\treturn payload, err\n}", "func DeleteOperator(namespace string) error {\n\tif namespace == \"\" {\n\t\tnamespace = DEFAULT_NAMESPACE\n\t}\n\tif err := sh.RunV(\"helm\", \"delete\", \"-n\", namespace, \"kedahttp\"); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func TestGitCreateGetYamlDelete(testing *testing.T) {\n\ttestCreateGetYamlDelete(gitRepo, gitRemoteRepository, testing)\n}", "func TestDeleteCmdWithoutProject(t *testing.T) {\n\ttr := &testRemover{}\n\tcmd := NewDeleteCmd(func(ns string, verbose bool) (fn.Remover, error) {\n\t\treturn tr, nil\n\t})\n\n\tcmd.SetArgs([]string{\"foo\"})\n\terr := cmd.Execute()\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tif tr.invokedWith == nil {\n\t\tt.Fatal(\"fn.Remover has not been invoked\")\n\t}\n\n\tif *tr.invokedWith != \"foo\" {\n\t\tt.Fatalf(\"expected fn.Remover to be called with 'foo', but was called with '%s'\", *tr.invokedWith)\n\t}\n}", "func (r *sampleRepository) Delete(id uuid.UUID) error {\n\tsample, err := r.Get(id)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn r.DB.Delete(&sample).Error\n}", "func (c *client) DeleteRepoLabel(org, repo, label string) error {\n\tdurationLogger := c.log(\"DeleteRepoLabel\", org, repo, label)\n\tdefer durationLogger()\n\n\t_, err := c.request(&request{\n\t\tmethod: http.MethodDelete,\n\t\taccept: \"application/vnd.github.symmetra-preview+json\", // allow the description field -- https://developer.github.com/changes/2018-02-22-label-description-search-preview/\n\t\tpath: fmt.Sprintf(\"/repos/%s/%s/labels/%s\", org, repo, label),\n\t\torg: org,\n\t\trequestBody: Label{Name: label},\n\t\texitCodes: []int{204},\n\t}, nil)\n\treturn err\n}", "func (m *DataRepositoryMongo) Delete(collectionName string, filter interface{}, justOne bool) <-chan d.RepositoryResult {\n\tresult := make(chan d.RepositoryResult)\n\tgo func() {\n\n\t\tvar (\n\t\t\terr error\n\t\t\tcollection *mongo.Collection\n\t\t\tdeleteResult *mongo.DeleteResult\n\t\t\tctx context.Context\n\t\t)\n\t\tcollection, err = m.Client.GetCollection(collectionName)\n\t\tif err != nil {\n\t\t\tlog.Error(\"Get collection %s err (%s)! \\n\", collectionName, err.Error())\n\t\t\tresult <- d.RepositoryResult{Error: err}\n\t\t}\n\n\t\tctx, err = m.Client.GetContext()\n\t\tif err != nil {\n\t\t\tlog.Error(\"Get context err (%s)! \\n\", err.Error())\n\t\t\tresult <- d.RepositoryResult{Error: err}\n\t\t}\n\n\t\t// Execute delete\n\t\tif justOne {\n\t\t\tdeleteResult, err = collection.DeleteOne(ctx, filter)\n\t\t} else {\n\t\t\tdeleteResult, err = collection.DeleteMany(ctx, filter)\n\t\t}\n\t\tif err != nil {\n\t\t\tlog.Error(\"Delete error (%s)! \\n\", err.Error())\n\t\t\tresult <- d.RepositoryResult{Error: err}\n\t\t}\n\n\t\tresult <- d.RepositoryResult{Result: deleteResult.DeletedCount}\n\t\tclose(result)\n\t}()\n\n\treturn result\n}", "func DeleteBranch(ctx *context.APIContext) {\n\t// swagger:operation DELETE /repos/{owner}/{repo}/branches/{branch} repository repoDeleteBranch\n\t// ---\n\t// summary: Delete a specific branch from a repository\n\t// produces:\n\t// - application/json\n\t// parameters:\n\t// - name: owner\n\t// in: path\n\t// description: owner of the repo\n\t// type: string\n\t// required: true\n\t// - name: repo\n\t// in: path\n\t// description: name of the repo\n\t// type: string\n\t// required: true\n\t// - name: branch\n\t// in: path\n\t// description: branch to delete\n\t// type: string\n\t// required: true\n\t// responses:\n\t// \"204\":\n\t// \"$ref\": \"#/responses/empty\"\n\t// \"403\":\n\t// \"$ref\": \"#/responses/error\"\n\t// \"404\":\n\t// \"$ref\": \"#/responses/notFound\"\n\n\tif ctx.Repo.Repository.IsEmpty {\n\t\tctx.Error(http.StatusNotFound, \"\", \"Git Repository is empty.\")\n\t\treturn\n\t}\n\n\tif ctx.Repo.Repository.IsArchived {\n\t\tctx.Error(http.StatusForbidden, \"\", \"Git Repository is archived.\")\n\t\treturn\n\t}\n\n\tif ctx.Repo.Repository.IsMirror {\n\t\tctx.Error(http.StatusForbidden, \"\", \"Git Repository is a mirror.\")\n\t\treturn\n\t}\n\n\tbranchName := ctx.Params(\"*\")\n\n\tif ctx.Repo.Repository.IsEmpty {\n\t\tctx.Error(http.StatusForbidden, \"\", \"Git Repository is empty.\")\n\t\treturn\n\t}\n\n\t// check whether branches of this repository has been synced\n\ttotalNumOfBranches, err := git_model.CountBranches(ctx, git_model.FindBranchOptions{\n\t\tRepoID: ctx.Repo.Repository.ID,\n\t\tIsDeletedBranch: util.OptionalBoolFalse,\n\t})\n\tif err != nil {\n\t\tctx.Error(http.StatusInternalServerError, \"CountBranches\", err)\n\t\treturn\n\t}\n\tif totalNumOfBranches == 0 { // sync branches immediately because non-empty repository should have at least 1 branch\n\t\t_, err = repo_module.SyncRepoBranches(ctx, ctx.Repo.Repository.ID, 0)\n\t\tif err != nil {\n\t\t\tctx.ServerError(\"SyncRepoBranches\", err)\n\t\t\treturn\n\t\t}\n\t}\n\n\tif ctx.Repo.Repository.IsArchived {\n\t\tctx.Error(http.StatusForbidden, \"IsArchived\", fmt.Errorf(\"can not delete branch of an archived repository\"))\n\t\treturn\n\t}\n\tif ctx.Repo.Repository.IsMirror {\n\t\tctx.Error(http.StatusForbidden, \"IsMirrored\", fmt.Errorf(\"can not delete branch of an mirror repository\"))\n\t\treturn\n\t}\n\n\tif err := repo_service.DeleteBranch(ctx, ctx.Doer, ctx.Repo.Repository, ctx.Repo.GitRepo, branchName); err != nil {\n\t\tswitch {\n\t\tcase git.IsErrBranchNotExist(err):\n\t\t\tctx.NotFound(err)\n\t\tcase errors.Is(err, repo_service.ErrBranchIsDefault):\n\t\t\tctx.Error(http.StatusForbidden, \"DefaultBranch\", fmt.Errorf(\"can not delete default branch\"))\n\t\tcase errors.Is(err, git_model.ErrBranchIsProtected):\n\t\t\tctx.Error(http.StatusForbidden, \"IsProtectedBranch\", fmt.Errorf(\"branch protected\"))\n\t\tdefault:\n\t\t\tctx.Error(http.StatusInternalServerError, \"DeleteBranch\", err)\n\t\t}\n\t\treturn\n\t}\n\n\tctx.Status(http.StatusNoContent)\n}", "func delete(cl *lib.Client, list *[]extAQLFileInfo) error {\n\n\t// range over list and make delete calls to Artifactory\n\tfor _, d := range *list {\n\n\t\t//construct request\n\t\tdl := []string{\n\t\t\td.Repo,\n\t\t\td.Path,\n\t\t\td.Name,\n\t\t}\n\t\tdlj := strings.Join(dl, \"/\")\n\n\t\tvar request lib.Request\n\t\trequest.Verb = \"DELETE\"\n\t\trequest.Path = \"/\" + (dlj)\n\n\t\t// make request\n\t\t_, err := cl.HTTPRequest(request)\n\t\tif err != nil {\n\t\t\texitErrorf(\"could not delete %q: \", dlj, err)\n\t\t}\n\t\tfmt.Println(\"deleted: \", dlj)\n\t}\n\treturn nil\n}", "func (s *BasejossListener) ExitDeleteCmdCMD(ctx *DeleteCmdCMDContext) {}", "func Delete() error {\n\n}", "func DeleteSubCommands() []*cobra.Command {\n\n\tvar deleteAuthProvider = &cobra.Command{\n\t\tUse: \"auth-provider\",\n\t\tAliases: []string{\"auth-providers\"},\n\t\tRunE: actionDeleteAuthProvider,\n\t\tValidArgsFunction: authProvidersAutoCompleteFunc,\n\t\tExample: \"space-cli delete auth-provider providerID --project myproject\",\n\t}\n\n\treturn []*cobra.Command{deleteAuthProvider}\n}", "func (c *Command) Delete(ctx *gin.Context) {\n\ttoken := html.EscapeString(ctx.Param(\"token\"))\n\tname := html.EscapeString(ctx.Param(\"name\"))\n\tfilter := map[string]interface{}{\"token\": token, \"name\": name}\n\tresp, err := c.Conn.GetByFilter(c.Table, filter, 1)\n\n\tif err != nil {\n\t\tutil.NiceError(ctx, err, http.StatusBadRequest)\n\t\treturn\n\t}\n\tif resp == nil {\n\t\t// Resource doesn't exist, return a 404\n\t\tctx.AbortWithStatus(http.StatusNotFound)\n\t\treturn\n\t}\n\n\trs, valid := resp[0].(map[string]interface{})\n\tif !valid {\n\t\tlog.Errorf(\"[%s] - Unable to typecast response to correct type\", c.Table)\n\t\tctx.AbortWithStatus(http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t_, err = c.Conn.Disable(c.Table, rs[\"id\"].(string))\n\tif err != nil {\n\t\tutil.NiceError(ctx, err, http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t// Success\n\tctx.Header(\"x-resource-id-removed\", rs[\"id\"].(string))\n\tctx.Status(http.StatusOK)\n}", "func deleteCommand() *cobra.Command {\n\tvar controller net.IP\n\tvar iqn string\n\tvar lun int\n\n\tvar deleteCmd = &cobra.Command{\n\t\tUse: \"delete\",\n\t\tShort: \"Deletes an iSCSI target\",\n\t\tLong: `Deletes an iSCSI target by stopping and deleting the pacemaker resource\nprimitives and removing the linstor resources.\n\nFor example:\nlinstor-iscsi delete --iqn=iqn.2019-08.com.linbit:example --lun=1`,\n\t\tArgs: cobra.NoArgs,\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\tif !cmd.Flags().Changed(\"controller\") {\n\t\t\t\tfoundIP, err := crmcontrol.FindLinstorController()\n\t\t\t\tif err == nil { // it might be ok to not find it...\n\t\t\t\t\tcontroller = foundIP\n\t\t\t\t}\n\t\t\t}\n\t\t\tlinstorCfg := linstorcontrol.Linstor{\n\t\t\t\tLoglevel: log.GetLevel().String(),\n\t\t\t\tControllerIP: controller,\n\t\t\t}\n\t\t\ttargetCfg := targetutil.TargetConfig{\n\t\t\t\tIQN: iqn,\n\t\t\t\tLUNs: []*targetutil.LUN{&targetutil.LUN{ID: uint8(lun)}},\n\t\t\t}\n\t\t\ttarget := cliNewTargetMust(cmd, targetCfg)\n\t\t\tiscsiCfg := &iscsi.ISCSI{Linstor: linstorCfg, Target: target}\n\t\t\tif err := iscsiCfg.DeleteResource(); err != nil {\n\t\t\t\tlog.Fatal(err)\n\t\t\t}\n\t\t},\n\t}\n\n\tdeleteCmd.Flags().StringVarP(&iqn, \"iqn\", \"i\", \"\", \"Set the iSCSI Qualified Name (e.g., iqn.2019-08.com.linbit:unique) (required)\")\n\tdeleteCmd.Flags().IntVarP(&lun, \"lun\", \"l\", 1, \"Set the LUN Number (required)\")\n\tdeleteCmd.Flags().IPVarP(&controller, \"controller\", \"c\", net.IPv4(127, 0, 0, 1), \"Set the IP of the linstor controller node\")\n\n\tdeleteCmd.MarkFlagRequired(\"iqn\")\n\tdeleteCmd.MarkFlagRequired(\"lun\")\n\n\treturn deleteCmd\n}", "func (c DBStoreDeleteUploadsWithoutRepositoryFuncCall) Args() []interface{} {\n\treturn []interface{}{c.Arg0, c.Arg1}\n}" ]
[ "0.6942299", "0.6630733", "0.6572339", "0.6517514", "0.6491354", "0.64621", "0.63369524", "0.6326031", "0.62508124", "0.624161", "0.6167326", "0.61609155", "0.6152381", "0.61082166", "0.6054995", "0.6052169", "0.592741", "0.5922558", "0.5896064", "0.5872743", "0.5736741", "0.56613404", "0.56404907", "0.56289953", "0.5612883", "0.5607482", "0.55733645", "0.5565433", "0.5555146", "0.5521368", "0.54974174", "0.54945266", "0.54834974", "0.5463921", "0.54588544", "0.54507345", "0.54418474", "0.5440875", "0.5424474", "0.54017013", "0.53987586", "0.5397131", "0.5396171", "0.5386954", "0.53668433", "0.5356068", "0.53488433", "0.53366065", "0.5322003", "0.53177685", "0.5306219", "0.53028584", "0.5294959", "0.5293505", "0.5290582", "0.52759564", "0.5265012", "0.52630013", "0.5261719", "0.5260556", "0.5256125", "0.5241062", "0.52350736", "0.5233494", "0.52246004", "0.5219898", "0.5215026", "0.52104867", "0.52007806", "0.5199525", "0.5197733", "0.5196935", "0.519374", "0.519197", "0.5183623", "0.5181448", "0.5180376", "0.5180065", "0.51791537", "0.5162152", "0.51602113", "0.5154237", "0.5147067", "0.51282495", "0.512107", "0.5114419", "0.51005", "0.51001745", "0.5099113", "0.5091787", "0.50747496", "0.50732666", "0.50712746", "0.50693053", "0.50687057", "0.50558347", "0.5054981", "0.50519955", "0.50477636", "0.5040864" ]
0.69687325
0
/ Add CORS headers to an http response object to allow for CrossOrigin requests made to this server Dont remember if using CORs relevent for geolocation
func addCorsHeader(res http.ResponseWriter) { headers := res.Header() headers.Add("Access-Control-Allow-Origin", "*") headers.Add("Vary", "Origin") headers.Add("Vary", "Access-Control-Request-Method") headers.Add("Vary", "Access-Control-Request-Headers") headers.Add("Access-Control-Allow-Headers", "Content-Type, Origin, Accept, token") headers.Add("Access-Control-Allow-Methods", "GET, POST, OPTIONS") }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func addCORSHeaders(w http.ResponseWriter, r *http.Request) {\n\t// origin := r.Header.Get(\"Origin\")\n\t// for _, o := range cfg.AllowedOrigins {\n\t// \tif origin == o {\n\tw.Header().Set(\"Access-Control-Allow-Origin\", r.Header.Get(\"Origin\"))\n\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, PUT, POST, DELETE, OPTIONS\")\n\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Content-Type,Authorization\")\n\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t// return\n\t// \t}\n\t// }\n}", "func setupResponse(writer *http.ResponseWriter, request *http.Request) {\n\t(*writer).Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t(*writer).Header().Set(\"Access-Control-Allow-Methods\", \"POST, GET, OPTIONS, PUT, DELETE\")\n\t(*writer).Header().Set(\"Access-Control-Allow-Headers\", \"Accept, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization\")\n}", "func ConfigureCors(res *http.ResponseWriter) {\n\n\t(*res).Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t(*res).Header().Set(\"Access-Control-Allow-Methods\", \"POST, GET, OPTIONS, PUT, DELETE\")\n\t(*res).Header().Set(\"Access-Control-Allow-Headers\", \"Accept, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization, username-from-header\")\n}", "func AddCORSHeader(w http.ResponseWriter, allowOrigin string) {\n\tw.Header().Set(\"Access-Control-Allow-Origin\", allowOrigin)\n}", "func corsHandler(resWriter http.ResponseWriter, req *http.Request) {\n\tresWriter.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tresWriter.Header().Set(\"Access-Control-Allow-Headers\", \"content-type,authorization\")\n\tresWriter.Header().Set(\"Access-Control-Allow-Methods\", \"OPTIONS,POST\")\n}", "func setAccessControlResponseHeaders (w http.ResponseWriter, req *http.Request) {\n\tif origin := req.Header.Get(\"Origin\"); origin != \"\" {\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, OPTIONS\")\n\t\tw.Header().Set(\"Access-Control-Allow-Headers\",\n\t\t\t\"Accept, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization\")\n\t}\n}", "func enableCors(w *http.ResponseWriter) {\n\t(*w).Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t(*w).Header().Set(\"Access-Control-Allow-Methods\", \"GET\")\n\t(*w).Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n}", "func (plugin *Cors) writeHeaders(w http.ResponseWriter, r *http.Request, preflight bool) {\n\tw.Header().Add(\"Vary\", \"Origin\")\n\tif preflight {\n\t\tw.Header().Add(\"Vary\", \"Access-Control-Request-Method\")\n\t\tw.Header().Add(\"Vary\", \"Access-Control-Request-Headers\")\n\t}\n\n\t// Check origin.\n\torigin := r.Header.Get(\"Origin\")\n\tif !plugin.isOriginAllowed(origin) {\n\t\treturn\n\t}\n\n\t// Check method\n\tmethod := r.Method\n\tif preflight {\n\t\tmethod = r.Header.Get(\"Access-Control-Request-Method\")\n\t}\n\tif !plugin.isMethodAllowed(method) {\n\t\treturn\n\t}\n\n\t// Check requested headers if preflight\n\theaders := r.Header.Get(\"Access-Control-Request-Headers\")\n\tif preflight && !plugin.areHeadersAllowed(strings.Split(headers, \",\")) {\n\t\treturn\n\t}\n\n\t// Write relevant headers\n\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\tif plugin.allowCredentials {\n\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t}\n\tif len(plugin.exposedHeaders) > 0 {\n\t\tw.Header().Set(\"Access-Control-Exposed-Headers\", strings.Join(plugin.exposedHeaders, \",\"))\n\t}\n\tif preflight {\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", method)\n\t\tw.Header().Set(\"Access-Control-Allow-Headers\", headers)\n\t\tif plugin.maxAge > 0 {\n\t\t\tw.Header().Set(\"Access-Control-Max-Age\", strconv.FormatInt(plugin.maxAge, 10))\n\t\t}\n\t}\n}", "func setAccessControlHeaders(w http.ResponseWriter) {\n\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Accept, Authorization, Content-Type, Origin\")\n\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET\")\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tw.Header().Set(\"Access-Control-Expose-Headers\", \"Date\")\n}", "func respondCORS(w http.ResponseWriter, code int) {\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Content-Type,Origin,Access,token,Authorization,appid\")\n\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET,POST,DELETE,OPTIONS\")\n\n\tw.WriteHeader(code)\n}", "func preflight(c *gin.Context) {\n\tc.Header(\"Access-Control-Allow-Origin\", \"*\")\n\tc.Header(\"Access-Control-Allow-Headers\", \"access-control-allow-origin, access-control-allow-headers, content-type\")\n\tc.JSON(http.StatusOK, struct{}{})\n}", "func addHeaders(h http.Handler) http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\tw.Header().Set(\"Allow-Control-Allow-Origin\", \"*\")\n\t\th.ServeHTTP(w, r)\n\t}\n}", "func (server *HTTPServer) addCORSHeaders(nextHandler http.Handler) http.Handler {\n\treturn http.HandlerFunc(\n\t\tfunc(w http.ResponseWriter, r *http.Request) {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"POST, GET, OPTIONS, PUT, DELETE\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Origin, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\t\tnextHandler.ServeHTTP(w, r)\n\t\t})\n}", "func (server *HTTPServer) addCORSHeaders(nextHandler http.Handler) http.Handler {\n\treturn http.HandlerFunc(\n\t\tfunc(w http.ResponseWriter, r *http.Request) {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"POST, GET, OPTIONS, PUT, DELETE\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Origin, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\t\tnextHandler.ServeHTTP(w, r)\n\t\t})\n}", "func EnableCors(res *http.ResponseWriter) {\n\t(*res).Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n}", "func enableCors(w *http.ResponseWriter) {\n\t(*w).Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n}", "func enableCors(w *http.ResponseWriter) {\n\t(*w).Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n}", "func RespondCORS(resp http.ResponseWriter) {\n\t// CORS headers\n\tresp.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tresp.Header().Set(\"Access-Control-Allow-Headers\", \"X-Requested-With, Content-Type\")\n}", "func CORS(w http.ResponseWriter, r *http.Request, next http.HandlerFunc) {\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tnext(w, r)\n}", "func EnableCors(w *http.ResponseWriter, r *http.Request) {\n\t(*w).Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n (*w).Header().Set(\"Access-Control-Allow-Methods\", \"POST, GET, OPTIONS, PUT, DELETE\")\n (*w).Header().Set(\"Access-Control-Allow-Headers\", \"Accept, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization\")\n}", "func (s *server) cors(w http.ResponseWriter) {\n\tw.Header().Set(\"Access-Control-Allow-Headers\", \"accept, content-type\")\n\tw.Header().Set(\"Access-Control-Allow-Methods\", \"POST\")\n\t// w.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"http://grafana.playland-01-cluster.k8s.cs.swiftnav.com\")\n}", "func (handler Handler) CORSWrapper(hf func(http.ResponseWriter, *http.Request)) func(http.ResponseWriter, *http.Request) {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tw.Header().Set(\"Content-Type\", handler.ContentType)\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Content-Type, Authorization, Cache-Control\")\n\t\tw.Header().Set(\"Access-Control-Expose-Headers\", \"*\")\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, POST, PUT, DELETE, OPTIONS\")\n\t\tif r.Method == \"OPTIONS\" {\n\t\t\tlog.Printf(\"incoming CORS request\")\n\t\t} else {\n\t\t\tlog.Printf(\"incoming request\")\n\t\t\tw.Header().Set(\"Content-Type\", handler.ContentType)\n\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Authorization\")\n\t\t\thf(w, r)\n\t\t}\n\t}\n\n}", "func SendPreflightResponse(w http.ResponseWriter, r *http.Request) {\n\tfmt.Println(time.Now().Local().Format(\"2006.01.02 15:04:05\"), \"response: sending preflight response for CORS\")\n\tw.Header().Add(\"Access-Control-Allow-Origin\", host)\n\tw.Header().Add(\"Access-Control-Allow-Headers\", \"Content-Type, Origin, Accept, Authorization\")\n\tw.Header().Add(\"Access-Control-Allow-Credentials\", \"true\")\n\tw.Header().Add(\"Access-Control-Allow-Methods\", \"GET, POST, PUT, DELETE, OPTIONS\")\n\n\tw.WriteHeader(http.StatusOK)\n}", "func cors(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\torigin := r.Header.Get(\"Origin\")\n\t\tif r.Method == \"OPTIONS\" {\n\t\t\tif origin == \"\" {\n\t\t\t\tw.WriteHeader(http.StatusBadRequest)\n\t\t\t\tlog.Error(\"Origin needed for preflight\")\n\t\t\t}\n\t\t\theaders := w.Header()\n\t\t\theaders.Add(\"Access-Control-Allow-Origin\", origin)\n\t\t\theaders.Add(\"Vary\", \"Origin\")\n\t\t\theaders.Add(\"Vary\", \"Access-Control-Request-Method\")\n\t\t\theaders.Add(\"Vary\", \"Access-Control-Request-Headers\")\n\t\t\theaders.Set(\"Access-Control-Allow-Methods\", allowMethods)\n\t\t\theaders.Set(\"Access-Control-Allow-Headers\", strings.Join(allowedHeaders, \", \"))\n\n\t\t\tw.WriteHeader(http.StatusOK)\n\t\t} else {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\tw.Header().Set(\"Cache-Control\", \"no-store, no-cache, must-revalidate, post-check=0, pre-check=0\")\n\t\t\tw.Header().Set(\"Access-Control-Expose-Headers\", strings.Join(allowedHeaders, \", \"))\n\t\t\tnext.ServeHTTP(w, r)\n\t\t}\n\t})\n}", "func enableCors(w *http.ResponseWriter) {\n\t// cross origin resource sharing\n\t(*w).Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t//(*w).Header().Set(\"Access-Control-Allow-Origin\", \"http://localhost:8070/products\")\n\t//w.Header().Set(\"Access-Control-Allow-Methods\", \"POST, GET\")\n}", "func handleUserOrigin(h goa.Handler) goa.Handler {\n\treturn func(ctx context.Context, rw http.ResponseWriter, req *http.Request) error {\n\t\torigin := req.Header.Get(\"Origin\")\n\t\tif origin == \"\" {\n\t\t\t// Not a CORS request\n\t\t\treturn h(ctx, rw, req)\n\t\t}\n\t\tif cors.MatchOrigin(origin, \"*\") {\n\t\t\tctx = goa.WithLogContext(ctx, \"origin\", origin)\n\t\t\trw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\t\trw.Header().Set(\"Access-Control-Expose-Headers\", \"Age, Cache-Control, Content-Length, Content-Type, Date, Expires, Host, Keep-Alive, Last-Modified, Location, Server, Status, Strict-Transport-Security\")\n\t\t\trw.Header().Set(\"Access-Control-Max-Age\", \"600\")\n\t\t\trw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\t\tif acrm := req.Header.Get(\"Access-Control-Request-Method\"); acrm != \"\" {\n\t\t\t\t// We are handling a preflight request\n\t\t\t\trw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, POST, PUT, PATCH, DELETE\")\n\t\t\t\trw.Header().Set(\"Access-Control-Allow-Headers\", \"Accept, Accept-CH, Accept-Charset, Accept-Datetime, Accept-Encoding, Accept-Ext, Accept-Features, Accept-Language, Accept-Params, Accept-Ranges, Access-Control-Allow-Credentials, Access-Control-Allow-Headers, Access-Control-Allow-Methods, Access-Control-Allow-Origin, Access-Control-Expose-Headers, Access-Control-Max-Age, Access-Control-Request-Headers, Access-Control-Request-Method, Age, Allow, Alternates, Authentication-Info, Authorization, C-Ext, C-Man, C-Opt, C-PEP, C-PEP-Info, CONNECT, Cache-Control, Compliance, Connection, Content-Base, Content-Disposition, Content-Encoding, Content-ID, Content-Language, Content-Length, Content-Location, Content-MD5, Content-Range, Content-Script-Type, Content-Security-Policy, Content-Style-Type, Content-Transfer-Encoding, Content-Type, Content-Version, Cookie, Cost, DAV, DELETE, DNT, DPR, Date, Default-Style, Delta-Base, Depth, Derived-From, Destination, Differential-ID, Digest, ETag, Expect, Expires, Ext, From, GET, GetProfile, HEAD, HTTP-date, Host, IM, If, If-Match, If-Modified-Since, If-None-Match, If-Range, If-Unmodified-Since, Keep-Alive, Label, Last-Event-ID, Last-Modified, Link, Location, Lock-Token, MIME-Version, Man, Max-Forwards, Media-Range, Message-ID, Meter, Negotiate, Non-Compliance, OPTION, OPTIONS, OWS, Opt, Optional, Ordering-Type, Origin, Overwrite, P3P, PEP, PICS-Label, POST, PUT, Pep-Info, Permanent, Position, Pragma, ProfileObject, Protocol, Protocol-Query, Protocol-Request, Proxy-Authenticate, Proxy-Authentication-Info, Proxy-Authorization, Proxy-Features, Proxy-Instruction, Public, RWS, Range, Referer, Refresh, Resolution-Hint, Resolver-Location, Retry-After, Safe, Sec-Websocket-Extensions, Sec-Websocket-Key, Sec-Websocket-Origin, Sec-Websocket-Protocol, Sec-Websocket-Version, Security-Scheme, Server, Set-Cookie, Set-Cookie2, SetProfile, SoapAction, Status, Status-URI, Strict-Transport-Security, SubOK, Subst, Surrogate-Capability, Surrogate-Control, TCN, TE, TRACE, Timeout, Title, Trailer, Transfer-Encoding, UA-Color, UA-Media, UA-Pixels, UA-Resolution, UA-Windowpixels, URI, Upgrade, User-Agent, Variant-Vary, Vary, Version, Via, Viewport-Width, WWW-Authenticate, Want-Digest, Warning, Width, X-Content-Duration, X-Content-Security-Policy, X-Content-Type-Options, X-CustomHeader, X-DNSPrefetch-Control, X-Forwarded-For, X-Forwarded-Port, X-Forwarded-Proto, X-Frame-Options, X-Modified, X-OTHER, X-PING, X-PINGOTHER, X-Powered-By, X-Requested-With\")\n\t\t\t}\n\t\t\treturn h(ctx, rw, req)\n\t\t}\n\n\t\treturn h(ctx, rw, req)\n\t}\n}", "func corsHttpOrigin(reqUrl string, redirects *[]string) {\n\torigin, err := url.Parse(reqUrl)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\n\treqHeaders := map[string]string{\n\t\t\"Origin\": \"http://\" + origin.Host,\n\t\t\"User-Agent\": *userAgent,\n\t\t\"Access-Control-Request-Headers\": *access_control_request_headers,\n\t\t\"Access-Control-Request-Method\": *access_control_request_method,\n\t}\n\n\tfmt.Print(\"* Testing HTTP Origin (Testing...)\")\n\n\theaders, err := checkCORS(reqUrl, \"OPTIONS\", reqHeaders, redirects)\n\tif err != nil {\n\t\tfmt.Println(\"\\033[11D\" + ESC_red + err.Error() + ESC_clear + \")\" + ESC_clearToEndOfLine)\n\t\treturn\n\t}\n\n\tif val, found := headers[\"Access-Control-Allow-Origin\"]; found {\n\t\tif val[0] == \"http://\"+origin.Host || val[0] == \"*\" {\n\t\t\tfmt.Println(\"\\033[11D\" + ESC_blinkRed + \"Fail\" + ESC_blinkRedClear + \")\" + ESC_clearToEndOfLine)\n\t\t\tfmt.Println(\" Trigger: Access-Control-Allow-Origin: \", val[0])\n\t\t} else {\n\t\t\tfmt.Println(\"\\033[11D\" + ESC_green + \"Pass\" + ESC_clear + \")\" + ESC_clearToEndOfLine)\n\t\t}\n\t} else { // NO ACAO header\n\t\tfmt.Println(\"\\033[11D\" + ESC_green + \"Pass\" + ESC_clear + \")\" + ESC_clearToEndOfLine)\n\t}\n\tprintCorsResponseHeaders(headers)\n}", "func handleCorsMiddleWare(h http.HandlerFunc) http.HandlerFunc {\n\treturn func(res http.ResponseWriter, req *http.Request) {\n\t\tif origin := req.Header.Get(\"Origin\"); origin != \"\" {\n\t\t\tres.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\tres.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\t\tres.Header().Set(\"Access-Control-Allow-Methods\", \"POST, GET, OPTIONS, PUT, DELETE\")\n\t\t\tres.Header().Set(\"Access-Control-Allow-Headers\", \"Accept, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization\")\n\t\t}\n\n\t\tif strings.ToLower(req.Method) == \"options\" {\n\t\t\tfmt.Println(\"CORS Preflight successful\")\n\t\t\tres.WriteHeader(http.StatusOK)\n\t\t\treturn\n\t\t}\n\t\t// Check if request has basic auth . If not, deny request further access\n\t\tuserid, password, ok := req.BasicAuth()\n\t\tif !ok || userid == \"\" || password == \"\" {\n\t\t\thttp.Error(res, \"401 - unauthorized\", http.StatusUnauthorized)\n\t\t\treturn\n\t\t}\n\n\t\th.ServeHTTP(res, req)\n\t}\n}", "func corsPreflight(gcors *CorsAccessControl, lcors *CorsAccessControl, allowedMethods string, w http.ResponseWriter, r *http.Request) error {\n\n\tcors := gcors.Merge(lcors)\n\n\tif origin := r.Header.Get(\"Origin\"); cors != nil && origin != \"\" {\n\t\t// validate origin is in list of acceptable allow-origins\n\t\tallowedOrigin := false\n\t\tallowedOriginExact := false\n\t\tfor _, v := range cors.GetAllowOrigin() {\n\t\t\tif v == origin {\n\t\t\t\tw.Header().Add(\"Access-Control-Allow-Origin\", origin)\n\t\t\t\tallowedOriginExact = true\n\t\t\t\tallowedOrigin = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif !allowedOrigin {\n\t\t\tfor _, v := range cors.GetAllowOrigin() {\n\t\t\t\tif v == \"*\" {\n\t\t\t\t\tw.Header().Add(\"Access-Control-Allow-Origin\", v)\n\t\t\t\t\tallowedOrigin = true\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif !allowedOrigin {\n\t\t\t// other option headers needed\n\t\t\tw.WriteHeader(http.StatusOK)\n\t\t\tw.Write([]byte(\"\"))\n\t\t\treturn errors.New(\"quick cors end\")\n\n\t\t}\n\n\t\t// if the request includes access-control-request-method\n\t\tif method := r.Header.Get(\"Access-Control-Request-Method\"); method != \"\" {\n\t\t\t// if there are no cors settings for this resource, use the allowedMethods,\n\t\t\t// if there are settings for cors, use those\n\t\t\tresponseMethods := []string{}\n\t\t\tif methods := cors.GetAllowMethods(); len(methods) != 0 {\n\t\t\t\tfor _, x := range methods {\n\t\t\t\t\tif x == method {\n\t\t\t\t\t\tresponseMethods = append(responseMethods, x)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tfor _, x := range strings.Split(allowedMethods, \", \") {\n\t\t\t\t\tif x == method {\n\t\t\t\t\t\tresponseMethods = append(responseMethods, x)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif len(responseMethods) > 0 {\n\t\t\t\tw.Header().Add(\"Access-Control-Allow-Methods\", strings.Join(responseMethods, \", \"))\n\t\t\t} else {\n\t\t\t\t// other option headers needed\n\t\t\t\tw.WriteHeader(http.StatusOK)\n\t\t\t\tw.Write([]byte(\"\"))\n\t\t\t\treturn errors.New(\"quick cors end\")\n\t\t\t}\n\t\t}\n\n\t\t// if allow credentials is allowed on this resource respond with true\n\t\tif allowCredentials := cors.GetAllowCredentials(); allowedOriginExact && allowCredentials {\n\t\t\tw.Header().Add(\"Access-Control-Allow-Credentials\", \"true\")\n\t\t}\n\n\t\tif exposeHeaders := cors.GetExposeHeaders(); len(exposeHeaders) != 0 {\n\t\t\t// if we have expose headers, send them\n\t\t\tw.Header().Add(\"Access-Control-Expose-Headers\", strings.Join(exposeHeaders, \", \"))\n\t\t}\n\t\tif maxAge := cors.GetMaxAge(); maxAge.Seconds() != 0 {\n\t\t\t// optional, if we have a max age, send it\n\t\t\tsec := fmt.Sprint(int64(maxAge.Seconds()))\n\t\t\tw.Header().Add(\"Access-Control-Max-Age\", sec)\n\t\t}\n\n\t\tif header := r.Header.Get(\"Access-Control-Request-Headers\"); header != \"\" {\n\t\t\theader = strings.Replace(header, \" \", \"\", -1)\n\t\t\trequestHeaders := strings.Split(header, \",\")\n\n\t\t\tallowHeaders := cors.GetAllowHeaders()\n\n\t\t\tgoodHeaders := []string{}\n\n\t\t\tfor _, x := range requestHeaders {\n\t\t\t\tfor _, y := range allowHeaders {\n\t\t\t\t\tif strings.ToLower(x) == strings.ToLower(y) {\n\t\t\t\t\t\tgoodHeaders = append(goodHeaders, x)\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif len(goodHeaders) > 0 {\n\t\t\t\tw.Header().Add(\"Access-Control-Allow-Headers\", strings.Join(goodHeaders, \", \"))\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func respondWithSavedCORSHeaders() httptransport.ServerResponseFunc {\n\treturn func(ctx context.Context, w http.ResponseWriter) context.Context {\n\t\tv, ok := ctx.Value(contextKey).(string)\n\t\tif ok && v != \"\" {\n\t\t\tmoovhttp.SetAccessControlAllowHeaders(w, v) // set CORS headers\n\t\t}\n\t\treturn ctx\n\t}\n}", "func CORSMiddleware(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\n\t\tif r.Method == \"OPTIONS\" {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Authorization\")\n\t\t}\n\n\t\tnext.ServeHTTP(w, r)\n\t})\n}", "func CorsHandler(w http.ResponseWriter) {\r\n\t// Handle CORS, RMB wildcard needs to change in production\r\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\r\n\tw.Header().Set(\"Access-Control-Allow-Headers\", \"access-control-allow-origin\")\r\n}", "func AllowCORS(h http.Handler) http.Handler {\n return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n if origin := r.Header.Get(\"Origin\"); origin != \"\" {\n w.Header().Set(\"Access-Control-Allow-Origin\", origin)\n if r.Method == \"OPTIONS\" && r.Header.Get(\"Access-Control-Request-Method\") != \"\" {\n preflightHandler(w, r)\n return\n }\n }\n h.ServeHTTP(w, r)\n })\n}", "func (srv *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) {\n\tif origin := r.Header.Get(\"Origin\"); origin != \"\" {\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"POST, GET, OPTIONS, PUT, DELETE\")\n\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Accept, Accept-Language, Content-Type, YourOwnHeader\")\n\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t}\n\t// Stop here if its Preflighted OPTIONS request\n\tif r.Method == \"OPTIONS\" {\n\t\treturn\n\t}\n\tsrv.mux.ServeHTTP(w, r)\n}", "func Cors(h httprouter.Handle) httprouter.Handle {\n\treturn func(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {\n\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Accept, Content-Type, Authorization\")\n\t\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"DELETE, GET, HEAD, OPTIONS, PATCH, POST, PUT\")\n\n\t\th(w, r, ps)\n\t}\n}", "func cors(rw http.ResponseWriter, req *http.Request, app *App) bool {\n\torigin := req.Header.Get(\"Origin\")\n\tif origin == \"\" {\n\t\treturn false\n\t}\n\n\tfor _, allowedOrigin := range app.Config.General.Origin {\n\t\tif allowedOrigin == origin {\n\t\t\trw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\trw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\t\tif req.Method == \"OPTIONS\" {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t}\n\n\treturn false\n}", "func (s *Header) preRequestModifyCorsResponseHeaders(rw http.ResponseWriter, req *http.Request) {\n\toriginHeader := req.Header.Get(\"Origin\")\n\tallowOrigin := s.getAllowOrigin(originHeader)\n\n\tif allowOrigin != \"\" {\n\t\trw.Header().Set(\"Access-Control-Allow-Origin\", allowOrigin)\n\t}\n\n\tif s.headers.AccessControlAllowCredentials {\n\t\trw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t}\n\n\tif len(s.headers.AccessControlExposeHeaders) > 0 {\n\t\texposeHeaders := strings.Join(s.headers.AccessControlExposeHeaders, \",\")\n\t\trw.Header().Set(\"Access-Control-Expose-Headers\", exposeHeaders)\n\t}\n}", "func Cors(h http.HandlerFunc) http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"*\")\n\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Content-Type, Access-Control-Allow-Headers, Authorization, X-Requested-With\")\n\t\th(w, r)\n\t}\n}", "func PreflightHandler(w http.ResponseWriter, r *http.Request) {\n headers := []string{\"Content-Type\", \"Accept\", \"Authorization\"}\n w.Header().Set(\"Access-Control-Allow-Headers\", strings.Join(headers, \",\"))\n methods := []string{\"GET\", \"HEAD\", \"POST\", \"PUT\", \"DELETE\"}\n w.Header().Set(\"Access-Control-Allow-Methods\", strings.Join(methods, \",\"))\n glog.Infof(\"preflight request for %s\", r.URL.Path)\n}", "func Preflight(w http.ResponseWriter, r *http.Request) bool {\n\tif origin := r.Header.Get(\"Origin\"); origin != \"\" {\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"POST, OPTIONS\")\n\t\tw.Header().Set(\"Access-Control-Allow-Headers\",\n\t\t\t\"Accept, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization\")\n\t}\n\n\treturn (r.Method == \"OPTIONS\")\n}", "func SetAccessControlAllowHeaders(w http.ResponseWriter, origin string) {\n\t// Access-Control-Allow-Origin can't be '*' with requests that send credentials.\n\t// Instead, we need to explicitly set the domain (from request's Origin header)\n\t//\n\t// Allow requests from anyone's localhost and only from secure pages.\n\tif strings.HasPrefix(origin, \"http://localhost:\") || strings.HasPrefix(origin, \"https://\") {\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET,POST,PATCH,DELETE,OPTIONS\")\n\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Cookie,X-User-Id,X-Request-Id,Content-Type\")\n\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t}\n}", "func handleStationOrigin(h http.Handler) http.Handler {\n\tspec0 := regexp.MustCompile(\"(.+[.])?fieldkit.org:\\\\d+\")\n\tspec1 := regexp.MustCompile(\"(.+[.])?local.fkdev.org:\\\\d+\")\n\tspec2 := regexp.MustCompile(\"(.+[.])?localhost:\\\\d+\")\n\torigHndlr := h.(http.HandlerFunc)\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\torigin := r.Header.Get(\"Origin\")\n\t\tif origin == \"\" {\n\t\t\t// Not a CORS request\n\t\t\torigHndlr(w, r)\n\t\t\treturn\n\t\t}\n\t\tif cors.MatchOriginRegexp(origin, spec0) {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\tw.Header().Set(\"Vary\", \"Origin\")\n\t\t\tw.Header().Set(\"Access-Control-Expose-Headers\", \"Authorization, Content-Type\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"false\")\n\t\t\tif acrm := r.Header.Get(\"Access-Control-Request-Method\"); acrm != \"\" {\n\t\t\t\t// We are handling a preflight request\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, OPTIONS, POST, DELETE, PATCH, PUT\")\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Authorization, Content-Type\")\n\t\t\t}\n\t\t\torigHndlr(w, r)\n\t\t\treturn\n\t\t}\n\t\tif cors.MatchOriginRegexp(origin, spec1) {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\tw.Header().Set(\"Vary\", \"Origin\")\n\t\t\tw.Header().Set(\"Access-Control-Expose-Headers\", \"Authorization, Content-Type\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"false\")\n\t\t\tif acrm := r.Header.Get(\"Access-Control-Request-Method\"); acrm != \"\" {\n\t\t\t\t// We are handling a preflight request\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, OPTIONS, POST, DELETE, PATCH, PUT\")\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Authorization, Content-Type\")\n\t\t\t}\n\t\t\torigHndlr(w, r)\n\t\t\treturn\n\t\t}\n\t\tif cors.MatchOriginRegexp(origin, spec2) {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\tw.Header().Set(\"Vary\", \"Origin\")\n\t\t\tw.Header().Set(\"Access-Control-Expose-Headers\", \"Authorization, Content-Type\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"false\")\n\t\t\tif acrm := r.Header.Get(\"Access-Control-Request-Method\"); acrm != \"\" {\n\t\t\t\t// We are handling a preflight request\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, OPTIONS, POST, DELETE, PATCH, PUT\")\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Authorization, Content-Type\")\n\t\t\t}\n\t\t\torigHndlr(w, r)\n\t\t\treturn\n\t\t}\n\t\tif cors.MatchOrigin(origin, \"https://*.fieldkit.org\") {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\tw.Header().Set(\"Vary\", \"Origin\")\n\t\t\tw.Header().Set(\"Access-Control-Expose-Headers\", \"Authorization, Content-Type\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"false\")\n\t\t\tif acrm := r.Header.Get(\"Access-Control-Request-Method\"); acrm != \"\" {\n\t\t\t\t// We are handling a preflight request\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, OPTIONS, POST, DELETE, PATCH, PUT\")\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Authorization, Content-Type\")\n\t\t\t}\n\t\t\torigHndlr(w, r)\n\t\t\treturn\n\t\t}\n\t\tif cors.MatchOrigin(origin, \"https://*.fieldkit.org:8080\") {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\tw.Header().Set(\"Vary\", \"Origin\")\n\t\t\tw.Header().Set(\"Access-Control-Expose-Headers\", \"Authorization, Content-Type\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"false\")\n\t\t\tif acrm := r.Header.Get(\"Access-Control-Request-Method\"); acrm != \"\" {\n\t\t\t\t// We are handling a preflight request\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, OPTIONS, POST, DELETE, PATCH, PUT\")\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Authorization, Content-Type\")\n\t\t\t}\n\t\t\torigHndlr(w, r)\n\t\t\treturn\n\t\t}\n\t\tif cors.MatchOrigin(origin, \"https://*.fkdev.org\") {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\tw.Header().Set(\"Vary\", \"Origin\")\n\t\t\tw.Header().Set(\"Access-Control-Expose-Headers\", \"Authorization, Content-Type\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"false\")\n\t\t\tif acrm := r.Header.Get(\"Access-Control-Request-Method\"); acrm != \"\" {\n\t\t\t\t// We are handling a preflight request\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, OPTIONS, POST, DELETE, PATCH, PUT\")\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Authorization, Content-Type\")\n\t\t\t}\n\t\t\torigHndlr(w, r)\n\t\t\treturn\n\t\t}\n\t\tif cors.MatchOrigin(origin, \"https://fieldkit.org\") {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\tw.Header().Set(\"Vary\", \"Origin\")\n\t\t\tw.Header().Set(\"Access-Control-Expose-Headers\", \"Authorization, Content-Type\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"false\")\n\t\t\tif acrm := r.Header.Get(\"Access-Control-Request-Method\"); acrm != \"\" {\n\t\t\t\t// We are handling a preflight request\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, OPTIONS, POST, DELETE, PATCH, PUT\")\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Authorization, Content-Type\")\n\t\t\t}\n\t\t\torigHndlr(w, r)\n\t\t\treturn\n\t\t}\n\t\tif cors.MatchOrigin(origin, \"https://fieldkit.org:8080\") {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\tw.Header().Set(\"Vary\", \"Origin\")\n\t\t\tw.Header().Set(\"Access-Control-Expose-Headers\", \"Authorization, Content-Type\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"false\")\n\t\t\tif acrm := r.Header.Get(\"Access-Control-Request-Method\"); acrm != \"\" {\n\t\t\t\t// We are handling a preflight request\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, OPTIONS, POST, DELETE, PATCH, PUT\")\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Authorization, Content-Type\")\n\t\t\t}\n\t\t\torigHndlr(w, r)\n\t\t\treturn\n\t\t}\n\t\tif cors.MatchOrigin(origin, \"https://fkdev.org\") {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\tw.Header().Set(\"Vary\", \"Origin\")\n\t\t\tw.Header().Set(\"Access-Control-Expose-Headers\", \"Authorization, Content-Type\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"false\")\n\t\t\tif acrm := r.Header.Get(\"Access-Control-Request-Method\"); acrm != \"\" {\n\t\t\t\t// We are handling a preflight request\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, OPTIONS, POST, DELETE, PATCH, PUT\")\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Authorization, Content-Type\")\n\t\t\t}\n\t\t\torigHndlr(w, r)\n\t\t\treturn\n\t\t}\n\t\torigHndlr(w, r)\n\t\treturn\n\t})\n}", "func CORS(h http.Handler) http.Handler {\n\tallowHeaders := strings.Join([]string{\n\t\t\"Access-Control-Allow-Origin\",\n\t\t\"Access-Control-Allow-Headers\",\n\t\t\"Access-Control-Allow-Methods\",\n\t\t\"Content-Type\",\n\t\t\"Set-Cookie\",\n\t\t\"Cookie\",\n\t\t\"X-Login-With\",\n\t\t\"X-Requested-With\",\n\t}, \",\")\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tw.Header().Add(\"Access-Control-Allow-Origin\", r.Header.Get(\"Origin\"))\n\t\tw.Header().Add(\"Access-Control-Allow-Headers\", allowHeaders)\n\t\tw.Header().Add(\"Access-Control-Expose-Headers\", allowHeaders)\n\t\tw.Header().Add(\"Access-Control-Allow-Credentials\", \"true\")\n\t\tw.Header().Add(\"Access-Control-Allow-Methods\", \"GET, POST, PUT, DELETE, OPTIONS\")\n\t\tif r.Method == \"OPTIONS\" {\n\t\t\treturn\n\t\t}\n\t\th.ServeHTTP(w, r)\n\t})\n}", "func Cors(next http.HandlerFunc) http.HandlerFunc {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Accept, Content-Type\")\n\t\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"POST, GET, OPTIONS, PUT, DELETE, PATCH\")\n\n\t\tnext.ServeHTTP(w, r)\n\t})\n}", "func (cors *Cors) ServeHTTP(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, PUT, POST, PATCH, DELETE\")\n\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Content-Type, Authorization\")\n\tw.Header().Set(\"Access-Control-Expose-Headers\", \"Authorization\")\n\tw.Header().Set(\"Access-Control-Max-Age\", \"600\")\n\tif r.Method == http.MethodOptions {\n\t\tw.WriteHeader(http.StatusOK)\n\t}\n\tcors.handler.ServeHTTP(w, r)\n}", "func handleCouncillorsOrigin(h goa.Handler) goa.Handler {\n\n\treturn func(ctx context.Context, rw http.ResponseWriter, req *http.Request) error {\n\t\torigin := req.Header.Get(\"Origin\")\n\t\tif origin == \"\" {\n\t\t\t// Not a CORS request\n\t\t\treturn h(ctx, rw, req)\n\t\t}\n\t\tif cors.MatchOrigin(origin, \"*\") {\n\t\t\tctx = goa.WithLogContext(ctx, \"origin\", origin)\n\t\t\trw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\trw.Header().Set(\"Access-Control-Allow-Credentials\", \"false\")\n\t\t\tif acrm := req.Header.Get(\"Access-Control-Request-Method\"); acrm != \"\" {\n\t\t\t\t// We are handling a preflight request\n\t\t\t\trw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, POST, DELETE, PUT\")\n\t\t\t\trw.Header().Set(\"Access-Control-Allow-Headers\", \"x-auth\")\n\t\t\t}\n\t\t\treturn h(ctx, rw, req)\n\t\t}\n\n\t\treturn h(ctx, rw, req)\n\t}\n}", "func corsMiddleware(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET\")\n\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Origin, Content-Type\")\n\n\t\tnext.ServeHTTP(w, r)\n\t})\n}", "func GinCorsHeaders(c *gin.Context) {\n\tc.Header(\"Access-Control-Allow-Origin\", \"*\")\n\tc.Header(\"Access-Control-Allow-Headers\", \"x-token\")\n}", "func CORS(fn http.Handler) http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tif origin := r.Header.Get(\"Origin\"); origin != \"\" {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"POST, GET, OPTIONS, PUT, DELETE\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\",\n\t\t\t\t\"Accept, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization\")\n\t\t\tw.Header().Set(\"Access-Control-Expose-Headers\", \"Status, Content-Type, Content-Length\")\n\t\t}\n\t\t// Stop here if its Preflighted OPTIONS request\n\t\tif r.Method == \"OPTIONS\" {\n\t\t\treturn\n\t\t}\n\n\t\tfn.ServeHTTP(w, r)\n\t}\n}", "func (s *MyServer) ServeHTTP(rw http.ResponseWriter, req *http.Request) {\n\tif origin := req.Header.Get(\"Origin\"); origin != \"\" {\n\t\trw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\trw.Header().Set(\"Access-Control-Allow-Methods\", \"POST, GET, OPTIONS, PUT, DELETE\")\n\t\trw.Header().Set(\"Access-Control-Allow-Headers\",\n\t\t\t\"Accept, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization\")\n\t}\n\t// Stop here if its Preflighted OPTIONS request\n\tif req.Method == \"OPTIONS\" {\n\t\treturn\n\t}\n\t// Lets Gorilla work\n\ts.r.ServeHTTP(rw, req)\n}", "func setPollingHeaders(w http.ResponseWriter, r *http.Request) {\n\torigin := r.Header.Get(\"Origin\")\n\tif len(origin) > 0 {\n\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t} else {\n\t\torigin = \"*\"\n\t}\n\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\tw.Header().Set(\"Connection\", \"keep-alive\")\n\tw.Header().Set(\"Content-Type\", \"text/plain; charset=UTF-8\")\n}", "func (l Headers) ServeHTTP(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tw.Header().Set(\"Access-Control-Allow-Headers\", \"authorization,content-type,hawkular-tenant\")\n\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, POST, DELETE, PUT\")\n\n\tif l.Verbose {\n\t\tlog.Printf(\"set http headers\")\n\t}\n\n\tl.next.ServeHTTP(w, r)\n}", "func Cors() gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\tc.Writer.Header().Add(\"Access-Control-Allow-Origin\", \"*\")\n\t\tc.Next()\n\t}\n}", "func CORS(h http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", corsOrigin)\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"POST, GET, OPTIONS, PUT, DELETE\")\n\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"X-Requested-With, Content-Type\")\n\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\tif r.Method == \"OPTIONS\" {\n\t\t\treturn\n\t\t}\n\t\th.ServeHTTP(w, r)\n\t})\n}", "func (s *Server) setCORSHeaders(w http.ResponseWriter, path string) {\n\tvar i int\n\tvar methods []string\n\t/* Find corresponding routes */\n\tfor i = 0; i < len(s.routes); i++ {\n\t\tif utils.ParseURL(s.routes[i].pattern, path, nil) {\n\t\t\tmethods = append(methods, s.routes[i].method)\n\t\t}\n\t}\n\tif len(methods) > 0 {\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\tw.Header().Add(\"Access-Control-Allow-Methods\", strings.Join(methods, \", \"))\n\t\tw.Header().Add(\"Access-Control-Allow-Headers\", \"Content-Type\")\n\t}\n}", "func SetAccessControlHeaders(c *gin.Context) {\n\tc.Header(\"Access-Control-Allow-Headers\", \"Accept, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization\")\n\n\to := c.GetHeader(\"Origin\")\n\tif o != config.Get().PanelLocation {\n\t\tfor _, origin := range config.Get().AllowedOrigins {\n\t\t\tif origin != \"*\" && o != origin {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tc.Header(\"Access-Control-Allow-Origin\", origin)\n\t\t\tc.Next()\n\t\t\treturn\n\t\t}\n\t}\n\n\tc.Header(\"Access-Control-Allow-Origin\", config.Get().PanelLocation)\n\tc.Next()\n}", "func (router Router) setUpHeaders(w http.ResponseWriter, r *http.Request) bool {\n\tw.Header().Set(\"Access-Control-Allow-Origin\", router.Host)\n\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.Header().Set(\"Access-Control-Max-Age\", \"120\")\n\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Content-Type, Access-Control-Allow-Headers, Authorization, X-Requested-With\")\n\tif r.Method == http.MethodOptions {\n\t\tw.WriteHeader(200)\n\t\treturn false\n\t}\n\treturn true\n}", "func handleCorsPreflight(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {\n\n\tlog.Infof(\"Received [OPTIONS] request to CorsPreFlight: %+v\", r)\n\n\tc := cors.New(REST_CORS_PREFIX, log)\n\tc.HandlePreflight(w, r)\n}", "func OptionsAnswer(w http.ResponseWriter) {\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tw.Header().Set(\"Access-Control-Allow-Headers\", \"*\")\n\tw.Header().Set(\"Access-Control-Allow-Methods\", \"*\")\n}", "func handleSwaggerOrigin(h goa.Handler) goa.Handler {\n\tspec1 := regexp.MustCompile(\"(localhost|cryptopages.club)\")\n\n\treturn func(ctx context.Context, rw http.ResponseWriter, req *http.Request) error {\n\t\torigin := req.Header.Get(\"Origin\")\n\t\tif origin == \"\" {\n\t\t\t// Not a CORS request\n\t\t\treturn h(ctx, rw, req)\n\t\t}\n\t\tif cors.MatchOrigin(origin, \"*\") {\n\t\t\tctx = goa.WithLogContext(ctx, \"origin\", origin)\n\t\t\trw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\trw.Header().Set(\"Access-Control-Allow-Credentials\", \"false\")\n\t\t\tif acrm := req.Header.Get(\"Access-Control-Request-Method\"); acrm != \"\" {\n\t\t\t\t// We are handling a preflight request\n\t\t\t\trw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, OPTIONS\")\n\t\t\t}\n\t\t\treturn h(ctx, rw, req)\n\t\t}\n\t\tif cors.MatchOriginRegexp(origin, spec1) {\n\t\t\tctx = goa.WithLogContext(ctx, \"origin\", origin)\n\t\t\trw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\trw.Header().Set(\"Vary\", \"Origin\")\n\t\t\trw.Header().Set(\"Access-Control-Expose-Headers\", \"Authorization\")\n\t\t\trw.Header().Set(\"Access-Control-Max-Age\", \"600\")\n\t\t\trw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\t\tif acrm := req.Header.Get(\"Access-Control-Request-Method\"); acrm != \"\" {\n\t\t\t\t// We are handling a preflight request\n\t\t\t\trw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, POST, PUT, PATCH, DELETE\")\n\t\t\t\trw.Header().Set(\"Access-Control-Allow-Headers\", \"Authorization, Content-Type\")\n\t\t\t}\n\t\t\treturn h(ctx, rw, req)\n\t\t}\n\n\t\treturn h(ctx, rw, req)\n\t}\n}", "func handleJsOrigin(h goa.Handler) goa.Handler {\n\tspec1 := regexp.MustCompile(\"(localhost|cryptopages.club)\")\n\n\treturn func(ctx context.Context, rw http.ResponseWriter, req *http.Request) error {\n\t\torigin := req.Header.Get(\"Origin\")\n\t\tif origin == \"\" {\n\t\t\t// Not a CORS request\n\t\t\treturn h(ctx, rw, req)\n\t\t}\n\t\tif cors.MatchOrigin(origin, \"*\") {\n\t\t\tctx = goa.WithLogContext(ctx, \"origin\", origin)\n\t\t\trw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\trw.Header().Set(\"Access-Control-Allow-Credentials\", \"false\")\n\t\t\tif acrm := req.Header.Get(\"Access-Control-Request-Method\"); acrm != \"\" {\n\t\t\t\t// We are handling a preflight request\n\t\t\t\trw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, OPTIONS\")\n\t\t\t}\n\t\t\treturn h(ctx, rw, req)\n\t\t}\n\t\tif cors.MatchOriginRegexp(origin, spec1) {\n\t\t\tctx = goa.WithLogContext(ctx, \"origin\", origin)\n\t\t\trw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\trw.Header().Set(\"Vary\", \"Origin\")\n\t\t\trw.Header().Set(\"Access-Control-Expose-Headers\", \"Authorization\")\n\t\t\trw.Header().Set(\"Access-Control-Max-Age\", \"600\")\n\t\t\trw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\t\tif acrm := req.Header.Get(\"Access-Control-Request-Method\"); acrm != \"\" {\n\t\t\t\t// We are handling a preflight request\n\t\t\t\trw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, POST, PUT, PATCH, DELETE\")\n\t\t\t\trw.Header().Set(\"Access-Control-Allow-Headers\", \"Authorization, Content-Type\")\n\t\t\t}\n\t\t\treturn h(ctx, rw, req)\n\t\t}\n\n\t\treturn h(ctx, rw, req)\n\t}\n}", "func Cors(inner http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"*\")\n\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Accept, Content-Type, Content-Length, Accept-Encoding, Authorization\")\n\n\t\tif r.Method != \"OPTIONS\" {\n\t\t\tinner.ServeHTTP(w, r)\n\t\t}\n\t})\n}", "func PreflightForCORS(w http.ResponseWriter, r *http.Request, allowHeaders, allowMethods *[]string, allowOrigin string) bool {\n\tif r.Method != http.MethodOptions {\n\t\treturn false\n\t}\n\n\t// When nil specified for allowHeaders, PreflightForCORS sets them as same as client request.\n\theaders := r.Header.Get(\"Access-Control-Request-Headers\")\n\tif allowHeaders != nil {\n\t\theaders = strings.Join(*allowHeaders, \", \")\n\t}\n\n\t// Also when nil specified for allowMethods, PreflightForCORS sets them as same as client request.\n\tmethods := r.Header.Get(\"Access-Control-Request-Method\")\n\tif allowMethods != nil {\n\t\tmethods = strings.Join(*allowMethods, \", \")\n\t}\n\n\tw.Header().Set(\"Access-Control-Allow-Origin\", allowOrigin)\n\tw.Header().Set(\"Access-Control-Allow-Headers\", headers)\n\tw.Header().Set(\"Access-Control-Allow-Methods\", methods)\n\tResponseOK(w)\n\treturn true\n}", "func AddCORSHandler(r *mux.Router) {\n\tr.Methods(\"OPTIONS\").HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\torigin := r.Header.Get(\"Origin\")\n\t\tif origin == \"\" {\n\t\t\tw.WriteHeader(http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\t\tSetAccessControlAllowHeaders(w, r.Header.Get(\"Origin\"))\n\t\tw.WriteHeader(http.StatusOK)\n\t})\n}", "func Options(c *gin.Context) {\n\tOrigin := c.MustGet(\"CorsOrigin\").(string)\n\n\tc.Writer.Header().Set(\"Access-Control-Allow-Origin\", Origin)\n\tc.Writer.Header().Set(\"Access-Control-Allow-Methods\", \"GET,DELETE,POST,PUT\")\n\tc.Writer.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\tc.Writer.Header().Set(\"Access-Control-Allow-Headers\", \"Content-Type\")\n\tc.Next()\n}", "func EnableCrossDomain(w http.ResponseWriter, r *http.Request) {\n\torigin := r.Header.Get(\"Origin\")\n\n\theader := w.Header()\n\theader.Set(\"Access-Control-Allow-Methods\", \"OPTIONS,POST,GET\")\n\n\tif origin == \"\" { // not cross origin\n\t\theader.Set(\"Access-Control-Allow-Origin\", \"*\")\n\t} else {\n\t\theader.Set(\"Access-Control-Allow-Origin\", origin)\n\t}\n}", "func withCORS(fn http.HandlerFunc) http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\tfn(w, r)\n\t}\n}", "func (s *Server) ServeHTTP(rw http.ResponseWriter, req *http.Request) {\n\tif origin := req.Header.Get(\"Origin\"); origin != \"\" {\n\t\trw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\trw.Header().Set(\"Access-Control-Allow-Methods\", \"POST, GET, OPTIONS, PUT, DELETE\")\n\t\trw.Header().Set(\"Access-Control-Allow-Headers\",\n\t\t\t\"Accept, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization\")\n\t}\n\t// Stop here if its Preflighted OPTIONS request\n\tif req.Method == \"OPTIONS\" {\n\t\treturn\n\t}\n\n\t// Lets Gorilla work\n\ts.r.ServeHTTP(rw, req)\n}", "func CORS_Middleware() gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\tc.Writer.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\tc.Writer.Header().Set(\"Access-Control-Max-Age\", \"86400\")\n\t\tc.Writer.Header().Set(\"Access-Control-Allow-Methods\", \"POST, GET, OPTIONS, PUT, DELETE, UPDATE\")\n\t\tc.Writer.Header().Set(\"Access-Control-Allow-Headers\", \"X-Requested-With, Content-Type, Origin, Authorization, Accept, Client-Security-Token, Accept-Encoding, x-access-token\")\n\t\tc.Writer.Header().Set(\"Access-Control-Expose-Headers\", \"Content-Length\")\n\t\tc.Writer.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t}\n}", "func optionsHandler(ctx apiContext, w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Access-Control-Allow-Origin\", originAddress)\n\tw.Header().Set(\"Access-Control-Allow-Headers\", supportedHeaders)\n\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\tw.Header().Set(\"Access-Control-Allow-Methods\", methodsToString(ctx.supportedMethods))\n\tw.WriteHeader(http.StatusOK)\n}", "func handlePublicOrigin(h goa.Handler) goa.Handler {\n\tspec1 := regexp.MustCompile(\"(localhost|cryptopages.club)\")\n\n\treturn func(ctx context.Context, rw http.ResponseWriter, req *http.Request) error {\n\t\torigin := req.Header.Get(\"Origin\")\n\t\tif origin == \"\" {\n\t\t\t// Not a CORS request\n\t\t\treturn h(ctx, rw, req)\n\t\t}\n\t\tif cors.MatchOrigin(origin, \"*\") {\n\t\t\tctx = goa.WithLogContext(ctx, \"origin\", origin)\n\t\t\trw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\trw.Header().Set(\"Access-Control-Allow-Credentials\", \"false\")\n\t\t\tif acrm := req.Header.Get(\"Access-Control-Request-Method\"); acrm != \"\" {\n\t\t\t\t// We are handling a preflight request\n\t\t\t\trw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, OPTIONS\")\n\t\t\t}\n\t\t\treturn h(ctx, rw, req)\n\t\t}\n\t\tif cors.MatchOriginRegexp(origin, spec1) {\n\t\t\tctx = goa.WithLogContext(ctx, \"origin\", origin)\n\t\t\trw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\trw.Header().Set(\"Vary\", \"Origin\")\n\t\t\trw.Header().Set(\"Access-Control-Expose-Headers\", \"Authorization\")\n\t\t\trw.Header().Set(\"Access-Control-Max-Age\", \"600\")\n\t\t\trw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\t\tif acrm := req.Header.Get(\"Access-Control-Request-Method\"); acrm != \"\" {\n\t\t\t\t// We are handling a preflight request\n\t\t\t\trw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, POST, PUT, PATCH, DELETE\")\n\t\t\t\trw.Header().Set(\"Access-Control-Allow-Headers\", \"Authorization, Content-Type\")\n\t\t\t}\n\t\t\treturn h(ctx, rw, req)\n\t\t}\n\n\t\treturn h(ctx, rw, req)\n\t}\n}", "func cors(h httprouter.Handle) httprouter.Handle {\n\treturn func(w http.ResponseWriter, r *http.Request, p httprouter.Params) {\n\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"accept, content-type\")\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"POST\")\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\th(w, r, p)\n\t}\n}", "func accessControl(h http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, POST, OPTIONS, PUT, DELETE, UPDATE, PATCH\")\n\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Origin, Content-Type, Authorization\")\n\n\t\tif r.Method == \"OPTIONS\" {\n\t\t\treturn\n\t\t}\n\n\t\th.ServeHTTP(w, r)\n\t})\n}", "func Cors() gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\tc.Writer.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\tc.Writer.Header().Set(\"Access-Control-Allow-Headers\", \"Origin, Accept, Content-Type, X-ProxiedEntitiesChain\")\n\t\tc.Writer.Header().Set(\"Access-Control-Allow-Methods\", \"POST, OPTIONS, GET, PUT\")\n\t\tc.Writer.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\n\t\tif c.Request.Method == \"OPTIONS\" {\n\t\t\tc.AbortWithStatus(204)\n\t\t\treturn\n\t\t}\n\n\t\tc.Next()\n\t}\n}", "func preflightHandler(w http.ResponseWriter, r *http.Request, additionalAllowedHeaders ...string) {\n\theaders := []string{\"Content-Type\", \"Accept\", \"Authorization\"}\n\tw.Header().Set(\"Access-Control-Allow-Headers\", strings.Join(append(headers, additionalAllowedHeaders...), \",\"))\n\n\tmethods := []string{\"GET\", \"HEAD\", \"POST\", \"PUT\", \"DELETE\"}\n\n\tw.Header().Set(\"Access-Control-Allow-Methods\", strings.Join(methods, \",\"))\n\tglog.Infof(\"preflight request for %s\", r.URL.Path)\n}", "func Cors(f http.HandlerFunc) http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"accept, content-type\")\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"POST\")\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\tf(w, r)\n\t}\n}", "func CORS() gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\tc.Writer.Header().Set(\"Access-Control-Allow-Origin\", c.Request.Header.Get(\"Origin\"))\n\t\tc.Writer.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\tc.Writer.Header().Set(\"Access-Control-Allow-Headers\", \"Origin, X-Requested-With, Content-Type, Accept, Authorization\")\n\t\tc.Writer.Header().Set(\"Access-Control-Allow-Methods\", \"GET, HEAD, POST, PUT, DELETE, OPTIONS, PATCH\")\n\n\t\tif c.Request.Method == \"OPTIONS\" {\n\t\t\tc.AbortWithStatus(204)\n\t\t\treturn\n\t\t}\n\t\tc.Next()\n\t}\n}", "func handleRequestAndRedirect(res http.ResponseWriter, req *http.Request) {\n\tresponseWithCORSHeaders := addCORSHeaders(req, res)\n\tif req.Method == \"OPTIONS\" {\n\t\tresponseWithCORSHeaders.WriteHeader(204)\n\t} else {\n\t\treq.Header.Add(\"X-Forwarded-Host\", req.Host)\n\t\treturnedURL := getProxyURL(req)\n\t\tusableURL, _ := url.Parse(returnedURL)\n\t\treq.URL.Scheme = usableURL.Scheme\n\t\treq.URL.Path = \"\"\n\t\treq.Header.Add(\"X-Origin-Host\", req.Host)\n\t\tproxy := httputil.NewSingleHostReverseProxy(usableURL)\n\t\tproxy.ServeHTTP(responseWithCORSHeaders, req)\n\t}\n\n}", "func (c *CORSWrapper) Headers() Headers {\n\thdrs := c.View.Headers()\n\tif hdrs == nil {\n\t\thdrs = make(map[string]string)\n\t}\n\n\t// Include the CORS Headers\n\thdrs[\"Access-Control-Allow-Origin\"] = c.Origin\n\thdrs[\"Access-Control-Allow-Headers\"] = \"Content-Type\"\n\treturn hdrs\n}", "func (s *Header) processCorsHeaders(rw http.ResponseWriter, req *http.Request) bool {\n\tif !s.hasCorsHeaders {\n\t\treturn false\n\t}\n\n\treqAcMethod := req.Header.Get(\"Access-Control-Request-Method\")\n\toriginHeader := req.Header.Get(\"Origin\")\n\n\tif reqAcMethod != \"\" && originHeader != \"\" && req.Method == http.MethodOptions {\n\t\t// If the request is an OPTIONS request with an Access-Control-Request-Method header,\n\t\t// and Origin headers, then it is a CORS preflight request,\n\t\t// and we need to build a custom response: https://www.w3.org/TR/cors/#preflight-request\n\t\tif s.headers.AccessControlAllowCredentials {\n\t\t\trw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\t}\n\n\t\tallowHeaders := strings.Join(s.headers.AccessControlAllowHeaders, \",\")\n\t\tif allowHeaders != \"\" {\n\t\t\trw.Header().Set(\"Access-Control-Allow-Headers\", allowHeaders)\n\t\t}\n\n\t\tallowMethods := strings.Join(s.headers.AccessControlAllowMethods, \",\")\n\t\tif allowMethods != \"\" {\n\t\t\trw.Header().Set(\"Access-Control-Allow-Methods\", allowMethods)\n\t\t}\n\n\t\tallowOrigin := s.getAllowOrigin(originHeader)\n\n\t\tif allowOrigin != \"\" {\n\t\t\trw.Header().Set(\"Access-Control-Allow-Origin\", allowOrigin)\n\t\t}\n\n\t\trw.Header().Set(\"Access-Control-Max-Age\", strconv.Itoa(int(s.headers.AccessControlMaxAge)))\n\t\treturn true\n\t}\n\n\ts.preRequestModifyCorsResponseHeaders(rw, req)\n\treturn false\n}", "func CORSEnabledFunction(w http.ResponseWriter, r *http.Request) {\n\t// Set CORS headers for the preflight request\n\tif r.Method == http.MethodOptions {\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"POST\")\n\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Content-Type\")\n\t\tw.Header().Set(\"Access-Control-Max-Age\", \"3600\")\n\t\tw.WriteHeader(http.StatusNoContent)\n\t\treturn\n\t}\n\t// Set CORS headers for the main request.\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tfmt.Fprint(w, \"Hello, World!\")\n}", "func (cors *Cors) Filter(c siesta.Context, w http.ResponseWriter, r *http.Request, quit func()) {\n\n\t// Allways add \"Vary:Origin\" header\n\tw.Header().Add(\"Vary\", OriginHeader)\n\n\torigin := r.Header.Get(OriginHeader)\n\n\t// is not cors request, same origin request\n\tif origin == \"\" {\n\t\tc.Set(\"cors\", false)\n\t\treturn\n\t}\n\n\tif !cors.isMethodAllowed(r.Method) {\n\t\tcors.logWrap(\"Filter: Request method %s not allowed\", r.Method)\n\t\tw.WriteHeader(http.StatusMethodNotAllowed)\n\t\tquit()\n\t\treturn\n\t}\n\n\tif !cors.isOriginAllowed(origin) {\n\t\tcors.logWrap(\"Filter: Origin %s not allowed\", origin)\n\t\tw.WriteHeader(http.StatusForbidden)\n\t\tquit()\n\t\treturn\n\t}\n\n\t// Ok, origin and method are allowed\n\tc.Set(\"cors\", true)\n\tw.Header().Set(AccessControlAllowOrigin, origin)\n\n\t// handle a preflight request\n\tif r.Method == http.MethodOptions {\n\t\tc.Set(\"preflight\", true)\n\t\tcors.preFlightRequest(c, w, r, quit)\n\t\treturn\n\t}\n\n\tif cors.exposeHeader {\n\t\tw.Header().Set(AccessControlExposeHeaders, cors.exposedHeaders)\n\t}\n\n\tif cors.allowCredentials {\n\t\tw.Header().Set(AccessControlAllowCredentials, \"true\")\n\t}\n}", "func isPreflight(w http.ResponseWriter, r *http.Request) bool {\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Origin, X-Session-ID\")\n\tif \"OPTIONS\" == r.Method {\n\t\treturn true\n\t}\n\treturn false\n}", "func (s *Server) WithCors(next http.HandlerFunc) http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\t// set CORS-Header to response writer as defined by the api\n\t\tw.Header().Set(accessControlAllowOrigin, strings.Join(s.allowedOrigins, \",\"))\n\t\tw.Header().Set(accessControlAllowMethods, strings.Join(s.allowedMethods, \",\"))\n\t\tw.Header().Set(accessControlAllowHeader, strings.Join(s.allowedHeaders, \",\"))\n\n\t\tif r.Method == \"OPTIONS\" {\n\t\t\tw.WriteHeader(http.StatusOK)\n\t\t\treturn\n\t\t}\n\t\t// serve next handler\n\t\tnext(w, r)\n\t}\n}", "func Cors(f HandlerFunc) HandlerFunc {\n\treturn func(ctx context.Context, r events.APIGatewayProxyRequest) (events.APIGatewayProxyResponse, error) {\n\t\tlogger := api.StandardLambdaLogger(ctx, pkg.EnvLogLevel)\n\t\torigin := \"\"\n\t\tif val, ok := r.Headers[\"Origin\"]; ok {\n\t\t\torigin = val\n\t\t} else if val2, ok2 := r.Headers[\"origin\"]; ok2 {\n\t\t\torigin = val2\n\t\t}\n\t\tlogger.Debugw(\"Found origin value...\", log.Fields{\n\t\t\t\"origin\": origin,\n\t\t})\n\t\tresponse, err := f(ctx, r)\n\t\tif err != nil {\n\t\t\t// don't set cors headers on a response that errored out\n\t\t\treturn response, err\n\t\t}\n\n\t\tif origin == \"\" {\n\t\t\tlogger.Debug(\"Empty 'Origin' header passed in, no cors available\")\n\t\t\treturn response, err\n\t\t}\n\t\tnormalizedOrigin := normalizeOrigin(origin)\n\t\tallowedOrigins := getAllowedOrigins(pkg.EnvCorsAllowedOrigins)\n\t\tallowedOrigin := \"\"\n\t\tfor _, curOrigin := range allowedOrigins {\n\t\t\tif curOrigin == normalizedOrigin {\n\t\t\t\tallowedOrigin = origin\n\t\t\t}\n\t\t}\n\t\tif allowedOrigin == \"\" {\n\t\t\t// don't set cors headers if the origin was not an allowed one\n\t\t\tlogger.Warnw(\"Unidentified origin attempting to call the api\", log.Fields{\n\t\t\t\t\"origin\": origin,\n\t\t\t})\n\t\t\treturn response, err\n\t\t}\n\t\tif _, ok := response.Headers[AllowCredentials]; !ok {\n\t\t\tresponse.Headers[AllowCredentials] = \"true\"\n\t\t}\n\t\tfinalAllowedHeaders := getAllowedHeadersStr(allowedHeaders)\n\t\tif _, ok := response.Headers[AllowHeaders]; !ok {\n\t\t\tresponse.Headers[AllowHeaders] = finalAllowedHeaders\n\t\t\tlogger.Debugw(\"Recording allowed headers...\", log.Fields{\n\t\t\t\t\"allowedHeaders\": finalAllowedHeaders,\n\t\t\t})\n\t\t}\n\t\tif _, ok := response.Headers[AllowMethods]; !ok {\n\t\t\tresponse.Headers[AllowMethods] = strings.Join(allowedMethods, \",\")\n\t\t}\n\t\tif _, ok := response.Headers[AllowOrigin]; !ok {\n\t\t\tresponse.Headers[AllowOrigin] = allowedOrigin\n\t\t}\n\t\tif _, ok := response.Headers[ExposeHeaders]; !ok {\n\t\t\tresponse.Headers[ExposeHeaders] = finalAllowedHeaders\n\t\t}\n\n\t\treturn response, err\n\t}\n}", "func (s defaultServer) enableCors(h http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"http://localhost:8085\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Content-Type, X-Csrf-Token\")\n\t\t\th.ServeHTTP(w, r)\n\t\t})\n}", "func CORS() gin.HandlerFunc {\n\treturn func(context *gin.Context) {\n\t\tcontext.Writer.Header().Add(\"Access-Control-Allow-Origin\", \"*\")\n\t\tcontext.Writer.Header().Set(\"Access-Control-Max-Age\", \"86400\")\n\t\tcontext.Writer.Header().Set(\"Access-Control-Allow-Methods\", \"POST, GET, OPTIONS, PUT, DELETE, UPDATE\")\n\t\tcontext.Writer.Header().Set(\"Access-Control-Allow-Headers\", \"Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization, accept, origin, Cache-Control, X-Requested-With\")\n\t\tcontext.Writer.Header().Set(\"Access-Control-Expose-Headers\", \"Content-Length\")\n\t\tcontext.Writer.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\n\t\tif context.Request.Method == \"OPTIONS\" {\n\t\t\tcontext.AbortWithStatus(200)\n\t\t} else {\n\t\t\tcontext.Next()\n\t\t}\n\t}\n}", "func SetCors(s string) { corsAllow = s }", "func (c *Config) ServeHTTP(w http.ResponseWriter, r *http.Request) {\n\tu := c.User\n\trequestOrigin := r.Header.Get(\"Origin\")\n\n\t// Add CORS headers before any operation so even on a 401 unauthorized status, CORS will work.\n\tif c.Cors.Enabled && requestOrigin != \"\" {\n\t\theaders := w.Header()\n\n\t\tallowedHeaders := strings.Join(c.Cors.AllowedHeaders, \", \")\n\t\tallowedMethods := strings.Join(c.Cors.AllowedMethods, \", \")\n\t\texposedHeaders := strings.Join(c.Cors.ExposedHeaders, \", \")\n\n\t\tallowAllHosts := len(c.Cors.AllowedHosts) == 1 && c.Cors.AllowedHosts[0] == \"*\"\n\t\tallowedHost := isAllowedHost(c.Cors.AllowedHosts, requestOrigin)\n\n\t\tif allowAllHosts {\n\t\t\theaders.Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\t} else if allowedHost {\n\t\t\theaders.Set(\"Access-Control-Allow-Origin\", requestOrigin)\n\t\t}\n\n\t\tif allowAllHosts || allowedHost {\n\t\t\theaders.Set(\"Access-Control-Allow-Headers\", allowedHeaders)\n\t\t\theaders.Set(\"Access-Control-Allow-Methods\", allowedMethods)\n\n\t\t\tif c.Cors.Credentials {\n\t\t\t\theaders.Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\t\t}\n\n\t\t\tif len(c.Cors.ExposedHeaders) > 0 {\n\t\t\t\theaders.Set(\"Access-Control-Expose-Headers\", exposedHeaders)\n\t\t\t}\n\t\t}\n\t}\n\n\tif r.Method == \"OPTIONS\" && c.Cors.Enabled && requestOrigin != \"\" {\n\t\treturn\n\t}\n\n\t// Authentication\n\tif c.Auth {\n\t\tw.Header().Set(\"WWW-Authenticate\", `Basic realm=\"Restricted\"`)\n\n\t\t// Gets the correct user for this request.\n\t\tusername, password, ok := r.BasicAuth()\n\t\tzap.L().Info(\"login attempt\", zap.String(\"username\", username), zap.String(\"remote_address\", r.RemoteAddr))\n\t\tif !ok {\n\t\t\thttp.Error(w, \"Not authorized\", 401)\n\t\t\treturn\n\t\t}\n\n\t\tuser, ok := c.Users[username]\n\t\tif !ok {\n\t\t\thttp.Error(w, \"Not authorized\", 401)\n\t\t\treturn\n\t\t}\n\n\t\tif !checkPassword(user.Password, password) {\n\t\t\tzap.L().Info(\"invalid password\", zap.String(\"username\", username), zap.String(\"remote_address\", r.RemoteAddr))\n\t\t\thttp.Error(w, \"Not authorized\", 401)\n\t\t\treturn\n\t\t}\n\n\t\tu = user\n\t\tzap.L().Info(\"user authorized\", zap.String(\"username\", username))\n\t} else {\n\t\t// Even if Auth is disabled, we might want to get\n\t\t// the user from the Basic Auth header. Useful for Caddy\n\t\t// plugin implementation.\n\t\tusername, _, ok := r.BasicAuth()\n\t\tif ok {\n\t\t\tif user, ok := c.Users[username]; ok {\n\t\t\t\tu = user\n\t\t\t}\n\t\t}\n\t}\n\n\t// Checks for user permissions relatively to this PATH.\n\tnoModification := r.Method == \"GET\" || r.Method == \"HEAD\" ||\n\t\tr.Method == \"OPTIONS\" || r.Method == \"PROPFIND\"\n\n\tallowed := u.Allowed(r.URL.Path, noModification)\n\n\tzap.L().Debug(\"allowed & method & path\", zap.Bool(\"allowed\", allowed), zap.String(\"method\", r.Method), zap.String(\"path\", r.URL.Path))\n\n\tif !allowed {\n\t\tw.WriteHeader(http.StatusForbidden)\n\t\treturn\n\t}\n\n\tif r.Method == \"HEAD\" {\n\t\tw = newResponseWriterNoBody(w)\n\t}\n\n\t// Excerpt from RFC4918, section 9.4:\n\t//\n\t// \t\tGET, when applied to a collection, may return the contents of an\n\t//\t\t\"index.html\" resource, a human-readable view of the contents of\n\t//\t\tthe collection, or something else altogether.\n\t//\n\t// Get, when applied to collection, will return the same as PROPFIND method.\n\tif r.Method == \"GET\" && strings.HasPrefix(r.URL.Path, u.Handler.Prefix) {\n\t\tinfo, err := u.Handler.FileSystem.Stat(context.TODO(), strings.TrimPrefix(r.URL.Path, u.Handler.Prefix))\n\t\tif err == nil && info.IsDir() {\n\t\t\tr.Method = \"PROPFIND\"\n\n\t\t\tif r.Header.Get(\"Depth\") == \"\" {\n\t\t\t\tr.Header.Add(\"Depth\", \"1\")\n\t\t\t}\n\t\t}\n\t}\n\n\t// Runs the WebDAV.\n\t//u.Handler.LockSystem = webdav.NewMemLS()\n\tu.Handler.ServeHTTP(w, r)\n}", "func setHeaderMiddleWare(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, POST, OPTIONS, PUT, DELETE, PATCH\")\n\t\tw.Header().Set(\"Access-Control-Expose-Headers\", \"Access-Token, Refresh-Token\")\n\t\tw.Header().Set(\"Access-Control-Allow-Headers\",\n\t\t\t\"Accept, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Access-Token, Refresh-Token, Authorization, X-PINGOTHER\")\n\n\t\tif os.Getenv(\"ENVIRONMENT\") == \"DEV\" {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"http://localhost:3000\")\n\t\t} else {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", \"https://cashcalc.web.app\")\n\t\t}\n\n\t\tif r.Method == \"OPTIONS\" {\n\t\t\treturn\n\t\t}\n\n\t\tnext.ServeHTTP(w, r)\n\t})\n}", "func handleUserOrigin(h goa.Handler) goa.Handler {\n\tspec0 := regexp.MustCompile(\"(localhost|cryptopages.club)\")\n\n\treturn func(ctx context.Context, rw http.ResponseWriter, req *http.Request) error {\n\t\torigin := req.Header.Get(\"Origin\")\n\t\tif origin == \"\" {\n\t\t\t// Not a CORS request\n\t\t\treturn h(ctx, rw, req)\n\t\t}\n\t\tif cors.MatchOriginRegexp(origin, spec0) {\n\t\t\tctx = goa.WithLogContext(ctx, \"origin\", origin)\n\t\t\trw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\trw.Header().Set(\"Vary\", \"Origin\")\n\t\t\trw.Header().Set(\"Access-Control-Expose-Headers\", \"Authorization\")\n\t\t\trw.Header().Set(\"Access-Control-Max-Age\", \"600\")\n\t\t\trw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\t\tif acrm := req.Header.Get(\"Access-Control-Request-Method\"); acrm != \"\" {\n\t\t\t\t// We are handling a preflight request\n\t\t\t\trw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, POST, PUT, PATCH, DELETE\")\n\t\t\t\trw.Header().Set(\"Access-Control-Allow-Headers\", \"Authorization, Content-Type\")\n\t\t\t}\n\t\t\treturn h(ctx, rw, req)\n\t\t}\n\n\t\treturn h(ctx, rw, req)\n\t}\n}", "func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Add(\"Access-Control-Allow-Origin\", \"*\")\n\tw.Header().Add(\"Access-Control-Max-Age\", \"2592000\")\n\tw.Header().Add(\"Access-Control-Allow-Methods\", \"GET, POST, PUT, DELETE\")\n\tw.Header().Add(\"Access-Control-Allow-Headers\", \"Origin, X-Requested-With, Content-Type, Accept\")\n\tw.Header().Add(\"X-Influxdb-Version\", h.Version)\n\n\t// If this is a CORS OPTIONS request then send back okie-dokie.\n\tif r.Method == \"OPTIONS\" {\n\t\tw.WriteHeader(http.StatusOK)\n\t\treturn\n\t}\n\n\t// Otherwise handle it via pat.\n\th.mux.ServeHTTP(w, r)\n}", "func Cors() gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\tc.Writer.Header().Add(\"Access-Control-Allow-Origin\", \"*\")\n\t\tc.Writer.Header().Add(\"Access-Control-Allow-Methods\", \"POST,PUT,DELETE,OPTIONS\")\n\t\tc.Writer.Header().Add(\"Access-Control-Allow-Headers\", \"Content-Type,Authorization\")\n\t\tif strings.ToLower(c.Request.Method) == \"options\" {\n\t\t\t// lib.Log.Notice(\"OPTIONS Request\")\n\t\t\t// ctx.ResponseWriter.WriteHeader(200)\n\t\t\tc.Writer.WriteHeader(200)\n\t\t\tc.Abort()\n\t\t\treturn\n\t\t}\n\t\tc.Next()\n\t}\n}", "func enableFrontend(w http.ResponseWriter, url string) {\n\tw.Header().Set(\"Access-Control-Allow-Origin\", url)\n\tw.Header().Set(\"Access-Control-Request-Method\", \"POST,GET,OPTIONS\")\n}", "func CORS(handler http.Handler, alloweedOriginPatterns []*regexp.Regexp, allowedMethods, allowedHeaders []string, allowCredentials string) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {\n\t\torigin := req.Header.Get(\"Origin\")\n\t\tif origin != \"\" {\n\t\t\tallowed := false\n\t\t\tfor _, pattern := range alloweedOriginPatterns {\n\t\t\t\tif allowed = pattern.MatchString(origin); allowed {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t\tif allowed {\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\t\t// Set defaults for methods and headers if nothing was passed\n\t\t\t\tif allowedMethods == nil {\n\t\t\t\t\tallowedMethods = []string{\"POST\", \"GET\", \"OPTIONS\", \"PUT\", \"DELETE\"}\n\t\t\t\t}\n\t\t\t\tif allowedHeaders == nil {\n\t\t\t\t\tallowedHeaders = []string{\"Content-Type\", \"Content-Length\", \"Accept-Encoding\", \"X-CSRF-Token\", \"Authorization\", \"X-Requested-With\", \"If-Modified-Since\"}\n\t\t\t\t}\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Methods\", strings.Join(allowedMethods, \", \"))\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\", strings.Join(allowedHeaders, \", \"))\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", allowCredentials)\n\n\t\t\t\t// Stop here if its a preflight OPTIONS request\n\t\t\t\tif req.Method == \"OPTIONS\" {\n\t\t\t\t\tw.WriteHeader(http.StatusNoContent)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\t// Dispatch to the next handler\n\t\thandler.ServeHTTP(w, req)\n\t})\n}", "func (h *Handler) buildResponse(w http.ResponseWriter, r *http.Request, origin string, method string, headers string) {\n\tw.Header().Set(allowOriginHeader, origin)\n\tw.Header().Set(allowMethodsHeader, method)\n\tw.Header().Set(allowHeadersHeader, headers)\n}", "func LoadCORS(r *mux.Router) http.Handler {\n var corsOpts *cors.Cors\n var origins []string\n var headers []string\n var methods []string\n\n origins = []string{\"*\"}\n headers = []string{\"*\"}\n methods = []string{http.MethodGet,\n http.MethodPost,\n http.MethodPut,\n http.MethodPatch,\n http.MethodDelete,\n http.MethodOptions,\n }\n\n corsOpts = cors.New(cors.Options{\n AllowedOrigins: origins,\n AllowedHeaders: headers,\n AllowedMethods: methods,\n })\n\n return corsOpts.Handler(r)\n}", "func (m *GoMiddleware) CORS(ctx iris.Context) {\n\tctx.Header(\"Access-Control-Allow-Origin\", \"*\")\n\tctx.Next()\n}", "func (m *GoMiddleware) CORS(next echo.HandlerFunc) echo.HandlerFunc {\n\treturn func(c echo.Context) error {\n\t\tc.Response().Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\t\tc.Response().Header().Set(\"Access-Control-Allow-Methods\", \"DELETE, POST, GET, OPTIONS, PUT\")\n\t\tc.Response().Header().Set(\"Access-Control-Allow-Headers\", \"Origin, X-Requested-With, Content-Type, Accept, Authorization, X-Custom-Header, Upgrade-Insecure-Requests\")\n\t\tc.Response().Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\t// Access-Control-Allow-Credentials\n\t\tif c.Request().Method == \"OPTIONS\" {\n\t\t\treturn c.JSON(http.StatusOK, \"ok\")\n\t\t}\n\n\t\treturn next(c)\n\t}\n}", "func writeResponse(r *http.Request, w http.ResponseWriter, code int, resp interface{}) {\n\n\t// Deal with CORS\n\tif origin := r.Header.Get(\"Origin\"); origin != \"\" {\n\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"DELETE, GET, HEAD, OPTIONS, POST, PUT\")\n\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\t// Allow any headers\n\t\tif wantedHeaders := r.Header.Get(\"Access-Control-Request-Headers\"); wantedHeaders != \"\" {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\", wantedHeaders)\n\t\t}\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"text/plain; charset=utf-8\")\n\n\tb, err := json.Marshal(resp)\n\tif err != nil {\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\tfmt.Fprintln(w, `{\"error\":\"failed to marshal json\"}`)\n\t\treturn\n\t}\n\n\tw.WriteHeader(code)\n\tfmt.Fprintln(w, string(b))\n}" ]
[ "0.7822085", "0.72537106", "0.7166708", "0.71026576", "0.7071419", "0.70695454", "0.70508224", "0.7045512", "0.7025273", "0.6990235", "0.6947004", "0.6942893", "0.69167435", "0.69167435", "0.69113564", "0.68853074", "0.68853074", "0.68397", "0.6834698", "0.6756225", "0.6754492", "0.67385745", "0.67206204", "0.671694", "0.6712776", "0.6676008", "0.6613655", "0.6607733", "0.65930504", "0.65898573", "0.65891653", "0.65570134", "0.65561324", "0.65557367", "0.6542722", "0.65314066", "0.65259135", "0.6523107", "0.65081495", "0.6476746", "0.6440124", "0.64370805", "0.64200103", "0.64038587", "0.6402819", "0.63720185", "0.6370957", "0.63474196", "0.6347184", "0.6326539", "0.6325333", "0.63247955", "0.63202375", "0.63125396", "0.63080406", "0.6307843", "0.63019544", "0.6295733", "0.62942225", "0.6271549", "0.6269154", "0.6231914", "0.62189144", "0.62154806", "0.62022996", "0.6201099", "0.61957806", "0.6195444", "0.6187575", "0.6185811", "0.61679405", "0.6167082", "0.6164829", "0.61623347", "0.6152061", "0.61473745", "0.6144631", "0.61400855", "0.612062", "0.6115673", "0.6107675", "0.6088696", "0.6051428", "0.6049543", "0.60440266", "0.6043798", "0.60434806", "0.60376847", "0.600453", "0.6004107", "0.60040736", "0.59893894", "0.59763056", "0.59559184", "0.5948268", "0.5948229", "0.593968", "0.5938984", "0.5937956", "0.5931993" ]
0.7798482
1
Position multiplies direction of ray with the passed distance and adds the result onto the origin. Used for finding the position along a ray.
func Position(r geom.Ray, distance float64) geom.Tuple4 { add := geom.MultiplyByScalar(r.Direction, distance) pos := geom.Add(r.Origin, add) return pos }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (r Ray3) Moved(dist float64) Vector3 {\n\treturn r.Origin.Plus(r.Dir.Scaled(dist))\n}", "func (r Ray) HitPos(t float64) Vector {\n\treturn r.direction.MultiplyByScalar(t).Add(r.origin)\n}", "func (p point) move(dir direction, dist int) point {\n\tvar movedPoint point\n\tswitch dir {\n\tcase up:\n\t\tmovedPoint = point{x: p.x, y: p.y + dist}\n\tcase down:\n\t\tmovedPoint = point{x: p.x, y: p.y - dist}\n\tcase right:\n\t\tmovedPoint = point{x: p.x + dist, y: p.y}\n\tcase left:\n\t\tmovedPoint = point{x: p.x - dist, y: p.y}\n\t}\n\tmovedPoint.wireLen = p.wireLen + dist\n\treturn movedPoint\n}", "func (r Ray) Pos(t float64) Vec3 {\n\treturn r.Origin.Add(r.Dir.ScalarMul(t))\n}", "func (c Circle) Moved(delta Vec) Circle {\n\treturn Circle{\n\t\tCenter: c.Center.Add(delta),\n\t\tRadius: c.Radius,\n\t}\n}", "func (p *Particle) Distance(pa *Particle) float64 {\n\treturn p.Position.Distance(pa.Position)\n}", "func (r *Ray) At(t float64) Vector {\n\treturn r.Origin.Plus(r.Direction.MultiplyScalar(t))\n}", "func (l Line) Moved(delta Vec) Line {\n\treturn Line{\n\t\tA: l.A.Add(delta),\n\t\tB: l.B.Add(delta),\n\t}\n}", "func (l Location) Offset(distance float64, angle float64) Location {\n\n\tdistanceNorth := math.Sin(angle) * distance\n\tdistanceEast := math.Cos(angle) * distance\n\tearthRadius := float64(6378100)\n\tnewLat := l.Latitude + (distanceNorth/earthRadius)*180/math.Pi\n\tnewLon := l.Longitude + (distanceEast/(earthRadius*math.Cos(newLat*180/math.Pi)))*180/math.Pi\n\n\treturn Location{newLat, newLon}\n}", "func (r *Ray) PointAt(t float64) Vector {\n\treturn r.Origin.Add(r.Direction.Mult(t))\n}", "func distance(point Point) (int) {\n\txDist := point.X\n\tyDist := point.Y\n\n\tif xDist < 0 {\n\t\txDist *= -1\n\t}\n\n\tif yDist < 0 {\n\t\tyDist *= -1\n\t}\n\n\treturn xDist + yDist\n}", "func (p point) distanceToOrigin() int {\n\treturn p.distanceToPoint(point{x: 0, y: 0})\n}", "func (r Rect) Moved(delta Vec) Rect {\n\treturn Rect{\n\t\tMin: r.Min.Add(delta),\n\t\tMax: r.Max.Add(delta),\n\t}\n}", "func (space *Space) Raycast(origin, direction Vector, distance float64, mask int32) (Shape, Vector, error) {\n\tif Sign(direction.X) == 0 && Sign(direction.Y) == 0 {\n\t\treturn nil, Zero(), fmt.Errorf(\"the direction vector is Zero()\")\n\t}\n\n\tif distance <= 0 {\n\t\tdistance = Distance(space.min, space.max)\n\t}\n\n\tnormDir, err := direction.Normalize()\n\n\tif err != nil {\n\t\treturn nil, Zero(), err\n\t}\n\n\tray, err := NewLine(origin, origin.Add(\n\t\tnormDir.MultiplyByScalar(distance)))\n\n\tif err != nil {\n\t\treturn nil, Zero(), err\n\t}\n\n\tray.SetMask(mask)\n\tnodeQueue := queue.New()\n\tnodeQueue.Enqueue(space.tree.root)\n\tminExists := false\n\n\tvar (\n\t\tminSquaredDistance float64\n\t\thitShape Shape\n\t\thit Vector\n\t)\n\n\tfor nodeQueue.Len() > 0 {\n\t\tnode := nodeQueue.Dequeue().(*quadTreeNode)\n\t\toverlapped, err := node.boundary.collidesLine(ray)\n\n\t\tif err != nil {\n\t\t\treturn nil, Zero(), err\n\t\t}\n\n\t\tif !overlapped {\n\t\t\tcontinue\n\t\t}\n\n\t\t// If the node is a leaf.\n\t\tif node.northWest == nil {\n\t\t\tfor shape := range node.shapes {\n\t\t\t\traycastHit, err := ResolveCollision(ray, shape, space.useTags)\n\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, Zero(), err\n\t\t\t\t}\n\n\t\t\t\tif raycastHit && !shape.ContainsPoint(ray.p) {\n\t\t\t\t\tcontacts, err := Contact(ray, shape)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, Zero(), err\n\t\t\t\t\t}\n\n\t\t\t\t\tfor _, contact := range contacts {\n\t\t\t\t\t\tsqrDistance := SquaredDistance(ray.p, contact)\n\n\t\t\t\t\t\tif !minExists {\n\t\t\t\t\t\t\thit = contact\n\t\t\t\t\t\t\thitShape = shape\n\t\t\t\t\t\t\tminSquaredDistance = sqrDistance\n\n\t\t\t\t\t\t\tminExists = true\n\t\t\t\t\t\t} else if sqrDistance < minSquaredDistance {\n\t\t\t\t\t\t\thit = contact\n\t\t\t\t\t\t\thitShape = shape\n\t\t\t\t\t\t\tminSquaredDistance = sqrDistance\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tnodeQueue.Enqueue(node.northEast)\n\t\t\tnodeQueue.Enqueue(node.northWest)\n\t\t\tnodeQueue.Enqueue(node.southEast)\n\t\t\tnodeQueue.Enqueue(node.southWest)\n\t\t}\n\t}\n\n\treturn hitShape, hit, nil\n}", "func (r Ray) Reflect(hit RayHit) Ray {\n\treflection_dir := hit.ray.direction.Sub(hit.normal.MultiplyByScalar(2.0 * hit.ray.direction.Dot(hit.normal))).Normalize()\n\treflection_origin := reflection_dir.MultiplyByScalar(0.001).Add(hit.ray.origin)\n\treturn Ray{\n\t\torigin: reflection_origin,\n\t\tdirection: reflection_dir,\n\t}\n}", "func (w world) distance(p1, p2 location) float64 {\n\ts1, c1 := math.Sincos(rad(p1.lat))\n\ts2, c2 := math.Sincos(rad(p2.lat))\n\tclong := math.Cos(rad(p1.long - p2.long))\n\treturn w.radius * math.Acos(s1*s2+c1*c2*clong)\n}", "func (w world) distance(p1, p2 location) float64 {\n\ts1, c1 := math.Sincos(rad(p1.lat))\n\ts2, c2 := math.Sincos(rad(p2.lat))\n\n\tclong := math.Cos(rad(p1.long - p2.long))\n\n\treturn w.radius * math.Acos(s1*s2+c1*c2*clong)\n}", "func (l *Line) Move(direction Vector) Vector {\n\tl.q = l.q.Add(direction)\n\tl.p = l.p.Add(direction)\n\n\treturn l.Center()\n}", "func (g Coords) Distance() float64 {\n\t// * removed returned variable\n\treturn math.Sqrt(math.Pow(g.X2-g.X1, 2) + math.Pow(g.Y2-g.Y1, 2))\n}", "func (ray Ray) PointAt(time float64) Vec3D {\n\treturn AddVec3D(ray.origin, ScalarProduct(time, ray.direction))\n}", "func (r Ray) At(t float64) *Vec3 {\n\treturn r.Direction.SMul(t).AddSet(r.Origin)\n}", "func (p Point) Distance(q Point) float64 { // HL\n\treturn math.Hypot(q.X-p.X, q.Y-p.Y)\n}", "func (c *camera) point_at(v Vector) {\n c.front = v.Sub(c.pos).Normalize().Add(c.pos)\n}", "func (m *CarCheckInOutMutation) AddDistance(f float64) {\n\tif m.adddistance != nil {\n\t\t*m.adddistance += f\n\t} else {\n\t\tm.adddistance = &f\n\t}\n}", "func translate_point_1(p Point, distance float64) {\n\tp.x += distance\n\tp.y += distance\n}", "func (d *droid) move(direction int) int {\n\td.code.PushInput(int64(direction))\n\td.code.Continue()\n\n\tmoveResult := int(d.code.PopOutput())\n\tif moveResult != 0 {\n\t\tif direction == 1 {\n\t\t\td.location.y--\n\t\t} else if direction == 2 {\n\t\t\td.location.y++\n\t\t} else if direction == 3 {\n\t\t\td.location.x--\n\t\t} else {\n\t\t\td.location.x++\n\t\t}\n\t}\n\n\tif moveResult == 2 {\n\t\td.foundTarget = true\n\t\td.oxygenPosition = &point{x: d.location.x, y: d.location.y}\n\t}\n\n\treturn moveResult\n}", "func (tg *TurtleGraphics) getnewpos(angle, distance float64) Vector {\n\td2r := math.Pi / 180\n\top := math.Sin(angle*d2r) * distance\n\tad := math.Cos(angle*d2r) * distance\n\n\tnewp := Vector{op, ad}\n\n\tnewp.X += tg.Pos.X\n\tnewp.Y += tg.Pos.Y\n\n\treturn newp\n}", "func (v *Vector) Distance(e *Vector) float64 {\n\tdX := v.X - e.X\n\tdY := v.Y - e.Y\n\tdZ := v.Z - e.Z\n\treturn math.Sqrt(dX*dX + dY*dY + dZ*dZ)\n}", "func (r ApiSearchRestaurantsRequest) Distance(distance float32) ApiSearchRestaurantsRequest {\n\tr.distance = &distance\n\treturn r\n}", "func (g Object) CalculatedPoint() Position {\n\tp := g.bridge().CalculatedPoint()\n\treturn Position{p.X, p.Y, p.Z}\n}", "func (m Matrix) Moved(delta Vec) Matrix {\n\tm[4], m[5] = m[4]+delta.X, m[5]+delta.Y\n\treturn m\n}", "func turnDist(p1, p2 Coord) int {\n return int(math.Ceil(float64(dist(p1, p2)) / MOVE_DIST))\n}", "func (c card) distance(d card) int {\n\tdist := d.number - c.number\n\tif dist < 0 {\n\t\tdist += 13\n\t}\n\treturn dist\n}", "func (self Source) GetPosition(result *Vector) {\n\tresult[x], result[y], result[z] = self.Get3f(AlPosition)\n}", "func newCoord(instruction string, xwards bool) {\n\tsteps, _ := strconv.Atoi(instruction[1:])\n\tif xwards {\n\t\tif string(instruction[0]) == \"R\" {\n\t\t\tfacing.x = facing.y\n\t\t} else {\n\t\t\tfacing.x = -facing.y\n\t\t}\n\t\twalk(steps, facing.x, coord, xwards)\n\t\tcoord.x += facing.x * steps\n\t} else {\n\t\tif string(instruction[0]) == \"R\" {\n\t\t\tfacing.y = -facing.x\n\t\t} else {\n\t\t\tfacing.y = facing.x\n\t\t}\n\t\twalk(steps, facing.y, coord, xwards)\n\t\tcoord.y += facing.y * steps\n\t}\n}", "func (p *player) calculatePosition() (float64, float64) {\n\tif p.startedMoving == 0 {\n\t\treturn p.location.x, p.location.y\n\t}\n\tcurrentTime := time.Now().UnixNano()\n\ttimeSince := float64((currentTime-p.startedMoving)/1000000) / 1000\n\tchangeX := timeSince * p.velocity.x\n\tx := p.location.x + changeX\n\ty := p.location.y\n\tif p.velocity.y != 0 {\n\t\tgravity := (timeSince / 2 * world_gravity)\n\t\tvelocity := p.velocity.y + gravity\n\t\ty = p.location.y + (timeSince * velocity)\n\t}\n\t//b, _ := w.inPlatform(x, y)\n\t//log.Println(\"in platform:\", b, x, y)\n\treturn x, y\n}", "func (c *Camera) Move(dir CameraDirection, offset float32) {\n\tvar delta types.Vec3\n\n\tswitch dir {\n\tcase Up:\n\t\tdelta = c.Up.Mul(offset)\n\tcase Down:\n\t\tdelta = c.Up.Mul(-offset)\n\tcase Left:\n\t\tdelta = c.LookAt.Sub(c.Position).Normalize().Cross(c.Up).Mul(-offset)\n\tcase Right:\n\t\tdelta = c.LookAt.Sub(c.Position).Normalize().Cross(c.Up).Mul(offset)\n\tcase Forward:\n\t\tdelta = c.LookAt.Sub(c.Position).Normalize().Mul(offset)\n\tcase Backward:\n\t\tdelta = c.LookAt.Sub(c.Position).Normalize().Mul(-offset)\n\t}\n\n\tc.Position = c.Position.Add(delta)\n\tc.LookAt = c.LookAt.Add(delta)\n\tc.Update()\n}", "func Distance(from, to Coord) float64 {\n\tx := float64(from.X - to.X)\n\ty := float64(from.Y - to.Y)\n\n\treturn math.Sqrt(x*x + y*y)\n}", "func distance(rec rectangle) float64 {\n\ta := rec.x2 - rec.x1\n\tb := rec.y2 - rec.y1\n\treturn math.Sqrt(a*a + b*b)\n}", "func (dwr *DifferentialWheeledRobot) RollPosition(distLeft, distRight float64, prev Position) Position {\n\n\t// Straight line\n\tif distLeft == distRight {\n\t\treturn Position{\n\t\t\tprev.X + distLeft*math.Cos(prev.Theta),\n\t\t\tprev.Y + distLeft*math.Sin(prev.Theta),\n\t\t\tprev.Theta,\n\t\t}\n\t}\n\n\t// Turning\n\tturnRadius := dwr.BaseWidth * (distRight + distLeft) / (2 * (distRight - distLeft))\n\tangle := (distRight-distLeft)/dwr.BaseWidth + prev.Theta\n\treturn Position{\n\t\tprev.X + turnRadius*(math.Sin(angle)-math.Sin(prev.Theta)),\n\t\tprev.Y - turnRadius*(math.Cos(angle)-math.Cos(prev.Theta)),\n\t\tangle,\n\t}\n\n\t// s := (distLeft + distRight) / 2.0\n\t// theta := (distRight-distLeft)/dwr.BaseWidth + prev.Theta\n\t// x := s*math.Cos(theta) + prev.X\n\t// y := s*math.Sin(theta) + prev.Y\n\n\t// return Position{x, y, theta}\n\n}", "func (m *CarCheckInOutMutation) AddedDistance() (r float64, exists bool) {\n\tv := m.adddistance\n\tif v == nil {\n\t\treturn\n\t}\n\treturn *v, true\n}", "func distance(x1, y1, x2, y2 float64) float64 {\n\ta := x2 - x1\n\tb := y2 - y1\n\treturn math.Sqrt(a*a + b*b)\n}", "func distance(a, b *Vertex) float64 {\n\treturn math.Sqrt(math.Pow(b.X-a.X, 2) + math.Pow(b.Y-a.Y, 2))\n}", "func (w *RandomWorld) Distance(from, to GoWorld.Location) float64 {\n\treturn math.Sqrt(math.Pow(float64(from.X-to.X), 2) + math.Pow(float64(from.Y-to.Y), 2))\n}", "func distance(p1 Point, p2 Point) float64 {\n\tfirst := math.Pow(float64(p2.x-p1.x), 2)\n\tsecond := math.Pow(float64(p2.y-p1.y), 2)\n\treturn math.Sqrt(first + second)\n}", "func (q *DistanceFeatureQuery) Origin(origin interface{}) *DistanceFeatureQuery {\n\tq.origin = origin\n\treturn q\n}", "func distance(x Point, y Point) float64 {\n\treturn math.Sqrt(math.Pow(x[0]-y[0],2) + math.Pow(x[1]-y[1],2))\t \n}", "func (p Point) Add(q Point) Point { return Point{p.X + q.X, p.Y + q.Y} }", "func (p Point) Add(q Point) Point { return Point{p.X + q.X, p.Y + q.Y} }", "func GetPlayerOffset(cmd string) *PlayerCoords {\n\tdir := \"\"\n\tfmt.Scan(&dir)\n\tamount := 0.0\n\tfmt.Scan(&amount)\n\tpp := GetPlayerPos()\n\tswitch (dir) {\n\t\tcase \"x\":\n\t\t\tpp.X += amount\n\t\tcase \"y\":\n\t\t\tpp.Y += amount\n\t\tcase \"z\":\n\t\t\tpp.Z += amount\n\t\tdefault:\n\t\t\tfmt.Printf(\"Syntax: %s <dir> <amount>\\nWhere %s is one of [x, y, z], and amount is a floating point number, relative to the current player position.\\n\", cmd, cmd)\n\t}\n\treturn pp\n}", "func Forward(b Branch, distance float64) Branch {\n\tvar b_new Branch\n\tb_new.phase = b.phase\n\tb_new.xy = b.xy + cmplx.Rect(distance, b.phase)\n\treturn b_new\n}", "func dist(p1, p2 Coord) int {\n return abs(p1.x-p2.x) + abs(p1.y-p2.y)\n}", "func (vec Vector2) Distance(vec2 Vector2) float32 {\n\txd := vec2.X - vec.X\n\tyd := vec2.Y - vec.Y\n\treturn Sqrt(xd*xd + yd*yd)\n}", "func (point Point) Walk(direction Direction) Point {\n\tswitch direction {\n\tcase DirectionTop:\n\t\tpoint.Y++\n\tcase DirectionDown:\n\t\tpoint.Y--\n\tcase DirectionRight:\n\t\tpoint.X++\n\tcase DirectionLeft:\n\t\tpoint.X--\n\t}\n\n\treturn point\n}", "func (p1 XY) Distance(p2 XY) float64 {\n\tdx, dy := p2.X-p1.X, p2.Y-p1.Y\n\treturn math.Sqrt(float64(dx*dx) + float64(dy*dy))\n}", "func (d Position2D) AddPosition(addend Position2D) Position2D {\n\treturn NewPosition2D(d.Row+addend.Row, d.Col+addend.Col)\n}", "func distance(a, b mgl64.Vec3) float64 {\n\txDiff, yDiff, zDiff := b[0]-a[0], b[1]-a[1], b[2]-a[2]\n\treturn math.Sqrt(xDiff*xDiff + yDiff*yDiff + zDiff*zDiff)\n}", "func (p Point) getDist(q Point) float64 {\n\tdx := p.X - q.X\n\tdy := p.Y - q.Y\n\treturn dx*dx + dy*dy\n}", "func (p *Particle) Move() {\n\tp.Position[0] += (p.Direction[0] * p.Speed)\n\tp.Position[1] += (p.Direction[1] * p.Speed)\n}", "func Position(parent, child Rect, a AlignType) Rect {\n\tswitch {\n\tcase a.Is(Left):\n\t\tchild.X = parent.Left()\n\tcase a.Is(Right):\n\t\tchild.X = parent.Right() - child.W\n\tcase a.Is(Center):\n\t\tchild.X = parent.CenterX() - child.W/2\n\t}\n\tswitch {\n\tcase a.Is(Top):\n\t\tchild.Y = parent.Top()\n\tcase a.Is(Bottom):\n\t\tchild.Y = parent.Bottom() - child.H\n\tcase a.Is(Center):\n\t\tchild.Y = parent.CenterY() - child.H/2\n\t}\n\treturn child\n}", "func reflect(dir, normal *Vec3) *Vec3 {\n\treturn dir.minus(normal.scale(TWO * normal.dot(dir)))\n}", "func (p Point) Distance(p2 Point) float64 {\n\treturn math.Sqrt(math.Pow(p2.x-p.x, 2) + math.Pow(p2.y-p.y, 2))\n}", "func (p *Point2D) Move(deltaX, deltaY float64) {\n\t// if you want to modify the \"object\" (i.e. the value) you need to pass a pointer\n\t// otherwise you would only get a copy (by-value)\n\n\t// this is actually short-hand for (*p).x and (*p).y. Go does magic dereferencing on struct pointers.\n\tp.x += deltaX\n\tp.y += deltaY\n}", "func (m match) dist() uint32 {\n\treturn uint32(m.distance - minDistance)\n}", "func (g SimplePoint) CalculatedPoint() Position {\n\treturn Position{X: g.X, Y: g.Y, Z: 0}\n}", "func (m *CarCheckInOutMutation) Distance() (r float64, exists bool) {\n\tv := m.distance\n\tif v == nil {\n\t\treturn\n\t}\n\treturn *v, true\n}", "func (ag *Agent) Advance(v float32) {\n\t// Compute direction vector\n\tag.X += v * float32(math.Cos(float64(ag.R)))\n\tag.Y += v * float32(math.Sin(float64(ag.R)))\n}", "func (l *Line) Move(x, y float64) {\n\tl.X += x\n\tl.Y += y\n\tl.X2 += x\n\tl.Y2 += y\n}", "func (self Source) GetDirection(result *Vector) {\n\tresult[x], result[y], result[z] = self.Get3f(AlDirection)\n}", "func Distance(src_lat, src_long, dest_lat, dest_long, radius float64) float64 {\n\tlat := Haversine(ToRadians(dest_lat - src_lat))\n\tlong := Haversine(ToRadians(dest_long - src_long))\n\n\treturn 2 * radius * math.Asin(math.Sqrt(lat+math.Cos(ToRadians(src_lat))*math.Cos(ToRadians(dest_lat))*long))\n}", "func (space Space) PointDist(point1 []float64, point2 []float64) float64 {\n\treturn 1 - Cosinus(point1, point2)\n}", "func (r Ruler) Destination(p Point, d float64, b float64) Point {\n\tvar a = b * math.Pi / 180\n\treturn r.Offset(p, math.Sin(a)*d, math.Cos(a)*d)\n}", "func (p *User) CalcDistance(q *User) float64 {\n\n\tdist := 0.5 - math.Cos((q.lat-p.lat)*pi)/2 + math.Cos(p.lat*pi)*math.Cos(q.lat*pi)*(1-math.Cos((q.long-p.long)*pi))/2\n\tdist = 12742 * math.Asin(math.Sqrt(dist))\n\n\treturn dist\n}", "func (l1 Location) Distance(l2 Location) float64 {\n\t// convert to radians\n\t// must cast radius as float to multiply later\n\tvar la1, lo1, la2, lo2, earthRadius float64\n\tla1 = l1.Latitude * math.Pi / 180\n\tlo1 = l1.Longitude * math.Pi / 180\n\tla2 = l2.Latitude * math.Pi / 180\n\tlo2 = l2.Longitude * math.Pi / 180\n\n\tearthRadius = 6378100 // Earth radius in meters\n\n\t// calculate\n\th := hsin(la2-la1) + math.Cos(la1)*math.Cos(la2)*hsin(lo2-lo1)\n\n\treturn 2 * earthRadius * math.Asin(math.Sqrt(h))\n}", "func (p Point) Offset(toward Point, by float32) Point {\n\treturn p.Add(p.DirTo(toward).Mul(by))\n}", "func distance(l1,l2 tgbotapi.Location) float64 {\n\tlat1:=l1.Latitude\n\tlon1:=l1.Longitude\n\tlat2:=l2.Latitude\n\tlon2:=l2.Longitude\n\t// convert to radians\n\t// must cast radius as float to multiply later\n\tvar la1, lo1, la2, lo2, r float64\n\tla1 = lat1 * math.Pi / 180\n\tlo1 = lon1 * math.Pi / 180\n\tla2 = lat2 * math.Pi / 180\n\tlo2 = lon2 * math.Pi / 180\n\n\tr = 6378100 // Earth radius in METERS\n\n\t// calculate\n\th := hsin(la2-la1) + math.Cos(la1)*math.Cos(la2)*hsin(lo2-lo1)\n\n\treturn 2 * r * math.Asin(math.Sqrt(h))\n}", "func (c Camera) Ray(u, v float64) Ray {\n\treturn Ray{\n\t\tOrigin: c.Origin,\n\t\tDir: c.BottomLeft.Add(c.Horizontal.ScalarMul(u)).Add(c.Vertical.ScalarMul(v)).Sub(c.Origin),\n\t}\n}", "func CMOVLOC(mr, r operand.Op) { ctx.CMOVLOC(mr, r) }", "func (p *Player) PosPlus(e float32) mgl32.Vec3 {\n\treturn p.Entity.Position.Add(p.FacingDir(e))\n}", "func (r Ruler) Distance(a Point, b Point) float64 {\n\tdx := (a[0] - b[0]) * r.kx\n\tdy := (a[1] - b[1]) * r.ky\n\treturn math.Sqrt(dx*dx + dy*dy)\n}", "func AttackRay(p *engine.Piece, b *engine.Board, dir [2]int) int {\n\tif p.Captured {\n\t\treturn 0\n\t}\n\tif !p.Infinite_direction {\n\t\treturn 1\n\t}\n\tfor n := 1; n < 8; n++ {\n\t\ts := &engine.Square{\n\t\t\tX: p.Position.X + dir[0]*n,\n\t\t\tY: p.Position.Y + dir[1]*n,\n\t\t}\n\t\tif occupied, _ := b.Occupied(s); occupied != 0 {\n\t\t\tif occupied == -2 {\n\t\t\t\treturn n - 1\n\t\t\t}\n\t\t\treturn n\n\t\t}\n\t}\n\treturn 7\n}", "func NewRay(origin, dir Vec3) *Ray {\n\treturn &Ray{\n\t\tOrigin: origin,\n\t\tDirection: *Unit(dir),\n\t}\n}", "func (p Position) Destination(meters, bearingDegrees float64) Position {\n\tp1 := geojson.Position{p.X, p.Y, p.Z}\n\tp2 := p1.Destination(meters, bearingDegrees)\n\treturn Position{p2.X, p2.Y, p2.Z}\n}", "func (p point) distanceToPoint(o point) int {\n\treturn abs(abs(p.x)-abs(o.x)) + abs(abs(p.y)-abs(o.y))\n}", "func (s *ClampDirectionOffset) Direction() dprec.Vec3 {\n\treturn s.direction\n}", "func Offset(dx, dy float32) {\n\tgContext.Cursor.X += dx\n\tgContext.Cursor.Y += dy\n}", "func (r *Ray) Intersect(o Ray) (Vector, bool) {\n\tconst width = 0.03\n\n\tclampInRange := func(p Vector) (Vector, bool) {\n\t\tdist := r.Origin.Distance(p)\n\t\tif dist < r.Mint || dist > r.Maxt {\n\t\t\treturn r.Origin, false\n\t\t}\n\n\t\treturn p, true\n\t}\n\n\tif r.Origin == o.Origin {\n\t\treturn r.Origin, true\n\t}\n\n\td3 := r.Direction.Cross(o.Direction)\n\n\tif !d3.Equals(NewVector(0, 0, 0)) {\n\t\tmatrix := [12]float64{\n\t\t\tr.Direction.X,\n\t\t\t-o.Direction.X,\n\t\t\td3.X,\n\t\t\to.Origin.X - r.Origin.X,\n\n\t\t\tr.Direction.Y,\n\t\t\t-o.Direction.Y,\n\t\t\td3.Y,\n\t\t\to.Origin.Y - r.Origin.Y,\n\n\t\t\tr.Direction.Z,\n\t\t\t-o.Direction.Z,\n\t\t\td3.Z,\n\t\t\to.Origin.Z - r.Origin.Z,\n\t\t}\n\n\t\tresult := solve(matrix, 3, 4)\n\n\t\ta := result[3]\n\t\tb := result[7]\n\t\tc := result[11]\n\n\t\tif a >= 0 && b >= 0 {\n\t\t\tdist := d3.MultiplyScalar(c)\n\t\t\tif dist.Length() <= width {\n\t\t\t\treturn clampInRange(r.At(a))\n\t\t\t}\n\t\t\treturn r.Origin, false\n\t\t}\n\t}\n\n\tdP := o.Origin.Multiply(r.Origin)\n\n\ta2 := r.Direction.Dot(dP)\n\tb2 := o.Direction.Dot(dP.Neg())\n\n\tif a2 < 0 && b2 < 0 {\n\t\tdist := r.Origin.Distance(dP)\n\t\tif dP.Length() <= width {\n\t\t\treturn clampInRange(r.At(dist))\n\t\t}\n\t\treturn r.Origin, false\n\t}\n\n\tp3a := r.Origin.Plus(r.Direction.MultiplyScalar(a2))\n\td3a := o.Origin.Minus(p3a)\n\n\tp3b := r.Origin\n\td3b := o.Origin.Plus(o.Direction.MultiplyScalar(b2)).Minus(p3b)\n\n\tif b2 < 0 {\n\t\tif d3a.Length() <= width {\n\t\t\treturn clampInRange(p3a)\n\t\t}\n\t\treturn r.Origin, false\n\t}\n\n\tif a2 < 0 {\n\t\tif d3b.Length() <= width {\n\t\t\treturn clampInRange(p3b)\n\t\t}\n\t\treturn r.Origin, false\n\t}\n\n\tif d3a.Length() <= d3b.Length() {\n\t\tif d3a.Length() <= width {\n\t\t\treturn clampInRange(p3a)\n\t\t}\n\t\treturn r.Origin, false\n\t}\n\n\tif d3b.Length() <= width {\n\t\treturn clampInRange(p3b)\n\t}\n\n\treturn r.Origin, false\n}", "func (p *PlayerEntity) Move(offset shared.FloatVector) {\n\tp.position.X += offset.X\n\tp.position.Y += offset.Y\n}", "func Direction(lon0, lat0, lon1, lat1 float64) float64 {\n\tc_dir := C.fap_direction(\n\t\tC.double(lon0), C.double(lat0),\n\t\tC.double(lon1), C.double(lat1),\n\t)\n\n\treturn float64(c_dir)\n}", "func (p *Position) Move(hole int) (*Position, *Position, MoveResult, error) {\n\t// validate in range\n\tif hole < 1 || hole > WIDTH() {\n\t\treturn p, nil, BadMove, errors.New(\"hole not in range\")\n\t}\n\n\t// validate hole has stones\n\tstones := p.near().Items[hole]\n\tif stones == 0 {\n\t\treturn p, nil, BadMove, errors.New(\"invalid move\")\n\t}\n\n\t// create delta position\n\tdelta, lastRow, lastHole := deltaPosition(hole, stones)\n\t// fmt.Printf(\"deltaPosition lastRow:%d, lastHole:%d\\n\", lastRow, lastHole)\n\t// combine\n\tresult := p.add(delta)\n\n\t// determina result from last position\n\tmoveResult := EndOfTurn\n\tif lastHole == 0 {\n\t\tmoveResult = RepeatTurn\n\t}\n\n\t// check for steal\n\tif isSteal, opRow, opHole, opCount := result.IsSteal(lastRow, lastHole); isSteal {\n\t\t// create steal position\n\t\tsteal := stealPosition(lastRow, lastHole, opRow, opHole, opCount)\n\t\t// apply\n\t\tresult = result.add(steal)\n\t}\n\n\tif result.IsGameEnd() {\n\t\tmoveResult = EndOfGame\n\t}\n\n\treturn result, delta, moveResult, nil\n}", "func (p *EdwardsPoint) Mul(point *EdwardsPoint, scalar *scalar.Scalar) *EdwardsPoint {\n\treturn edwardsMul(p, point, scalar)\n}", "func (p Vector3) DistanceTo(o Vector3) Coord {\n\tdx := p.X - o.X\n\tdy := p.Y - o.Y\n\tdz := p.Z - o.Z\n\treturn Coord(math.Sqrt(float64(dx*dx + dy*dy + dz*dz)))\n}", "func Move(zoom, lat, lon float64, pdx int, pdy int) (nlat, nlon float64) {\n\txf, yf := TileNum(int(zoom), lat, lon)\n\tdx := float64(pdx) / TileWidth\n\tdy := float64(pdy) / TileHeight\n\n\treturn latlonFromXY(int(zoom), xf+(dx), yf+(dy))\n}", "func distance(xa, ya, xb, yb int) float64 {\n\tx := math.Abs(float64(xa - xb))\n\ty := math.Abs(float64(ya - yb))\n\treturn math.Sqrt(x*x + y*y)\n}", "func Distance(x1, y1, x2, y2 float64) float64 {\n\treturn math.Sqrt(math.Pow(x1 - x2, 2) + math.Pow(y1 - y2, 2))\n}", "func distance(x0, x1, y0, y1 int) float64 {\n\tdistance := math.Sqrt(math.Pow(float64(x1-x0), 2) + math.Pow(float64(y1-y0), 2))\n\treturn distance\n}", "func (loc1 location) distance(loc2 location) float64 {\n\tvar la1, lo1, la2, lo2, r float64\n\tlat1 := loc1.Lat\n\tlon1 := loc1.Long\n\tlat2 := loc2.Lat\n\tlon2 := loc2.Long\n\tla1 = lat1 * math.Pi / 180\n\tlo1 = lon1 * math.Pi / 180\n\tla2 = lat2 * math.Pi / 180\n\tlo2 = lon2 * math.Pi / 180\n\n\tr = 6378100 // Earth radius in METERS\n\n\t// calculate\n\th := hsin(la2-la1) + math.Cos(la1)*math.Cos(la2)*hsin(lo2-lo1)\n\n\treturn math.Round(2 * r * math.Asin(math.Sqrt(h)))\n}", "func (m *DistanceMutation) Distance() (r string, exists bool) {\n\tv := m._Distance\n\tif v == nil {\n\t\treturn\n\t}\n\treturn *v, true\n}", "func (self *Graphics) CameraOffset() *Point{\n return &Point{self.Object.Get(\"cameraOffset\")}\n}", "func (p Point3) Distance(p2 Point3) float64 {\n\treturn Distance3(p.X(), p.Y(), p.Z(), p2.X(), p2.Y(), p2.Z())\n}" ]
[ "0.6087142", "0.54294276", "0.5297914", "0.5245839", "0.5236105", "0.5213102", "0.5143102", "0.51349884", "0.5061371", "0.5052785", "0.50251883", "0.5004317", "0.49870855", "0.4963939", "0.49597773", "0.49274454", "0.49230936", "0.49188787", "0.48924768", "0.48168725", "0.47859994", "0.4777463", "0.47719878", "0.4770826", "0.4749838", "0.47460487", "0.47459686", "0.47298717", "0.47280306", "0.47173294", "0.46866703", "0.46765423", "0.46736977", "0.46701193", "0.46509644", "0.46274677", "0.46192908", "0.461001", "0.4600265", "0.4598681", "0.45894215", "0.4578295", "0.457704", "0.4558075", "0.45325896", "0.452484", "0.4520652", "0.4517406", "0.4517406", "0.4497843", "0.44838268", "0.44804826", "0.44681054", "0.4462413", "0.44467553", "0.44327378", "0.44283354", "0.4403814", "0.44015792", "0.43828106", "0.438086", "0.43805832", "0.43771714", "0.4354251", "0.43541637", "0.43480703", "0.43474892", "0.43425757", "0.43375987", "0.43186763", "0.4316774", "0.4315394", "0.43010306", "0.4297649", "0.42972416", "0.4295528", "0.42931503", "0.4280785", "0.42800725", "0.42692947", "0.4268526", "0.42641294", "0.4262936", "0.42600983", "0.42586905", "0.4257987", "0.4246843", "0.42454687", "0.42441198", "0.42330837", "0.42227256", "0.4218444", "0.42168745", "0.42132035", "0.42094454", "0.4203446", "0.41985318", "0.41963628", "0.41901714", "0.41896522" ]
0.74692005
0
PRE: verdad POST: IsPrime devuelve verdad si n es primo y falso en caso contrario
func findPrimes(interval com.TPInterval) (primes []int) { for i := interval.A; i <= interval.B; i++ { if isPrime(i) { primes = append(primes, i) } } return primes }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func IntProbablyPrime(x *big.Int, n int) bool", "func IsPrime(n int) bool {\n if n <= 1 { return false }\n if n == 2 || n == 3 { return true }\n if n % 2 == 0 || n % 3 == 0 { return false }\n if n < 9 { return true }\n r := intRoot(n)\n s := TwoThreeSieve()\n for i:=s(); i<=r; i=s() {\n if n % i == 0 { return false }\n }\n return true\n}", "func isPrime(toTest int) bool {\n\n sqrt := math.Sqrt(float64(toTest))\n\n for i := 2; i <= int(sqrt); i++ {\n if toTest % i == 0 {\n return false\n }\n }\n\n return true\n\n}", "func isPrime(n int) (bool) {\n\tif n == 2 || n == 3 {\n\t\treturn true\n\t}\n\tif n % 2 == 0 || n % 3 == 0 {\n\t\treturn false\n\t}\n\n\ti := 5\n\tw := 2\n\tfor i * i <= n {\n\t\tif n % i == 0 {\n\t\t\treturn false\n\t\t}\n\n\t\ti += w\n\t\tw = 6 - w\n\t}\n\n\treturn true\n}", "func isPrime(n int) bool {\n\tif n == 2 {\n\t\treturn true\n\t}\n\n\tif n%2 == 0 {\n\t\treturn false\n\t}\n\n\tnsqrt := int(math.Sqrt(float64(n)))\n\n\tfor i := 3; i < nsqrt+1; i += 2 {\n\t\tif n%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func isTwoSidePrime(w http.ResponseWriter, r *http.Request) {\n\tvars := mux.Vars(r)\n\tvalue := vars[\"num\"]\n\t// To convert from String to Int\n\tn, _ := strconv.Atoi(value)\n\tjson.NewEncoder(w).Encode(checkTwoSidePrime(n))\n}", "func (t *PrimosRPC) Prime(args *shared.Args, reply *bool) error {\n\t*reply = isPrimo(args.A)\n\n\treturn nil\n}", "func Prime(p int) bool {\n\tif p < 2 {\n\t\treturn false\n\t} else if p == 2 {\n\t\treturn true\n\t}\n\n\tfor i := 2; i < p; i++ {\n\t\tif p%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n\n}", "func isPrime(n uint32) bool {\n\tfor x := uint32(2); (x*x <= n) && (x < 65536); x++ {\n\t\tif n%x == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn (n > 1)\n}", "func IsPrime(n int) bool {\n\tif n < 2 {\n\t\treturn false\n\t}\n\n\t_, ok := Factor(n)[n]\n\treturn ok\n}", "func isPrime(v int) bool {\n\tif v <= 1 {return false}\n\tif v == 2 {return true}\n\tif (v & 1) == 0 && v != 2 {return false} // cheap bitwise test for even numbers\n\thv := v / 2\n\n\tfor i := 2; i < hv; i++ {\n\t\tif v % i == 0 {return false}\n\t}\n\treturn true\n}", "func isPrime(x int) bool {\n\tif x%2==0 {\n\t\treturn x==2\n\t}\n\tm := 1+int(math.Sqrt(float64(x)))\n\tfor i := 3; i < m ; i+=2 {\n\t\tif x%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func isPrime(n uint64) bool {\n\tif n <= 3 {\n\t\treturn n > 1\n\t}\n\n\t// 不在6的倍数两侧的一定不是质数\n\tif n%6 != 1 && n%6 != 5 {\n\t\treturn false\n\t}\n\n\tsqrt := math.Sqrt(float64(n))\n\tfor i := uint64(5); i <= uint64(sqrt); i += 6 {\n\t\tif n%i == 0 || n%(i+2) == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func isPrime(value int) bool {\n for i := 2; i <= int(math.Floor(float64(value) / 2)); i++ {\n if value % i == 0 {\n return false\n }\n }\n return value > 1\n}", "func EsPrimo(a uint32) bool {\n\tc := 0\n\tvar i uint32\n\tfor i = 1; i <= a; i++ {\n\t\tif a%i == 0 {\n\t\t\tc++\n\t\t}\n\t}\n\tif c == 2 {\n\t\treturn true\n\t}\n\treturn false\n}", "func IsPrime(num int) bool {\n\t// special cases\n\tif num == 1 {\n\t\treturn false\n\t}\n\tif num == 2 {\n\t\treturn true\n\t}\n\n\tif num%2 == 0 {\n\t\treturn false\n\t}\n\tfor i := 3; i <= int(math.Floor(math.Sqrt(float64(num)))); i++ {\n\t\tif num%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func isPrime(num int64) bool {\n\tif num == 2 { // If the number is two it is the one prime even number\n\t\treturn true\n\t}\n\n\tif num%2 == 0 { // even numbers cannot be prime\n\t\treturn false\n\t}\n\n\t// start at 3 and increment by 2 since primes > 2 aren't even\n\tfor i := int64(3); i*i <= num; i += 2 {\n\t\tif num%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func IsPrime(n int) bool {\n\tif n < 2 {\n\t\treturn false\n\t}\n\tif n == 2 {\n\t\treturn true\n\t}\n\tif n%2 == 0 {\n\t\treturn false\n\t}\n\n\tfor i := 3; i*i <= n; i += 2 {\n\t\tif n%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func IsPrime(n uint64) bool {\n pf := PrimeFactorsOf(n)\n if len(pf) == 1 && pf[0] == n {\n return true\n }\n return false\n}", "func Prime(a uint) bool {\n\tif a == 1 || a == 0 {\n\t\treturn false\n\t}\n\tfor i := a - 1; i >= 2; i-- {\n\t\tif a%i == 0 {\n\t\t\treturn false\n\t\t} else {\n\t\t\tcontinue\n\t\t}\n\n\t}\n\n\treturn true\n}", "func isPrime(primes []int,test int,len int) bool{\n\n for v:=1;v<len;v++{\n if (test%v) == 0 {\n return false;\n }\n }\n return true;\n}", "func isPrime(num int64) bool {\n\n\tfor i := 0; i < len(primes); i++ {\n\t\tif num%primes[i] == 0 {\n\t\t\treturn false\n\t\t}\n\t\t// dont need to try more than square root of num\n\t\tif primes[i]*primes[i] > num {\n\t\t\tbreak\n\t\t}\n\t}\n\tprimes = append(primes, num)\n\treturn true\n}", "func isPrime(j int) bool {\n\n\ty := true\n\n\tfor denum := 2; float64(denum) <= math.Sqrt(float64(j)+1); denum++ {\n\n\t\tif j%denum == 0 {\n\t\t\ty = false\n\t\t\tbreak\n\t\t}\n\n\t}\n\n\treturn y\n}", "func IsPrime(n int64) bool {\n\tif n < 2 {\n\t\treturn false\n\t}\n\tfor p := int64(2); p*p <= n; p++ {\n\t\tif n%p == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func IsPrime(n int) bool {\n\tif n == 1 {\n\t\treturn false\n\t}\n\n\tfor i := 2; i*i <= n; i++ {\n\t\tif n%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func isPrime(numberToTest int) bool {\n\tfor j := 2; j <= numberToTest/2; j++ {\n\t\t// Quit if a lpf has been set to value greater than one being tested here.\n\t\tif largestFactor > numberToTest {\n\t\t\treturn false\n\t\t}\n\t\tif numberToTest % j == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func is_prime(number int) bool {\n\tif (number <= 3) {\n\t\treturn number > 1\n\t}\n\n\tif (number%2 == 0 || number%3 == 0) {\n\t\treturn false\n\t}\n\n\tfor i := 5; i*i < number; i+=6 {\n\t\tif number%i == 0 || number%(i+2) == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func Prime1(n int) bool {\n\tif n < 2 {\n\t\treturn false\n\t}\n\tif n == 2 {\n\t\treturn true\n\t}\n\tfor i := 2; i < n; i++ {\n\t\tif n%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func isPrime(number int) bool {\n\troot := int(math.Sqrt(float64(number)))\n\tfor i := 2; i <= root; i++ {\n\t\tif number%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func TestPrime(t *testing.T) {\n\tprime := uint32(num)\n\tif !i32Prime(prime) {\n\t\tt.Error(prime, \"failed prime test\")\n\t}\n}", "func IsPrime(n int) bool {\n\t// Considering that primes are greater than 1\n\tif n <= 0 {\n\t\treturn false\n\t}\n\t// For even, only true if equals to two\n\tif n%2 == 0 {\n\t\treturn n == 2\n\t}\n\troot := int(math.Sqrt(float64(n)))\n\t// Tries to divide n for all odd numbers from 3 to n\n\tfor i := 3; i <= root; i += 2 {\n\t\tif n%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func CheckPrime(n int) bool {\n\n\tif n < 2 {\n\t\treturn false\n\t}\n\n\tsr := int(float64(n / 2))\n\n\tfor i := 2; i <= sr; i++ {\n\t\tif n%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func isPrime(value int) bool {\n\tfor i := 2; i <= int(math.Floor(float64(value)/2)); i++ {\n\t\tif value%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn value > 1\n}", "func IsPrime(n int64) bool {\n\treturn n == SmallestDivisor(n)\n}", "func IsPrime(x int) (largest_div int, isPrime bool, err error) {\n\n\tdefer func() {\n\t\tif r := recover(); r != nil {\n\t\t\tlargest_div = 0\n\t\t\tisPrime = false\n\t\t\terr = errors.New(fmt.Sprintf(\"%s\", r))\n\t\t}\n\t}()\n\n\tif x < 2 {\n\t\treturn 0, false, nil\n\t}\n\tif x == 2 {\n\t\treturn 0, true, nil\n\t}\n\tif x%2 == 0 {\n\t\treturn 2, false, nil\n\t} else {\n\t\tlimit := int(math.Sqrt(float64(x))) + 1\n\t\tfor i := 3; i < limit; i += 2 {\n\t\t\tif x%i == 0 {\n\t\t\t\treturn i, false, nil\n\t\t\t}\n\t\t}\n\t}\n\treturn 0, true, nil\n}", "func isSavePrime(prime *big.Int) bool {\n\ttmp := new(big.Int).Sub(prime, bigOne) // prime - bigOne = prime - 1\n\ttmp.Div(tmp, bigTwo)\n\treturn tmp.ProbablyPrime(20)\n}", "func nthPrime(n int) int {\n foundPrimes := 0\n currentNumber := 0\n for foundPrimes < n {\n if isPrime(currentNumber) {\n foundPrimes++\n }\n currentNumber++\n }\n return currentNumber - 1\n}", "func isNewPrime(candidate int, knownPrimes []int) bool {\n\tfor _, e := range knownPrimes {\n\t\tif candidate%e == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func vale_la_pena(pakete *helloworld.PaqueteRequest)(res int){\n\tif pakete.Tipo == \"retail\" {\n\t\tres = 1\n\t\treturn\n\t}else{\n\t\tvalor, err := strconv.Atoi(pakete.Valor)\n\t\tif err != nil{\n\t\t\tlog.Printf(\"El valor del paquete no es un número: %v\", err)\n\t\t}\n\t\tif int32(valor) > (pakete.Intentos)*10{\n\t\t\tres = 1\n\t\t\treturn\n\t\t}else{\n\t\t\tres = 0\n\t\t\treturn\n\t\t}\n\t}\n}", "func nextPrime(n uint32) uint32 {\n\tvar p uint32\n\tfor p = n; p != 0; p++ {\n\t\tif isPrime(p) {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn p\n}", "func IsPrimeBruteForce(n uint) bool {\n\tcount := 0\n\n\tfor i := uint(2); i < n; i++ {\n\t\tif n%i == 0 {\n\t\t\tcount++\n\t\t}\n\t}\n\n\treturn count == 0\n}", "func isDividableByPrimes(primes []int, target int) bool {\n\tfor _, prime := range primes {\n\t\tif target%prime == 0 {\n\t\t\treturn true\n\t\t}\n\t}\n\n\treturn false\n}", "func Prime3(n int) bool {\n\tfor i := 2; i < n; i++ {\n\t\tif n%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn n > 1\n}", "func checkdivisibility(p, lb, ub *big.Int) bool {\n z := new (big.Int)\n for i := new(big.Int).Set(lb); i.Cmp(ub) == -1; i.Add(i, big.NewInt(1)) {\n z.Mod(p, i)\n if z.Cmp(big.NewInt(0)) == 0 {\n return true\n }\n }\n return false\n}", "func PrimeSqrt(a *big.Int, pa *big.Int) (*big.Int, bool) {\n\t// Handle the case a == 0\n\tif a.Cmp(bigZERO) == 0 {\n\t\treturn big.NewInt(0), true // should be a new big int!\n\t}\n\n\t// Check number is a square\n\tvalidation := new(big.Int).Exp(a, new(big.Int).Rsh(pa, 1), pa)\n\tif validation.Cmp(bigONE) != 0 {\n\t\treturn nil, false\n\t}\n\n\t// Shortcut when pa = 3 (mod 4)\n\trem := new(big.Int).Mod(pa, bigFOUR)\n\tif rem.Cmp(bigTHREE) == 0 {\n\t\tresult := new(big.Int).Exp(a, new(big.Int).Add(new(big.Int).Rsh(pa, 2), big.NewInt(1)), pa)\n\t\treturn result, true\n\t}\n\n\t// Find a non-residue\n\tz := big.NewInt(2) // Should be a new big int!\n\tfor LegendreSymbol(new(big.Int).Set(z), new(big.Int).Set(pa)) != -1 {\n\t\tz.Add(z, bigONE)\n\t}\n\n\t// Split pa-1 as 2^S*Q\n\tQ := new(big.Int).Sub(pa, big.NewInt(1))\n\tM := 0\n\tfor Q.Bit(0) == 0 {\n\t\tQ.Rsh(Q, 1)\n\t\tM++\n\t}\n\n\t// Setup for main loop\n\tc := new(big.Int).Exp(z, Q, pa)\n\tt := new(big.Int).Exp(a, Q, pa)\n\tR := new(big.Int).Exp(a, new(big.Int).Add(new(big.Int).Rsh(Q, 1), big.NewInt(1)), pa)\n\n\t// Main loop\n\tfor t.Cmp(bigONE) != 0 {\n\t\ttp := new(big.Int).Set(t)\n\t\ti := 0\n\t\tfor tp.Cmp(bigONE) != 0 {\n\t\t\ttp.Exp(tp, big.NewInt(2), pa)\n\t\t\ti++\n\t\t}\n\t\tb := new(big.Int).Exp(c, new(big.Int).Lsh(bigONE, uint(M-i-1)), pa)\n\t\tM = i\n\t\tc.Exp(b, bigTWO, pa)\n\t\tt.Mod(new(big.Int).Mul(t, c), pa)\n\t\tR.Mod(new(big.Int).Mul(R, b), pa)\n\t}\n\n\treturn R, true\n}", "func isPrime(number int64) bool {\n\tif big.NewInt(number).ProbablyPrime(0) {\n\t\treturn true\n\t} else {\n\t\treturn false\n\t}\n}", "func IsPrime(number int) bool {\n\tupperDivisor := int(math.Sqrt(float64(number))) + 1\n\tfor divisor := 2; divisor < upperDivisor; divisor++ {\n\t\tif number%divisor == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func IsPrime(x int) bool {\n\t//2 is prime but is a bit of a special case\n\tif x == 2 {\n\t\treturn true\n\t}\n\t//0 and 1 are not prime, but I think thats up for debate\n\tif x <= 1 {\n\t\treturn false\n\t}\n\tif math.Remainder(float64(x), float64(2)) == 0 {\n\t\treturn false\n\t}\n\t//checks every possible factor up to the square root of the given number\n\tfor i := 3; i*i <= x; i++ {\n\t\tif math.Remainder(float64(x), float64(i)) == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\t//if it meets none of the above criteria it is prime\n\treturn true\n}", "func IsPrime(p int64, primes []int64) bool {\n\tif p < 2 {\n\t\treturn false\n\t}\n\tfor i := 0; i < len(primes) && primes[i]*primes[i] <= p; i++ {\n\t\tif p%primes[i] == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func (n nat) probablyPrime(reps int) bool {\n\tif len(n) == 0 {\n\t\treturn false\n\t}\n\n\tif len(n) == 1 {\n\t\tif n[0] < 2 {\n\t\t\treturn false\n\t\t}\n\n\t\tif n[0]%2 == 0 {\n\t\t\treturn n[0] == 2\n\t\t}\n\n\t\t// We have to exclude these cases because we reject all\n\t\t// multiples of these numbers below.\n\t\tswitch n[0] {\n\t\tcase 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53:\n\t\t\treturn true\n\t\t}\n\t}\n\n\tconst primesProduct32 = 0xC0CFD797 // Π {p ∈ primes, 2 < p <= 29}\n\tconst primesProduct64 = 0xE221F97C30E94E1D // Π {p ∈ primes, 2 < p <= 53}\n\n\tvar r Word\n\tswitch _W {\n\tcase 32:\n\t\tr = n.modW(primesProduct32)\n\tcase 64:\n\t\tr = n.modW(primesProduct64 & _M)\n\tdefault:\n\t\tpanic(\"Unknown word size\")\n\t}\n\n\tif r%3 == 0 || r%5 == 0 || r%7 == 0 || r%11 == 0 ||\n\t\tr%13 == 0 || r%17 == 0 || r%19 == 0 || r%23 == 0 || r%29 == 0 {\n\t\treturn false\n\t}\n\n\tif _W == 64 && (r%31 == 0 || r%37 == 0 || r%41 == 0 ||\n\t\tr%43 == 0 || r%47 == 0 || r%53 == 0) {\n\t\treturn false\n\t}\n\n\tnm1 := nat(nil).sub(n, natOne)\n\t// 1<<k * q = nm1;\n\tq, k := nm1.powersOfTwoDecompose()\n\n\tnm3 := nat(nil).sub(nm1, natTwo)\n\trand := rand.New(rand.NewSource(int64(n[0])))\n\n\tvar x, y, quotient nat\n\tnm3Len := nm3.bitLen()\n\nNextRandom:\n\tfor i := 0; i < reps; i++ {\n\t\tx = x.random(rand, nm3, nm3Len)\n\t\tx = x.add(x, natTwo)\n\t\ty = y.expNN(x, q, n)\n\t\tif y.cmp(natOne) == 0 || y.cmp(nm1) == 0 {\n\t\t\tcontinue\n\t\t}\n\t\tfor j := 1; j < k; j++ {\n\t\t\ty = y.mul(y, y)\n\t\t\tquotient, y = quotient.div(y, y, n)\n\t\t\tif y.cmp(nm1) == 0 {\n\t\t\t\tcontinue NextRandom\n\t\t\t}\n\t\t\tif y.cmp(natOne) == 0 {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t\treturn false\n\t}\n\n\treturn true\n}", "func FloatingPrime() bool{\n\tvar input float64\n\n\t//Receives input\n\tfmt.Println(\"Please enter any number that ranges from 1.0 - 10.0\")\n\t_,err := fmt.Scan(&input)\n\n\tif err != nil{\n\t\tfmt.Println(err)\n\t}\n\n\t//Check the range of the inputted\n\tif input < 1 || input > 10{\n\t\tlog.Fatal(\"The given input is invalid\")\n\t}\n\n\t//Converts to string and splits according to the decimals\n\tstr := strconv.FormatFloat(input, 'f', 3, 64)\n\n\tsplited := strings.Split(str, \".\")\n\ttemp := splited[0]\n\n\t//For loops the converted strings has already been splited\n\tfor _,val := range string([]rune(splited[1])){\n\t\t//Concat the splited strings together\n\t\ttemp += string(val)\n\t\tn, _ := strconv.ParseInt(temp,10,32 )\n\n\t\t//Check if that number is prime\n\t\tif big.NewInt(n).ProbablyPrime(0){\n\t\t\tfmt.Println(\"Is a prime number at:\",n)\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func Is(p int) bool {\n\tif p <= 3 {\n\t\treturn p >= 2\n\t}\n\tif p%2 == 0 || p%3 == 0 {\n\t\treturn false\n\t}\n\tfor i := 5; i*i <= p; i += 6 {\n\t\tif p%i == 0 || p%(i+2) == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func Problem3() {\n\n\tn := 600851475143\n\n\tfor !isPrime(n) {\n\n\t\tfor i := 2; i < n; i++ {\n\t\t\tif n%i == 0 {\n\t\t\t\tfmt.Println(i)\n\t\t\t\tn = n / i\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t}\n\t}\n\tfmt.Println(n)\n}", "func checkPrimes(max int) (s []bool) {\n\ts = make([]bool, max+1)\n\ts[0] = true\n\ts[1] = true\n\ts[2] = false\n\n\tfor i := 4; i <= max; i += 2 {\n\t\ts[i] = true\n\t}\n\n\tlimit := int(math.Sqrt(float64(max))) + 1\n\tfor i := 3; i < limit; i += 2 {\n\t\tif !s[i] {\n\t\t\tfor j := i * i; j < max; j += i {\n\t\t\t\ts[j] = true\n\t\t\t}\n\t\t}\n\t}\n\treturn s\n}", "func main() {\n\tif len(os.Args) <= 1 {\n\t\tfmt.Fprintln(os.Stderr, \"usage: primes starting [ending]\")\n\t\tos.Exit(1)\n\t}\n\n\tlower, err := strconv.ParseInt(os.Args[1], 10, 64)\n\tif err != nil {\n\t\tfmt.Fprintf(os.Stderr, \"primes: %s\\n\", err)\n\t\tos.Exit(0)\n\t}\n\tif lower < 3 {\n\t\tlower = 3\n\t}\n\tif lower%2 == 0 {\n\t\tlower += 1\n\t}\n\n\tvar upper int64\n\tstepOnce := false\n\n\tif len(os.Args) > 2 {\n\t\tupper, err = strconv.ParseInt(os.Args[2], 10, 64)\n\t\tif err != nil {\n\t\t\tfmt.Fprintf(os.Stderr, \"primes: %s\\n\", err)\n\t\t\tos.Exit(1)\n\t\t}\n\t\tif upper < lower {\n\t\t\treturn\n\t\t}\n\t} else {\n\t\tstepOnce = true\n\t}\n\n\t// increment current by two, we already know it's an uneven number, so let's\n\t// only check those.\n\tbTwo := big.NewInt(2)\n\tbLower := big.NewInt(lower)\n\tbUpper := big.NewInt(upper)\n\tcurrent := bLower\n\n\tif stepOnce {\n\t\tfor {\n\t\t\tif current.ProbablyPrime(4) {\n\t\t\t\tfmt.Println(current.Int64())\n\t\t\t\treturn\n\t\t\t}\n\t\t\tcurrent = current.Add(current, bTwo)\n\t\t}\n\t}\n\n\tfor current.Cmp(bUpper) < 0 {\n\t\tif current.ProbablyPrime(4) {\n\t\t\tfmt.Println(current.Int64())\n\t\t}\n\t\tcurrent = current.Add(current, bTwo)\n\t}\n}", "func SigmoidPrime(x float64) float64 {\n\treturn Sigmoid(x) * (1 - Sigmoid(x))\n}", "func Prime4(n int) bool {\n\tswitch n {\n\tcase 0:\n\t\treturn false\n\tcase 1: return false\n\tcase 2: return true\n\tdefault:\n\t\tfor i := 2; i < n; i++ {\n\t\t\tif n%i == 0 {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t\treturn true\n\t}\n}", "func PrimeByTrialDivisionImproved(number int) bool {\n\tfor i := 2; i <= helpers.SquareRootOf(number); i++ {\n\t\tif number%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn number > 1\n}", "func IsPrimeSqrt(p int64) bool {\n\tif p < 2 {\n\t\treturn false\n\t}\n\tif p == 2 {\n\t\treturn true\n\t}\n\tif p%2 == 0 {\n\t\treturn false\n\t}\n\tfor i := int64(3); i*i <= p; i += 2 {\n\t\tif p%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func IsPrimeMiller(p int64, iteration int) bool {\n\tif p < 2 {\n\t\treturn false\n\t}\n\tif p != 2 && p%2 == 0 {\n\t\treturn false\n\t}\n\ts := p - 1\n\tfor s%2 == 0 {\n\t\ts /= 2\n\t}\n\tfor i := 0; i < iteration; i++ {\n\t\ta, temp := (rand.Int63()%(p-1))+1, s\n\t\tmod := gmath.PowModulo(a, temp, p)\n\t\tfor temp != p-1 && mod != 1 && mod != p-1 {\n\t\t\tmod = gmath.PowModulo(mod, mod, p)\n\t\t\ttemp *= 2\n\t\t}\n\t\tif mod != p-1 && temp%2 == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "func SimplePrimeFactorizationWithSieve(n int, primeSieve []int) (bool, []int) {\n\n\tfactors := []int{}\n\n\tif len(primeSieve) < n || (n == 1 || n == 0) {\n\t\treturn true, factors\n\t}\n\n\tfor primeSieve[n - 1] == 1 {\n\t\tfor i, v := range primeSieve {\n\t\t\tif v == 0 && n%(i+1) == 0 {\n\t\t\t\tfactors = append(factors, i+1)\n\t\t\t\tn = n/(i+1)\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\n\tfactors = append(factors, n)\n\n\treturn false, factors\n}", "func Prime2(n int) bool {\n\tmid := n / 2\n\tfor i := 2; i <= mid; i++ {\n\t\tif n%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn n > 1\n}", "func PrimeByTrialDivision(number int) bool {\n\tfor i := 2; i < number; i++ {\n\t\tif number%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn number > 1\n}", "func checkIfPrime(x int) bool {\n\tvar checker int\n\tvar check bool\n\tfor i := 2; i < x; i++ {\n\t\tif x%i != 0 {\n\t\t\tchecker = checker + 1\n\t\t}\n\t}\n\tif checker+2 == x {\n\t\tcheck = true\n\t} else {\n\t\tcheck = false\n\t}\n\treturn check\n}", "func validateNumber(number int) bool {\n\t// getting the half of the number\n\t// we have to convert number to float64 because that what is required by math.Float\n\trangeMax := math.Floor(float64(number) / float64(2))\n\t// looping through to see if the number if prime or not\n\tfor i := 2; i <= int(rangeMax); i++ {\n\t\t// return false if divisible\n\t\tif number%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\t// if the number if more than 1 -> true else false\n\treturn number > 1\n}", "func Nth(n int) (int, bool) {\n\tif n < 1 {\n\t\treturn 0, false\n\t}\n\ti := 1\n\tfor n > 0 {\n\t\ti++\n\t\tif isPrime(i) {\n\t\t\tn--\n\t\t}\n\t}\n\treturn i, true\n}", "func SigmoidPrime(value float64) float64 {\n\treturn Sigmoid(value) * (1 - Sigmoid(value))\n}", "func NthPrime(nth uint) (uint, error) {\n\tif nth == 0 {\n\t\treturn 0, fmt.Errorf(\"please enter nth value > 0\")\n\t} else if nth == 1 {\n\t\treturn 2, nil\n\t}\n\n\tnumber, counter := uint(3), uint(1)\n\tfor {\n\t\tisPrime := true\n\t\tmaxDiv := uint(math.Ceil(math.Sqrt(float64(number))))\n\t\tfor divisor := uint(3); divisor <= maxDiv; divisor++ {\n\t\t\tif number%divisor == 0 {\n\t\t\t\tisPrime = false\n\t\t\t}\n\t\t}\n\n\t\tif isPrime {\n\t\t\tcounter++\n\t\t\tif counter == nth {\n\t\t\t\treturn number, nil\n\t\t\t}\n\t\t}\n\n\t\tnumber += 2\n\t}\n}", "func nthPrime(n int) int {\n\tcount := 0\n\tnumber := 1\n\tfor count < n {\n\t\tnumber++\n\t\tif isPrime(number) {\n\t\t\tcount++\n\t\t}\n\t}\n\treturn number\n}", "func sigmoidPrime(x float64) float64 {\n\treturn sigmoid(x) * (1.0 - sigmoid(x))\n}", "func sigmoidPrime(x float64) float64 {\n\treturn sigmoid(x) * (1.0 - sigmoid(x))\n}", "func nextPrime(n int, primes []int) []int {\n for _, prime := range primes {\n if n%prime == 0 {\n return nextPrime(n+1, primes)\n }\n }\n return append(primes, n)\n}", "func main() {\n\n\tvar maxNum int = 20\n\t// i mean the question is just a simple case of finding max number of times each prime factor appears in any one number in the numbers 1-20\n\t// eg, 2 appears at max 4 times, as 16 = 2x2x2x2\n\tvar primeAppeareanceRate map[int]int = make(map[int]int) // we initialize a map mapping prime to appeareance rate\n\n\tprimes := []int{2} // we initialize for the first prime\n\tprimeAppeareanceRate[2] = 1\n\n\t// im pretty sure there is a better data structure, but im not good with go\n\n\t//for each number, see if its prime if it's prime, then we will add it to the map\n\t//if its not prime, we will see how many times each prime has occured and compare with previous\n\n\tfor i := 3; i<=maxNum; i++ {\n\t\tvar temp int = i\n\t\tfor _, prime := range primes{\n\t\t\tvar count int = 0\n\t\t\tfor temp%prime == 0 {\n\t\t\t\t//it is divisible by a smaller prime, thus is not prime\n\t\t\t\tcount++\n\t\t\t\ttemp /= prime\n\t\t\t\t//continuously divide until not divisible\n\t\t\t}\n\t\t\tif primeAppeareanceRate[prime] < count{\n\t\t\t\tprimeAppeareanceRate[prime] = count\n\t\t\t}\n\t\t}\n\n\t\tif temp != 1 { //if it were divisible by smaller primes, we would be left with only 1 by now\n\t\t\tprimeAppeareanceRate[temp] = 1\n\t\t\tprimes = append(primes, temp)\n\t\t\t//we have added new prime to the list\n\t\t}\n\t}\n\n\t//now we times all the primes\n\tvar output int = 1\n\tfor _, prime:= range primes{\n\t\tfor i:=0; i<primeAppeareanceRate[prime]; i++{\n\t\t\toutput *= prime\n\t\t}\n\t}\n\n fmt.Println(output)\n}", "func main() {\n\tresult := 1\n\n\troot := int(math.Sqrt(float64(number)))\n\tfor i := 2; i <= root; i++ {\n\t\t// not a factor\n\t\tif number%i != 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\tpotentialResult := number / i\n\t\tif isPrime(potentialResult) {\n\t\t\tresult = potentialResult\n\t\t\t// can not get larger factor\n\t\t\tbreak\n\t\t}\n\n\t\tif isPrime(i) {\n\t\t\tresult = i\n\t\t\t// still can get larger factor\n\t\t}\n\t}\n\n\tfmt.Println(result)\n}", "func main() {\n\tx := 2\n\tcount := 0\n\tfor {\n\t\tif isItPrime(x) == true {\n\t\t\tcount++\n\n\t\t\tif count == 10001 {\n\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t}\n\t\tx++\n\n\t}\n\tfmt.Println(x)\n}", "func GetNextPrime(cur int64) int64 {\n\tif _, err := os.Stat(\"/tmp/stop\"); err == nil {\n\t\tprint(\"Not replying as stopping soon\")\n\t\treturn 0\n\t} else if os.IsNotExist(err) {\n\t\tnext := cur + 2\n\t\tif cur == 2 {\n\t\t\tnext = cur + 1\n\t\t}\n\t\ttrynum := int64(3)\n\t\tfor trynum < next {\n\t\t\tif next%trynum == 0 {\n\t\t\t\t// Next is not a prime\n\t\t\t\tnext++\n\t\t\t\ttrynum = int64(3)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\ttrynum++\n\t\t}\n\t\treturn next\n\t} else {\n\t\tpanic(err)\n\t}\n\treturn -1\n}", "func (c *Chance) Prime() int {\n\trandomIndex := c.Rand.Intn(len(data.Primes))\n\treturn data.Primes[randomIndex]\n}", "func (s *Set) Init() bool {\n\ts.P = big.NewInt(0)\n\ts.P.Mul(s.Pri1, s.Pri2)\n\n\ts.PDecr = big.NewInt(0)\n\th1, h2 := big.NewInt(0), big.NewInt(0)\n\th1.Sub(s.Pri1, big.NewInt(1))\n\th2.Sub(s.Pri2, big.NewInt(1))\n\ts.PDecr.Mul(h1, h2)\n\tlog.Printf(\"Prime1 = %3v - Prime2 = %3v | %4v\", s.Pri1, s.Pri2, s.P)\n\tlog.Printf(\"Prime1 - 1 = %3v - Prime2 - 1 = %3v | %4v\", h1, h2, s.PDecr)\n\n\tlog.Printf(\"----------\")\n\t//\n\t// Random seed for public exponent\n\tP_decr := s.PDecr.Int64()\n\texpPub := rand.Int63n(P_decr-2) + int64(2)\n\ts.ExpPub = big.NewInt(expPub)\n\tlog.Printf(\"Exp Public is %3v\", s.ExpPub)\n\n\t//\n\t// Find an appropriate private key with\n\t// REST( ExpPrv * ExpPub ; PDecr ) == 1 !\n\tmaxTries := s.P.Int64()\n\tfmtStr := \"%3v * %3v MOD %3v = %3v\"\n\n\tfor i := int64(2); i < maxTries; i++ {\n\n\t\t// Do not consider non-primes\n\t\tiBig := big.NewInt(i)\n\t\tisPrime := iBig.ProbablyPrime(1)\n\t\tif !isPrime {\n\t\t\t// log.Printf(\"skipping for non prime: %v\", iBig)\n\t\t\tcontinue\n\t\t}\n\n\t\t// private key should be unequal public key\n\t\tif iBig.Cmp(s.ExpPub) == 0 {\n\t\t\tlog.Printf(\"skipping %v\", iBig)\n\t\t\tcontinue\n\t\t}\n\n\t\tmsg := \"\"\n\t\tre := (expPub * i) % P_decr\n\t\tif re == 1 {\n\t\t\ts.ExpPrv = iBig\n\t\t\tmsg = fmt.Sprintf(\"| found a second number %3v, so that remainder is 1\", i)\n\t\t\tlog.Printf(fmtStr+\" %v\", expPub, i, P_decr, re, msg)\n\t\t\tlog.Printf(\"(%v*%v)%%%v = %v\", expPub, i, s.P.Int64(), (expPub*i)%s.P.Int64())\n\t\t\treturn true\n\t\t}\n\t\tlog.Printf(fmtStr, expPub, i, P_decr, re)\n\t}\n\treturn false\n}", "func primize(x *big.Int) *big.Int {\n\tvar tmp big.Int\n\tif x.Bit(0) == 0 {\n\t\tx.Add(x, tmp.SetInt64(1))\n\t}\n\tfor {\n\t\tif x.ProbablyPrime(sprpTestCount) {\n\t\t\treturn x\n\t\t}\n\t\tx.Add(x, tmp.SetInt64(2))\n\t}\n}", "func IsPrimeBruteForceWithBreak(n uint) bool {\n\tfor i := uint(2); i < n; i++ {\n\t\tif n%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func IsPrimeSqrtN(n uint) bool {\n\tcount := 0\n\tsqrtN := uint(math.Sqrt(float64(n)))\n\n\tfor i := uint(2); i <= sqrtN; i++ {\n\t\tif n%i == 0 {\n\t\t\tcount++\n\t\t}\n\t}\n\n\treturn count == 0\n}", "func main() {\n\n\tupperLimit := big.NewInt(20)\n\tseries := generateSequence(upperLimit)\n\tprimes := generatePrimes(upperLimit)\n\tprimeExps := calculateMaxExponentsForPrimeSeries(primes, upperLimit)\n\n\tfmt.Println(\"----------------------------------------------\")\n\tfmt.Println(\"Number series 1 -\", upperLimit)\n\n\t/*\n\t\tfmt.Println(\"Primes Factors and Prime Exponents --------------------\")\n\t\tfor i := 0; i < len(primes); i++ {\n\t\t\tfmt.Println(\"Prime: \", primes[i],\n\t\t\t\t\" Prime Max Exponent: \", primeExps[i])\n\t\t}\n\t*/\n\n\tfmt.Println(\"----------------------------------------------\")\n\n\tresult := computeSmallestDividend(primes, primeExps)\n\n\tfmt.Println(\"Smallest Dividend Is: \", result)\n\n\tif IsTestNumDivisibleBySeries(series, result) {\n\t\tfmt.Println(\"Result Confirmed! Result is divisible by each number in the series\")\n\t} else {\n\t\tfmt.Print(\"Failure!!!! - Result is NOT divisible by each number in the series\")\n\t}\n\n}", "func IsPrimeSqrtNWithBreak(n uint) bool {\n\tsqrtN := uint(math.Sqrt(float64(n)))\n\n\tfor i := uint(2); i <= sqrtN; i++ {\n\t\tif n%i == 0 {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "func Primes(n int64) set.Int64 {\n\tp := set.Int64{}\n\n\tfor _, v := range Divisors(n) {\n\t\tif isit.Prime(v) {\n\t\t\tp = append(p, v)\n\t\t}\n\t}\n\n\treturn p\n}", "func (d *DataLoader) Prime(key interface{}, value interface{}) {\n\tcacheKey := key\n\tif d.options != nil && d.options.CacheKeyFn != nil {\n\t\tcacheKey = d.options.CacheKeyFn(key)\n\t}\n\n\t// Only add the key if it does not already exist.\n\tif d.futureCache.Get(cacheKey) == nil {\n\t\tvar future Future\n\n\t\tif err, ok := value.(error); ok {\n\t\t\tfuture = NewFutureError(err)\n\t\t} else {\n\t\t\tfuture = NewFutureValue(value)\n\t\t}\n\n\t\td.futureCache.Set(cacheKey, future)\n\t}\n}", "func Nth(n int) (int, bool) {\n\tif n == 0 {\n\t\treturn 0, false\n\t}\n\tprime := []int{2, 3, 5, 7, 11, 13, 17}\n\tif n < len(prime) {\n\t\treturn prime[n-1], true\n\t}\n\ti := prime[len(prime)-1] + 1\n\tfor {\n\t\tif isDivisible(i, prime) {\n\t\t\ti++\n\t\t\tcontinue\n\t\t}\n\t\tprime = append(prime, i)\n\t\tif n < len(prime) {\n\t\t\treturn prime[n-1], true\n\t\t}\n\t}\n}", "func (c *DeviceController) NextPrime(w http.ResponseWriter, r *http.Request) {\n\tvals := r.URL.Query()\n\tcurs, ok := vals[\"cur\"]\n\tcur, err := strconv.ParseInt(curs[0], 10, 64)\n\tfmt.Println(\"Getting next prime on from: \" + strconv.Itoa(int(cur)))\n\tres := int64(0)\n\tif ok && err == nil {\n\t\tres = prime.GetNextPrime(cur)\n\t}\n\tc.SendJSON(\n\t\tw,\n\t\tr,\n\t\tres,\n\t\thttp.StatusOK,\n\t)\n}", "func generatePrimes(){\n\tvar booleans = make([]bool, MAX_PRIME)\n\tfor i := range booleans{\n\t\tbooleans[i] = true\n\t}\n\tfor i := 2; i*i <= MAX_PRIME; i++{\n\t\tif booleans[i] == true {\n\t\t\tfor j := i*i; j <= MAX_PRIME; j += i{\n\t\t\t\tbooleans[j] = false\n\t\t\t}\n\t\t}\n\t}\n\tfor i := 2; i < MAX_PRIME; i++{\n\t\tif booleans[i]{\n\t\t\tprimes = append(primes, i)\n\t\t}\n\t}\n}", "func isSmallestMember(n int) bool {\n\tfor _, indices := range findIndices(n) {\n\t\tvar primes []int\n\t\tfor _, x := range family(n, indices) {\n\t\t\tif tools.IsPrime(x) {\n\t\t\t\tprimes = append(primes, x)\n\t\t\t}\n\t\t}\n\t\tif len(primes) == 8 {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func (s *Server) PrimeNumberDecomposition(req *calcpb.PrimeNumberDecompositionRequest,\n\tstream calcpb.CalculatorService_PrimeNumberDecompositionServer) error {\n\tfmt.Printf(\"PrimeNumberDecomposition function was invoked with %v \\n\", req)\n\tnumber := req.GetNumber()\n\tfor number > 1 {\n\t\tnum := getFirstPrime(number)\n\t\tnumber /= num\n\t\tres := &calcpb.PrimeNumberDecompositionResponse{Number: num}\n\t\tif err := stream.Send(res); err != nil {\n\t\t\tlog.Fatalf(\"error while sending greet many times responses: %v\", err.Error())\n\t\t}\n\t\ttime.Sleep(time.Second)\n\t}\n\treturn nil\n}", "func makePrimes() {\n\n\tnums := [n]int{}\n\n\tnums[0] = 1\n\n\tp := 2\n\tprimes = append(primes, p)\n\n\tfor p*p <= n {\n\t\tfor i := p; i*p <= n; i++ {\n\t\t\tnums[i*p - 1] = 1\n\t\t}\n\n\t\tfor i := 1; i < n - p; i++ {\n\t\t\tif nums[p - 1 + i] == 0 {\n\t\t\t\tp = p + i\n\t\t\t\tprimes = append(primes, p)\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n}", "func nthPrime(n uint) uint {\n\tvar i, count uint\n\tlimit := n * 12\n\tprimes := sieve(limit)\n\n\tfor i = 2; i < limit; i++ {\n\t\tif primes.Test(i) {\n\t\t\tcount++\n\t\t}\n\t\tif count == n {\n\t\t\treturn i\n\t\t}\n\t}\n\n\treturn 0\n}", "func PrimeDivisors(n int) int {\n\n\tcnt := 0\n\n\tfor i := 2; i <= n && n != 1; i++ {\n\n\t\tfor ; n%i == 0; cnt++ {\n\t\t\tfmt.Printf(\"/%d\", i)\n\t\t\tn /= i\n\t\t}\n\n\t}\n\n\tfmt.Printf(\"\\n\")\n\treturn cnt\n}", "func Aprime(params *Params, x Num) Num {\n\treturn Concat(readChaCha(params, x), Slice(x, 0, param_ext)) // ChaCha8 || x[:param_ext]\n}", "func Setup(security uint64) *big.Int {\n\n\tvar err error\n\n\tchk := func(dest *big.Int, wg *sync.WaitGroup) {\n\t\tctr := 0\n\t\tvar tmp *big.Int\n\t\tfor {\n\t\t\tctr++\n\t\t\ttmp, err = rand.Prime(rand.Reader, int(security/2))\n\n\t\t\tif err != nil {\n\t\t\t\tpanic(err)\n\t\t\t}\n\t\t\tif isSavePrime(tmp) {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\t//fmt.Printf(\"\\ntook %v attempts to generate a save prime\", ctr)\n\t\tdest.Set(tmp)\n\t\twg.Done()\n\t}\n\tvar p, q = new(big.Int), new(big.Int)\n\t//lets find the primes concurrently, cuz we can. its golang bliad\n\twg := sync.WaitGroup{}\n\twg.Add(2)\n\tgo chk(p, &wg)\n\tgo chk(q, &wg)\n\twg.Wait()\n\n\trsaModulus := new(big.Int).Mul(p, q)\n\treturn rsaModulus\n}", "func Pi(n int) int {\n\tif n <= 1 {\n\t\treturn 0\n\t}\n\n\t// store numbers in range [2,n]\n\tnumbers := make([]int, n-1)\n\tfor i := range numbers {\n\t\tnumbers[i] = i + 2\n\t}\n\n\tcount := 0\n\t// sieve method\n\tfor i, v := range numbers {\n\t\tif v == i+2 {\n\t\t\tcount++\n\t\t\t// v is a prime\n\t\t\tfor j := i + v; j <= n-2; j += v {\n\t\t\t\tnumbers[j] = 0\n\t\t\t}\n\t\t}\n\t}\n\n\treturn count\n\n}", "func genPrimes(i int) {\nagain:\n\tif i < len(primes) {\n\t\treturn\n\t}\n\tif sieved == 0 {\n\t\t// simple sieve for primes up to 60000\n\t\t// index i represents number 2*i+3\n\t\tconst n = (60000 - 3 + 1) / 2\n\t\tif n*n <= sievewidth {\n\t\t\tpanic(\"need to raise initial sieve size\")\n\t\t}\n\t\ts := make([]bool, n)\n\t\tfor i := int64(0); i < 128; i++ { // 2*128+3 > sqrt(60000)\n\t\t\tif s[i] {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\taddPrime(2*i + 3)\n\t\t\t// mark as composite values starting at p^2, stepping by 2p\n\t\t\tfor j := 2*i*(i+3) + 3; j < n; j += 2*i + 3 {\n\t\t\t\ts[j] = true\n\t\t\t}\n\t\t}\n\t\tfor i := int64(128); i < n; i++ {\n\t\t\tif !s[i] {\n\t\t\t\taddPrime(2*i + 3)\n\t\t\t}\n\t\t}\n\t\tsieved = 60000\n\t\tgoto again\n\t}\n\n\t// Sieve up some more primes.\n\t// Each byte in the sieve covers 30 integers. Each bit in the byte represents\n\t// one of the eight numbers in those 30 that are nonzero mod 2,3, and 5.\n\tstart := sieved\n\tend := start + sievewidth\n\n\t// initialize sieve\n\tfor i := 0; i < sievewidth/30; i++ {\n\t\tsieve[i] = 0\n\t}\n\n\t// mark all composite numbers in the sieve\n\tfor i := 3; ; i++ { // \"3\" means start at p=7\n\t\tp := primes[i]\n\t\tif p*p >= end {\n\t\t\t// We don't need to sieve with primes >= sqrt(end).\n\t\t\t// If they have such a factor, they will also have a\n\t\t\t// factor < sqrt(end).\n\t\t\tbreak\n\t\t}\n\n\t\t// compute starting offset. It is the offset from sieve start\n\t\t// of the first multiple of p which is >= p^2.\n\t\tx := p*p - start\n\t\tif x < 0 {\n\t\t\tx += (-x + p - 1) / p * p\n\t\t}\n\t\tfor j := 0; j < 30; j++ {\n\t\t\t// mark all integers starting at x+j*p with stride 30*p.\n\t\t\ty := x + int64(j)*p\n\t\t\tb := deltaIdx[y%30]\n\t\t\tif b == 255 {\n\t\t\t\t// all values are multiples of 2,3, or 5\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tblock := y / 30\n\t\t\tmask := byte(1) << b\n\t\t\t// This is the inner loop. We sweep through the sieve\n\t\t\t// 8 times for each prime < sqrt(end).\n\t\t\tfor block < sievewidth/30 {\n\t\t\t\tsieve[block] |= mask\n\t\t\t\tblock += p\n\t\t\t}\n\t\t}\n\t}\n\t// pick primes out of the sieve\n\tfor i := 0; i < sievewidth/30; i++ {\n\t\ts := sieve[i]\n\t\tfor j := 0; j < 8; j++ {\n\t\t\tif s&1 == 0 {\n\t\t\t\taddPrime(start + int64(i)*30 + int64(deltas[j]))\n\t\t\t}\n\t\t\ts >>= 1\n\t\t}\n\t}\n\tsieved = end\n\tgoto again\n}", "func main() {\n\tn := 600851475143\n\tfmt.Println(\"Find largest primefactor of \", n)\n\tfor i := 2; i < (n / i); i++ {\n\t\tfor n%i == 0 {\n\t\t\tn = n / i\n\t\t\tfmt.Printf(\"%d x %d\\n\", n, i)\n\t\t}\n\t}\n\tif n > 1 {\n\t\tfmt.Println(\"Largest prime factor:\", n)\n\t}\n\n}", "func countPrime(n int) int {\n\n\tvar j int\n\n\tif n >= 2 {\n\n\t\tfor i := n; i > 1; i-- {\n\n\t\t\tif isPrime(i) {\n\t\t\t\tj++\n\t\t\t}\n\t\t}\n\n\t\treturn j\n\n\t} else {\n\n\t\treturn 0\n\n\t}\n\n}", "func problem51() int {\n\tn := 56995 // given in the description as a member of a 7-prime family\n\tfor {\n\t\tif tools.IsPrime(n) && isSmallestMember(n) {\n\t\t\treturn n\n\t\t}\n\t\tn++\n\t}\n}", "func sumPrimes(max int) (sum int) {\n\ts := checkPrimes(max)\n\tfor i := 0; i <= max; i++ {\n\t\tif !s[i] {\n\t\t\tsum += i\n\t\t}\n\t}\n\treturn sum\n}" ]
[ "0.65248394", "0.64596117", "0.64497995", "0.63829464", "0.6322964", "0.63197076", "0.6319113", "0.62583184", "0.6231767", "0.61991537", "0.6170459", "0.61701924", "0.6146464", "0.6121957", "0.6112721", "0.61084133", "0.60875404", "0.60686004", "0.6057913", "0.60466033", "0.6007039", "0.6000722", "0.599722", "0.5965389", "0.5965176", "0.59199697", "0.5904817", "0.58935374", "0.58867955", "0.5869018", "0.58273435", "0.58116573", "0.57926923", "0.57873666", "0.57421076", "0.573188", "0.57153136", "0.57054734", "0.5704938", "0.57027614", "0.5680112", "0.5666765", "0.56498873", "0.560673", "0.5587264", "0.5585562", "0.5581475", "0.5566342", "0.5562446", "0.5558034", "0.55367035", "0.5490088", "0.54803103", "0.547858", "0.54493177", "0.543452", "0.54222953", "0.54140687", "0.5390528", "0.5367129", "0.5348142", "0.5335428", "0.53197074", "0.53107625", "0.5293777", "0.5280106", "0.5250535", "0.5228449", "0.52258223", "0.52205116", "0.52205116", "0.52187216", "0.5187159", "0.5179141", "0.5159555", "0.5116416", "0.51136863", "0.51071733", "0.5101711", "0.5077985", "0.5068397", "0.505249", "0.5052096", "0.5044613", "0.5007192", "0.49793872", "0.49657458", "0.49618608", "0.49591428", "0.49580613", "0.4956836", "0.49479726", "0.49077994", "0.48838872", "0.48792666", "0.48713493", "0.48659328", "0.4860506", "0.48555058", "0.4854571", "0.4853097" ]
0.0
-1
InitMongoDataBase connect to mongo db
func InitMongoDataBase() error { connSettings := fmt.Sprintf("mongodb://%s:%s", viper.GetString("storage.mongo.host"), viper.GetString("storage.mongo.port")) opts := options.Client().ApplyURI(connSettings) ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) defer cancel() c, err := mongo.Connect(ctx, opts) if err != nil { return err } client = c return client.Ping(ctx, readpref.Primary()) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func InitMongoDatabase(host string, db_name string) (*MongoDatabase, error) {\n\tdb := MongoDatabase{}\n\terr := db.Connect(host)\n\n\tif err != nil {\n\t\treturn &db, err\n\t}\n\n\tdb.name = db_name\n\n\treturn &db, nil\n}", "func InitMongo() {\n\tconnectMongoDB()\n}", "func initMongo(host string, port int, username, passwd, dbname string) (string, *MongoDbConn, *MongoDbConn) {\n\n\t// create DB connection URL for MongoDB\n\ts := fmt.Sprintf(\"mongodb://%s:%s@%s:%d/%s\", username, passwd, host, port, dbname)\n\n\t// create new instance of MongoDB Session\n\tdb := new(MongoDbConn)\n\tdb.name = dbname\n\n\treturn s, db, db\n}", "func InitDb() *mongo.Database {\n\tctx, _ := context.WithTimeout(context.Background(), 10*time.Second)\n\tclient, err := mongo.Connect(ctx, os.Getenv(\"MONGO_HOST\")+\":\"+os.Getenv(\"MONGO_PORT\"))\n\tif err != nil {\n\t\tlog.Fatal(\"There was an error connecting to the database\")\n\t}\n\tctx, _ = context.WithTimeout(context.Background(), 2*time.Second)\n\terr = client.Ping(ctx, readpref.Primary())\n\tif err != nil {\n\t\tlog.Fatal(\"Couldnt find a server \", err)\n\t}\n\tfmt.Println(\"Connected to MongoDB!\")\n\treturn client.Database(os.Getenv(\"MONGO_DB\"))\n}", "func init() {\n\tctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)\n\tdefer cancel()\n\tclient, err := mongo.Connect(ctx, options.Client().ApplyURI(os.Getenv(\"BRAIN_DB\")))\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tdb = client.Database(dbName)\n}", "func (p *MongodbProvider) Init(expire int64, connStr string) (err error) {\n\t//Fixed:if connStr is \"mongodb://username:password@localhost/myDataBase\",call mgo.Dial() would panic.\n\t//so delete myDataBase from connStr,then call mgo.Dial();next call session.DB().\n\tvar db string\n\ti := strings.LastIndex(connStr, \"?\")\n\tif i > 0 {\n\t\tconnStr = connStr[:i-1]\n\t}\n\ti = strings.LastIndex(connStr, \"/\")\n\tif i > 0 {\n\t\tif strings.HasPrefix(connStr, \"mongodb://\") {\n\t\t\tif i > len(\"mongodb://\") {\n\t\t\t\tdb = connStr[i+1:]\n\t\t\t\tconnStr = connStr[:i]\n\t\t\t}\n\t\t}\n\t}\n\t//\n\tp.expire = expire\n\tp.session, err = mgo.Dial(connStr)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif db == \"\" {\n\t\tvar dbname []string\n\t\tdbname, err = p.session.DatabaseNames()\n\t\tif (len(dbname) == 0) && err != nil {\n\t\t\tpanic(\"Need database name\")\n\t\t}\n\t\tdb = dbname[0]\n\t}\n\tp.c = p.session.DB(db).C(\"session\")\n\treturn p.session.Ping()\n}", "func init() {\n\tdao.Server = \"mongodb://shivam:[email protected]:25294/shayona-store\"\n\tdao.Database = \"shayona-store\"\n\tdao.Connect()\n}", "func init(){\n \n clientOptions := options.Client().ApplyURI(\"mongodb://localhost:27017/\")\n client, err := mongo.Connect(context.TODO(), clientOptions)\n \n if err != nil {\n log.Fatal(err)\n }\n \n err = client.Ping(context.TODO(), nil)\n \n if err != nil {\n log.Fatal(err)\n }\n \n collection = client.Database(\"sugu\").Collection(\"users\")\n}", "func InitMongoDB(ctx context.Context) interfaces.MongoDatabase {\n\tdeferFunc := logger.LogWithDefer(\"Load MongoDB connection...\")\n\tdefer deferFunc()\n\n\t// create db instance\n\tdbInstance := new(mongoInstance)\n\tdbName, ok := os.LookupEnv(\"MONGODB_DATABASE_NAME\")\n\tif !ok {\n\t\tpanic(\"missing MONGODB_DATABASE_NAME environment\")\n\t}\n\n\t// set default mgm write\n\tmgm.SetDefaultConfig(&mgm.Config{CtxTimeout: 15000 * time.Millisecond}, dbName)\n\n\t// get write mongo from env\n\thostWrite := os.Getenv(\"MONGODB_HOST_WRITE\")\n\n\t// connect to MongoDB\n\tclient, err := mgm.NewClient(options.Client().ApplyURI(hostWrite))\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"mongo: %v, conn: %s\", err, hostWrite))\n\t}\n\tdbInstance.write = client.Database(dbName)\n\n\t// get read mongo from env\n\thostRead := os.Getenv(\"MONGODB_HOST_READ\")\n\n\t// connect to MongoDB\n\tclient, err = mgm.NewClient(options.Client().ApplyURI(hostRead))\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"mongo: %v, conn: %s\", err, hostRead))\n\t}\n\tdbInstance.read = client.Database(dbName)\n\n\treturn dbInstance\n}", "func init(){\n fmt.Printf(\"init Mongo START\\n\")\n session, err := mgo.Dial(\"localhost\")\n if err != nil {\n panic(err)\n }\n /* defer session.Close() */\n // Optional. Switch the session to a monotonic behavior.\n base_config = &Config{\n session: session,\n }\n fmt.Printf(\"init Mongo DONE\\n\")\n}", "func init() {\n\tctx, _ := context.WithTimeout(context.Background(), 10*time.Second)\n\tclient, err := mongo.Connect(ctx, options.Client().ApplyURI(CONNECTIONSTRING))\n\n\tif err != nil {\n\t\tlog.Fatal(\"[init]: %s\\n\", err)\n\t}\n\t// Collection types can be used to access the database\n\tdb = client.Database(DBNAME)\n\n}", "func Initialize(mongo *mongo.Client, opts ...*DBOptions) {\n\tdbManager = &DBManager{}\n\toptions := &DBOptions{}\n\n\tif len(opts) == 0 {\n\t\toptions.databaseName = \"Cryb\"\n\t\toptions.userCollection = \"Users\"\n\t\toptions.portalCollection = \"Portals\"\n\t\toptions.roomCollection = \"Rooms\"\n\t\toptions.messageCollection = \"Messages\"\n\t} else {\n\t\toptions = opts[0]\n\n\t\tif options.databaseName == \"\" {\n\t\t\toptions.databaseName = \"Cryb\"\n\t\t}\n\t\tif options.userCollection == \"\" {\n\t\t\toptions.userCollection = \"Users\"\n\t\t}\n\t\tif options.portalCollection == \"\" {\n\t\t\toptions.portalCollection = \"Portals\"\n\t\t}\n\t\tif options.roomCollection == \"\" {\n\t\t\toptions.roomCollection = \"Rooms\"\n\t\t}\n\t\tif options.messageCollection == \"\" {\n\t\t\toptions.messageCollection = \"Messages\"\n\t\t}\n\t}\n\n\tdbManager.database = mongo.Database(options.databaseName)\n\tdbManager.userCollection = dbManager.database.Collection(options.userCollection)\n\tdbManager.portalCollection = dbManager.database.Collection(options.portalCollection)\n\tdbManager.roomCollection = dbManager.database.Collection(options.roomCollection)\n\tdbManager.messageCollection = dbManager.database.Collection(options.messageCollection)\n\n\tpackageReady = true\n}", "func (m *MongoDB) Init() (err error) {\n\n\tif m.client, err = m.connect(); err != nil {\n\t\tfmt.Printf(\"[ERROR]: Can't connect to the host: [%s]. [Err Detail: %s]\\n \", m.Host, err)\n\t}\n\n\treturn\n}", "func InitializationMongo(ctx context.Context) *mongo.Client {\n\tconf := variable.Mongo\n\tclient, err := mongo.NewClient(options.Client().ApplyURI(conf.URI))\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tif err := client.Connect(ctx); err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tif err := client.Ping(ctx, readpref.Primary()); err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tMongoDB = client.Database(conf.DB)\n\treturn client\n}", "func Initialize(o *ConnectOptions) (*mongo.Database, error) {\n\tif client != nil {\n\t\treturn selectDatabase(client, o.Database)\n\t}\n\n\tdatabaseURL := o.DatabaseURL\n\tconnectTimeout := o.ConnectTimeout\n\n\tif connectTimeout == 0 {\n\t\tconnectTimeout = time.Duration(10 * time.Second)\n\t}\n\n\tctx, cancel := context.WithTimeout(context.Background(), connectTimeout)\n\tdefer cancel()\n\n\tif databaseURL == \"\" {\n\t\treturn nil, fmt.Errorf(\"missing database url\")\n\t}\n\n\tc, err := mongo.Connect(ctx, options.Client().ApplyURI(o.DatabaseURL))\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\terr = testClient(c)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tclient = c\n\n\tdefer func() {\n\t\tif err = client.Disconnect(ctx); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\n\treturn selectDatabase(client, o.Database)\n}", "func InitializeDB() (*mongo.Client, error) {\n\tctx, _ := context.WithTimeout(context.Background(), 10*time.Second)\n\tclient, err := mongo.Connect(ctx, options.Client().ApplyURI(\"mongodb://db:27017/url-shortener\"))\n\tif err != nil { \n\t\treturn nil, err\n\t}\n\treturn client, nil\n}", "func Init() {\n\tvar mongoURL bytes.Buffer\n\tvar err error\n\tconfiguration := conf.Get()\n\ttemplate.Must(template.New(\"mongo_url\").Parse(`mongodb://{{.UserName}}:{{.Password}}@localhost:{{.Port}}`)).Execute(&mongoURL, configuration.Database)\n\tSession, err = mgo.Dial(mongoURL.String())\n\tif err != nil {\n\t\tgolog.Errorf(\"Database connection error: %s\", err.Error())\n\t}\n\tDatabase = Session.DB(configuration.Database.Name)\n}", "func initDao() {\n\t_, memoMongoDb, err := core.MongoConnectFromEnvVar(utils.EnvVarMemoDbURL, memoLogger)\n\tif err != nil {\n\t\tmemoLogger.Fatal(1, \"%v\", err)\n\t}\n\n\t// Initialisation: collections name\n\tdbMemoCollectionName = \"al_memos\"\n\n\t// Initialisation: collections instances\n\tdbMemoCollection = memoMongoDb.Collection(dbMemoCollectionName)\n\n\tmemoLogger.Debug(\"[MongoDB] Memo initialisation!\")\n}", "func Init() {\n\tlog.Info(\"Init mongodb instance....\")\n\tclientOptions := options.Client().ApplyURI(connectionString)\n\tclient, err := mongo.Connect(context.TODO(), clientOptions)\n\n\tif err != nil {\n\t\tlog.Error(err.Error())\n\t\treturn\n\t}\n\n\tlog.Info(\"Init mongodb done...\")\n\tdatabase := client.Database(dbName)\n\tusersCollection = database.Collection(\"users\")\n\tproductsCollection = database.Collection(\"products\")\n\tcategoriesCollection = database.Collection(\"categories\")\n\ttestCollection = database.Collection(\"test\")\n}", "func Initialize() {\n\tfmt.Println(\"inited DB\")\n\tvar err error\n\tclient, err = mongo.Connect(bg(), options.Client().ApplyURI(config.DatabaseURL))\n\tif err != nil {\n\t\tlog.Fatalf(\"%+v\\n\", err)\n\t}\n\n\tdb = client.Database(config.DatabaseName)\n}", "func Setup() {\n\tdao.Server = \"mongodb://\" + os.Getenv(\"MONGO_USER\") + \":\" + os.Getenv(\"MONGO_PWORD\") + \"@ds125469.mlab.com:25469/newsy_db\"\n\tdao.Database = \"newsy_db\"\n}", "func InitMongo(config config.Config) error {\n\tclient, err := mongo.NewClient(options.Client().ApplyURI(\"mongodb://localhost:27017\").SetAuth(getCredentials()))\n\tif err != nil {\n\t\treturn err\n\t}\n\tctx, _ := context.WithTimeout(context.Background(), 10*time.Second)\n\tif err = client.Connect(ctx); err != nil {\n\t\treturn err\n\t}\n\tprimaryMongoClient = client\n\tif err := client.Ping(ctx, options.Client().ReadPreference); err != nil {\n\t\tlogging.GetLogger().Info(\"Unable to connect to mongo client\")\n\t\tlogging.GetLogger().Fatal(err.Error())\n\t\tpanic(err)\n\t}\n\treturn nil\n}", "func (mdbc *MongoDbController) InitDatabase() error {\n\tuserCreationErr := mdbc.initUserCollection(mdbc.dbName)\n\n\t// We want to return an error only if it's not the \"Collection already exists\" error\n\t// The collection will likely exist most times this app is run. We only want to\n\t// return an error if there's a larger problem than the collection already existing\n\tif userCreationErr != nil && !strings.Contains(userCreationErr.Error(), \"Collection already exists\") {\n\t\treturn userCreationErr\n\t}\n\n\tnonceCreationErr := mdbc.initNonceDatabase(mdbc.dbName)\n\n\tif nonceCreationErr != nil && !strings.Contains(nonceCreationErr.Error(), \"Collection already exists\") {\n\t\treturn nonceCreationErr\n\t}\n\n\tinitLoggingErr := mdbc.initLoggingDatabase(mdbc.dbName)\n\n\tif initLoggingErr != nil && !strings.Contains(nonceCreationErr.Error(), \"Collection already exists\") {\n\t\treturn initLoggingErr\n\t}\n\n\treturn nil\n}", "func InitMongo() {\n\tdialInfo := settings.Get().Mongo\n\tvar err error\n\tmongoUrl := fmt.Sprintf(dialInfo.Url, dialInfo.Username, dialInfo.Password)\n\tprintUrl := strings.Replace(mongoUrl, dialInfo.Password, \"******\", 1)\n\tglog.Infoln(\"connection url:\", printUrl)\n\tclient, err = mongo.NewClient(options.Client().ApplyURI(mongoUrl))\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tctx, _ := context.WithTimeout(context.Background(), time.Duration(dialInfo.Timeout)*time.Second)\n\terr = client.Connect(ctx)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\n\tgo func(c *mongo.Client) {\n\t\t/*\t\tticker := time.NewTicker(10 * time.Second)\n\t\t\t\tfor _ = range ticker.C {*/\n\t\tvar fr readpref.ReadPref\n\t\tglog.Errorln(client.Ping(nil, &fr))\n\t\tif &fr != nil {\n\t\t\tglog.Infoln(\"ping ...\")\n\t\t\tglog.Infoln(fr.Mode())\n\t\t\tglog.Infoln(fr.MaxStaleness())\n\t\t}\n\t\t//}\n\n\t}(client)\n\tglog.Infoln(\"connect mongodb successfully\")\n}", "func InitMongo(c *mongo.Client) {\n\ttokenCol = c.Database(config.DbMain).Collection(config.ColToken)\n}", "func initDb(username, password, endpoint, port, database string) (*sql.DB, error) {\n\t// Create url for connection\n\turl := fmt.Sprintf(\"%s:%s@tcp(%s:%s)/%s?parseTime=true\", username, password, endpoint, port, database)\n\n\t// Open connection to SQL DB\n\tdb, err := sql.Open(\"mysql\", url)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Test database connection\n\terr = db.Ping()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn db, err\n}", "func ConnectDb() error {\n\tmongo := os.Getenv(\"MONGO_HOST\")\n\tif mongo == \"\" {\n\t\tmongo = \"mongodb://localhost:27017\"\n\t}\n\n\tdb := os.Getenv(\"MONGO_DB_NAME\")\n\tif db == \"\" {\n\t\tdb = \"go-book\"\n\t}\n\n\tfmt.Println(\"mongo: \", mongo)\n\n\t// _ = mgm.SetDefaultConfig(&mgm.Config{CtxTimeout: 12 * time.Second}, \"go-book\", options.Client().ApplyURI(\"mongodb://root:12345@localhost:27017\"))\n\terr := mgm.SetDefaultConfig(&mgm.Config{CtxTimeout: 12 * time.Second}, db, options.Client().ApplyURI(mongo))\n\tif err != nil {\n\t\tfmt.Println(\"Connect database error: \", err)\n\t}\n\n\treturn err\n}", "func DBInit(conStr string) {\n\tif db == nil {\n\t\tvar err error\n\t\tdbConnection, err := gorm.Open(\"mysql\", conStr+\"?charset=utf8&parseTime=True&loc=Local\")\n\t\t// db connection will be closed if there's no request for a while\n\t\t// which would cause 500 error when a new request comes.\n\t\t// diable pool here to avoid this problem.\n\t\tdbConnection.DB().SetMaxIdleConns(0)\n\t\tif err != nil {\n\t\t\tlog.WithFields(log.Fields{\n\t\t\t\t\"error\": err.Error(),\n\t\t\t}).Fatal(\"Faile to create db connection pool\")\n\t\t} else {\n\t\t\tlog.WithFields(log.Fields{\n\t\t\t\t\"message\": dbConnection.GetErrors(),\n\t\t\t\t\"db\": conStr,\n\t\t\t}).Info(\"connected to mysql\")\n\t\t}\n\t\tdb = &DB{dbConnection}\n\t}\n\tdb.dbConnect.SetLogger(log.StandardLogger())\n\t// db.Debug message will be logged be logrug with Info level\n\tdb.dbConnect.Debug().AutoMigrate(&Todo{})\n}", "func init() {\n\tUserCollection, collectionErr = database.GetMongoDBCollection(database.Client, \"user\")\n\tif collectionErr != nil {\n\t\tlog.Fatal(collectionErr)\n\t}\n}", "func (mongoStorer *MongoStorer) Initialize() error {\n\tclient, err := mongo.NewClient(\"mongodb://localhost:27017\")\n\tif err != nil {\n\t\treturn err\n\t}\n\tmongoStorer.client = client\n\terr = client.Connect(context.TODO())\n\tif err != nil {\n\t\treturn err\n\t}\n\tmongoStorer.database = client.Database(\"hog\")\n\treturn nil\n}", "func (ch *Context) Init() {\n\tmongoes := ch.MongoDBs\n\tmongoes.Mongo[0] = ch.ConnectMongo(\"mongoone\", \"27137\", \"user\", \"niupi\", \"@NIUPI123\")\n\tmongoes.Mongo[1] = ch.ConnectMongo(\"mongotwo\", \"27138\", \"ml\", \"niupi\", \"@NIUPI123\")\n}", "func InitDatabase() {\n\tdbConnMap = make(map[string]*gorm.DB)\n\tconfigs := config.GetConfigs()\n\tdebug := configs.Debug\n\tdbs := configs.Databases\n\tfor _, db := range dbs {\n\t\tengine := db.Engine\n\t\tvar url string\n\t\tif engine == \"mysql\" {\n\t\t\turl = fmt.Sprintf(\"%s:%s@tcp(%s:%d)/%s?charset=%s&parseTime=True&loc=Local\", db.User, db.Password, db.Host, db.Port, db.Name, db.Charset)\n\t\t}\n\t\tdbConn, errConn := gorm.Open(engine, url)\n\t\tif errConn != nil {\n\t\t\tpanic(\"连接数据库失败:\" + errConn.Error())\n\t\t}\n\t\tdbConn.LogMode(debug)\n\t\tdbConnMap[db.Alias] = dbConn\n\t}\n}", "func Init() (s Storer, err error) {\n\turi := config.ReadEnvString(\"DB_URI\")\n\tname := config.ReadEnvString(\"DB_NAME\")\n\n\tclient, err := mongo.NewClient(options.Client().ApplyURI(uri))\n\tif err != nil {\n\t\tlogger.Error(\"Cannot initialize database\", err)\n\t\treturn\n\t}\n\n\tctx, cancel := context.WithTimeout(context.Background(), 20*time.Second)\n\tdefer cancel()\n\n\terr = client.Connect(ctx)\n\tif err != nil {\n\t\tlogger.Error(\"Cannot initialize database context\", err)\n\t\treturn\n\t}\n\n\terr = client.Ping(ctx, readpref.Primary())\n\n\tif err != nil {\n\t\tlogger.Error(\"Cannot connect to database\", err)\n\t\treturn\n\t}\n\n\tlogger.Info(\"Connected To MongoDB\")\n\tdb := client.Database(name)\n\n\treturn &mongoStore{db}, nil\n}", "func init() {\n\tRepoCreateDatabaseConnection(DatabaseConnection{Name: \"Write presentation\"})\n\tRepoCreateDatabaseConnection(DatabaseConnection{Name: \"Host meetup\"})\n}", "func InitializeDb() {\n\tdbPort, err := strconv.Atoi(os.Getenv(\"DB_PORT\"))\n\tif err != nil {\n\t\tlog.Fatal(\"Database port is not valid\")\n\t}\n\n\tdbConnString := fmt.Sprintf(\n\t\t\"host=%s port=%d user=%s password=%s dbname=%s sslmode=disable\",\n\t\tos.Getenv(\"DB_HOST\"),\n\t\tdbPort,\n\t\tos.Getenv(\"DB_USER\"),\n\t\tos.Getenv(\"DB_PASS\"),\n\t\tos.Getenv(\"DB_NAME\"),\n\t)\n\n\tDB, err = sql.Open(\"postgres\", dbConnString)\n\tif err != nil {\n\t\tlog.Fatal(\"Could not connect to db- \", err)\n\t}\n}", "func SetupMongoDb(ctx context.Context) (*mongo.Client, error) {\n\tlog.Println(\"Connecting to MongoDB\")\n\n\t// Read config file\n\tdata, err := ioutil.ReadFile(\"./config.json\")\n\tif err != nil {\n\t\tlog.Fatalln(\"Error in opening file\")\n\t\treturn nil, err\n\t}\n\n\tvar config models.Config\n\terr = json.Unmarshal(data, &config)\n\tif err != nil {\n\t\tlog.Fatalln(\"Error in unmarshalling json\")\n\t\treturn nil, err\n\t}\n\n\tconnectionString := fmt.Sprintf(\"mongodb://%s:%s@mongo:27017/\",\n\t\tconfig.DbConfigs.Credentials.Username, config.DbConfigs.Credentials.Password)\n\n\t// TODO: Replace connection string and move to config file\n\tclient, err := mongo.Connect(ctx, options.Client().ApplyURI(\n\t\tconnectionString,\n\t))\n\tif err != nil {\n\t\tlog.Fatalln(\"Connection error: \", err)\n\t\treturn nil, err\n\t}\n\n\t// TODO: Replace database and collection names and move to config file\n\tDbCollection = client.Database(config.DbConfigs.Database).Collection(config.DbConfigs.Collection)\n\treturn client, nil\n}", "func init() {\n\t_ = godotenv.Load()\n\n\thostname := os.Getenv(\"HOST\")\n\tdbname := os.Getenv(\"DBNAME\")\n\tusername := os.Getenv(\"DBUSER\")\n\tpassword := os.Getenv(\"PASSWORD\")\n\n\tdbString := \"host=\" + hostname + \" user=\" + username + \" dbname=\" + dbname + \" sslmode=disable password=\" + password\n\n\tvar err error\n\tdb, err = gorm.Open(\"postgres\", dbString)\n\tif err != nil {\n\t\tfmt.Println(err.Error())\n\t\tpanic(\"Unable to connect to DB\")\n\t}\n\n\tdb.AutoMigrate(&QuestionModel{})\n\tdb.AutoMigrate(&AnswerModel{})\n\tdb.AutoMigrate(&UserModel{})\n\tdb.AutoMigrate(&Cohort{})\n}", "func DBInit() *gorm.DB {\n\t//db, err := gorm.Open(\"mysql\", \"root:@tcp(128.199.211.144:3306)/godb?charset=utf8&parseTime=True&loc=Local\")\n\tdb, err := gorm.Open(\"mysql\",\"root:orion2402@tcp(localhost:3306)/popfren?charset=utf8&parseTime=True&loc=Local\")\n\tif err != nil {\n\t\tpanic(\"failed to connect to database\")\n\t}\n\n\tdb.AutoMigrate(structs.Person{})\n\treturn db\n}", "func (mdbc *MongoDbController) initLoggingDatabase(dbName string) error {\n\tdb := mdbc.MongoClient.Database(dbName)\n\n\tjsonSchema := bson.M{\n\t\t\"bsonType\": \"object\",\n\t\t\"required\": []string{\"timestamp\", \"type\"},\n\t\t\"properties\": bson.M{\n\t\t\t\"timestamp\": bson.M{\n\t\t\t\t\"bsonType\": \"timestamp\",\n\t\t\t\t\"description\": \"timestamp is required and must be a timestamp\",\n\t\t\t},\n\t\t\t\"type\": bson.M{\n\t\t\t\t\"bsonType\": \"string\",\n\t\t\t\t\"description\": \"type is required and must be a string\",\n\t\t\t},\n\t\t},\n\t}\n\n\tcolOpts := options.CreateCollection().SetValidator(bson.M{\"$jsonSchema\": jsonSchema})\n\tcolOpts.SetCapped(true)\n\tcolOpts.SetSizeInBytes(100000)\n\n\tcreateCollectionErr := db.CreateCollection(context.TODO(), \"logging\", colOpts)\n\n\tif createCollectionErr != nil {\n\t\treturn dbController.NewDBError(createCollectionErr.Error())\n\t}\n\n\treturn nil\n}", "func InitDB() {\n\t// Setting autoload in the main funciton to get the environment variables\n\t// fmt.Printf(\"uri is %+v \\n\", os.Getenv(\"MGDB_APIKEY\"))\n\tclientOptions := options.Client().ApplyURI(os.Getenv(\"MGDB_APIKEY\"))\n\tclient, err := mongo.Connect(context.TODO(), clientOptions)\n\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\t// Checking\n\terr = client.Ping(context.TODO(), nil)\n\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tfmt.Println(\"Connected to MongoDB successfully.\")\n\n\tDB = client.Database(\"quenc\")\n\n\tPostCategoryCollection = DB.Collection(\"postCategory\")\n\tCommentCollection = DB.Collection(\"comment\")\n\tPostCollection = DB.Collection(\"post\")\n\tReportCollection = DB.Collection(\"report\")\n\tUserCollection = DB.Collection(\"user\")\n\tChatRoomCollection = DB.Collection(\"chatRoom\")\n\n}", "func InitMongoSession() error {\n\t// To avoid a socket leak\n\tif mongo != nil {\n\t\tCleanupMongoSession()\n\t}\n\n\t// Establish new session\n\turl := config.GetMongoURL()\n\tlogger.Debug(\"init mongo\", \"url\", url)\n\tvar err error\n\tif mongo, err = mgo.Dial(url); err != nil {\n\t\treturn err\n\t}\n\n\t// Ensure indicies\n\tensureUserIndex()\n\n\treturn nil\n}", "func (db *MongoDB) Init() error {\n\tsess := db.sess.Copy()\n\tdefer sess.Close()\n\ttasks := db.tasks(sess)\n\tnodes := db.nodes(sess)\n\n\tnames, err := sess.DB(db.conf.Database).CollectionNames()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error listing collection names in database %s: %v\", db.conf.Database, err)\n\t}\n\tvar tasksFound bool\n\tvar nodesFound bool\n\tfor _, n := range names {\n\t\tswitch n {\n\t\tcase \"tasks\":\n\t\t\ttasksFound = true\n\t\tcase \"nodes\":\n\t\t\tnodesFound = true\n\t\t}\n\t}\n\n\tif !tasksFound {\n\t\terr = tasks.Create(&mgo.CollectionInfo{})\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"error creating tasks collection in database %s: %v\", db.conf.Database, err)\n\t\t}\n\n\t\terr = tasks.EnsureIndex(mgo.Index{\n\t\t\tKey: []string{\"-id\", \"-creationtime\"},\n\t\t\tUnique: true,\n\t\t\tDropDups: true,\n\t\t\tBackground: true,\n\t\t\tSparse: true,\n\t\t})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif !nodesFound {\n\t\terr = nodes.Create(&mgo.CollectionInfo{})\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"error creating nodes collection in database %s: %v\", db.conf.Database, err)\n\t\t}\n\n\t\terr = nodes.EnsureIndex(mgo.Index{\n\t\t\tKey: []string{\"id\"},\n\t\t\tUnique: true,\n\t\t\tDropDups: true,\n\t\t\tBackground: true,\n\t\t\tSparse: true,\n\t\t})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func initDatabase() {\n\tif dbPath == \"\" {\n\t\t// No path provided, use the default path\n\t\tdbPath = getDefaultDBPath()\n\t}\n\t// Start the database\n\tdb, err := poddata.New(dbPath)\n\tif err != nil {\n\t\tlogrus.Fatal(err.Error())\n\t}\n\tdata = db\n}", "func (db *Database) Init() {\n\tdata, dbErr := tiedot.OpenDB(db.Location)\n\tif dbErr != nil {\n\t\tlog.Error(dbConnectionError{\n\t\t\tmsg: \"Failed to connect to the tiedot database\",\n\t\t\terr: dbErr,\n\t\t})\n\t}\n\n\t// Set up the collections - throw away the error for now.\n\tfor _, c := range db.Collections {\n\t\tdata.Create(c.Name)\n\t\tdata.Use(c.Name).Index(c.Index)\n\t}\n\n\tdb.Data = data\n}", "func (mongo *MongoDB) Initialize(ctx context.Context) error {\n\treturn nil\n}", "func DBCollectionInit(collectionName string) {\n\tsession, err := mgo.Dial(\"mongodb://localhost\")\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tdefer session.Close()\n\n\tcollRepositories := GetCollection(session, collectionName)\n\n\tdummy := GetDummy()\n\n\terr = collRepositories.Insert(dummy)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\terr = collRepositories.Remove(bson.M{\"dummy\": true})\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n}", "func setupMongoDbClient() (*mongo.Client, error) {\n\tmongoDbUrl := os.Getenv(\"MONGO_DB_URL\")\n\tmongoDbUser := os.Getenv(\"MONGO_DB_USERNAME\")\n\tmongoDbPass := os.Getenv(\"MONGO_DB_PASSWORD\")\n\n\tmongoDbFullUrl := fmt.Sprintf(\"mongodb+srv://%v:%v@%v\", mongoDbUser, mongoDbPass, mongoDbUrl)\n\tclientOptions := options.Client().\n\t\tApplyURI(mongoDbFullUrl)\n\n\tctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)\n\tdefer cancel()\n\n\tclient, mdbErr := mongo.Connect(ctx, clientOptions)\n\n\tif mdbErr != nil {\n\t\terr := fmt.Sprint(\"Error connecting: \", mdbErr.Error())\n\t\treturn client, dbController.NewDBError(err)\n\t}\n\n\treturn client, nil\n}", "func (ch *Context) ConnectMongo(MONGO_HOST string, MONGO_PORT string, MONGO_DB string, MONGO_USER string, MONGO_PWD string) *mgo.Database {\n\tSession, MgoError = mgo.Dial(fmt.Sprintf(\"%s:%s\", MONGO_HOST, MONGO_PORT))\n\tif MgoError != nil {\n\t\tfmt.Println(\"Wrong mongdb link!\")\n\t\tos.Exit(1)\n\t}\n\n\tDatabases = Session.DB(MONGO_DB)\n\n\tMgoError = Databases.Login(MONGO_USER, MONGO_PWD)\n\tif MgoError != nil {\n\t\tfmt.Println(\"Authentication failed!\")\n\t\tos.Exit(1)\n\t}\n\n\treturn Databases\n\t// defer Session.Close()\n}", "func InitDatabase() *sql.DB {\n\tlog.Println(\"connecting database.\")\n\n\tquery := url.Values{}\n\tquery.Add(\"database\", \"Company\")\n\n\tu := &url.URL{\n\t\tScheme: \"sqlserver\",\n\t\tUser: url.UserPassword(\"sa\", \"1234\"),\n\t\tHost: fmt.Sprintf(\"%s:%d\", \"localhost\", 1433),\n\t\t// Path: instance, // if connecting to an instance instead of a port\n\t\tRawQuery: query.Encode(),\n\t}\n\n\tlog.Println(u.String())\n\n\tcondb, err := sql.Open(\"sqlserver\", u.String())\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tlog.Println(\"test ping database.\")\n\tif err = condb.Ping(); err != nil {\n\t\tpanic(err)\n\t}\n\treturn condb\n}", "func InitDB(driver, connectionstring string) error {\n\tdb, err := gorm.Open(driver, connectionstring)\n\tif err != nil {\n\t\treturn err\n\t}\n\tsetDB(db)\n\treturn nil\n}", "func NewMongoConnect(c *Config) {\n\tvar err error\n\tinfo := &mgo.DialInfo{\n\t\tAddrs: []string{os.Getenv(\"MONGO_URI\")},\n\t\tTimeout: 60 * time.Second,\n\t\tDatabase: os.Getenv(\"MONGO_URI_DATABASE\"),\n\t\tUsername: os.Getenv(\"MONGO_USER\"),\n\t\tPassword: os.Getenv(\"MONGO_PASS\"),\n\t}\n\n\tc.session, err = mgo.DialWithInfo(info)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\n\tdatabase := c.session.DB(os.Getenv(\"MONGO_URI_DATABASE\"))\n\tif err := ensureIndex(database); err != nil {\n\t\tpanic(err)\n\t}\n\n\tc.closeFns = append(c.closeFns, func() {\n\t\tdefer func() {\n\t\t\tif err := recover(); err != nil {\n\t\t\t\tlog.Println(err)\n\t\t\t}\n\t\t}()\n\t\tlog.Println(\"closing mgo\")\n\n\t\tc.session.Close()\n\t})\n\n}", "func connectToMongo() error {\n\n\tif dbSession != nil {\n\t\treturn nil\n\t}\n\n\tsession, err := mgo.Dial(MongoDBHosts)\n\tdefer func() {\n\t\tsession.Close()\n\t\tutl.TimeTrack(time.Now(), \"connectToMongo\")\n\t}()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdbSession = session.Copy()\n\treturn nil\n\n}", "func InitDatabase(dsn string) error {\n\tfmt.Println(\"Init db connection\")\n\t// config := mysql.NewConfig()\n\t// config.User = username\n\t// config.Passwd = password\n\t// config.Net = protocol\n\t// config.Addr = host\n\t// config.DBName = database\n\t// config.Params = map[string]string{\n\t// \t\"charset\": charset,\n\t// \t\"parseTime\": \"True\",\n\t// }\n\tdb, err := gorm.Open(\"sqlite3\", dsn)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn err\n\t}\n\tDbConn = db\n\treturn nil\n}", "func Connect(dburl string) {\n\tif dburl == \"\" {\n\t\tdburl = MongoDBUrl\n\t}\n\n\tMongoSession = MongoConnect(dburl)\n}", "func DBInit() *gorm.DB {\n\te := godotenv.Load() //Load .env file\n\tif e != nil {\n\t\tfmt.Print(e)\n\t}\n\n\thost := os.Getenv(\"DB_HOST\")\n\tport := os.Getenv(\"DB_PORT\")\n\tpassword := os.Getenv(\"DB_PASSWORD\")\n\tdbUser := os.Getenv(\"DB_USER\")\n\tdbName := os.Getenv(\"DB_NAME\")\n\tdbURI := fmt.Sprintf(\"%s:%s@tcp(%s:%s)/%s?charset=utf8&parseTime=True&loc=%s\", dbUser, password, host, port, dbName, \"Asia%2FJakarta\")\n\n\tdb, err := gorm.Open(\"mysql\", dbURI)\n\tif err != nil {\n\t\tlog.Panicf(\"failed to connect to database with err : %s \", err)\n\t}\n\tdb.DB().SetConnMaxLifetime(time.Minute * 5)\n\tdb.DB().SetMaxIdleConns(0)\n\tdb.DB().SetMaxOpenConns(5)\n\n\tdb.LogMode(true)\n\n\tdB = db\n\tdb.AutoMigrate(\n\t\t&domain.Transaction{},\n\t\t&domain.TransactionDetail{},\n\t\t&domain.Cart{},\n\t\t&domain.CartDetail{},\n\t\t&domain.Product{},\n\t\t&domain.StatusCode{},\n\t)\n\treturn dB\n}", "func init() {\n\tconfig.Read()\n\n\tdao.Server = config.Server\n\tdao.Database = config.Database\n\tdao.Connect()\n}", "func init() {\n\tconfig.Read()\n\n\tdao.Server = config.Server\n\tdao.Database = config.Database\n\tdao.Connect()\n}", "func InitializeConnection(addrs []string, timeout int, authDatabase string, username string, password string, workDatabase string) (conn MongoDBConnect, err error) {\n\tconn = MongoDBConnect{}\n\n\t//creating DialInfo object to establish a session to MongoDB\n\tmongoDBDialInfo := &mgo.DialInfo{\n\t\tAddrs: addrs,\n\t\tTimeout: time.Duration(timeout) * time.Second,\n\t\tDatabase: authDatabase,\n\t\tUsername: username,\n\t\tPassword: password,\n\t}\n\n\tconn.database = workDatabase\n\n\t//Creating a session object which creates a pool of socket connections to MongoDB\n\tconn.mongoSession, err = mgo.DialWithInfo(mongoDBDialInfo)\n\tif err != nil {\n\t\tLOGGER.Error(\"Error while creating MongoDB socket connections pool: \", err)\n\t\treturn\n\t}\n\n\tLOGGER.Infof(\"\\t* Dialing MongoDB session for socket connections pool for the Database: [%s] in Mongo Host: [%s]\", conn.database, addrs)\n\tconn.mongoSession.SetMode(mgo.Eventual, true)\n\n\tLOGGER.Infof(\"\\t* MongoDB socket connections pool for [%s] initialized successfully.\", conn.database)\n\treturn\n}", "func init() {\n\t//orm.RegisterDriver(\"mysql\", orm.DRMySQL)\n\n\tmysqlReg := beego.AppConfig.String(\"mysqluser\") + \":\" +\n\t\tbeego.AppConfig.String(\"mysqlpass\") + \"@tcp(127.0.0.1:3306)/\" +\n\t\tbeego.AppConfig.String(\"mysqldb\") + \"?charset=utf8&parseTime=true&loc=Australia%2FSydney\"\n\torm.RegisterDataBase(\"default\", \"mysql\", mysqlReg)\n}", "func ConnectMongoDB() {\n\tmgoOnce.Do(func() {\n\t\tmongodb = dialMgoDB()\n\t})\n}", "func (mongo *MongoDBDepConfig) Initialize(ctx context.Context) error {\n\treturn nil\n}", "func NewDatabase() (*mongo.Database, error) {\n\n\t// Get credentials from env\n\tdbName, err := utils.GetEnv(\"MONGO_INITDB_DATABASE\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdbUser, err := utils.GetEnv(\"MONGO_INITDB_ROOT_USERNAME\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdbPassword, err := utils.GetEnv(\"MONGO_INITDB_ROOT_PASSWORD\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\turi := fmt.Sprintf(\"mongodb://%s:%s@database:27017\", dbUser, dbPassword)\n\n\t// TODO: learn about context\n\tclient, err := mongo.Connect(context.TODO(), options.Client().ApplyURI(uri))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tDB := client.Database(dbName)\n\n\treturn DB, nil\n}", "func Initialize() {\r\n\tlog.Info(\"BB: Initializing database service\")\r\n\r\n\tctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)\r\n\tdefer cancel()\r\n\r\n\tc, err := mongo.Connect(ctx, options.Client().ApplyURI(os.Getenv(\"M_URL\")))\r\n\tif err != nil {\r\n\t\tpanic(err)\r\n\t}\r\n\r\n\tClient = c\r\n\r\n\tRestCollection = Client.Database(os.Getenv(\"M_DB\")).Collection(\"restaurants\")\r\n\tUserCollection = Client.Database(os.Getenv(\"M_DB\")).Collection(\"users\")\r\n\tCardCollection = Client.Database(os.Getenv(\"M_DB\")).Collection(\"cards\")\r\n\r\n\terr = Client.Ping(ctx, nil)\r\n\tif err != nil {\r\n\t\tlog.Error(err)\r\n\t}\r\n\r\n\tlog.Info(\"BB: Connected to the following databases:\")\r\n\tlog.Info(Client.ListDatabaseNames(ctx, bson.D{}))\r\n}", "func (m *SiteSearchTagsDAO) Initialize(mclient *mongo.Client, config *config.AppConfig) error {\n\n\terr := mclient.Ping(context.TODO(), nil)\n\n\tif err != nil {\n\t\tlog.Error().Err(err).Msg(\"Error connecting to mongodb\")\n\t}\n\n\t//log.Info().Msg(\"MediaTagsDAO connected successfully to mongodb\")\n\n\tm.DBClient = mclient\n\tm.Config = config\n\n\treturn nil\n}", "func (m *MongoDB) connect() error {\n\tvar url string\n\tif m.config.GetUser() != \"\" {\n\t\turl = m.config.GetUser()\n\t}\n\tif m.config.GetPassword() != \"\" {\n\t\turl += \":\" + m.config.GetPassword()\n\t}\n\tif m.config.GetUser() != \"\" || m.config.GetPassword() != \"\" {\n\t\turl += \"@\"\n\t}\n\turl += m.config.GetHost() + \":\" + strconv.Itoa(m.config.GetPort())\n\n\tsession, err := mgo.Dial(\"mongodb://\" + url)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tm.db = session.DB(m.config.GetDatabase())\n\tm.db.Session = session\n\treturn nil\n}", "func initAppDB() {\n\n\t// Init config data\n\tdbConf := GetDBConfig()\n\tdbConf.IsAppDB = true\n\n\tdbPoolApp, err := initSocketConnectionPool(dbConf)\n\tif err != nil {\n\t\tlog.Println(\"initial dbConnApp fail : \", err.Error())\n\t} else {\n\t\tlog.Println(\"initial dbConnApp successful\")\n\t\tdbConf.Conn = dbPoolApp\n\t\tdbConf.InitSuccess = true\n\t}\n\n\tdbConf.Err = err\n\n\t// Keep instance\n\tdbAppConf = dbConf\n}", "func InitDatabase() (err error) {\n\tvar pgo *pg.Options\n\n\tif pgo, err = pg.ParseURL(options.PgSQLDSN); err != nil {\n\t\treturn\n\t}\n\tlog.Debugf(\"Try to connect to postgrsql server...\")\n\tdb = pg.Connect(pgo)\n\treturn\n}", "func init() {\n\tlog.Info(\"Initializing database\")\n\tpsqlInfo := fmt.Sprintf(\"host=%s port=%s user=%s \"+\n\t\t\"password=%s dbname=%s sslmode=disable\",\n\t\tconfig.Config().GetString(\"database.host\"),\n\t\tconfig.Config().GetString(\"database.port\"),\n\t\tconfig.Config().GetString(\"database.user\"),\n\t\tconfig.Config().GetString(\"database.password\"),\n\t\tconfig.Config().GetString(\"database.name\"))\n\tdb, err := sql.Open(\"postgres\", psqlInfo)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = db.Ping()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tlog.Info(\"Successfully connected to database!\")\n}", "func initializeDB() *gorm.DB {\n\t// load Env Variables\n\tHOST := os.Getenv(\"HOST\")\n\tDB_PORT := os.Getenv(\"DB_PORT\")\n\tUSER := os.Getenv(\"USER\")\n\tNAME := os.Getenv(\"NAME\")\n\tPASSWORD := os.Getenv(\"PASSWORD\")\n\n\t// Data connection string\n\tDB_URI := fmt.Sprintf(\"host=%s user=%s dbname=%s sslmode=disable password=%s port=%s\", HOST, USER, NAME, PASSWORD, DB_PORT)\n\t\n\t// Open DB\n\tdb, err := gorm.Open(postgres.Open(DB_URI), &gorm.Config{})\n\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t} else {\n\t\tfmt.Println(\"DB Connected successfully\")\n\t}\n\n\tdb.AutoMigrate(&models.Person{})\n\tdb.AutoMigrate(&models.Book{})\n\n\treturn db\n}", "func InitDBConnection() *Database {\n\thost := viper.GetString(\"db.host\")\n\tuser := viper.GetString(\"db.user\")\n\tdbname := viper.GetString(\"db.dbname\")\n\tdbConfig := fmt.Sprintf(\"host=%s user=%s dbname=%s sslmode=disable\", host, user, dbname)\n\tdb, err := gorm.Open(\"postgres\", dbConfig)\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"Failed to initiate a connection to the database: %s\", err))\n\t}\n\n\tfmt.Println(\"Migrating database\")\n\tdb.AutoMigrate(&User{}, &Organization{}, &Restaurant{}, &Menu{}, &Activity{}, &OrderItem{})\n\n\treturn &Database{db}\n}", "func Connect() {\n mongoSession, err := mgo.Dial(\"localhost\")\n if err != nil {\n\tpanic(err)\n }\n\n Db = mongoSession.DB(\"eos\")\n}", "func StartDB(conf *configs.Config, log *logging.Logger) *DB {\n\tmongoDBDialInfo := &mgo.DialInfo{\n\t\tAddrs: []string{\n\t\t\tconf.Mongo.Address1,\n\t\t},\n\t\tDatabase: conf.Mongo.DatabaseName,\n\t\tTimeout: time.Second * 60,\n\t\tPoolLimit: 100,\n\t}\n\n\ts, err := mgo.DialWithInfo(mongoDBDialInfo)\n\tif err != nil {\n\t\tlog.Error(err.Error())\n\t\treturn nil\n\t}\n\ts.SetSafe(&mgo.Safe{WMode: \"majority\"})\n\n\tif err := s.Ping(); err != nil {\n\t\tlog.Error(err.Error())\n\t\treturn nil\n\t}\n\n\treturn &DB{conf: conf, log: log, session: s}\n}", "func newMongoDB(addr string, cred *mgo.Credential) (ProductDatabase, error) {\n\tconn, err := mgo.Dial(addr)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"mongo: could not dial: %v\", err)\n\t}\n\n\tif cred != nil {\n\t\tif err := conn.Login(cred); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn &mongoDB{\n\t\tconn: conn,\n\t\tc: conn.DB(\"productshelf\").C(\"products\"),\n\t}, nil\n}", "func InitDatabase() *Database {\n\t// eg. \"postgres://postgres:postgres@localhost/postgres?sslmode=disable\"\n\t// TODO: enable SSL on DB\n\tconn, err := sql.Open(\"postgres\", os.Getenv(\"PG_CONNECTION_STRING\"))\n\tif err != nil {\n\t\tlog.Fatal(err) // kill server if we can't use DB on startup\n\t}\n\treturn &Database{\n\t\tconn: conn,\n\t}\n}", "func InitDB() *sql.DB {\n\n\tlog.Println(\"attempting connections\")\n\n\tvar err error\n\t\n\t// Open a SQL connection to the docker container hosting the database server\n\t// Assign the connection to the \"DB\" variable\n\t// Look at how it's done in the other microservices!\n\t// \"YOUR CODE HERE\"\n\t\n\tif err != nil {\n\t\tlog.Println(\"couldnt connect\")\n\t\tpanic(err.Error())\n\t}\n\n\terr = DB.Ping()\n\tif err != nil {\n\t\tlog.Println(\"couldnt ping\")\n\t\tpanic(err.Error())\n\t}\n\n\treturn DB\n}", "func (server *Server) Initialize(Dbdriver, DbUser, DbPassword, DbPort, DbHost, DbName string) *gorm.DB {\n\tvar err error\n\n\tif Dbdriver == \"postgres\" {\n\t\tDBURL := fmt.Sprintf(\"host=%s port=%s user=%s dbname=%s sslmode=disable password=%s\", DbHost, DbPort, DbUser, DbName, DbPassword)\n\t\tserver.DB, err = gorm.Open(Dbdriver, DBURL)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Cannot connect to %s database\", err)\n\n\t\t} else {\n\t\t\tlog.Printf(\"We are connected to the %s database\", Dbdriver)\n\t\t}\n\t}\n\n\treturn server.DB\n}", "func initDB(options Options) (*mgo.Session, error) {\n\tdialInfo := &mgo.DialInfo{\n\t\tAddrs: strings.Split(options.DBHost, \",\"),\n\t\tDatabase: options.DBName,\n\t\tUsername: options.DBUser,\n\t\tPassword: options.DBPassword,\n\t\tDialServer: func(addr *mgo.ServerAddr) (net.Conn, error) {\n\t\t\treturn tls.Dial(\"tcp\", addr.String(), &tls.Config{InsecureSkipVerify: true})\n\t\t},\n\t\tReplicaSetName: \"rs0\",\n\t\tTimeout: time.Second * 10,\n\t}\n\n\tif !options.SSL {\n\t\tdialInfo.ReplicaSetName = \"\"\n\t\tdialInfo.DialServer = nil\n\t}\n\t// connect to the database\n\tsession, err := mgo.DialWithInfo(dialInfo)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn session, err\n}", "func NewDBConn() *mgo.Database {\n\tdb := DBInfo{}\n\terr := db.loadFromConfigFile(MONGO_CONFIG_FILE)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tsession, err := mgo.Dial(db.Host)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tdefer session.Close()\n\tsession.SetBatch(DEFAULT_MONGO_BATCH_SIZE)\n\tcollections := session.DB(db.DB)\n\tif err := collections.Login(db.User, db.Pass); err != nil {\n\t\tlog.Printf(\"Cannot login to DB.\")\n\t\treturn nil\n\t}\n\treturn collections\n}", "func (meta *Meta) init() {\n\tmeta.client = utils.CreateMongoDB(dbConfig.Str(\"address\"), log)\n\tmeta.database = meta.client.Database(dbConfig.Str(\"db\"))\n\tmeta.collection = meta.database.Collection(metaCollection)\n}", "func NewMongoDB(host, username, password, database, source string) (*DB, error) {\n\tHost := []string{host}\n\tconn, err := mgo.DialWithInfo(&mgo.DialInfo{\n\t\tAddrs: Host,\n\t\tUsername: username,\n\t\tPassword: password,\n\t\tDatabase: database,\n\t\tSource: source,\n\t})\n\tif err != nil {\n\t\treturn nil, errors.New(err.Error())\n\t}\n\treturn &DB{\n\t\tConn: conn,\n\t\tDatabaseName: database,\n\t\tCollection: make(map[TableName]*mgo.Collection),\n\t}, nil\n}", "func (db *Database) init() (*gorm.DB, error) {\n\tvar connection, err = gorm.Open(db.Driver, db.getURI())\n\tif err != nil {\n\t\tfmt.Printf(\"✖ Cannot connect to %s database\\n\", db.Driver)\n\t\tlog.Fatal(\"This is the error:\", err)\n\t} else {\n\t\tfmt.Printf(\"⚡ We are connected to the %s database\\n\", db.Driver)\n\t}\n\treturn connection, err\n}", "func InitDB() {\n\tconnStr := \"user=osama dbname=hackernews password=ibnjunaid \"\n\t// Use root:dbpass@tcp(172.17.0.2)/hackernews, if you're using Windows.\n\tdb, err := sql.Open(\"postgres\", connStr)\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n\tDb = db\n\n}", "func init() {\n\tuser := \"root\"\n\tpass := \"pwpw\"\n\tname := \"itemsDB\"\n\n\tdbconf := user + \":\" + pass + \"@/\" + name\n\tconn, err := sql.Open(\"mysql\", dbconf)\n\tif err != nil {\n\t\tpanic(err.Error)\n\t}\n\tConn = conn\n}", "func InitializeDataAccess(dbURL string) (*DataAccess, error) {\n\tdao := DataAccess{}\n\n\tc, err := mongo.NewClient(options.Client().ApplyURI(dbURL))\n\tif err != nil {\n\t\treturn &dao, err\n\t}\n\n\terr = c.Connect(context.Background())\n\tif err != nil {\n\t\treturn &dao, err\n\t}\n\n\tdao.DbClient = c\n\n\treturn &dao, nil\n}", "func New(o ...Option) (*mongo.Database, error) {\n\tcfg := opts{}\n\n\tfor _, i := range o {\n\t\ti(&cfg)\n\t}\n\n\tif cfg.direct && cfg.cluster {\n\t\treturn nil, errors.New(\"invalid configuration -- either direct or cluster, not both\")\n\t}\n\n\tif cfg.cluster && cfg.port != 0 {\n\t\treturn nil, errors.New(\"invalid configuration -- either port or cluster, not both\")\n\t}\n\n\tvar prefix string\n\tif cfg.cluster {\n\t\tprefix = \"mongodb+srv\"\n\t} else {\n\t\tprefix = \"mongodb\"\n\t}\n\n\turi := fmt.Sprintf(\n\t\t\"%s://%s:%s@%s\",\n\t\tprefix,\n\t\tcfg.username,\n\t\tcfg.password,\n\t\tcfg.host,\n\t)\n\n\tif cfg.port != 0 {\n\t\turi += fmt.Sprintf(\":%v\", cfg.port)\n\t}\n\n\tif cfg.authdbname != \"\" {\n\t\turi += fmt.Sprintf(\"/%s\", cfg.authdbname)\n\t}\n\n\tswitch {\n\tcase cfg.direct:\n\t\turi += \"/?connect=direct\"\n\tcase cfg.retryWrites && cfg.writeConcern == \"\":\n\t\turi += \"/?retryWrites=true\"\n\tcase !cfg.retryWrites && cfg.writeConcern != \"\":\n\t\turi += \"/?w=\" + cfg.writeConcern\n\tcase cfg.retryWrites && cfg.writeConcern != \"\":\n\t\turi += `/?retryWrites=true&w=` + cfg.writeConcern\n\t}\n\n\tctx := context.TODO()\n\tclient, err := mongo.Connect(ctx, options.Client().ApplyURI(uri))\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"uri is %s: %v\", uri, err)\n\t}\n\n\tdb := client.Database(cfg.name)\n\treturn db, nil\n}", "func init() {\n\tcfg = pkg.InitializeConfig()\n\t_, err := pkg.InitializeDb()\n\tif err != nil {\n\t\tpanic(\"failed to initialize db connection : \" + err.Error())\n\t}\n}", "func init() {\n\tdbinfo := fmt.Sprintf(\"user=%s password=%s dbname=%s sslmode=disable\",\n\t\tos.Getenv(\"POSTGRES_USER\"), os.Getenv(\"POSTGRES_PASSWORD\"), DATABASE_NAME)\n\tdb, err := sql.Open(\"postgres\", dbinfo)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tDB = db\n}", "func InitializeDB() *gorm.DB {\n\tdb, err := gorm.Open(\"mysql\", \"root:root@tcp(127.0.0.1:3306)/referee?parseTime=true&readTimeout=1s&writeTimeout=1s&timeout=1s\")\n\tCheck(err)\n\n\treturn db\n}", "func init() {\n\tconfig.Read()\n\tdao.DialInfo = &mgo.DialInfo{\n\t\tAddrs: []string{config.Server},\n\t\tDatabase: config.Database,\n\t\tUsername: config.Username,\n\t\tPassword: config.Password,\n\t}\n\n\tdao.Server = config.Server\n\tdao.Database = config.Database\n\tdao.Connect()\n\n}", "func DBInit() *gorm.DB {\n\tvar errEnv error\n\terrEnv = godotenv.Load()\n\tif errEnv != nil {\n\t\tlog.Fatalf(\"Error getting env, not comming through %v\", errEnv)\n\t} else {\n\t\tfmt.Println(\"We are getting the env values\")\n\t}\n\t// fmt.Println(\"host=\" + os.Getenv(\"DB_HOST\") + \" port=\" + os.Getenv(\"DB_PORT\") + \" user=\" + os.Getenv(\"DB_USER\") + \" dbname=\" + os.Getenv(\"DB_NAME\") + \" password=\" + os.Getenv(\"DB_PASSWORD\"))\n\tdb, err := gorm.Open(\"postgres\", \"host=\"+os.Getenv(\"DB_HOST\")+\" sslmode=disable port=\"+os.Getenv(\"DB_PORT\")+\" user=\"+os.Getenv(\"DB_USER\")+\" dbname=\"+os.Getenv(\"DB_NAME\")+\" password=\"+os.Getenv(\"DB_PASSWORD\"))\n\tdb.LogMode(true)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tdb.AutoMigrate(structs.User{}, structs.UserSession{}, structs.Diary{})\n\n\treturn db\n}", "func NewMongoDAL(dbURI string, dbName string) (mockery_testify.DataAccessLayer, error) {\n\tsession, err := mgo.Dial(dbURI)\n\tmongo := &MongoDAL{\n\t\tsession: session,\n\t\tdbName: dbName,\n\t}\n\tfmt.Println(\"Hello\")\n\treturn mongo, err\n}", "func connectdb(ctx context.Context) (*mongo.Collection){\n client, err := mongo.NewClient(options.Client().ApplyURI(\"mongodb://localhost:27017\"))\n if err != nil {\n log.Fatal(err)\n }\n \n err = client.Connect(ctx)\n if err != nil {\n log.Fatal(err)\n }\n\n appointyDatabase := client.Database(\"appointy-task-ritvix\")\n meetingCollection := appointyDatabase.Collection(\"meetings\")\n\n //returns collection object\n return meetingCollection\n}", "func (cfg Config) Connect(ctx context.Context) (*mongo.Database, error) {\n\turi := cfg.makeURI()\n\n\tclient, err := mongo.NewClient(options.Client().ApplyURI(uri))\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"couldn't connect to mongo: %v\", err)\n\t}\n\terr = client.Connect(ctx)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"mongo client couldn't connect with background context: %v\", err)\n\t}\n\n\tstarWarsDB := client.Database(cfg.DBName)\n\n\treturn starWarsDB, nil\n}", "func (mongo *mongo) Connect(dbname string) error {\n\tmongo.dbname = dbname\n\tsession, err := mgo.Dial(\"localhost:27017\")\n\n\tif err != nil {\n\t\tlog.Println(\"Error\", err)\n\t\treturn errors.New(logprefix + err.Error())\n\t}\n\n\tlog.Println(\"Connected to MongDB\")\n\tmongo.db = session.DB(mongo.dbname)\n\treturn nil\n}", "func NewMongoDB(cfg conf.Config) (error, *mgo.Database) {\n\taddress := cfg.GetString(`mongodb.address`)\n\tdatabase := cfg.GetString(`mongodb.database`)\n\tlog.Println(\"mongo config\", address, database)\n\tsession, err := mgo.Dial(address)\n\tsession.SetMode(mgo.Monotonic, true)\n\treturn err, session.DB(database)\n}", "func InitDb(){\r\n\tconnectionURL:=os.Getenv(\"CONNECTION_URL\")\r\n\tvar err error\r\n\tDBConn, err = gorm.Open(\"postgres\",connectionURL)\r\n\tif err!= nil{\r\n\t\tpanic(\"failed to connect to db\")\r\n\t}\r\n\tfmt.Println(\"db is connected lets go.........\")\r\n\tDBConn.AutoMigrate(&models.GoItems{})\r\n\tfmt.Println(\"db has been migrated\")\r\n}", "func InitDb(dbtype DBType, host string, port int,\n\tusername, password, dbname string) (url string, db DBConnector, data DataProvider, e error) {\n\n\t// initialize\n\turl = \"\"\n\tdb = nil\n\tdata = nil\n\n\tswitch dbtype {\n\n\tcase MongoDB:\n\t\turl, db, data = initMongo(host, port, username, password, dbname)\n\t\treturn url, db, data, e\n\n\tcase SQLite:\n\t\t// TODO\n\n\t}\n\treturn url, db, data, fmt.Errorf(\"Unknown database: cannot connect.\\n\")\n}", "func (mdbc *MongoDbController) initNonceDatabase(dbName string) error {\n\tdb := mdbc.MongoClient.Database(dbName)\n\n\tjsonSchema := bson.M{\n\t\t\"bsonType\": \"object\",\n\t\t\"required\": []string{\"hash\", \"time\", \"remoteAddress\"},\n\t\t\"properties\": bson.M{\n\t\t\t\"hash\": bson.M{\n\t\t\t\t\"bsonType\": \"string\",\n\t\t\t\t\"description\": \"hash is required and must be a string\",\n\t\t\t},\n\t\t\t\"time\": bson.M{\n\t\t\t\t\"bsonType\": \"long\",\n\t\t\t\t\"description\": \"time is required and must be a 64-bit integer (aka a long)\",\n\t\t\t},\n\t\t\t\"remoteAddress\": bson.M{\n\t\t\t\t\"bsonType\": \"string\",\n\t\t\t\t\"description\": \"remoteAddress is required and must be a string\",\n\t\t\t},\n\t\t},\n\t}\n\n\tcolOpts := options.CreateCollection().SetValidator(bson.M{\"$jsonSchema\": jsonSchema})\n\n\tcreateCollectionErr := db.CreateCollection(context.TODO(), \"authNonces\", colOpts)\n\n\tif createCollectionErr != nil {\n\t\treturn dbController.NewDBError(createCollectionErr.Error())\n\t}\n\n\tmodels := []mongo.IndexModel{\n\t\t{\n\t\t\tKeys: bson.D{{Key: \"hash\", Value: 1}},\n\t\t\tOptions: options.Index().SetUnique(true),\n\t\t},\n\t\t{\n\t\t\tKeys: bson.D{{Key: \"remoteAddress\", Value: 1}},\n\t\t\tOptions: options.Index().SetUnique(true),\n\t\t},\n\t}\n\n\topts := options.CreateIndexes().SetMaxTime(2 * time.Second)\n\n\tcollection, _, _ := mdbc.getCollection(\"authNonces\")\n\tnames, setIndexErr := collection.Indexes().CreateMany(context.TODO(), models, opts)\n\n\tif setIndexErr != nil {\n\t\treturn dbController.NewDBError(setIndexErr.Error())\n\t}\n\n\tfmt.Printf(\"created indexes %v\\n\", names)\n\n\treturn nil\n}", "func InitDatabase() {\n\tvar err error\n\tdsn := \"root:@tcp(127.0.0.1)/test_server?charset=utf8mb4&parseTime=True&loc=Local\"\n\tDB, err = gorm.Open(mysql.Open(dsn), &gorm.Config{})\n\n\tif err != nil {\n\t\tpanic(\"database is error\")\n\t}\n\n\tDB.AutoMigrate(&model.User{})\n\n\tfmt.Println(\"Database Connected\")\n}", "func init() {\n\tos.RemoveAll(DataPath)\n\n\tdc := DatabaseConfig{\n\t\tDataPath: DataPath,\n\t\tIndexDepth: 4,\n\t\tPayloadSize: 16,\n\t\tBucketDuration: 3600000000000,\n\t\tResolution: 60000000000,\n\t\tSegmentSize: 100000,\n\t}\n\n\tcfg := &ServerConfig{\n\t\tVerboseLogs: true,\n\t\tRemoteDebug: true,\n\t\tListenAddress: Address,\n\t\tDatabases: map[string]DatabaseConfig{\n\t\t\tDatabase: dc,\n\t\t},\n\t}\n\n\tdbs := map[string]kdb.Database{}\n\tdb, err := dbase.New(dbase.Options{\n\t\tDatabaseName: Database,\n\t\tDataPath: dc.DataPath,\n\t\tIndexDepth: dc.IndexDepth,\n\t\tPayloadSize: dc.PayloadSize,\n\t\tBucketDuration: dc.BucketDuration,\n\t\tResolution: dc.Resolution,\n\t\tSegmentSize: dc.SegmentSize,\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tdbs[\"test\"] = db\n\td = db\n\to = dc\n\n\ts = NewServer(dbs, cfg)\n\tgo s.Listen()\n\n\t// wait for the server to start\n\ttime.Sleep(time.Second * 2)\n\n\tc = NewClient(Address)\n\tif err := c.Connect(); err != nil {\n\t\tpanic(err)\n\t}\n}" ]
[ "0.77988166", "0.74417037", "0.74027896", "0.738605", "0.7372712", "0.73211294", "0.7260213", "0.72425145", "0.72104573", "0.7123856", "0.712385", "0.71149546", "0.70497435", "0.7010737", "0.7005248", "0.6976649", "0.6960267", "0.69152534", "0.6912278", "0.681296", "0.6708742", "0.6677732", "0.6657569", "0.66224736", "0.66113776", "0.66090477", "0.65876806", "0.6579933", "0.6579642", "0.6562167", "0.6561471", "0.65499836", "0.6548233", "0.6537746", "0.6534792", "0.65277267", "0.6524155", "0.6510499", "0.649049", "0.64801204", "0.6469953", "0.64682543", "0.6455135", "0.6451344", "0.64512974", "0.6451145", "0.6430666", "0.6416955", "0.63762724", "0.63740754", "0.63661957", "0.6357936", "0.6352592", "0.63442147", "0.63351744", "0.6317108", "0.6317108", "0.63023037", "0.6298785", "0.62973285", "0.62725264", "0.6267327", "0.6264052", "0.6252257", "0.6247916", "0.6245565", "0.62399006", "0.62340677", "0.6232668", "0.6231131", "0.62310743", "0.623097", "0.62229717", "0.62223846", "0.62191767", "0.62164783", "0.6213312", "0.6200781", "0.61926204", "0.6186787", "0.617989", "0.61790866", "0.6172037", "0.6145748", "0.61430365", "0.6138552", "0.6128619", "0.61243725", "0.6122099", "0.6116738", "0.6114893", "0.611076", "0.6109973", "0.6108826", "0.61054707", "0.6104477", "0.60930985", "0.6087674", "0.608398", "0.6083267" ]
0.8373997
0
/ ProjectsprojectserviceClassApiService Get service class
func (a *ProjectsprojectserviceClassApiService) GetServiceClass(ctx context.Context, name string, project string) (ServiceClass, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte localVarReturnValue ServiceClass ) // create path and map variables localVarPath := a.client.cfg.BasePath + "/projects/{project}/service-class/{name}" localVarPath = strings.Replace(localVarPath, "{"+"name"+"}", fmt.Sprintf("%v", name), -1) localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} // set Content-Type header localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) if localVarHttpContentType != "" { localVarHeaderParams["Content-Type"] = localVarHttpContentType } // to determine the Accept header localVarHttpHeaderAccepts := []string{"application/json"} // set Accept header localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) if localVarHttpHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHttpHeaderAccept } r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) if err != nil { return localVarReturnValue, nil, err } localVarHttpResponse, err := a.client.callAPI(r) if err != nil || localVarHttpResponse == nil { return localVarReturnValue, localVarHttpResponse, err } localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) localVarHttpResponse.Body.Close() if err != nil { return localVarReturnValue, localVarHttpResponse, err } if localVarHttpResponse.StatusCode < 300 { // If we succeed, return the data, otherwise pass on to decode error. err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err == nil { return localVarReturnValue, localVarHttpResponse, err } } if localVarHttpResponse.StatusCode >= 300 { newErr := GenericSwaggerError{ body: localVarBody, error: localVarHttpResponse.Status, } if localVarHttpResponse.StatusCode == 200 { var v ServiceClass err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() return localVarReturnValue, localVarHttpResponse, newErr } newErr.model = v return localVarReturnValue, localVarHttpResponse, newErr } return localVarReturnValue, localVarHttpResponse, newErr } return localVarReturnValue, localVarHttpResponse, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetClassOfService(c common.Client, uri string) (*ClassOfService, error) {\n\tresp, err := c.Get(uri)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer resp.Body.Close()\n\n\tvar classofservice ClassOfService\n\terr = json.NewDecoder(resp.Body).Decode(&classofservice)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tclassofservice.SetClient(c)\n\treturn &classofservice, nil\n}", "func GetServiceClassAndPlans(client *occlient.Client, serviceName string) (ServiceClass, []ServicePlan, error) {\n\tresult, err := client.GetClusterServiceClass(serviceName)\n\tif err != nil {\n\t\treturn ServiceClass{}, nil, errors.Wrap(err, \"unable to get the given service\")\n\t}\n\n\tvar meta map[string]interface{}\n\terr = json.Unmarshal(result.Spec.ExternalMetadata.Raw, &meta)\n\tif err != nil {\n\t\treturn ServiceClass{}, nil, errors.Wrap(err, \"unable to unmarshal data the given service\")\n\t}\n\n\tservice := ServiceClass{\n\t\tName: result.Spec.ExternalName,\n\t\tBindable: result.Spec.Bindable,\n\t\tShortDescription: result.Spec.Description,\n\t\tTags: result.Spec.Tags,\n\t\tServiceBrokerName: result.Spec.ClusterServiceBrokerName,\n\t}\n\n\tif val, ok := meta[\"longDescription\"]; ok {\n\t\tservice.LongDescription = val.(string)\n\t}\n\n\tif val, ok := meta[\"dependencies\"]; ok {\n\t\tversions := fmt.Sprint(val)\n\t\tversions = strings.Replace(versions, \"[\", \"\", -1)\n\t\tversions = strings.Replace(versions, \"]\", \"\", -1)\n\t\tservice.VersionsAvailable = strings.Split(versions, \" \")\n\t}\n\n\t// get the plans according to the service name\n\tplanResults, err := client.GetClusterPlansFromServiceName(result.Name)\n\tif err != nil {\n\t\treturn ServiceClass{}, nil, errors.Wrap(err, \"unable to get plans for the given service\")\n\t}\n\n\tvar plans []ServicePlan\n\tfor _, result := range planResults {\n\t\tplan, err := NewServicePlan(result)\n\t\tif err != nil {\n\t\t\treturn ServiceClass{}, nil, err\n\t\t}\n\n\t\tplans = append(plans, plan)\n\t}\n\n\treturn service, plans, nil\n}", "func (e *Explorer) GetClass(ctx context.Context,\n\tparams dto.GetParams,\n) ([]interface{}, error) {\n\tif params.Pagination == nil {\n\t\tparams.Pagination = &filters.Pagination{\n\t\t\tOffset: 0,\n\t\t\tLimit: 100,\n\t\t}\n\t}\n\n\tif err := e.validateFilters(params.Filters); err != nil {\n\t\treturn nil, errors.Wrap(err, \"invalid 'where' filter\")\n\t}\n\n\tif err := e.validateSort(params.ClassName, params.Sort); err != nil {\n\t\treturn nil, errors.Wrap(err, \"invalid 'sort' parameter\")\n\t}\n\n\tif err := e.validateCursor(params); err != nil {\n\t\treturn nil, errors.Wrap(err, \"cursor api: invalid 'after' parameter\")\n\t}\n\n\tif params.KeywordRanking != nil {\n\t\treturn e.getClassKeywordBased(ctx, params)\n\t}\n\n\tif params.NearVector != nil || params.NearObject != nil || len(params.ModuleParams) > 0 {\n\t\treturn e.getClassVectorSearch(ctx, params)\n\t}\n\n\treturn e.getClassList(ctx, params)\n}", "func (e *Explorer) GetClass(ctx context.Context,\n\tparams GetParams) ([]interface{}, error) {\n\tif params.Pagination == nil {\n\t\tparams.Pagination = &filters.Pagination{\n\t\t\tLimit: 100,\n\t\t}\n\t}\n\n\tif params.NearVector != nil || params.NearObject != nil || len(params.ModuleParams) > 0 {\n\t\treturn e.getClassExploration(ctx, params)\n\t}\n\n\treturn e.getClassList(ctx, params)\n}", "func (pr PlanReference) GetSpecifiedServiceClass() string {\n\tif pr.ServiceClassExternalName != \"\" {\n\t\treturn pr.ServiceClassExternalName\n\t}\n\n\tif pr.ServiceClassExternalID != \"\" {\n\t\treturn pr.ServiceClassExternalID\n\t}\n\n\tif pr.ServiceClassName != \"\" {\n\t\treturn pr.ServiceClassName\n\t}\n\n\treturn \"\"\n}", "func (c *serviceClass) getSingleton(ns, name string) (*kapi.Service, error) {\n\treturn c.rk.clientset.Core().Services(ns).Get(name, meta.GetOptions{})\n}", "func (client *Client) Class(name string) *Class {\n\treturn &Class{\n\t\tc: client,\n\t\tName: name,\n\t}\n}", "func (e *Explorer) GetClass(ctx context.Context,\n\tparams GetParams) ([]interface{}, error) {\n\tif params.Pagination == nil {\n\t\tparams.Pagination = &filters.Pagination{\n\t\t\tLimit: 100,\n\t\t}\n\t}\n\n\tif params.Explore != nil {\n\t\treturn e.getClassExploration(ctx, params)\n\t}\n\n\treturn e.getClassList(ctx, params)\n}", "func GetClasses(req *restful.Request, rsp *restful.Response) {\n\t_, pid, email, _ := utils.ResolveHeaderInfo(req)\n\tresponse, err := newcoreclient().GetClasses(ctx(req), &proto.GetClassRequest{\n\t\tPid: pid,\n\t\tEmail: email,\n\t})\n\n\tif err != nil {\n\t\twriteError(rsp, errorcode.Pipe, err.Error())\n\t\treturn\n\t}\n\n\trsp.WriteAsJson(response)\n}", "func GetBrokerByServiceClass(storage Storage, id string) (*model.ServiceBroker, error) {\n\tlog.Printf(\"Getting broker by service id %s\\n\", id)\n\n\tc, err := storage.GetInventory()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfor _, service := range c.Services {\n\t\tif service.ID == id {\n\t\t\tlog.Printf(\"Found service type %s\\n\", service.Name)\n\t\t\treturn storage.GetBroker(service.Broker)\n\t\t}\n\t}\n\treturn nil, serviceNotFound{id}\n}", "func getClass(line []byte) string {\n\tvar reportPeek gpsdReport\n\tif err := json.Unmarshal(line, &reportPeek); err != nil {\n\t\tfmt.Printf(\"failed to parse class type: %s\\n\", err)\n\t\treturn \"\"\n\t}\n\treturn reportPeek.Class\n}", "func (d *EmulatedBTPeerDevice) ClassOfService() int {\n\treturn d.cache.classOfService\n}", "func (p *APIProjectRef) ToService() (interface{}, error) {\n\n\tcommitQueue, err := p.CommitQueue.ToService()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"can't convert commit queue params\")\n\t}\n\n\ti, err := p.TaskSync.ToService()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"cannot convert API task sync options to service representation\")\n\t}\n\ttaskSync, ok := i.(model.TaskSyncOptions)\n\tif !ok {\n\t\treturn nil, errors.Errorf(\"expected task sync options but was actually '%T'\", i)\n\t}\n\n\ti, err = p.WorkstationConfig.ToService()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"cannot convert API workstation config\")\n\t}\n\tworkstationConfig, ok := i.(model.WorkstationConfig)\n\tif !ok {\n\t\treturn nil, errors.Errorf(\"expected workstation config but was actually '%T'\", i)\n\t}\n\n\tprojectRef := model.ProjectRef{\n\t\tOwner: utility.FromStringPtr(p.Owner),\n\t\tRepo: utility.FromStringPtr(p.Repo),\n\t\tBranch: utility.FromStringPtr(p.Branch),\n\t\tEnabled: utility.BoolPtrCopy(p.Enabled),\n\t\tPrivate: utility.BoolPtrCopy(p.Private),\n\t\tRestricted: utility.BoolPtrCopy(p.Restricted),\n\t\tBatchTime: p.BatchTime,\n\t\tRemotePath: utility.FromStringPtr(p.RemotePath),\n\t\tId: utility.FromStringPtr(p.Id),\n\t\tIdentifier: utility.FromStringPtr(p.Identifier),\n\t\tDisplayName: utility.FromStringPtr(p.DisplayName),\n\t\tDeactivatePrevious: utility.BoolPtrCopy(p.DeactivatePrevious),\n\t\tTracksPushEvents: utility.BoolPtrCopy(p.TracksPushEvents),\n\t\tDefaultLogger: utility.FromStringPtr(p.DefaultLogger),\n\t\tPRTestingEnabled: utility.BoolPtrCopy(p.PRTestingEnabled),\n\t\tGitTagVersionsEnabled: utility.BoolPtrCopy(p.GitTagVersionsEnabled),\n\t\tGithubChecksEnabled: utility.BoolPtrCopy(p.GithubChecksEnabled),\n\t\tUseRepoSettings: p.UseRepoSettings,\n\t\tRepoRefId: utility.FromStringPtr(p.RepoRefId),\n\t\tCommitQueue: commitQueue.(model.CommitQueueParams),\n\t\tTaskSync: taskSync,\n\t\tWorkstationConfig: workstationConfig,\n\t\tHidden: utility.BoolPtrCopy(p.Hidden),\n\t\tPatchingDisabled: utility.BoolPtrCopy(p.PatchingDisabled),\n\t\tRepotrackerDisabled: utility.BoolPtrCopy(p.RepotrackerDisabled),\n\t\tDispatchingDisabled: utility.BoolPtrCopy(p.DispatchingDisabled),\n\t\tDisabledStatsCache: utility.BoolPtrCopy(p.DisabledStatsCache),\n\t\tFilesIgnoredFromCache: utility.FromStringPtrSlice(p.FilesIgnoredFromCache),\n\t\tNotifyOnBuildFailure: utility.BoolPtrCopy(p.NotifyOnBuildFailure),\n\t\tSpawnHostScriptPath: utility.FromStringPtr(p.SpawnHostScriptPath),\n\t\tAdmins: utility.FromStringPtrSlice(p.Admins),\n\t\tGitTagAuthorizedUsers: utility.FromStringPtrSlice(p.GitTagAuthorizedUsers),\n\t\tGitTagAuthorizedTeams: utility.FromStringPtrSlice(p.GitTagAuthorizedTeams),\n\t\tGithubTriggerAliases: utility.FromStringPtrSlice(p.GithubTriggerAliases),\n\t}\n\n\t// Copy triggers\n\tif p.Triggers != nil {\n\t\ttriggers := []model.TriggerDefinition{}\n\t\tfor _, t := range p.Triggers {\n\t\t\ti, err = t.ToService()\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errors.Wrap(err, \"cannot convert API trigger definition\")\n\t\t\t}\n\t\t\tnewTrigger, ok := i.(model.TriggerDefinition)\n\t\t\tif !ok {\n\t\t\t\treturn nil, errors.Errorf(\"expected trigger definition but was actually '%T'\", i)\n\t\t\t}\n\t\t\ttriggers = append(triggers, newTrigger)\n\t\t}\n\t\tprojectRef.Triggers = triggers\n\t}\n\n\t// Copy periodic builds\n\tif p.PeriodicBuilds != nil {\n\t\tbuilds := []model.PeriodicBuildDefinition{}\n\t\tfor _, t := range p.Triggers {\n\t\t\ti, err = t.ToService()\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errors.Wrap(err, \"cannot convert API periodic build\")\n\t\t\t}\n\t\t\tnewBuild, ok := i.(model.PeriodicBuildDefinition)\n\t\t\tif !ok {\n\t\t\t\treturn nil, errors.Errorf(\"expected periodic build definition but was actually '%T'\", i)\n\t\t\t}\n\t\t\tbuilds = append(builds, newBuild)\n\t\t}\n\t\tprojectRef.PeriodicBuilds = builds\n\t}\n\n\tif p.PatchTriggerAliases != nil {\n\t\tpatchTriggers := []patch.PatchTriggerDefinition{}\n\t\tfor _, t := range p.PatchTriggerAliases {\n\t\t\ti, err = t.ToService()\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errors.Wrap(err, \"cannot convert API patch trigger definition\")\n\t\t\t}\n\t\t\ttrigger, ok := i.(patch.PatchTriggerDefinition)\n\t\t\tif !ok {\n\t\t\t\treturn nil, errors.Errorf(\"expected patch trigger definition but was actually '%T'\", i)\n\t\t\t}\n\t\t\tpatchTriggers = append(patchTriggers, trigger)\n\t\t}\n\t\tprojectRef.PatchTriggerAliases = patchTriggers\n\t}\n\treturn &projectRef, nil\n}", "func (o CSharpFunctionBindingResponseOutput) Class() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v CSharpFunctionBindingResponse) *string { return v.Class }).(pulumi.StringPtrOutput)\n}", "func GetService(nbmaster string, httpClient *http.Client, jwt string, host string, hostUuid string, serviceName string) {\r\n fmt.Printf(\"\\nGet NetBackup service %s on %s...\\n\\n\", serviceName, host)\r\n\r\n uri := \"https://\" + nbmaster + \":\" + port + \"/netbackup/admin/hosts/\" + hostUuid + \"/services/\" + serviceName\r\n\r\n request, _ := http.NewRequest(http.MethodGet, uri, nil)\r\n request.Header.Add(\"Authorization\", jwt);\r\n request.Header.Add(\"Content-Type\", contentTypeV3);\r\n\r\n response, err := httpClient.Do(request)\r\n\r\n if err != nil {\r\n fmt.Printf(\"The HTTP request failed with error: %s\\n\", err)\r\n panic(\"Unable to get services\")\r\n } else {\r\n if response.StatusCode == 200 {\r\n resp, _ := ioutil.ReadAll(response.Body)\r\n var obj interface{}\r\n json.Unmarshal(resp, &obj)\r\n service := obj.(map[string]interface{})[\"data\"].(map[string]interface{})\r\n\r\n fmt.Printf(\"id status\\n\");\r\n fmt.Printf(\"============.=========\\n\");\r\n id := (service)[\"id\"]\r\n status := ((service)[\"attributes\"]).(map[string]interface{})[\"status\"]\r\n\r\n fmt.Printf(\"%-12s %s\\n\", id, status);\r\n } else {\r\n printErrorResponse(response)\r\n }\r\n }\r\n}", "func (r *ClusterTopologyReconciler) getClass(ctx context.Context, cluster *clusterv1.Cluster) (*clusterTopologyClass, error) {\n\t// TODO: add get class logic; also remove nolint exception from clusterTopologyClass and machineDeploymentTopologyClass\n\treturn nil, nil\n}", "func (m *MethodDescriptor) GetService() *ServiceDescriptor { return m.Service }", "func Service(name string) *js.Object {\n\treturn js.Global.Get(\"angular\").Call(\"element\", js.Global.Get(\"document\")).Call(\"injector\").Call(\"get\", name)\n}", "func Get() *Service { return singleton }", "func GetClass(err error) api.Result {\n\tif err == nil {\n\t\treturn api.ResultUndefined\n\t}\n\tif annotation := (*errorClassAnnotation)(nil); errors.As(err, &annotation) {\n\t\treturn annotation.class\n\t}\n\treturn api.ResultUndefined\n}", "func GetClass(name string) (CtrlGroup, bool) {\n\treturn rdt.getClass(name)\n}", "func (id ID) Class() Class {\n\treturn object_getClass(id)\n}", "func GetOneServiceType(c *gin.Context) {\n\n\tid := c.Param(\"id\")\n\n\tID, err := strconv.Atoi(id)\n\tif err != nil {\n\t\tc.JSON(http.StatusInternalServerError, gin.H{\n\t\t\t\"err\": err,\n\t\t})\n\t}\n\n\tserviceType, err := repository.ServiceTypeGetOne(ID)\n\n\tif err != nil {\n\t\tc.JSON(http.StatusInternalServerError, gin.H{\n\t\t\t\"err\": err,\n\t\t})\n\t}\n\n\tc.JSON(http.StatusOK, gin.H{\n\t\t\"result\": serviceType,\n\t})\n\n}", "func buildGetClasses(dbSchema *schema.Schema, k kind.Kind, semanticSchema *models.SemanticSchema, knownClasses *map[string]*graphql.Object) (*graphql.Object, error) {\n\tclassFields := graphql.Fields{}\n\n\tvar kindName string\n\tswitch k {\n\tcase kind.THING_KIND:\n\t\tkindName = \"Thing\"\n\tcase kind.ACTION_KIND:\n\t\tkindName = \"Action\"\n\t}\n\n\tfor _, class := range semanticSchema.Classes {\n\t\tclassField, err := buildGetClass(dbSchema, k, class, knownClasses)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"Could not build class for %s\", class.Class)\n\t\t}\n\t\tclassFields[class.Class] = classField\n\t}\n\n\tclasses := graphql.NewObject(graphql.ObjectConfig{\n\t\tName: fmt.Sprintf(\"WeaviateLocalGet%ssObj\", kindName),\n\t\tFields: classFields,\n\t\tDescription: fmt.Sprintf(\"Type of %ss i.e. %ss classes to Get on the Local Weaviate\", kindName, kindName),\n\t})\n\n\treturn classes, nil\n}", "func NewService(c Config) *Service {\n \n s := &Service{}\n s.instance = c.Instance\n s.hostname = c.Hostname\n s.userAgent = c.UserAgent\n s.port = c.Endpoint\n s.router = mux.NewRouter()\n s.entityHandler = c.EntityHandler\n s.readTimeout = c.ReadTimeout\n s.writeTimeout = c.WriteTimeout\n s.idleTimeout = c.IdleTimeout\n \n if c.Name == \"\" {\n s.name = \"service\"\n }else{\n s.name = c.Name\n }\n \n if c.Debug || os.Getenv(\"GOREST_DEBUG\") == \"true\" {\n s.debug = true\n }\n \n if c.TraceRegexps != nil {\n if s.traceRequests == nil {\n s.traceRequests = make(map[string]*regexp.Regexp)\n }\n for _, e := range c.TraceRegexps {\n s.traceRequests[e.String()] = e\n }\n }\n if t := os.Getenv(\"GOREST_TRACE\"); t != \"\" {\n if s.traceRequests == nil {\n s.traceRequests = make(map[string]*regexp.Regexp)\n }\n for _, e := range strings.Split(t, \";\") {\n s.traceRequests[e] = regexp.MustCompile(e)\n }\n }\n if s.debug {\n for k, _ := range s.traceRequests {\n fmt.Println(\"rest: trace:\", k)\n }\n }\n \n s.suppress = make(map[string]struct{})\n if v := os.Getenv(\"GOREST_TRACE_SUPPRESS_HEADERS\"); v != \"\" {\n if !strings.EqualFold(v, \"none\") {\n for _, e := range strings.Split(v, \",\") {\n s.suppress[strings.ToLower(e)] = struct{}{}\n }\n }\n }else{\n s.suppress[\"authorization\"] = struct{}{}\n }\n \n return s\n}", "func GetDetailClass(c *gin.Context) {\n\t// Initialize database connection\n\tdb := config.DatabaseConn()\n\n\t// Declare UUID parameters\n\tUuidClassroom := c.Param(\"UuidClassroom\")\n\n\t// Declare models dto\n\tvar detailClassroom dto.Classrooms\n\n\t// Custom select columns\n\tdb.Select(\"classrooms.uuid_classroom, classrooms.classroom_name, classrooms.classroom_time, classrooms.room, participants.participants_name, participants.participants_address, participants.participants_gender, participants.participants_phone\").\n\t\tJoins(\"INNER JOIN participants ON participants.uuid_participants=classrooms.uuid_participants\").\n\t\tWhere(\"classrooms.uuid_classroom = ?\", UuidClassroom).\n\t\tFind(&detailClassroom)\n\n\t// Find the record in database\n\tdb.Where(\"classrooms.uuid_classroom = ?\", UuidClassroom).Find(&detailClassroom)\n\n\t// Checking in database\n\tif db.Where(\"classrooms.uuid_classroom = ?\", UuidClassroom).Find(&detailClassroom).RecordNotFound() {\n\t\tc.JSON(http.StatusNotFound, gin.H{\"error\": \"Record not found\"})\n\t} else {\n\t\tc.JSON(http.StatusOK, gin.H{\"data\": detailClassroom})\n\t}\n}", "func (system *System) gen_class_list_endpoint(req web.RequestInterface) *web.ResponseStatus {\n\n\tclass := system.DB.ClassNameIndex(req.Param(\"$class\").(string))\n\n\tvar limit int64 = -1\n\tvar page int64 = -1\n\n\tif req.Param(\"limit\") != nil { limit = req.Param(\"limit\").(int64) }\n\tif req.Param(\"page\") != nil { page = req.Param(\"page\").(int64) }\n\n\tswitch req.Param(\"mode\").(string) {\n\n\t\tcase \"list\":\n\n\t\t\tok, results := system.DBClient.QueryClassList(class, limit, page); if !ok { return req.Fail() }\n\t\t\treturn req.Respond(results)\n\n\t\tcase \"export\":\n\n\t\t\tif limit > EXPORT_HARD_LIMIT { limit = EXPORT_HARD_LIMIT }\n\n\t\t\tok, results := system.DBClient.QueryClassList(class, limit, page); if !ok { return req.Fail() }\n\n\t\t\tsystem.DB.ExportVertexList(results)\n\n\t\t\treturn req.Respond(results)\n\n\t}\n\n\treturn req.Fail()\n}", "func GetClass() core.RecordRef {\n\treturn ClassReference\n}", "func (c *configuration) Service(clientSet ClientSet) *Service {\n\tif clientSet != nil {\n\t\treturn NewService(clientSet)\n\t}\n\treturn nil\n\n}", "func buildGetClass(dbSchema *schema.Schema, k kind.Kind, class *models.SemanticSchemaClass, knownClasses *map[string]*graphql.Object) (*graphql.Field, error) {\n\tclassObject := graphql.NewObject(graphql.ObjectConfig{\n\t\tName: class.Class,\n\t\tFields: (graphql.FieldsThunk)(func() graphql.Fields {\n\t\t\tclassProperties := graphql.Fields{}\n\n\t\t\tclassProperties[\"uuid\"] = &graphql.Field{\n\t\t\t\tDescription: \"UUID of the thing or action given by the local Weaviate instance\",\n\t\t\t\tType: graphql.String,\n\t\t\t\tResolve: func(p graphql.ResolveParams) (interface{}, error) {\n\t\t\t\t\tfmt.Printf(\"WHOOPTYDOO uuid\\n\")\n\t\t\t\t\treturn \"uuid\", nil\n\t\t\t\t},\n\t\t\t}\n\n\t\t\tfor _, property := range class.Properties {\n\t\t\t\tpropertyType, err := dbSchema.FindPropertyDataType(property.AtDataType)\n\t\t\t\tif err != nil {\n\t\t\t\t\t// We can't return an error in this FieldsThunk function, so we need to panic\n\t\t\t\t\tpanic(fmt.Sprintf(\"buildGetClass: wrong propertyType for %s.%s.%s; %s\", k.Name(), class.Class, property.Name, err.Error()))\n\t\t\t\t}\n\n\t\t\t\tvar propertyField *graphql.Field\n\n\t\t\t\tif propertyType.IsPrimitive() {\n\t\t\t\t\tswitch propertyType.AsPrimitive() {\n\n\t\t\t\t\tcase schema.DataTypeString:\n\t\t\t\t\t\tpropertyField = &graphql.Field{\n\t\t\t\t\t\t\tDescription: property.Description,\n\t\t\t\t\t\t\tType: graphql.String,\n\t\t\t\t\t\t\tResolve: func(p graphql.ResolveParams) (interface{}, error) {\n\t\t\t\t\t\t\t\tfmt.Printf(\"GET PRIMITIVE PROP: string\\n\")\n\t\t\t\t\t\t\t\treturn \"primitive string\", nil\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t}\n\t\t\t\t\tcase schema.DataTypeInt:\n\t\t\t\t\t\tpropertyField = &graphql.Field{\n\t\t\t\t\t\t\tDescription: property.Description,\n\t\t\t\t\t\t\tType: graphql.Int,\n\t\t\t\t\t\t\tResolve: func(p graphql.ResolveParams) (interface{}, error) {\n\t\t\t\t\t\t\t\tfmt.Printf(\"GET PRIMITIVE PROP: int\\n\")\n\t\t\t\t\t\t\t\treturn nil, nil\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t}\n\t\t\t\t\tcase schema.DataTypeNumber:\n\t\t\t\t\t\tpropertyField = &graphql.Field{\n\t\t\t\t\t\t\tDescription: property.Description,\n\t\t\t\t\t\t\tType: graphql.Float,\n\t\t\t\t\t\t\tResolve: func(p graphql.ResolveParams) (interface{}, error) {\n\t\t\t\t\t\t\t\tfmt.Printf(\"GET PRIMITIVE PROP: float\\n\")\n\t\t\t\t\t\t\t\treturn 4.2, nil\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t}\n\t\t\t\t\tcase schema.DataTypeBoolean:\n\t\t\t\t\t\tpropertyField = &graphql.Field{\n\t\t\t\t\t\t\tDescription: property.Description,\n\t\t\t\t\t\t\tType: graphql.Boolean,\n\t\t\t\t\t\t\tResolve: func(p graphql.ResolveParams) (interface{}, error) {\n\t\t\t\t\t\t\t\tfmt.Printf(\"GET PRIMITIVE PROP: bool\\n\")\n\t\t\t\t\t\t\t\treturn true, nil\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t}\n\t\t\t\t\tcase schema.DataTypeDate:\n\t\t\t\t\t\tpropertyField = &graphql.Field{\n\t\t\t\t\t\t\tDescription: property.Description,\n\t\t\t\t\t\t\tType: graphql.String, // String since no graphql date datatype exists\n\t\t\t\t\t\t\tResolve: func(p graphql.ResolveParams) (interface{}, error) {\n\t\t\t\t\t\t\t\tfmt.Printf(\"GET PRIMITIVE PROP: date\\n\")\n\t\t\t\t\t\t\t\treturn \"somedate\", nil\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t}\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tpanic(fmt.Sprintf(\"buildGetClass: unknown primitive type for %s.%s.%s; %s\", k.Name(), class.Class, property.Name, propertyType.AsPrimitive()))\n\t\t\t\t\t}\n\n\t\t\t\t\tpropertyField.Name = property.Name\n\t\t\t\t\tclassProperties[property.Name] = propertyField\n\t\t\t\t} else {\n\t\t\t\t\t// This is a reference\n\t\t\t\t\trefClasses := propertyType.Classes()\n\t\t\t\t\tpropertyName := strings.Title(property.Name)\n\t\t\t\t\tdataTypeClasses := make([]*graphql.Object, len(refClasses))\n\n\t\t\t\t\tfor index, refClassName := range refClasses {\n\t\t\t\t\t\trefClass, ok := (*knownClasses)[string(refClassName)]\n\n\t\t\t\t\t\tif !ok {\n\t\t\t\t\t\t\tpanic(fmt.Sprintf(\"buildGetClass: unknown referenced class type for %s.%s.%s; %s\", k.Name(), class.Class, property.Name, refClassName))\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tdataTypeClasses[index] = refClass\n\t\t\t\t\t}\n\n\t\t\t\t\tclassUnion := graphql.NewUnion(graphql.UnionConfig{\n\t\t\t\t\t\tName: fmt.Sprintf(\"%s%s%s\", class.Class, propertyName, \"Obj\"),\n\t\t\t\t\t\tTypes: dataTypeClasses,\n\t\t\t\t\t\tResolveType: func(p graphql.ResolveTypeParams) *graphql.Object {\n\t\t\t\t\t\t\t// TODO: inspect type of result.\n\t\t\t\t\t\t\treturn (*knownClasses)[\"City\"]\n\t\t\t\t\t\t\tfmt.Printf(\"Resolver: WHOOPTYDOO\\n\")\n\t\t\t\t\t\t\treturn nil\n\t\t\t\t\t\t},\n\t\t\t\t\t\tDescription: property.Description,\n\t\t\t\t\t})\n\n\t\t\t\t\t// TODO: Check cardinality\n\n\t\t\t\t\tclassProperties[propertyName] = &graphql.Field{\n\t\t\t\t\t\tType: classUnion,\n\t\t\t\t\t\tDescription: property.Description,\n\t\t\t\t\t\tResolve: func(p graphql.ResolveParams) (interface{}, error) {\n\t\t\t\t\t\t\tfmt.Printf(\"- Resolve action property field (ref?)\\n\")\n\t\t\t\t\t\t\tfmt.Printf(\"WHOOPTYDOO2\\n\")\n\t\t\t\t\t\t\treturn true, nil\n\t\t\t\t\t\t},\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn classProperties\n\t\t}),\n\t\tDescription: class.Description,\n\t})\n\n\t(*knownClasses)[class.Class] = classObject\n\n\tclassField := graphql.Field{\n\t\tType: graphql.NewList(classObject),\n\t\tDescription: class.Description,\n\t\tArgs: graphql.FieldConfigArgument{\n\t\t\t\"first\": &graphql.ArgumentConfig{\n\t\t\t\tDescription: \"Pagination option, show the first x results\",\n\t\t\t\tType: graphql.Int,\n\t\t\t},\n\t\t\t\"after\": &graphql.ArgumentConfig{\n\t\t\t\tDescription: \"Pagination option, show the results after the first x results\",\n\t\t\t\tType: graphql.Int,\n\t\t\t},\n\t\t},\n\t\tResolve: func(p graphql.ResolveParams) (interface{}, error) {\n\t\t\tfmt.Printf(\"- thing class (supposed to extract pagination, now return nil)\\n\")\n\t\t\tfiltersAndResolver := p.Source.(*filtersAndResolver)\n\n\t\t\tpagination, err := common.ExtractPaginationFromArgs(p.Args)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\t// There can only be exactly one graphql_ast.Field; it is the class name.\n\t\t\tif len(p.Info.FieldASTs) != 1 {\n\t\t\t\tpanic(\"Only one Field expected here\")\n\t\t\t}\n\n\t\t\tselectionsOfClass := p.Info.FieldASTs[0].SelectionSet\n\t\t\tproperties, err := extractProperties(selectionsOfClass)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tparams := LocalGetClassParams{\n\t\t\t\tFilters: filtersAndResolver.filters,\n\t\t\t\tKind: k,\n\t\t\t\tClassName: class.Class,\n\t\t\t\tPagination: pagination,\n\t\t\t\tProperties: properties,\n\t\t\t}\n\n\t\t\tpromise, err := filtersAndResolver.resolver.LocalGetClass(&params)\n\t\t\treturn promise, err\n\t\t},\n\t}\n\n\treturn &classField, nil\n}", "func (c *ApiService) FetchService(Sid string) (*VerifyV2Service, error) {\n\tpath := \"/v2/Services/{Sid}\"\n\tpath = strings.Replace(path, \"{\"+\"Sid\"+\"}\", Sid, -1)\n\n\tdata := url.Values{}\n\theaders := make(map[string]interface{})\n\n\tresp, err := c.requestHandler.Get(c.baseURL+path, data, headers)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdefer resp.Body.Close()\n\n\tps := &VerifyV2Service{}\n\tif err := json.NewDecoder(resp.Body).Decode(ps); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn ps, err\n}", "func CreateServiceStatusRequest() (request *ServiceStatusRequest) {\nrequest = &ServiceStatusRequest{\nRpcRequest: &requests.RpcRequest{},\n}\nrequest.InitWithApiInfo(\"Yundun\", \"2015-04-16\", \"ServiceStatus\", \"yundun\", \"openAPI\")\nreturn\n}", "func toService(syncService *pb.SyncService) (service *scpb.MicroService) {\n\tservice = &scpb.MicroService{}\n\tvar err error\n\tif syncService.PluginName == PluginName && len(syncService.Expansions) > 0 {\n\t\tmatches := pb.Expansions(syncService.Expansions).Find(expansionDatasource, map[string]string{})\n\t\tif len(matches) > 0 {\n\t\t\terr = proto.Unmarshal(matches[0].Bytes, service)\n\t\t\tif err == nil {\n\t\t\t\tservice.ServiceId = syncService.ServiceId\n\t\t\t\treturn\n\t\t\t}\n\t\t\tlog.Errorf(err, \"proto unmarshal %s service, serviceID = %s, kind = %v, content = %v failed\",\n\t\t\t\tPluginName, service.ServiceId, matches[0].Kind, matches[0].Bytes)\n\t\t}\n\t}\n\tservice.AppId = syncService.App\n\tservice.ServiceId = syncService.ServiceId\n\tservice.ServiceName = syncService.Name\n\tservice.Version = syncService.Version\n\tservice.Status = pb.SyncService_Status_name[int32(syncService.Status)]\n\tservice.Environment = syncService.Environment\n\treturn\n}", "func (s *Structured) GetClass() string {\n\treturn \"unsupported\"\n}", "func (s source) Get(serviceID Service) (ServiceDef, error) {\n\tswitch serviceID {\n\tcase PlanetLab:\n\t\tport := environment.PLControllerPortProduction\n\t\taddr := \"plcontroller.revtr.ccs.neu.edu\"\n\t\tif environment.IsDebugPLController{\n\t\t\tport = environment.PLControllerPortDebug\n\t\t}\n\t\treturn ServiceDef{\n\t\t\tAddr: addr,\n\t\t\tPort: strconv.Itoa(port),\n\t\t\tService: PlanetLab,\n\t\t}, nil\n\tcase RIPEAtlas:\n\t\tport := environment.RIPEAtlasControllerPortProduction\n\t\taddr := \"ripeatlascontroller.revtr.ccs.neu.edu\"\n\t\tif environment.IsDebugRIPEAtlasController{\n\t\t\tport = environment.RIPEAtlasControllerPortDebug\n\t\t\taddr = \"localhost\"\n\t\t}\n\t\treturn ServiceDef{\n\t\t\tAddr: addr,\n\t\t\tPort: strconv.Itoa(port),\n\t\t\tService: RIPEAtlas,\n\t\t} , nil\n\t}\n\tpanic(errors.New(\"Service not known\"))\n}", "func (c *Controller) getService(obj *meta.ObjectMeta) (*core.Service, error) {\n\t// Check whether object with such name already exists in k8s\n\tres, err := c.serviceLister.Services(obj.Namespace).Get(obj.Name)\n\n\tif res != nil {\n\t\t// Object found by name\n\t\treturn res, nil\n\t}\n\n\tif apierrors.IsNotFound(err) {\n\t\t// Object with such name not found\n\t\t// Try to find by labels\n\t\tif set, err := chopmodel.GetSelectorHostFromObjectMeta(obj); err == nil {\n\t\t\tselector := labels.SelectorFromSet(set)\n\n\t\t\tobjects, err := c.serviceLister.Services(obj.Namespace).List(selector)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif len(objects) == 1 {\n\t\t\t\t// Object found by labels\n\t\t\t\treturn objects[0], nil\n\t\t\t}\n\t\t}\n\t}\n\n\t// Object not found\n\treturn nil, err\n}", "func (m *SecureScoreControlProfile) GetService()(*string) {\n return m.service\n}", "func (c PGClient) GetService(id int64) (res *Service, err error) {\n\terr = c.DB.QueryRow(\"select type,name,runstr from tServices where id=$1\", id).Scan(&res.Type, &res.Name, &res.RunSTR)\n\treturn res, err\n}", "func (c *ClusterConf) GetService(req *acomm.Request) (interface{}, *url.URL, error) {\n\tvar args IDArgs\n\tif err := req.UnmarshalArgs(&args); err != nil {\n\t\treturn nil, nil, err\n\t}\n\tif args.ID == \"\" {\n\t\treturn nil, nil, errors.Newv(\"missing arg: id\", map[string]interface{}{\"args\": args})\n\t}\n\n\tservice, err := c.getService(args.ID)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\treturn &ServicePayload{service}, nil, nil\n}", "func (s *ServiceBuilder) Result() *corev1api.Service {\n\treturn s.object\n}", "func NewClassService(classRepo repository.ClassRepository) service.ClassService {\n\treturn &classService{\n\t\tclassRepo: classRepo,\n\t}\n}", "func GetService(request *Request, serviceName string) (service Almanac, err error) {\n\tqueryList := make([]Query, 0)\n\tattachments := make(map[string]string)\n\tattachments[\"properties\"] = \"1\"\n\tattachments[\"projects\"] = \"1\"\n\tattachments[\"bindings\"] = \"1\"\n\tconstraints := make(map[string][]string)\n\tnameConstraint := []string{serviceName}\n\tconstraints[\"names\"] = nameConstraint\n\tqueryList = append(queryList, Query{\"map\", \"attachments\", attachments})\n\tqueryList = append(queryList, Query{\"mapArray\", \"constraints\", constraints})\n\tqueryList = append(queryList, Query{\"string\", \"limit\", \"100\"})\n\trequest.SetMethod(\"almanac.service.search\")\n\trequest.AddValues(queryList)\n\tresp, err := SendRequest(request)\n\terr = json.Unmarshal(resp, &service)\n\treturn service, err\n}", "func (d *device) GetClass() Class {\n\treturn d.class\n}", "func GetServices(nbmaster string, httpClient *http.Client, jwt string, host string, hostUuid string) {\r\n fmt.Printf(\"\\nGet NetBackup services available on %s...\\n\\n\", host)\r\n\r\n uri := \"https://\" + nbmaster + \":\" + port + \"/netbackup/admin/hosts/\" + hostUuid + \"/services\"\r\n\r\n request, _ := http.NewRequest(http.MethodGet, uri, nil)\r\n request.Header.Add(\"Authorization\", jwt);\r\n request.Header.Add(\"Content-Type\", contentTypeV3);\r\n\r\n response, err := httpClient.Do(request)\r\n\r\n if err != nil {\r\n fmt.Printf(\"The HTTP request failed with error: %s\\n\", err)\r\n panic(\"Unable to get services\")\r\n } else {\r\n if response.StatusCode == 200 {\r\n resp, _ := ioutil.ReadAll(response.Body)\r\n var obj interface{}\r\n json.Unmarshal(resp, &obj)\r\n data := obj.(map[string]interface{})\r\n var services []interface{} = data[\"data\"].([]interface{})\r\n\r\n fmt.Printf(\"id status\\n\");\r\n fmt.Printf(\"============.=========\\n\");\r\n for _, service := range services {\r\n id := (service.(map[string]interface{}))[\"id\"]\r\n status := (((service.(map[string]interface{}))[\"attributes\"]).(map[string]interface{}))[\"status\"]\r\n\r\n fmt.Printf(\"%-12s %s\\n\", id, status);\r\n }\r\n } else {\r\n printErrorResponse(response)\r\n }\r\n }\r\n}", "func (o V2ApiTargetOutput) Service() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v V2ApiTarget) *string { return v.Service }).(pulumi.StringPtrOutput)\n}", "func ListReferencedClassOfServices(c common.Client, link string) ([]*ClassOfService, error) {\n\tvar result []*ClassOfService\n\tif link == \"\" {\n\t\treturn result, nil\n\t}\n\n\tlinks, err := common.GetCollection(c, link)\n\tif err != nil {\n\t\treturn result, err\n\t}\n\n\tfor _, classofserviceLink := range links.ItemLinks {\n\t\tclassofservice, err := GetClassOfService(c, classofserviceLink)\n\t\tif err != nil {\n\t\t\treturn result, err\n\t\t}\n\t\tresult = append(result, classofservice)\n\t}\n\n\treturn result, nil\n}", "func RetrieveService(settings *models.Settings) *models.Service {\n\tresp := httpclient.Get(fmt.Sprintf(\"%s/v1/environments/%s/services/%s\", settings.PaasHost, settings.EnvironmentID, settings.ServiceID), true, settings)\n\tvar service models.Service\n\tjson.Unmarshal(resp, &service)\n\treturn &service\n}", "func (r *Registry) FindService(pdb *db.PostgresDB, req rentities.ServiceRequest) (*rentities.ServiceInfo, error) {\n\t/*sList, okName := r.ServicesMap[req.TName]\n\tif okName != true {\n\t\treturn nil, fmt.Errorf(\"FindService error: service not exist\")\n\t}\n\n\t//get min load instance\n\tminLoad := sList[0].Quality.Load\n\tminIndex := 0\n\tfor i, ri := range sList {\n\t\tif ri.Version == req.Version {\n\t\t\tif minLoad >= ri.Quality.Load {\n\t\t\t\tminIndex = i\n\t\t\t}\n\t\t}\n\t}\n\t*/\n\tsrv, err := pdb.FindMinLoadSrv(req.TName, req.Version)\n\tri, err := rentities.NewServiceInfo(srv.TName, srv.IID, srv.IP, srv.Version, config.DefaultTTL)\n\t/*srv, err := rentities.NewServiceInfo(sList[minIndex].TName, sList[minIndex].IID,\n\tsList[minIndex].IP, sList[minIndex].Version, config.DefaultTTL)*/\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ri, nil\n}", "func getClasses(w http.ResponseWriter, r *http.Request) {\n\terr := json.NewEncoder(w).Encode(DBClasses)\n\tif err != nil {\n\t\terr = errorResponse(w, InternalError, http.StatusInternalServerError)\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\t}\n}", "func (c *Client) CreateClass(request *CreateClassRequest) (response *CreateClassResponse, err error) {\n if request == nil {\n request = NewCreateClassRequest()\n }\n response = NewCreateClassResponse()\n err = c.Send(request, response)\n return\n}", "func New(client *http.Client) (*Service, error) {\n\tif client == nil {\n\t\treturn nil, errors.New(\"client is nil\")\n\t}\n\ts := &Service{client: client, BasePath: basePath}\n\ts.AccountActiveAdSummaries = NewAccountActiveAdSummariesService(s)\n\ts.AccountPermissionGroups = NewAccountPermissionGroupsService(s)\n\ts.AccountPermissions = NewAccountPermissionsService(s)\n\ts.AccountUserProfiles = NewAccountUserProfilesService(s)\n\ts.Accounts = NewAccountsService(s)\n\ts.Ads = NewAdsService(s)\n\ts.AdvertiserGroups = NewAdvertiserGroupsService(s)\n\ts.AdvertiserLandingPages = NewAdvertiserLandingPagesService(s)\n\ts.Advertisers = NewAdvertisersService(s)\n\ts.Browsers = NewBrowsersService(s)\n\ts.CampaignCreativeAssociations = NewCampaignCreativeAssociationsService(s)\n\ts.Campaigns = NewCampaignsService(s)\n\ts.ChangeLogs = NewChangeLogsService(s)\n\ts.Cities = NewCitiesService(s)\n\ts.ConnectionTypes = NewConnectionTypesService(s)\n\ts.ContentCategories = NewContentCategoriesService(s)\n\ts.Conversions = NewConversionsService(s)\n\ts.Countries = NewCountriesService(s)\n\ts.CreativeAssets = NewCreativeAssetsService(s)\n\ts.CreativeFieldValues = NewCreativeFieldValuesService(s)\n\ts.CreativeFields = NewCreativeFieldsService(s)\n\ts.CreativeGroups = NewCreativeGroupsService(s)\n\ts.Creatives = NewCreativesService(s)\n\ts.DimensionValues = NewDimensionValuesService(s)\n\ts.DirectorySites = NewDirectorySitesService(s)\n\ts.DynamicTargetingKeys = NewDynamicTargetingKeysService(s)\n\ts.EventTags = NewEventTagsService(s)\n\ts.Files = NewFilesService(s)\n\ts.FloodlightActivities = NewFloodlightActivitiesService(s)\n\ts.FloodlightActivityGroups = NewFloodlightActivityGroupsService(s)\n\ts.FloodlightConfigurations = NewFloodlightConfigurationsService(s)\n\ts.InventoryItems = NewInventoryItemsService(s)\n\ts.Languages = NewLanguagesService(s)\n\ts.Metros = NewMetrosService(s)\n\ts.MobileApps = NewMobileAppsService(s)\n\ts.MobileCarriers = NewMobileCarriersService(s)\n\ts.OperatingSystemVersions = NewOperatingSystemVersionsService(s)\n\ts.OperatingSystems = NewOperatingSystemsService(s)\n\ts.OrderDocuments = NewOrderDocumentsService(s)\n\ts.Orders = NewOrdersService(s)\n\ts.PlacementGroups = NewPlacementGroupsService(s)\n\ts.PlacementStrategies = NewPlacementStrategiesService(s)\n\ts.Placements = NewPlacementsService(s)\n\ts.PlatformTypes = NewPlatformTypesService(s)\n\ts.PostalCodes = NewPostalCodesService(s)\n\ts.Projects = NewProjectsService(s)\n\ts.Regions = NewRegionsService(s)\n\ts.RemarketingListShares = NewRemarketingListSharesService(s)\n\ts.RemarketingLists = NewRemarketingListsService(s)\n\ts.Reports = NewReportsService(s)\n\ts.Sites = NewSitesService(s)\n\ts.Sizes = NewSizesService(s)\n\ts.Subaccounts = NewSubaccountsService(s)\n\ts.TargetableRemarketingLists = NewTargetableRemarketingListsService(s)\n\ts.TargetingTemplates = NewTargetingTemplatesService(s)\n\ts.UserProfiles = NewUserProfilesService(s)\n\ts.UserRolePermissionGroups = NewUserRolePermissionGroupsService(s)\n\ts.UserRolePermissions = NewUserRolePermissionsService(s)\n\ts.UserRoles = NewUserRolesService(s)\n\ts.VideoFormats = NewVideoFormatsService(s)\n\treturn s, nil\n}", "func GetClass(name string) Class {\n\treturn objc_getClass(name)\n}", "func Service() typhon.Service {\n\treturn Proxy\n}", "func (c BasicController) GetService(key string) (model.Service, error) {\n\treturn c.serviceRepo.GetService(key)\n}", "func (m *GroupPolicyDefinition) GetClassType()(*GroupPolicyDefinitionClassType) {\n val, err := m.GetBackingStore().Get(\"classType\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*GroupPolicyDefinitionClassType)\n }\n return nil\n}", "func Get() Service {\n\treturn &srv\n}", "func (service *Service) GetServiceDef(path string) error {\n\tfilePath := service.ServiceFilePath(path)\n\traw, err := ioutil.ReadFile(filePath)\n\tif err != nil {\n\t\treturn err\n\t}\n\tjson.Unmarshal(raw, &service)\n\treturn nil\n}", "func getServiceInstanceFromObj(logt logr.Logger, service *ibmcloudv1.Service) (models.ServiceInstance, error) {\n\texternalName := getExternalName(service)\n\n\tibmCloudInfo, err := ibmcloud.GetInfo(logt, k8sClient, service)\n\tif err != nil {\n\t\treturn models.ServiceInstance{}, err\n\t}\n\n\t// Service instance is not CF\n\tcontrollerClient := ibmCloudInfo.ResourceClient\n\tresServiceInstanceAPI := controllerClient.ResourceServiceInstance()\n\tserviceInstanceQuery := bxcontroller.ServiceInstanceQuery{\n\t\tResourceGroupID: ibmCloudInfo.ResourceGroupID,\n\t\tServicePlanID: ibmCloudInfo.ServicePlanID,\n\t\tName: externalName,\n\t}\n\n\tinstances, err := resServiceInstanceAPI.ListInstances(serviceInstanceQuery)\n\tif err != nil {\n\t\treturn models.ServiceInstance{}, err\n\t}\n\tfor _, instance := range instances {\n\t\tif instance.ID == service.Status.InstanceID {\n\t\t\treturn instance, nil\n\t\t}\n\t}\n\treturn models.ServiceInstance{}, errNotFoundTest\n}", "func (r *apiRegister) GetService(svc string) (*register.Service, error) {\n\tvar (\n\t\tstarted = time.Now()\n\t\terr error\n\t)\n\tdefer reportRegisterAPISixMetrics(\"GetService\", err, started)\n\n\tvar service *admin.Service\n\tservice, err = r.apisixClient.GetService(svc)\n\tif err != nil {\n\t\tblog.Errorf(\"apisix register get service %s failed, %s\", svc, err.Error())\n\t\treturn nil, err\n\t}\n\tif service == nil {\n\t\tblog.Warnf(\"apisix register get no Service named %s\", svc)\n\t\treturn nil, nil\n\t}\n\n\tvar upstream *admin.Upstream\n\tupstream, err = r.apisixClient.GetUpstream(svc)\n\tif err != nil {\n\t\tblog.Errorf(\"apisix register get service %s relative upstream failed, %s\", svc, err.Error())\n\t\treturn nil, err\n\t}\n\tif upstream == nil {\n\t\tblog.Errorf(\"apisix register get service %s err, Upsteram Not Found\", svc)\n\t\treturn nil, fmt.Errorf(\"Upstream Not Found\")\n\t}\n\n\tvar route *admin.Route\n\troute, err = r.apisixClient.GetRoute(svc)\n\tif err != nil {\n\t\tblog.Errorf(\"apisix register get service %s relative route failed, %s\", svc, err.Error())\n\t\treturn nil, err\n\t}\n\tif route == nil {\n\t\tblog.Errorf(\"apisix register get service %s err, Route Not Found\", svc)\n\t\treturn nil, fmt.Errorf(\"Route Not Found\")\n\t}\n\t//convert data structure\n\treturn innerServiceConvert(service, route, upstream), nil\n}", "func CreateServiceStatusResponse() (response *ServiceStatusResponse) {\nresponse = &ServiceStatusResponse{\nBaseResponse: &responses.BaseResponse{},\n}\nreturn\n}", "func (o *IamServiceProviderAllOf) GetClassId() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.ClassId\n}", "func (e *Ex) Class() string {\n\treturn e.class\n}", "func (e *Ex) Class() string {\n\treturn e.class\n}", "func (r *kRegister) GetService(svc string) (*register.Service, error) {\n\tvar (\n\t\terr error\n\t\tstarted = time.Now()\n\t\tkSvc *gokong.Service\n\t)\n\tdefer reportRegisterKongMetrics(\"GetService\", err, started)\n\n\tkSvc, err = r.kClient.Services().GetServiceByName(svc)\n\tif err != nil {\n\t\treportKongAPIMetrics(\"GetServices\", http.MethodGet, utils.ErrStatus, started)\n\t\tblog.Errorf(\"kong register get service %s failed, %s\", svc, err.Error())\n\t\treturn nil, err\n\t}\n\tif kSvc == nil {\n\t\treportKongAPIMetrics(\"GetServices\", http.MethodGet, utils.SucStatus, started)\n\t\tblog.Warnf(\"kong register get no Service named %s\", svc)\n\t\treturn nil, nil\n\t}\n\t//convert data structure\n\tregistryService := innerServiceConvert(kSvc)\n\treportKongAPIMetrics(\"GetServices\", http.MethodGet, utils.SucStatus, started)\n\treturn registryService, nil\n}", "func GetService(srvType string) (types.ServiceTypeSupport, bool) {\n\tsrv, ok := serviceTypeMap[srvType]\n\treturn srv, ok\n}", "func (r Runtime) Service() *service.Service {\n\treturn r.svc\n}", "func (p *Provisioner) newClassReflector(kubeClient *kubernetes.Clientset) (cache.Store, *cache.Reflector) {\n\tclassStore := cache.NewStore(cache.DeletionHandlingMetaNamespaceKeyFunc)\n\tvar classReflector *cache.Reflector\n\t// In 1.6 and above classes are out of beta\n\tclassListWatch := &cache.ListWatch{\n\t\tListFunc: p.listAllClasses,\n\t\tWatchFunc: p.watchAllClasses,\n\t}\n\tclassReflector = cache.NewReflector(classListWatch, &storage_v1.StorageClass{}, classStore, 0)\n\n\t// if we're dealing with 1.5, classes are still in beta\n\tif p.serverVersion.Major == \"1\" && p.serverVersion.Minor == \"5\" {\n\t\tclassListWatch = &cache.ListWatch{\n\t\t\tListFunc: p.listBetaAllClasses,\n\t\t\tWatchFunc: p.watchBetaAllClasses,\n\t\t}\n\t\tclassReflector = cache.NewReflector(classListWatch, &storage_v1beta1.StorageClass{}, classStore, 0)\n\t}\n\n\treturn classStore, classReflector\n}", "func (sc *ServiceController) Show() (*service.Service, error) {\n\turl := urlService(sc.ID)\n\n\tresponse, e := sc.c.ClientREST.HTTPMethod(\"GET\", url)\n\n\tif e != nil {\n\t\treturn &service.Service{}, e\n\t}\n\n\treturn NewService(documentJSON(response)), nil\n}", "func getServiceInstanceFromObjCF(logt logr.Logger, service *ibmcloudv1.Service) (*mccpv2.ServiceInstance, error) {\n\texternalName := getExternalName(service)\n\n\tibmCloudInfo, err := ibmcloud.GetInfo(logt, k8sClient, service)\n\tif err != nil {\n\t\treturn &mccpv2.ServiceInstance{}, err\n\t}\n\n\tserviceInstanceAPI := ibmCloudInfo.BXClient.ServiceInstances()\n\treturn serviceInstanceAPI.FindByName(externalName)\n}", "func (o V2ApiTargetResponseOutput) Service() pulumi.StringOutput {\n\treturn o.ApplyT(func(v V2ApiTargetResponse) string { return v.Service }).(pulumi.StringOutput)\n}", "func (o *ControllerServiceAPI) GetType() string {\n\tif o == nil || o.Type == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Type\n}", "func GetTagWithClass(ctx *gin.Context) {\n\tdb := getDatabase(ctx)\n\tdefer db.Close()\n\tcls := ctx.Param(\"class\")\n\n\tresp := ListTagsWithClass(db, cls)\n\tctx.JSON(http.StatusOK, resp)\n}", "func mapClassFromDomainToAPI(c domain.Class) *model.Class {\n\n\tmpc := model.ProficiencyChoices{}\n\tfor _, dpc := range c.ProficiencyChoices {\n\t\tmf := []*model.Proficiency{}\n\t\tfor _, f := range dpc.From {\n\t\t\tmf = append(mf, &model.Proficiency{\n\t\t\t\tName: &f.Name,\n\t\t\t})\n\t\t}\n\t\tmpc = model.ProficiencyChoices{\n\t\t\tChoose: &dpc.Choose,\n\t\t\tFrom: mf,\n\t\t\tType: &dpc.Type,\n\t\t}\n\t}\n\tmp := []*model.Proficiency{}\n\tfor _, dp := range c.Proficiencies {\n\t\tmp = append(mp, &model.Proficiency{\n\t\t\tName: &dp.Name,\n\t\t})\n\t}\n\tmsc := []*model.SubClass{}\n\tfor _, sc := range c.SubClasses {\n\t\tmsc = append(msc, &model.SubClass{\n\t\t\tName: &sc.Name,\n\t\t})\n\t}\n\treturn &model.Class{\n\t\tID: &c.ID,\n\t\tName: &c.Name,\n\t\tHitDie: &c.HitDie,\n\t\tProficiencyChoices: &mpc,\n\t\tProficiencies: mp,\n\t\tSubClasses: msc,\n\t\t// map SavingThrows abilities\n\t\t// StartingEquipment: c.StartingEquipment,\n\t\t// ClassLevels: c.ClassLevels,\n\t\t// SavingThrows []*Ability `json:\"savingThrows\"`\n\t\t// StartingEquipment *StartingEquipment `json:\"startingEquipment\"`\n\t\t// ClassLevels []*ClassLevel `json:\"classLevels\"`\n\t}\n}", "func NewGetServiceClassesDefault(code int) *GetServiceClassesDefault {\n\treturn &GetServiceClassesDefault{\n\t\t_statusCode: code,\n\t}\n}", "func newService(cr *argoprojv1a1.ArgoCD) *corev1.Service {\n\treturn &corev1.Service{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: cr.Name,\n\t\t\tNamespace: cr.Namespace,\n\t\t\tLabels: argoutil.LabelsForCluster(cr),\n\t\t},\n\t}\n}", "func (r *ClusterServiceResource) obj() (k8sclient.Object, error) {\n\tports := make([]corev1.ServicePort, 0, len(r.svcPorts))\n\tfor _, svcPort := range r.svcPorts {\n\t\tports = append(ports, corev1.ServicePort{\n\t\t\tName: svcPort.Name,\n\t\t\tProtocol: corev1.ProtocolTCP,\n\t\t\tPort: int32(svcPort.Port),\n\t\t\tTargetPort: intstr.FromInt(svcPort.Port),\n\t\t})\n\t}\n\n\tobjLabels := labels.ForCluster(r.pandaCluster)\n\tsvc := &corev1.Service{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tNamespace: r.Key().Namespace,\n\t\t\tName: r.Key().Name,\n\t\t\tLabels: objLabels,\n\t\t},\n\t\tTypeMeta: metav1.TypeMeta{\n\t\t\tKind: \"Service\",\n\t\t\tAPIVersion: \"v1\",\n\t\t},\n\t\tSpec: corev1.ServiceSpec{\n\t\t\tPublishNotReadyAddresses: true,\n\t\t\tType: corev1.ServiceTypeClusterIP,\n\t\t\tPorts: ports,\n\t\t\tSelector: objLabels.AsAPISelector().MatchLabels,\n\t\t},\n\t}\n\n\terr := controllerutil.SetControllerReference(r.pandaCluster, svc, r.scheme)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn svc, nil\n}", "func (c *ProjectService) Get(id string) (*Project, *http.Response, error) {\n\tproject := new(Project)\n\tapiError := new(APIError)\n\tpath := fmt.Sprintf(\"%s\", id)\n\tresp, err := c.sling.New().Get(path).Receive(project, apiError)\n\treturn project, resp, relevantError(err, *apiError)\n}", "func (v *IADs) Class() (class string, err error) {\n\tvar bstr *int16\n\thr, _, _ := syscall.Syscall(\n\t\tuintptr(v.VTable().Class),\n\t\t2,\n\t\tuintptr(unsafe.Pointer(v)),\n\t\tuintptr(unsafe.Pointer(&bstr)),\n\t\t0)\n\tif bstr != nil {\n\t\tdefer ole.SysFreeString(bstr)\n\t}\n\tif hr == 0 {\n\t\tclass = ole.BstrToString((*uint16)(unsafe.Pointer(bstr)))\n\t} else {\n\t\treturn \"\", convertHresultToError(hr)\n\t}\n\treturn\n}", "func newService(rcvr interface{}, guard Guard) *service {\n\ts := new(service)\n\ts.typ = reflect.TypeOf(rcvr)\n\ts.rcvr = reflect.ValueOf(rcvr)\n\ts.name = reflect.Indirect(s.rcvr).Type().Name()\n\ts.guard = guard\n\n\t// install the methods\n\ts.method = suitableMethods(s.typ, true)\n\n\treturn s\n}", "func (c client) GetService(objectKey k8sClient.ObjectKey) (corev1.Service, error) {\n\ts := corev1.Service{}\n\tif err := c.Get(context.TODO(), objectKey, &s); err != nil {\n\t\treturn corev1.Service{}, err\n\t}\n\treturn s, nil\n}", "func (f *lazyCallReq) Service() []byte {\n\tl := f.Payload[_serviceLenIndex]\n\treturn f.Payload[_serviceNameIndex : _serviceNameIndex+int(l)]\n}", "func (pr PlanReference) GetSpecifiedClusterServiceClass() string {\n\tif pr.ClusterServiceClassExternalName != \"\" {\n\t\treturn pr.ClusterServiceClassExternalName\n\t}\n\n\tif pr.ClusterServiceClassExternalID != \"\" {\n\t\treturn pr.ClusterServiceClassExternalID\n\t}\n\n\tif pr.ClusterServiceClassName != \"\" {\n\t\treturn pr.ClusterServiceClassName\n\t}\n\n\treturn \"\"\n}", "func (kb *KubeAPIServer) Service() (s string, err error) {\n\ttpl := template.Must(template.New(\"kubeAPIServerTemplate\").Parse(kubeAPIServerTemplate))\n\tbuf := bytes.NewBuffer(nil)\n\tkv := kubeAPIServerTemplateInfo{KubeAPIServerPath: kb.Path}\n\tif err := tpl.Execute(buf, kv); err != nil {\n\t\treturn \"\", err\n\t}\n\treturn buf.String(), nil\n}", "func (c *Call) Service() string {\n\tif c == nil {\n\t\treturn \"\"\n\t}\n\treturn c.md.Service()\n}", "func (s serviceimpl) BuildService(client *http.Client) *calendar.Service {\n\tsrv, err := calendar.New(client)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\treturn srv\n}", "func (c *Client) Service() *Service {\n\tctx := context.TODO()\n\n\tif c.service == nil {\n\t\t// Load configuration for the V2 SDK\n\t\tcfg, err := NewAWSConfig(ctx)\n\t\tif err != nil {\n\t\t\tc.logger.WithError(err).Error(\"Can't load AWS Configuration\")\n\t\t}\n\n\t\tc.mux.Lock()\n\t\tc.service = NewService(cfg)\n\t\tc.mux.Unlock()\n\t}\n\n\treturn c.service\n}", "func (kubeutil *Kube) GetSecretProviderClass(namespace string, className string) (*secretsstorev1.SecretProviderClass, error) {\n\treturn kubeutil.secretProviderClient.SecretsstoreV1().SecretProviderClasses(namespace).Get(context.Background(), className, metav1.GetOptions{})\n}", "func GetServiceName() (*result.Result, error) {\n\n\t// connect database\n\tdb, err := pqx.Open()\n\tif err != nil {\n\t\treturn nil, errors.NewEvent(eventcode.EventNetworkCriticalUnableConDB, err)\n\t}\n\n\t// sql for get Service name from api.service\n\tq := sqlGetServiceName\n\tp := []interface{}{}\n\t// process query access table\n\trows, err := db.Query(q, p...)\n\t// check error\n\tif err != nil {\n\t\treturn nil, pqx.GetRESTError(err)\n\t}\n\tdefer rows.Close()\n\n\tvar data = selectOption.NewSelect()\n\t// loop for get data from table to struct for return\n\tfor rows.Next() {\n\t\t//declare value for get data from table\n\t\tvar (\n\t\t\tid sql.NullInt64\n\t\t\ttext sql.NullString\n\t\t\tmethod sql.NullString\n\t\t\tmodule sql.NullString\n\t\t)\n\t\trows.Scan(&id, &text, &method, &module)\n\t\t// add data one row to array data\n\t\tdata.Add(id.Int64, module.String+\"/\"+text.String+\"(\"+method.String+\")\")\n\t}\n\t//declare value for result\n\ttype ServiceName struct {\n\t\tDateRange string `json:\"date_range\"`\n\t\tServiceOption []*selectOption.Option `json:\"service_name\"`\n\t}\n\n\tdataOption := &ServiceName{}\n\tdataOption.ServiceOption = data.Option\n\t// get date range\n\tdataOption.DateRange = setting.GetSystemSetting(\"setting.Default.DateRange\")\n\t// return service\n\treturn result.Result1(dataOption), nil\n}", "func (t *Targets) GetService(context context.Context, instance *iter8v1alpha2.Experiment) error {\n\treturn t.client.Get(context, types.NamespacedName{\n\t\tName: instance.Spec.Service.Name,\n\t\tNamespace: t.namespace},\n\t\tt.Service)\n}", "func (SearchRes) Service() ServiceID {\n\treturn SearchResService\n}", "func ServiceGetOne(id int) (service entity.Service, err error) {\n\tdb := util.DbConnect()\n\n\tresult := db.QueryRow(`\n\t\tSELECT service.id as id, service.user_id as user_id, service.service_type_id as service_type_id,\n\t\tCONCAT(user.first_name, ' ', user.last_name) as user_name, \n\t\tservice_type.service_name as service_name,\n\t\tservice.contact_number, service.price,\n\t\tservice.image as image,\n\t\tservice.description,\n\t\tservice.total_rating,\n\t\tservice.total_reviewer\n\t\tFROM service\n\t\tJOIN user ON user.ID = service.user_id\n\t\tJOIN service_type ON service_type.id = service.service_type_id\n\t\tWHERE service.is_archived = false AND service.id=?\n\t`, id)\n\terr = result.Scan(&service.ID, &service.UserID, &service.ServiceTypeID, &service.UserName, &service.ServiceName, &service.ContactNumber,\n\t\t&service.Price, &service.Image, &service.Description, &service.TotalRating, &service.TotalReviewer)\n\n\tif err != nil {\n\t\treturn\n\t}\n\n\tdefer db.Close()\n\treturn\n}", "func (c PGClient) GetServicesByProject(projects []string) (*[]Service, error) {\n\tvar projectsID []int64\n\trows, err := c.DB.Query(\"select id from tProjects where name = any($1)\", pg.Array(projects))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfor rows.Next() {\n\t\tvar tempID int64\n\t\terr := rows.Scan(&tempID)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tprojectsID = append(projectsID, tempID)\n\t}\n\n\trows, err = c.DB.Query(\"select service_id from tServiceProjects where project_id=any($1)\", pg.Array(projectsID))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tserviceIDs := make([]int64, 0, 10)\n\tfor rows.Next() {\n\t\tvar tempID int64\n\t\terr := rows.Scan(&tempID)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tserviceIDs = append(serviceIDs, tempID)\n\t}\n\trows, err = c.DB.Query(\"select id,name,host,port,type from tServices where id =any($1)\", pg.Array(serviceIDs))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tres := make([]Service, 0, 200)\n\tfor rows.Next() {\n\t\tt := Service{}\n\t\tif err = rows.Scan(&t.ID, &t.Name, &t.Host, &t.Port, &t.Type); err != nil {\n\t\t\treturn &res, err\n\t\t}\n\t\tres = append(res, t)\n\t}\n\treturn &res, err\n}", "func (m *SecureScoreControlProfile) GetService()(*string) {\n val, err := m.GetBackingStore().Get(\"service\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*string)\n }\n return nil\n}", "func getBuilderService(kubeClient *client.Client) (*api.Service, error) {\n\tserviceClient := kubeClient.Services(namespace)\n\tservice, err := serviceClient.Get(\"deis-builder\")\n\tif err != nil {\n\t\tstatusErr, ok := err.(*errors.StatusError)\n\t\t// If the issue is just that no deis-builder was found, that's ok.\n\t\tif ok && statusErr.Status().Code == 404 {\n\t\t\t// We'll just return nil instead of a found *api.Service.\n\t\t\treturn nil, nil\n\t\t}\n\t\treturn nil, err\n\t}\n\treturn service, nil\n}", "func (i *Item) GetService() iaas.Service {\n\treturn i.folder.GetService()\n}", "func TestGetAppClass(t *testing.T) {\n\tfor _, ac := range appClassCases {\n\t\tt.Logf(\"start case: %s\", ac.describe)\n\t\tresult := GetAppClass(ac.pod)\n\t\tif result != ac.expect {\n\t\t\tt.Fatalf(\"get app class error, expect %s, but get %s\", ac.expect, result)\n\t\t}\n\t}\n}", "func (pa *PodAutoscaler) Class() string {\n\tif c, ok := pa.Annotations[autoscaling.ClassAnnotationKey]; ok {\n\t\treturn c\n\t}\n\t// Default to \"kpa\" class for backward compatibility.\n\treturn autoscaling.KPA\n}", "func (o JavaSettingsResponseOutput) ServiceClassNames() pulumi.StringMapOutput {\n\treturn o.ApplyT(func(v JavaSettingsResponse) map[string]string { return v.ServiceClassNames }).(pulumi.StringMapOutput)\n}", "func (c *controller) getClusterServiceClassAndClusterServiceBroker(instance *v1beta1.ServiceInstance) (*v1beta1.ClusterServiceClass, string, osb.Client, error) {\n\tpcb := pretty.NewContextBuilder(pretty.ServiceInstance, instance.Namespace, instance.Name)\n\tserviceClass, err := c.clusterServiceClassLister.Get(instance.Spec.ClusterServiceClassRef.Name)\n\tif err != nil {\n\t\treturn nil, \"\", nil, &operationError{\n\t\t\treason: errorNonexistentClusterServiceClassReason,\n\t\t\tmessage: fmt.Sprintf(\n\t\t\t\t\"The instance references a non-existent ClusterServiceClass (K8S: %q ExternalName: %q)\",\n\t\t\t\tinstance.Spec.ClusterServiceClassRef.Name, instance.Spec.ClusterServiceClassExternalName,\n\t\t\t),\n\t\t}\n\t}\n\n\tbroker, err := c.clusterServiceBrokerLister.Get(serviceClass.Spec.ClusterServiceBrokerName)\n\tif err != nil {\n\t\treturn nil, \"\", nil, &operationError{\n\t\t\treason: errorNonexistentClusterServiceBrokerReason,\n\t\t\tmessage: fmt.Sprintf(\n\t\t\t\t\"The instance references a non-existent broker %q\",\n\t\t\t\tserviceClass.Spec.ClusterServiceBrokerName,\n\t\t\t),\n\t\t}\n\n\t}\n\n\tauthConfig, err := getAuthCredentialsFromClusterServiceBroker(c.kubeClient, broker)\n\tif err != nil {\n\t\treturn nil, \"\", nil, &operationError{\n\t\t\treason: errorAuthCredentialsReason,\n\t\t\tmessage: fmt.Sprintf(\n\t\t\t\t\"Error getting broker auth credentials for broker %q: %s\",\n\t\t\t\tbroker.Name, err,\n\t\t\t),\n\t\t}\n\t}\n\n\tclientConfig := NewClientConfigurationForBroker(broker.ObjectMeta, &broker.Spec.CommonServiceBrokerSpec, authConfig)\n\tglog.V(4).Info(pcb.Messagef(\"Creating client for ClusterServiceBroker %v, URL: %v\", broker.Name, broker.Spec.URL))\n\tbrokerClient, err := c.brokerClientCreateFunc(clientConfig)\n\tif err != nil {\n\t\treturn nil, \"\", nil, err\n\t}\n\n\treturn serviceClass, broker.Name, brokerClient, nil\n}", "func returnGoSubServiceSvcDefinition(name string, hasDB bool) (string, error) {\n\t// service types\n\tsrvcDefinitionString := fmt.Sprintf(\"\\n\\n // New%s loads related SQL statements and initializes the container struct\\n\", strings.Title(name)+\"Service\") +\n\t\tfmt.Sprintf(\"func New%s(s *Services) %s {\\n\", strings.Title(name)+\"Service\", strings.Title(name)+\"Service\")\n\n\tif hasDB {\n\t\tsrvcDefinitionString += fmt.Sprintf(\"\t// create initial interface \\n\") +\n\t\t\tfmt.Sprintf(\"\tctx := &db.%s{}\\n\", strings.Title(name)+\"StructDB\") +\n\t\t\tfmt.Sprintf(\"\tctx.DB = s.db \\n\")\n\t}\n\n\tsrvcDefinitionString += fmt.Sprintf(\"\tsrvc := &%s{}\\n\", strings.ToLower(name)+\"Service\")\n\n\tif hasDB {\n\t\tsrvcDefinitionString += fmt.Sprintf(\"\tsrvc.I%s = &validation.%s{I%s: ctx}\\n\", strings.Title(name)+\"DB\", strings.Title(name)+\"Validator\", strings.Title(name)+\"DB\")\n\t}\n\tsrvcDefinitionString += fmt.Sprintf(\"\treturn srvc\\n }\\n\\n\") +\n\t\t// interface type\n\t\tfmt.Sprintf(\"// %s is a wrapper for related components\\n\", strings.Title(name)+\"Services\") +\n\t\tfmt.Sprintf(\"type %s interface {\\n\", strings.Title(name)+\"Service\")\n\tif hasDB {\n\t\tsrvcDefinitionString += fmt.Sprintf(\"\tdb.I%s\\n\", strings.Title(name)+\"DB\")\n\t}\n\tsrvcDefinitionString += fmt.Sprintf(\"}\\n\\n\") +\n\t\t// struct type\n\t\tfmt.Sprintf(\"type %s struct {\\n\", strings.ToLower(name)+\"Service\")\n\tif hasDB {\n\t\tsrvcDefinitionString += fmt.Sprintf(\"\tdb.I%s\\n\", strings.Title(name)+\"DB\")\n\t}\n\tsrvcDefinitionString += fmt.Sprintf(\"}\\n\")\n\n\treturn srvcDefinitionString, nil\n}" ]
[ "0.6730124", "0.60911924", "0.6003059", "0.5878648", "0.5870544", "0.5807817", "0.57496333", "0.57308173", "0.57098544", "0.5695293", "0.5679978", "0.5668689", "0.5612896", "0.55966806", "0.5568805", "0.55262625", "0.5512509", "0.5452803", "0.5448624", "0.53934723", "0.53896487", "0.5349266", "0.5339706", "0.53252506", "0.53061193", "0.52383435", "0.5237565", "0.52347887", "0.5208759", "0.519053", "0.51703924", "0.51670814", "0.5131598", "0.5129195", "0.5125182", "0.51141095", "0.50993365", "0.5090171", "0.50806534", "0.50786376", "0.5073819", "0.50635624", "0.50507885", "0.5049383", "0.5042324", "0.50351506", "0.50336456", "0.50320345", "0.5029736", "0.5025012", "0.5006509", "0.5005212", "0.50004673", "0.4995771", "0.49879712", "0.4979922", "0.49725822", "0.497083", "0.49648336", "0.4963901", "0.49401802", "0.49292636", "0.49292636", "0.49270272", "0.492501", "0.4905259", "0.49030972", "0.48988456", "0.4896417", "0.48946476", "0.488312", "0.48823422", "0.48815167", "0.4878334", "0.48729318", "0.4868136", "0.48624372", "0.48609558", "0.48559546", "0.48468342", "0.48439744", "0.48433733", "0.4837488", "0.48344862", "0.48295376", "0.4829346", "0.48280683", "0.4824665", "0.4823", "0.48214462", "0.48203695", "0.48193258", "0.48142332", "0.48090893", "0.4805014", "0.47972476", "0.47915924", "0.4790487", "0.47889057", "0.4787367" ]
0.7838934
0
NewIndex will create an index
func (es *Repository) NewIndex(ctx context.Context, index string) (error){ svc := es.client.CreateIndex("ethan") _, err := svc.Do(ctx) if err != nil { return err } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func CreateNewIndex(url string, alias string) (string, error) {\n\t// create our day-specific name\n\tphysicalIndex := fmt.Sprintf(\"%s_%s\", alias, time.Now().Format(\"2006_01_02\"))\n\tidx := 0\n\n\t// check if it exists\n\tfor true {\n\t\tresp, err := http.Get(fmt.Sprintf(\"%s/%s\", url, physicalIndex))\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\t\t// not found, great, move on\n\t\tif resp.StatusCode == http.StatusNotFound {\n\t\t\tbreak\n\t\t}\n\n\t\t// was found, increase our index and try again\n\t\tidx++\n\t\tphysicalIndex = fmt.Sprintf(\"%s_%s_%d\", alias, time.Now().Format(\"2006_01_02\"), idx)\n\t}\n\n\t// initialize our index\n\tcreateURL := fmt.Sprintf(\"%s/%s\", url, physicalIndex)\n\t_, err := MakeJSONRequest(http.MethodPut, createURL, indexSettings, nil)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\t// all went well, return our physical index name\n\tlog.WithField(\"index\", physicalIndex).Info(\"created index\")\n\treturn physicalIndex, nil\n}", "func NewIndex(unique bool, columns []Column) *Index {\n\treturn &Index{\n\t\tbtree: btree.NewBTreeG[Doc](func(a, b Doc) bool {\n\t\t\treturn Order(a, b, columns, !unique) < 0\n\t\t}),\n\t}\n}", "func CreateNewIndex(rawItems PointArray, dim, nTree, k int, m Metric) (Index, error) {\n\t// verify that given items have same dimension\n\tl := rawItems.Len()\n\tif l < 2 {\n\t\treturn nil, errNotEnoughItems\n\t}\n\tits := make([]*item, l)\n\t//idToItem := make(map[itemId]*item, l)\n\tfor i:=0; i < l; i++{\n\t\tv := rawItems.At(i)\n\t\tif v.Dimension() != dim {\n\t\t\treturn nil, errDimensionMismatch\n\t\t}\n\t\tit := &item{\n\t\t\tid: itemId(i),\n\t\t\tvector: v,\n\t\t}\n\t\tits[i] = it\n\t\t//idToItem[it.id] = it\n\t}\n\tidx := &index{\n\t\tmetric: m,\n\t\tdim: dim,\n\t\tk: k,\n\t\titemIDToItem: rawItems,\n\t\troots: make([]*node, nTree),\n\t\tnodeIDToNode: map[nodeId]*node{},\n\t\tmux: &sync.Mutex{},\n\t}\n\n\t// build\n\tidx.build(its, nTree)\n\treturn idx, nil\n}", "func NewIndex(addr, name, typ string, md *index.Metadata) (*Index, error) {\n\n\tfmt.Println(\"Get a new index: \", addr, name)\n client := &http.Client{\n\t\tTransport: &http.Transport{\n\t\t\t//MaxIdleConnsPerHost: 200,\n\t\t\tMaxIdleConnsPerHost: 2000000,\n\t\t},\n\t\tTimeout: 2500000 * time.Millisecond,\n\t}\n\tconn, err := elastic.NewClient(elastic.SetURL(addr), elastic.SetHttpClient(client))\n\tif err != nil {\n fmt.Println(\"Get error here\");\n\t\treturn nil, err\n\t}\n\tret := &Index{\n\t\tconn: conn,\n\t\tmd: md,\n\t\tname: name,\n\t\ttyp: typ,\n\t}\n fmt.Println(\"get here ======\");\n\n\treturn ret, nil\n\n}", "func newIndex(name string) (index *ind) {\n\tindex = new(ind)\n\tindex.name = name\n\tindex.Storage = map[string][]string{}\n\tindex.Domains = map[string]bool{}\n\treturn\n}", "func CreateIndex(context *web.AppContext) *web.AppError {\n\n\tdb := context.MDB\n\tvar input model.Index\n\tjson.NewDecoder(context.Body).Decode(&input)\n\n\terr := db.Session.DB(\"\").C(input.Target).EnsureIndex(input.Index)\n\tif err != nil {\n\t\tmessage := fmt.Sprintf(\"Error creating index [%+v]\", input)\n\t\treturn &web.AppError{err, message, http.StatusInternalServerError}\n\t}\n\n\treturn nil\n}", "func MakeIndex() error {\n\n\treturn nil\n}", "func NewIndex(addrs []string, pass string, temporary int, name string, md *index.Metadata) *Index {\n\n\tret := &Index{\n\n\t\thosts: addrs,\n\n\t\tmd: md,\n\t\tpassword: pass,\n\t\ttemporary: temporary,\n\n\t\tname: name,\n\n\t\tcommandPrefix: \"FT\",\n\t}\n\tif md != nil && md.Options != nil {\n\t\tif opts, ok := md.Options.(IndexingOptions); ok {\n\t\t\tif opts.Prefix != \"\" {\n\t\t\t\tret.commandPrefix = md.Options.(IndexingOptions).Prefix\n\t\t\t}\n\t\t}\n\t}\n\t//ret.pool.MaxActive = ret.pool.MaxIdle\n\n\treturn ret\n\n}", "func newDocumentIndex(opts *iface.CreateDocumentDBOptions) iface.StoreIndex {\n\treturn &documentIndex{\n\t\tindex: map[string][]byte{},\n\t\topts: opts,\n\t}\n}", "func NewIndex() Index {\n\tnewIndex := Index{}\n\tnewIndex.Setup()\n\treturn newIndex\n}", "func (c *esClientV7) createIndex(args ElasticsearchArgs) error {\n\tres, err := c.Indices.ResolveIndex([]string{args.Index})\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer res.Body.Close()\n\n\tvar v map[string]interface{}\n\tfound := false\n\tif err := json.NewDecoder(res.Body).Decode(&v); err != nil {\n\t\treturn fmt.Errorf(\"Error parsing response body: %v\", err)\n\t}\n\n\tindices := v[\"indices\"].([]interface{})\n\tfor _, index := range indices {\n\t\tname := index.(map[string]interface{})[\"name\"]\n\t\tif name == args.Index {\n\t\t\tfound = true\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif !found {\n\t\tresp, err := c.Indices.Create(args.Index)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdefer resp.Body.Close()\n\t\tif resp.IsError() {\n\t\t\terr := fmt.Errorf(\"Create index err: %s\", res.String())\n\t\t\treturn err\n\t\t}\n\t\tio.Copy(ioutil.Discard, resp.Body)\n\t\treturn nil\n\t}\n\treturn nil\n}", "func (c *Client) createIndex(mapping string) error {\n\tinfo, err := c.Client.NodesInfo().Do(context.Background())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcreateIndexService := c.Client.CreateIndex(c.IndexName).BodyString(mapping)\n\n\tfor _, node := range info.Nodes {\n\t\t// Grab the first character of the version string and turn it into an int\n\t\tversion, _ := strconv.Atoi(string(node.Version[0]))\n\t\tif version == 7 {\n\t\t\t// include_type_name defaults to false in ES7. This will ensure ES7\n\t\t\t// behaves like ES6 when creating mappings. See\n\t\t\t// https://www.elastic.co/blog/moving-from-types-to-typeless-apis-in-elasticsearch-7-0\n\t\t\t// for more information. We also can't set this for any versions before\n\t\t\t// 6.8 as this parameter was not supported. Since it defaults to true in\n\t\t\t// all 6.x it's safe to only set it for 7.x.\n\t\t\tcreateIndexService = createIndexService.IncludeTypeName(true)\n\t\t}\n\n\t\t// We only look at the first node and assume they're all the same version\n\t\tbreak\n\t}\n\n\tcreateIndex, err := createIndexService.Do(context.Background())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif !createIndex.Acknowledged {\n\t\treturn timeoutError\n\t}\n\n\treturn nil\n}", "func NewIndex(kind IndexKind, table string) Index {\n\treturn &index{\n\t\tkind: kind,\n\t\ttable: table,\n\t}\n}", "func NewLogIndex() indices.Index { return &logIndex{} }", "func NewIndex(path, name string) (*Index, error) {\n\terr := validateName(name)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"validating name\")\n\t}\n\n\treturn &Index{\n\t\tpath: path,\n\t\tname: name,\n\t\tfields: make(map[string]*Field),\n\n\t\tnewAttrStore: newNopAttrStore,\n\t\tcolumnAttrs: nopStore,\n\n\t\tbroadcaster: NopBroadcaster,\n\t\tStats: stats.NopStatsClient,\n\t\tlogger: logger.NopLogger,\n\t\ttrackExistence: true,\n\t}, nil\n}", "func (dbclient *CouchDatabase) CreateNewIndexWithRetry(indexdefinition string, designDoc string) error {\n\t//get the number of retries\n\tmaxRetries := dbclient.CouchInstance.Conf.MaxRetries\n\n\t_, err := retry.Invoke(\n\t\tfunc() (interface{}, error) {\n\t\t\texists, err := dbclient.IndexDesignDocExists(designDoc)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif exists {\n\t\t\t\treturn nil, nil\n\t\t\t}\n\n\t\t\treturn dbclient.CreateIndex(indexdefinition)\n\t\t},\n\t\tretry.WithMaxAttempts(maxRetries),\n\t)\n\treturn err\n}", "func (cc *TicketsChaincode) createIndex(stub shim.ChaincodeStubInterface, indexName string, attributes []string) error {\n\tfmt.Println(\"- start create index\")\n\tvar err error\n\n\tindexKey, err := stub.CreateCompositeKey(indexName, attributes)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvalue := []byte{0x00}\n\tstub.PutState(indexKey, value)\n\n\tfmt.Println(\"created index\")\n\treturn nil\n}", "func NewIndex(name string, columns []string, indexType IndexType) Index {\n\treturn Index{\n\t\tName: name,\n\t\tColumns: columns,\n\t\tType: indexType,\n\t}\n}", "func NewIndex(physicalID int64, tblInfo *model.TableInfo, indexInfo *model.IndexInfo) table.Index {\n\tindex := &index{\n\t\tidxInfo: indexInfo,\n\t\ttblInfo: tblInfo,\n\t\t// The prefix can't encode from tblInfo.ID, because table partition may change the id to partition id.\n\t\tprefix: tablecodec.EncodeTableIndexPrefix(physicalID, indexInfo.ID),\n\t}\n\treturn index\n}", "func (msg MsgCreateIndex) Type() string { return \"create_index\" }", "func NewIndex(texts []string, name string) *Index {\n\treturn &Index{texts: texts, name: name}\n}", "func (s *BasePlSqlParserListener) EnterCreate_index(ctx *Create_indexContext) {}", "func createIndexes(ts *Schema, ti *Info, idxs []schema.Index, store *stor.Stor) {\n\tif len(idxs) == 0 {\n\t\treturn\n\t}\n\tts.Indexes = slices.Clip(ts.Indexes) // copy on write\n\tnold := len(ts.Indexes)\n\tfor i := range idxs {\n\t\tix := &idxs[i]\n\t\tif ts.FindIndex(ix.Columns) != nil {\n\t\t\tpanic(\"duplicate index: \" +\n\t\t\t\tstr.Join(\"(,)\", ix.Columns) + \" in \" + ts.Table)\n\t\t}\n\t\tts.Indexes = append(ts.Indexes, *ix)\n\t}\n\tidxs = ts.SetupNewIndexes(nold)\n\tn := len(ti.Indexes)\n\tti.Indexes = slices.Clip(ti.Indexes) // copy on write\n\tfor i := range idxs {\n\t\tbt := btree.CreateBtree(store, &ts.Indexes[n+i].Ixspec)\n\t\tti.Indexes = append(ti.Indexes, index.OverlayFor(bt))\n\t}\n}", "func NewIndex(storage storages.Storage) Index {\n\treturn &multiIndex{index: storage}\n}", "func (p *ThriftHiveMetastoreClient) AlterIndex(ctx context.Context, dbname string, base_tbl_name string, idx_name string, new_idx *Index) (err error) {\n var _args108 ThriftHiveMetastoreAlterIndexArgs\n _args108.Dbname = dbname\n _args108.BaseTblName = base_tbl_name\n _args108.IdxName = idx_name\n _args108.NewIdx_ = new_idx\n var _result109 ThriftHiveMetastoreAlterIndexResult\n if err = p.Client_().Call(ctx, \"alter_index\", &_args108, &_result109); err != nil {\n return\n }\n switch {\n case _result109.O1!= nil:\n return _result109.O1\n case _result109.O2!= nil:\n return _result109.O2\n }\n\n return nil\n}", "func CreateIndex(excludedPaths []string) (Index, error) {\n\tglog.V(1).Infof(\"CreateIndex(%v)\", excludedPaths)\n\n\tmapping := bleve.NewIndexMapping()\n\tif len(excludedPaths) > 0 {\n\t\tcustomMapping := bleve.NewDocumentMapping()\n\t\tfor _, path := range excludedPaths {\n\t\t\tpaths := strings.Split(path, \".\")\n\t\t\tpathToMapping(paths, customMapping)\n\t\t}\n\t\tmapping.DefaultMapping = customMapping\n\t}\n\tindex, err := bleve.NewMemOnly(mapping)\n\n\tif err != nil {\n\t\tglog.Error(err)\n\t\treturn nil, err\n\t}\n\n\tbatch := index.NewBatch()\n\n\treturn &bleveIndex{\n\t\tindex: index,\n\t\taddInc: 0,\n\t\tbatch: batch,\n\t}, nil\n}", "func (s *BasePlSqlParserListener) EnterNew_index_name(ctx *New_index_nameContext) {}", "func CreateIndex(fromKind string, fromKindFieldName string, toKind string,\n\textractor ForeignKeyExtractor) {\n\n\ti := Indexes[fromKind]\n\tif i == nil {\n\t\ti = make(map[string]string)\n\t\tIndexes[fromKind] = i\n\t}\n\ti[fromKindFieldName] = toKind\n\n\tfkv := ForeignKeyExtractors[fromKind]\n\tif fkv == nil {\n\t\tfkv = make(map[string]ForeignKeyExtractor)\n\t\tForeignKeyExtractors[fromKind] = fkv\n\t}\n\tfkv[fromKindFieldName] = extractor\n}", "func (c *esClientV56) createIndex(args ElasticsearchArgs) error {\n\texists, err := c.IndexExists(args.Index).Do(context.Background())\n\tif err != nil {\n\t\treturn err\n\t}\n\tif !exists {\n\t\tvar createIndex *elastic.IndicesCreateResult\n\t\tif createIndex, err = c.CreateIndex(args.Index).Do(context.Background()); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif !createIndex.Acknowledged {\n\t\t\treturn fmt.Errorf(\"index %v not created\", args.Index)\n\t\t}\n\t}\n\treturn nil\n}", "func (c *Client) CreateIndex(name string, mapping interface{}) (*Response, error) {\n\tr := Request{\n\t\tQuery: mapping,\n\t\tIndexList: []string{name},\n\t\tMethod: \"PUT\",\n\t}\n\n\treturn c.Do(&r)\n}", "func addStoringIndex(ctx context.Context, w io.Writer, adminClient *database.DatabaseAdminClient, database string) error {\n\top, err := adminClient.UpdateDatabaseDdl(ctx, &adminpb.UpdateDatabaseDdlRequest{\n\t\tDatabase: database,\n\t\tStatements: []string{\n\t\t\t\"CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle) STORING (MarketingBudget)\",\n\t\t},\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\tif err := op.Wait(ctx); err != nil {\n\t\treturn err\n\t}\n\tfmt.Fprintf(w, \"Added storing index\\n\")\n\treturn nil\n}", "func (b *mysql) Index(table *Table, index *Index) string {\n\tlog.Printf(\"create index:%+v\", index)\n\tvar obj = \"INDEX\"\n\tif index.Unique {\n\t\tobj = \"UNIQUE INDEX\"\n\t}\n\treturn fmt.Sprintf(\"CREATE %s %s ON %s (%s);\", obj, index.Name, table.Name, b.columns(nil, index.Fields, true, false, false))\n}", "func NewIndex() *Index {\n\treturn &Index{root: &node{}}\n}", "func createIndexMigration(cmd *cobra.Command, args []string) error {\n\t// Caller should supply a table name as the first argument and a column name\n\t// as the second argument.\n\tif len(args) < 2 {\n\t\treturn errors.New(\"requires a tablename argument followed by a columnname argument\")\n\t}\n\n\t// Set index data.\n\tidx := new(sqlt.Index)\n\tidx.SetTableName(args[0])\n\tidx.SetColumnName(args[1])\n\n\t// Process SQL template for \"up\" migration.\n\tupSQL, err := sqlt.ProcessTmpl(idx, sqlt.CreateDefaultIndexTmpl)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Process SQL template for \"down\" migration.\n\tdownSQL, err := sqlt.ProcessTmpl(idx, sqlt.DropIndexTmpl)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Create migration file.\n\tmigrationName := fmt.Sprintf(\"CreateIndexOn_%s_%s\", idx.TableName(), idx.ColumnName())\n\terr = createMigration(migrationName, upSQL, downSQL)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn err\n}", "func createIndex(collection *mongo.Collection, field string, unique bool) bool {\n\tmod := mongo.IndexModel{\n\t\tKeys: bson.M{field: 1}, // index in ascending order or -1 for descending order\n\t\tOptions: options.Index().SetUnique(unique),\n\t}\n\n\tctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)\n\tdefer cancel()\n\n\t_, err := collection.Indexes().CreateOne(ctx, mod)\n\tif err != nil {\n\t\tfmt.Println(err.Error())\n\t\treturn false\n\t}\n\n\treturn true\n}", "func (db *Database) CreateIndex(label, property string) (*Index, error) {\n\turi := join(db.Url, \"schema/index\", label)\n\tpayload := indexRequest{[]string{property}}\n\tresult := Index{db: db}\n\tne := NeoError{}\n\tresp, err := db.Session.Post(uri, payload, &result, &ne)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tswitch resp.Status() {\n\tcase 200:\n\t\treturn &result, nil // Success\n\tcase 405:\n\t\treturn nil, NotAllowed\n\t}\n\treturn nil, ne\n}", "func CreateIndexIfNotExists(e *elastic.Client, index string) error {\n\t// Use the IndexExists service to check if a specified index exists.\n\texists, err := e.IndexExists(index).Do(context.Background())\n\tif err != nil {\n\t\tlog.Printf(\"elastic: unable to check if Index exists - %s\\n\", err)\n\t\treturn err\n\t}\n\n\tif exists {\n\t\treturn nil\n\t}\n\n\t// Create a new index.\n\tv := reflect.TypeOf(Point{})\n\n\tmapping := MapStr{\n\t\t\"settings\": MapStr{\n\t\t\t\"number_of_shards\": 1,\n\t\t\t\"number_of_replicas\": 1,\n\t\t},\n\t\t\"mappings\": MapStr{\n\t\t\t\"doc\": MapStr{\n\t\t\t\t\"properties\": MapStr{},\n\t\t\t},\n\t\t},\n\t}\n\tfor i := 0; i < v.NumField(); i++ {\n\t\tfield := v.Field(i)\n\t\ttag := field.Tag.Get(\"elastic\")\n\t\tif len(tag) == 0 {\n\t\t\tcontinue\n\t\t}\n\t\ttagfields := strings.Split(tag, \",\")\n\t\tmapping[\"mappings\"].(MapStr)[\"doc\"].(MapStr)[\"properties\"].(MapStr)[field.Name] = MapStr{}\n\t\tfor _, tagfield := range tagfields {\n\t\t\ttagfieldValues := strings.Split(tagfield, \":\")\n\t\t\tmapping[\"mappings\"].(MapStr)[\"doc\"].(MapStr)[\"properties\"].(MapStr)[field.Name].(MapStr)[tagfieldValues[0]] = tagfieldValues[1]\n\t\t}\n\t}\n\tmappingJSON, err := json.Marshal(mapping)\n\tif err != nil {\n\t\tlog.Printf(\"elastic: error on json marshal - %s\\n\", err)\n\t\treturn err\n\t}\n\n\t_, err = e.CreateIndex(index).BodyString(string(mappingJSON)).Do(context.Background())\n\tif err != nil {\n\t\tlog.Printf(\"elastic: error creating elastic index %s - %s\\n\", index, err)\n\t\treturn err\n\t}\n\tlog.Printf(\"elastic: index %s created\\n\", index)\n\treturn nil\n}", "func (c *index) Create(sctx sessionctx.Context, rm kv.RetrieverMutator, indexedValues []types.Datum, h int64, opts ...table.CreateIdxOptFunc) (int64, error) {\n\tvar opt table.CreateIdxOpt\n\tfor _, fn := range opts {\n\t\tfn(&opt)\n\t}\n\tss := opt.AssertionProto\n\twriteBufs := sctx.GetSessionVars().GetWriteStmtBufs()\n\tskipCheck := sctx.GetSessionVars().LightningMode || sctx.GetSessionVars().StmtCtx.BatchCheck\n\tkey, distinct, err := c.GenIndexKey(sctx.GetSessionVars().StmtCtx, indexedValues, h, writeBufs.IndexKeyBuf)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\tctx := opt.Ctx\n\tif opt.Untouched {\n\t\ttxn, err1 := sctx.Txn(true)\n\t\tif err1 != nil {\n\t\t\treturn 0, err1\n\t\t}\n\t\t// If the index kv was untouched(unchanged), and the key/value already exists in mem-buffer,\n\t\t// should not overwrite the key with un-commit flag.\n\t\t// So if the key exists, just do nothing and return.\n\t\t_, err = txn.GetMemBuffer().Get(ctx, key)\n\t\tif err == nil {\n\t\t\treturn 0, nil\n\t\t}\n\t}\n\n\t// save the key buffer to reuse.\n\twriteBufs.IndexKeyBuf = key\n\tif !distinct {\n\t\t// non-unique index doesn't need store value, write a '0' to reduce space\n\t\tvalue := []byte{'0'}\n\t\tif opt.Untouched {\n\t\t\tvalue[0] = kv.UnCommitIndexKVFlag\n\t\t}\n\t\terr = rm.Set(key, value)\n\t\tif ss != nil {\n\t\t\tss.SetAssertion(key, kv.None)\n\t\t}\n\t\treturn 0, err\n\t}\n\n\tif skipCheck {\n\t\tvalue := EncodeHandle(h)\n\t\tif opt.Untouched {\n\t\t\tvalue = append(value, kv.UnCommitIndexKVFlag)\n\t\t}\n\t\terr = rm.Set(key, value)\n\t\tif ss != nil {\n\t\t\tss.SetAssertion(key, kv.None)\n\t\t}\n\t\treturn 0, err\n\t}\n\n\tif ctx != nil {\n\t\tif span := opentracing.SpanFromContext(ctx); span != nil && span.Tracer() != nil {\n\t\t\tspan1 := span.Tracer().StartSpan(\"index.Create\", opentracing.ChildOf(span.Context()))\n\t\t\tdefer span1.Finish()\n\t\t\tctx = opentracing.ContextWithSpan(ctx, span1)\n\t\t}\n\t} else {\n\t\tctx = context.TODO()\n\t}\n\n\tvar value []byte\n\tvalue, err = rm.Get(ctx, key)\n\t// If (opt.Untouched && err == nil) is true, means the key is exists and exists in TiKV, not in txn mem-buffer,\n\t// then should also write the untouched index key/value to mem-buffer to make sure the data\n\t// is consistent with the index in txn mem-buffer.\n\tif kv.IsErrNotFound(err) || (opt.Untouched && err == nil) {\n\t\tv := EncodeHandle(h)\n\t\tif opt.Untouched {\n\t\t\tv = append(v, kv.UnCommitIndexKVFlag)\n\t\t}\n\t\terr = rm.Set(key, v)\n\t\tif ss != nil {\n\t\t\tss.SetAssertion(key, kv.NotExist)\n\t\t}\n\t\treturn 0, err\n\t}\n\n\thandle, err := DecodeHandle(value)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn handle, kv.ErrKeyExists\n}", "func updateIndex(indexName string, objects []algoliasearch.Object) error {\n\n\tindex := algoliaClient.InitIndex(indexName)\n\terr := populateIndex(index, objects)\n\tif err != nil {\n\t\treturn errors.New(\"Error updating index -\" + err.Error())\n\t}\n\n\treturn nil\n}", "func (c *Collection) indexAdd(tx ds.Txn, key ds.Key, data []byte) error {\n\tfor path, index := range c.indexes {\n\t\terr := c.indexUpdate(path, index, tx, key, data, false)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (st *Schema) addCreateIndex(ci sql.CreateIndexStmt) {\n\tst.Indexes = append(st.Indexes, SchemaIndex{\n\t\tIndex: ci.Index,\n\t\tColumns: st.toIndexColumns(ci.IndexedColumns),\n\t})\n}", "func (db *Database) createTimestampIndex() {\n\tindexView := db.database.Collection(TRACKS.String()).Indexes()\n\n\tindexModel := mongo.IndexModel{\n\t\tKeys: bson.NewDocument(bson.EC.Int32(\"ts\", -1))}\n\n\t_, err := indexView.CreateOne(context.Background(), indexModel, nil)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n}", "func (s *searcher) CreateIndex() error {\n\tcolor.Cyan(\"[start] initialize index.\")\n\t// get user\n\tuser, reload, err := s.getUser()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"[err] createIndex %w\", err)\n\t}\n\n\t// check to whether exist starred items or not.\n\tvar isNewIndex bool\n\tif err := s.db.Update(func(tx *bolt.Tx) error {\n\t\tvar err error\n\t\tbucket := tx.Bucket([]byte(starredBucketName(s.gitToken)))\n\t\tif bucket == nil {\n\t\t\tbucket, err = tx.CreateBucket([]byte(starredBucketName(s.gitToken)))\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tisNewIndex = true\n\t\t} else {\n\t\t\tisNewIndex = false\n\t\t}\n\t\treturn nil\n\t}); err != nil {\n\t\tClearAll()\n\t\tcolor.Yellow(\"[err] collapse db file, so delete db file\")\n\t\treturn fmt.Errorf(\"[err] createIndex %w\", err)\n\t}\n\n\t// read old database.\n\tvar oldStarredList []*git.Starred\n\toldStarredMap := map[string]*git.Starred{}\n\tif !isNewIndex {\n\t\t// read old starred from db\n\t\ts.db.View(func(tx *bolt.Tx) error {\n\t\t\tbucket := tx.Bucket([]byte(starredBucketName(s.gitToken)))\n\t\t\tbucket.ForEach(func(k, v []byte) error {\n\t\t\t\tvar starred *git.Starred\n\t\t\t\tif err := json.Unmarshal(v, &starred); err != nil {\n\t\t\t\t\tcolor.Yellow(\"[err] parsing %s\", string(k))\n\t\t\t\t} else {\n\t\t\t\t\toldStarredList = append(oldStarredList, starred)\n\t\t\t\t\toldStarredMap[starred.FullName] = starred\n\t\t\t\t}\n\t\t\t\treturn nil\n\t\t\t})\n\t\t\treturn nil\n\t\t})\n\n\t\t// write old starred to index\n\t\tfor _, starred := range oldStarredList {\n\t\t\tif err := s.index.Index(starred.FullName, starred); err != nil {\n\t\t\t\tcolor.Yellow(\"[err] indexing %s\", starred.FullName)\n\t\t\t}\n\t\t}\n\t}\n\n\t// are you all ready?\n\tif !reload && !isNewIndex {\n\t\tcount, _ := s.index.DocCount()\n\t\tcolor.Green(\"[success][using cache] %d items\", count)\n\t\treturn nil\n\t}\n\n\t// reload new starred list.\n\tnewStarredList, err := s.git.ListStarredAll()\n\tif err != nil {\n\t\tcolor.Yellow(\"[err] don't getting starred list %s\", err.Error())\n\t\tif !isNewIndex {\n\t\t\tcount, _ := s.index.DocCount()\n\t\t\tcolor.Yellow(\"[fail][using cache] %d items\", count)\n\t\t\treturn nil\n\t\t}\n\t\treturn fmt.Errorf(\"[err] CreateIndex %w\", err)\n\t}\n\tnewStarredMap := map[string]*git.Starred{}\n\tfor _, starred := range newStarredList {\n\t\tnewStarredMap[starred.FullName] = starred\n\t}\n\n\t// update and insert\n\tif isNewIndex {\n\t\tcolor.White(\"[refresh] all repositories\")\n\t\ts.git.SetReadme(newStarredList)\n\t\ts.writeDBAndIndex(newStarredList)\n\t} else {\n\t\t// insert or update starred\n\t\tvar insertList []*git.Starred\n\t\tvar updateList []*git.Starred\n\t\tfor _, newStarred := range newStarredList {\n\t\t\tif oldStarred, ok := oldStarredMap[newStarred.FullName]; !ok {\n\t\t\t\tinsertList = append(insertList, newStarred)\n\t\t\t\tcolor.White(\"[insert] %s repository pushed_at %s\",\n\t\t\t\t\tnewStarred.FullName, newStarred.PushedAt.Format(time.RFC3339))\n\t\t\t} else {\n\t\t\t\tif oldStarred.PushedAt.Unix() != newStarred.PushedAt.Unix() &&\n\t\t\t\t\toldStarred.CachedAt.Unix() < time.Now().Add(-24*7*time.Hour).Unix() { // after 7 days.\n\t\t\t\t\tupdateList = append(updateList, newStarred)\n\t\t\t\t\tcolor.White(\"[update] %s repository pushed_at %s\",\n\t\t\t\t\t\tnewStarred.FullName, newStarred.PushedAt.Format(time.RFC3339))\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t// insert\n\t\ts.git.SetReadme(insertList)\n\t\ts.writeDBAndIndex(insertList)\n\n\t\t// update\n\t\ts.git.SetReadme(updateList)\n\t\ts.writeDBAndIndex(updateList)\n\n\t\t// delete starred\n\t\tvar deleteList []*git.Starred\n\t\tfor _, oldStarred := range oldStarredList {\n\t\t\tif _, ok := newStarredMap[oldStarred.FullName]; !ok {\n\t\t\t\tdeleteList = append(deleteList, oldStarred)\n\t\t\t\tcolor.White(\"[delete] %s repository pushed_at %s\",\n\t\t\t\t\toldStarred.FullName, oldStarred.PushedAt.Format(time.RFC3339))\n\t\t\t}\n\t\t}\n\t\t// delete\n\t\ts.deleteDBAndIndex(deleteList)\n\t}\n\n\t// rewrite a user to db\n\tuserData, err := json.Marshal(user)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"[err] createIndex %w\", err)\n\t}\n\ts.db.Update(func(tx *bolt.Tx) error {\n\t\tbucket := tx.Bucket([]byte(userBucketName))\n\t\tbucket.Put([]byte(s.gitToken), userData)\n\t\treturn nil\n\t})\n\n\tcount, _ := s.index.DocCount()\n\tcolor.Green(\"[success][new reload] %d items\", count)\n\treturn nil\n}", "func (ac *AdminClient) CreateIndex(ctx context.Context, db, table string, indexMeta *tspb.IndexMeta) error {\n\tin := &tspb.CreateIndexRequest{\n\t\tDatabase: db,\n\t\tTable: table,\n\t\tIndexes: indexMeta,\n\t}\n\treturn retry.Invoke(ctx, func(ctx context.Context, settings retry.CallSettings) error {\n\t\t_, err := ac.pbCli.CreateIndex(ctx, in)\n\t\treturn err\n\t})\n}", "func (es *Connection) CreateIndex(index string, body interface{}) (int, *QueryResult, error) {\n\treturn withQueryResult(es.apiCall(\"PUT\", index, \"\", \"\", \"\", nil, body))\n}", "func CreateIndex(i string) {\n\tcreateIndex, err := client.CreateIndex(indexName).\n\t\tBody(indexMapping).\n\t\tDo(context.Background())\n\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\tif !createIndex.Acknowledged {\n\t\tlog.Println(\"CreateIndex was not acknowledged. Check that timeout value is correct.\")\n\t}\n}", "func NewIndex(f *os.File, c Config) (*Index, error) {\n\tidx := &Index{\n\t\tfile: f,\n\t}\n\n\tfi, err := os.Stat(f.Name())\n\tif err != nil {\n\t\treturn nil, lib.Wrap(err, \"Unable to get file stats\")\n\t}\n\n\tidx.size = uint64(fi.Size())\n\tif err = os.Truncate(\n\t\tf.Name(), int64(c.Segment.MaxIndexBytes),\n\t); err != nil {\n\t\treturn nil, lib.Wrap(err, \"Unable to truncate file\")\n\t}\n\n\tif idx.mmap, err = gommap.Map(\n\t\tidx.file.Fd(),\n\t\tgommap.PROT_READ|gommap.PROT_WRITE,\n\t\tgommap.MAP_SHARED,\n\t); err != nil {\n\t\treturn nil, lib.Wrap(err, \"Unable to create gommap map\")\n\t}\n\n\treturn idx, nil\n}", "func New(indexRegistry *registry.IndexRegistry, options ...func(*Index)) (I *Index, err error) {\n\tI = &Index{\n\t\tindexRegistry: indexRegistry,\n\t}\n\n\tfor _, option := range options {\n\t\toption(I)\n\t}\n\n\treturn\n}", "func (api *ElasticAPI) CreateSearchIndex(ctx context.Context, instanceID, dimension string) (int, error) {\n\t*api.NumberOfCalls++\n\n\tif api.InternalServerError {\n\t\treturn 0, errorInternalServer\n\t}\n\n\treturn 201, nil\n}", "func createIndexes(db *sql.DB, table string) error {\n\tindexes := []string{}\n\n\tswitch table {\n\tcase \"dfp\":\n\t\tindexes = []string{\n\t\t\t\"CREATE INDEX IF NOT EXISTS dfp_metrics ON dfp (CODE, ID_CIA, YEAR, VL_CONTA);\",\n\t\t\t\"CREATE INDEX IF NOT EXISTS dfp_year_ver ON dfp (ID_CIA, YEAR, VERSAO);\",\n\t\t}\n\tcase \"itr\":\n\t\tindexes = []string{\n\t\t\t\"CREATE INDEX IF NOT EXISTS itr_metrics ON itr (CODE, ID_CIA, YEAR, VL_CONTA);\",\n\t\t\t\"CREATE INDEX IF NOT EXISTS itr_quarter_ver ON itr (ID_CIA, DT_FIM_EXERC, VERSAO);\",\n\t\t}\n\tcase \"stock_quotes\":\n\t\tindexes = []string{\n\t\t\t\"CREATE UNIQUE INDEX IF NOT EXISTS stock_quotes_stockdate ON stock_quotes (stock, date);\",\n\t\t}\n\tcase \"fii_dividends\":\n\t\tindexes = []string{\n\t\t\t\"CREATE UNIQUE INDEX IF NOT EXISTS fii_dividends_pk ON fii_dividends (trading_code, base_date);\",\n\t\t}\n\t}\n\n\tfor _, idx := range indexes {\n\t\t_, err := db.Exec(idx)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"erro ao criar índice\")\n\t\t}\n\t}\n\n\treturn nil\n}", "func newQueueIndex(dataDir string) (*queueIndex, error) {\n\tindexFile := path.Join(dataDir, cIndexFileName)\n\tindexArena, err := newArena(indexFile, cIndexFileSize)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &queueIndex{\n\t\tindexFile: indexFile,\n\t\tindexArena: indexArena,\n\t}, nil\n}", "func New() *Index {\n\treturn &Index{Version: Version}\n}", "func addIndexes() {\n\tvar err error\n\n\tufIndex1 := mgo.Index{\n\t\tKey: []string{\"codigo\"},\n\t\tUnique: true,\n\t\tBackground: true,\n\t\tSparse: true,\n\t}\n\tmunicipioIndex1 := mgo.Index{\n\t\tKey: []string{\"codigo\"},\n\t\tUnique: true,\n\t\tBackground: true,\n\t\tSparse: true,\n\t}\n\t// Add indexes into MongoDB\n\tsession := Session.Copy()\n\tdefer session.Close()\n\tufCol := session.DB(commons.AppConfig.Database).C(\"ufs\")\n\tmunicipioCol := session.DB(commons.AppConfig.Database).C(\"municipios\")\n\n\t// cria indice codigo para UF\n\terr = ufCol.EnsureIndex(ufIndex1)\n\tif err != nil {\n\t\tlog.Fatalf(\"[addIndexes]: %s\\n\", err)\n\t}\n\tlog.Println(\"Indice para UF criado com sucesso\")\n\n\t// cria indice codigo para Municipio\n\terr = municipioCol.EnsureIndex(municipioIndex1)\n\tif err != nil {\n\t\tlog.Fatalf(\"[addIndexes]: %s\\n\", err)\n\t}\n\tlog.Println(\"Indice para Municipio criado com sucesso\")\n\n}", "func EnsureIndex(cd *mongo.Collection, indexQuery []string) error {\n\n\t// options for index\n\topts := options.CreateIndexes().SetMaxTime(5 * time.Second)\n\n\t// index model\n\tindex := []mongo.IndexModel{}\n\n\t// creating multiple index query\n\tfor _, val := range indexQuery {\n\t\ttemp := mongo.IndexModel{}\n\t\ttemp.Keys = bsonx.Doc{{Key: val, Value: bsonx.Int32(1)}}\n\t\tindex = append(index, temp)\n\t}\n\n\t// executng index query\n\t_, err := cd.Indexes().CreateMany(context.Background(), index, opts)\n\tif err != nil {\n\t\tfmt.Errorf(\"Error while executing index Query\", err.Error())\n\t\treturn err\n\t}\n\n\t// if executed successfully then return nil\n\treturn nil\n}", "func (a *Adapter) ensureIndex(ctx context.Context) error {\n\t_, err := a.c.IndexPutTemplate(activeIndexAlias).BodyString(activeIndexTemplate).Do(ctx)\n\tif err != nil {\n\t\tlog.Fatal(\"Failed to create index template\", zap.Error(err))\n\t}\n\n\texists, err := a.c.IndexExists(activeIndexAlias).Do(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif !exists {\n\t\ta.c.CreateIndex(activeIndexAlias + \"-000001\").Do(ctx)\n\t\tif err != nil {\n\t\t\tlog.Fatal(\"Failed to create initial index\", zap.Error(err))\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func TestEnsureHashIndex(t *testing.T) {\n\tc := createClient(t, nil)\n\tdb := ensureDatabase(nil, c, \"index_test\", nil, t)\n\n\ttestOptions := []*driver.EnsureHashIndexOptions{\n\t\tnil,\n\t\t{Unique: true, Sparse: false},\n\t\t{Unique: true, Sparse: true},\n\t\t{Unique: false, Sparse: false},\n\t\t{Unique: false, Sparse: true},\n\t}\n\n\tfor i, options := range testOptions {\n\t\tcol := ensureCollection(nil, db, fmt.Sprintf(\"hash_index_test_%d\", i), nil, t)\n\n\t\tidx, created, err := col.EnsureHashIndex(nil, []string{\"name\"}, options)\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"Failed to create new index: %s\", describe(err))\n\t\t}\n\t\tif !created {\n\t\t\tt.Error(\"Expected created to be true, got false\")\n\t\t}\n\t\tif idxType := idx.Type(); idxType != driver.HashIndex {\n\t\t\tt.Errorf(\"Expected HashIndex, found `%s`\", idxType)\n\t\t}\n\t\tif options != nil && idx.Unique() != options.Unique {\n\t\t\tt.Errorf(\"Expected Unique to be %t, found `%t`\", options.Unique, idx.Unique())\n\t\t}\n\t\tif options != nil && idx.Sparse() != options.Sparse {\n\t\t\tt.Errorf(\"Expected Sparse to be %t, found `%t`\", options.Sparse, idx.Sparse())\n\t\t}\n\n\t\t// Index must exists now\n\t\tif found, err := col.IndexExists(nil, idx.Name()); err != nil {\n\t\t\tt.Fatalf(\"Failed to check index '%s' exists: %s\", idx.Name(), describe(err))\n\t\t} else if !found {\n\t\t\tt.Errorf(\"Index '%s' does not exist, expected it to exist\", idx.Name())\n\t\t}\n\n\t\t// Ensure again, created must be false now\n\t\t_, created, err = col.EnsureHashIndex(nil, []string{\"name\"}, options)\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"Failed to re-create index: %s\", describe(err))\n\t\t}\n\t\tif created {\n\t\t\tt.Error(\"Expected created to be false, got true\")\n\t\t}\n\n\t\t// Remove index\n\t\tif err := idx.Remove(nil); err != nil {\n\t\t\tt.Fatalf(\"Failed to remove index '%s': %s\", idx.Name(), describe(err))\n\t\t}\n\n\t\t// Index must not exists now\n\t\tif found, err := col.IndexExists(nil, idx.Name()); err != nil {\n\t\t\tt.Fatalf(\"Failed to check index '%s' exists: %s\", idx.Name(), describe(err))\n\t\t} else if found {\n\t\t\tt.Errorf(\"Index '%s' does exist, expected it not to exist\", idx.Name())\n\t\t}\n\t}\n}", "func newIndexWithTempPath(name string) *Index {\n\tpath, err := ioutil.TempDir(\"\", \"pilosa-index-\")\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tindex, err := NewIndex(path, name)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn index\n}", "func (m *MongoDB) CreateIndex(name, key string, order int) (string, error) {\n\tcoll, ok := m.coll[name]\n\tif !ok {\n\t\treturn \"\", fmt.Errorf(\"not defined collection %s\", name)\n\t}\n\n\tasscending := 1\n\tif order == -1 {\n\t\tasscending = -1\n\t}\n\n\tmodel := mongo.IndexModel{\n\t\tKeys: bson.D{{Key: key, Value: asscending}},\n\t\t//Options: options.Index().SetBackground(true),\n\t}\n\n\topts := options.CreateIndexes().SetMaxTime(2 * time.Second)\n\n\treturn coll.Indexes().CreateOne(m.ctx, model, opts)\n}", "func (c *Collection) addIndex(schema *jsonschema.Schema, index Index, opts ...Option) error {\n\targs := &Options{}\n\tfor _, opt := range opts {\n\t\topt(args)\n\t}\n\n\t// Don't allow the default index to be overwritten\n\tif index.Path == idFieldName {\n\t\tif _, ok := c.indexes[idFieldName]; ok {\n\t\t\treturn nil\n\t\t}\n\t}\n\n\t// Validate path and type.\n\tjt, err := getSchemaTypeAtPath(schema, index.Path)\n\tif err != nil {\n\t\treturn err\n\t}\n\tvar valid bool\n\tfor _, t := range indexTypes {\n\t\tif jt.Type == t {\n\t\t\tvalid = true\n\t\t\tbreak\n\t\t}\n\t}\n\tif !valid {\n\t\treturn ErrNotIndexable\n\t}\n\n\t// Skip if nothing to do\n\tif x, ok := c.indexes[index.Path]; ok && index.Unique == x.Unique {\n\t\treturn nil\n\t}\n\n\t// Ensure collection does not contain multiple instances with the same value at path\n\tif index.Unique && index.Path != idFieldName {\n\t\tvals := make(map[interface{}]struct{})\n\t\tall, err := c.Find(&Query{}, WithTxnToken(args.Token))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfor _, i := range all {\n\t\t\tres := gjson.GetBytes(i, index.Path)\n\t\t\tif !res.Exists() {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif _, ok := vals[res.Value()]; ok {\n\t\t\t\treturn ErrCantCreateUniqueIndex\n\t\t\t} else {\n\t\t\t\tvals[res.Value()] = struct{}{}\n\t\t\t}\n\t\t}\n\t}\n\n\tc.indexes[index.Path] = index\n\treturn c.saveIndexes()\n}", "func (s *Store) CreateUserIndex(token string) error {\n\n\tj := indexSettingsAndMapping()\n\n\tif _, err := s.ES.CreateIndex(token).BodyJson(j).Do(); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (i *SGIndex) createIfNeeded(bucket *base.CouchbaseBucketGoCB, useXattrs bool, numReplica uint) error {\n\n\tif i.isXattrOnly() && !useXattrs {\n\t\treturn nil\n\t}\n\n\tindexName := i.fullIndexName(useXattrs)\n\n\texists, _, metaErr := bucket.GetIndexMeta(indexName)\n\tif metaErr != nil {\n\t\treturn metaErr\n\t}\n\tif exists {\n\t\treturn nil\n\t}\n\n\t// Create index\n\tindexExpression := replaceSyncTokensIndex(i.expression, useXattrs)\n\tfilterExpression := replaceSyncTokensIndex(i.filterExpression, useXattrs)\n\n\tvar options *base.N1qlIndexOptions\n\t// We want to pass nil options unless one or more of the WITH elements are required\n\tif numReplica > 0 || i.shouldIndexTombstones(useXattrs) {\n\t\toptions = &base.N1qlIndexOptions{\n\t\t\tNumReplica: numReplica,\n\t\t\tIndexTombstones: i.shouldIndexTombstones(useXattrs),\n\t\t}\n\t}\n\n\tsleeper := base.CreateDoublingSleeperFunc(\n\t\t11, //MaxNumRetries approx 10 seconds total retry duration\n\t\t5, //InitialRetrySleepTimeMS\n\t)\n\n\t//start a retry loop to create index,\n\tworker := func() (shouldRetry bool, err error, value interface{}) {\n\t\terr = bucket.CreateIndex(indexName, indexExpression, filterExpression, options)\n\t\tif err != nil {\n\t\t\tbase.Warn(\"Error creating index %s: %v - will retry.\", indexName, err)\n\t\t}\n\t\treturn err != nil, err, nil\n\t}\n\n\tdescription := fmt.Sprintf(\"Attempt to create index %s\", indexName)\n\terr, _ := base.RetryLoop(description, worker, sleeper)\n\n\tif err != nil {\n\t\treturn pkgerrors.Wrapf(err, \"Error installing Couchbase index: %v\", indexName)\n\t}\n\n\t// Wait for created index to come online\n\treturn bucket.WaitForIndexOnline(indexName)\n}", "func (_e *MockDataCoord_Expecter) CreateIndex(ctx interface{}, req interface{}) *MockDataCoord_CreateIndex_Call {\n\treturn &MockDataCoord_CreateIndex_Call{Call: _e.mock.On(\"CreateIndex\", ctx, req)}\n}", "func NewIndex(mapping IndexMapping, opts ...IndexOption) *Index {\n\tindex := &Index{\n\t\tIndexMapping: mapping,\n\t\tpopulateBatchSize: defaultPopulateBatchSize,\n\t}\n\n\tfor _, opt := range opts {\n\t\topt(index)\n\t}\n\n\treturn index\n}", "func (db *MongoDbBridge) createIndexIfNotExists(col *mongo.Collection, view *mongo.IndexView, ix mongo.IndexModel, known []*mongo.IndexSpecification) error {\n\t// throw if index is not explicitly named\n\tif ix.Options.Name == nil {\n\t\treturn fmt.Errorf(\"index name not defined on %s\", col.Name())\n\t}\n\n\t// do we know the index?\n\tfor _, spec := range known {\n\t\tif spec.Name == *ix.Options.Name {\n\t\t\treturn nil\n\t\t}\n\t}\n\n\tcreatedName, err := view.CreateOne(context.Background(), ix)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to create index %s on %s\", *ix.Options.Name, col.Name())\n\t}\n\tdb.log.Noticef(\"created index %s on %s\", createdName, col.Name())\n\treturn nil\n}", "func TestEnsurePersistentIndex(t *testing.T) {\n\tc := createClient(t, nil)\n\tdb := ensureDatabase(nil, c, \"index_test\", nil, t)\n\n\ttestOptions := []*driver.EnsurePersistentIndexOptions{\n\t\tnil,\n\t\t{Unique: true, Sparse: false},\n\t\t{Unique: true, Sparse: true},\n\t\t{Unique: false, Sparse: false},\n\t\t{Unique: false, Sparse: true},\n\t}\n\n\tfor i, options := range testOptions {\n\t\tcol := ensureCollection(nil, db, fmt.Sprintf(\"persistent_index_test_%d\", i), nil, t)\n\n\t\tidx, created, err := col.EnsurePersistentIndex(nil, []string{\"age\", \"name\"}, options)\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"Failed to create new index: %s\", describe(err))\n\t\t}\n\t\tif !created {\n\t\t\tt.Error(\"Expected created to be true, got false\")\n\t\t}\n\t\tif idxType := idx.Type(); idxType != driver.PersistentIndex {\n\t\t\tt.Errorf(\"Expected PersistentIndex, found `%s`\", idxType)\n\t\t}\n\t\tif options != nil && idx.Unique() != options.Unique {\n\t\t\tt.Errorf(\"Expected Unique to be %t, found `%t`\", options.Unique, idx.Unique())\n\t\t}\n\t\tif options != nil && idx.Sparse() != options.Sparse {\n\t\t\tt.Errorf(\"Expected Sparse to be %t, found `%t`\", options.Sparse, idx.Sparse())\n\t\t}\n\n\t\t// Index must exist now\n\t\tif found, err := col.IndexExists(nil, idx.Name()); err != nil {\n\t\t\tt.Fatalf(\"Failed to check index '%s' exists: %s\", idx.Name(), describe(err))\n\t\t} else if !found {\n\t\t\tt.Errorf(\"Index '%s' does not exist, expected it to exist\", idx.Name())\n\t\t}\n\n\t\t// Ensure again, created must be false now\n\t\t_, created, err = col.EnsurePersistentIndex(nil, []string{\"age\", \"name\"}, options)\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"Failed to re-create index: %s\", describe(err))\n\t\t}\n\t\tif created {\n\t\t\tt.Error(\"Expected created to be false, got true\")\n\t\t}\n\n\t\t// Remove index\n\t\tif err := idx.Remove(nil); err != nil {\n\t\t\tt.Fatalf(\"Failed to remove index '%s': %s\", idx.Name(), describe(err))\n\t\t}\n\n\t\t// Index must not exist now\n\t\tif found, err := col.IndexExists(nil, idx.Name()); err != nil {\n\t\t\tt.Fatalf(\"Failed to check index '%s' exists: %s\", idx.Name(), describe(err))\n\t\t} else if found {\n\t\t\tt.Errorf(\"Index '%s' does exist, expected it not to exist\", idx.Name())\n\t\t}\n\t}\n}", "func NewAutoincrementIndex(o ...option.Option) index.Index {\n\topts := &option.Options{}\n\tfor _, opt := range o {\n\t\topt(opts)\n\t}\n\n\tu := &Autoincrement{\n\t\tindexBy: opts.IndexBy,\n\t\ttypeName: opts.TypeName,\n\t\tfilesDir: opts.FilesDir,\n\t\tbound: opts.Bound,\n\t\tindexBaseDir: path.Join(opts.DataDir, \"index.cs3\"),\n\t\tindexRootDir: path.Join(path.Join(opts.DataDir, \"index.cs3\"), strings.Join([]string{\"autoincrement\", opts.TypeName, opts.IndexBy}, \".\")),\n\t\tcs3conf: &Config{\n\t\t\tProviderAddr: opts.ProviderAddr,\n\t\t\tDataURL: opts.DataURL,\n\t\t\tDataPrefix: opts.DataPrefix,\n\t\t\tJWTSecret: opts.JWTSecret,\n\t\t\tServiceUser: opts.ServiceUser,\n\t\t},\n\t\tdataProvider: dataProviderClient{\n\t\t\tbaseURL: singleJoiningSlash(opts.DataURL, opts.DataPrefix),\n\t\t\tclient: http.Client{\n\t\t\t\tTransport: http.DefaultTransport,\n\t\t\t},\n\t\t},\n\t}\n\n\treturn u\n}", "func NewIndexed() *Indexed {\n\trand.Seed(time.Now().UTC().UnixNano())\n\treturn &Indexed{\n\t\tsize: 0,\n\t}\n}", "func idIndex() mgo.Index {\n\treturn mgo.Index{\n\t\tKey: []string{\"id\"},\n\t\tUnique: true,\n\t\tDropDups: true,\n\t\tBackground: true,\n\t\tSparse: true,\n\t}\n}", "func AddIndex(db MongoDB, m metrics.Metrics, col string, indexes ...mgo.Index) error {\n\tdefer m.CollectMetrics(\"DB.AddIndex\")\n\n\tif len(indexes) == 0 {\n\t\treturn nil\n\t}\n\n\tdatabase, session, err := db.New(false)\n\tif err != nil {\n\t\tm.Emit(metrics.Errorf(\"Failed to create session for index\"), metrics.With(\"collection\", col), metrics.With(\"error\", err.Error()))\n\t\treturn err\n\t}\n\n\tdefer session.Close()\n\n\tcollection := database.C(col)\n\n\tfor _, index := range indexes {\n\t\tif err := collection.EnsureIndex(index); err != nil {\n\t\t\tm.Emit(metrics.Errorf(\"Failed to ensure session index\"), metrics.With(\"collection\", col), metrics.With(\"index\", index), metrics.With(\"error\", err.Error()))\n\t\t\treturn err\n\t\t}\n\n\t\tm.Emit(metrics.Info(\"Succeeded in ensuring collection index\"), metrics.With(\"collection\", col), metrics.With(\"index\", index))\n\t}\n\n\tm.Emit(metrics.Info(\"Finished adding index\"), metrics.With(\"collection\", col))\n\treturn nil\n}", "func refreshIndex(c *Client) error {\n\tidx, err := c.GitDir.ReadIndex()\n\tif err != nil {\n\t\treturn err\n\t}\n\tnidx, err := UpdateIndex(c, idx, UpdateIndexOptions{Refresh: true}, nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\tf, err := c.GitDir.Create(\"index\")\n\tif err != nil {\n\t\treturn err\n\t}\n\tif err := nidx.WriteIndex(f); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func newIndexCache(size uint64) *indexCache {\n\treturn &indexCache{sizecache.New(size), sync.NewCond(&sync.Mutex{}), map[addr]struct{}{}}\n}", "func New(data []byte) *Index {}", "func generateIndex(textsNumber int, wordsNumber int) *Index {\n\ttitles := make([]string, textsNumber)\n\tentries := make(map[string]Set)\n\tfor i := 0; i < textsNumber; i++ {\n\t\ttitles[i] = fmt.Sprintf(\"title-with-number-%d\", i)\n\t}\n\tfor i := 0; i < wordsNumber; i++ {\n\t\tset := Set{}\n\t\tfor j := 0; j < textsNumber; j++ {\n\t\t\tset.Put(j)\n\t\t}\n\t\tentries[fmt.Sprintf(\"w%d\", i)] = set\n\t}\n\treturn &Index{\n\t\tTitles: titles,\n\t\tData: entries,\n\t}\n}", "func (m *Mongo) Index(gid string, background bool) error {\n\tm.Session.ResetIndexCache()\n\n\tsessionCopy := m.Session.Copy()\n\tdefer sessionCopy.Close()\n\tcol := sessionCopy.DB(m.DBName).C(gid)\n\n\tcInfo := &mgo.CollectionInfo{DisableIdIndex: true}\n\terr := col.Create(cInfo)\n\tif err != nil {\n\t\tlog.Error(err)\n\t}\n\n\t/*\n\t\t// TODO figure out the magic of mongo indexes\n\t\tindex := mgo.Index{\n\t\t\tKey: []string{\"g\", \"s\", \"p\", \"o\"},\n\t\t\tBackground: false,\n\t\t\tSparse: true,\n\t\t\tUnique: true,\n\t\t\tDropDups: true,\n\t\t}\n\t\terr := col.EnsureIndex(index)\n\t\treturn err\n\t*/\n\n\tindex := mgo.Index{\n\t\tKey: []string{\"g\", \"s\"},\n\t\tBackground: background,\n\t\tSparse: true,\n\t}\n\terr = col.EnsureIndex(index)\n\tif err != nil {\n\t\tlog.Error(err)\n\t\t//return err\n\t}\n\tlog.V(2).Infof(\"%+v\", index)\n\n\tindex.Key = []string{\"g\", \"o\"}\n\terr = col.EnsureIndex(index)\n\tif err != nil {\n\t\tlog.Error(err)\n\t\t//return err\n\t}\n\tlog.V(2).Infof(\"%+v\", index)\n\n\tindex.Key = []string{\"g\", \"p\"}\n\terr = col.EnsureIndex(index)\n\tif err != nil {\n\t\tlog.Error(err)\n\t\t//return err\n\t}\n\tlog.V(2).Infof(\"%+v\", index)\n\n\tindex.Key = []string{\"g\", \"s\", \"p\"}\n\terr = col.EnsureIndex(index)\n\tif err != nil {\n\t\tlog.Error(err)\n\t\t//return err\n\t}\n\tlog.V(2).Infof(\"%+v\", index)\n\n\tindex.Key = []string{\"g\", \"s\", \"o\"}\n\terr = col.EnsureIndex(index)\n\tif err != nil {\n\t\tlog.Error(err)\n\t\t//return err\n\t}\n\tlog.V(2).Infof(\"%+v\", index)\n\n\tindex.Key = []string{\"g\", \"p\", \"o\"}\n\terr = col.EnsureIndex(index)\n\tif err != nil {\n\t\tlog.Error(err)\n\t\t//return err\n\t}\n\tlog.V(2).Infof(\"%+v\", index)\n\n\tindex.Key = []string{\"g\", \"s\", \"p\", \"o\"}\n\tindex.Unique = true\n\tindex.DropDups = true\n\terr = col.EnsureIndex(index)\n\tif err != nil {\n\t\tlog.Error(err)\n\t\t//return err\n\t}\n\tlog.V(2).Infof(\"%+v\", index)\n\n\treturn nil\n}", "func EnsureIndex(db *mongo.Database, collectionName string, keys bson.M, opt *options.IndexOptions) {\n\tvar keyIndex []string\n\tfor k := range keys {\n\t\tkeyIndex = append(keyIndex, k)\n\t}\n\n\tctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)\n\tdefer cancel()\n\n\tcollection := db.Collection(collectionName)\n\n\tindexes := collection.Indexes()\n\tcursor, err := indexes.List(ctx)\n\tif err != nil {\n\t\tlog.Panicf(\"index list error %v\", err)\n\t}\n\n\tif cursor != nil {\n\t\tfor cursor.Next(ctx) {\n\t\t\tvar index []primitive.E\n\t\t\terrCursor := cursor.Decode(&index)\n\t\t\tif errCursor != nil {\n\t\t\t\tlog.Panicf(\"index list error %v\", errCursor)\n\t\t\t}\n\n\t\t\t// skip creating index if key field already exist\n\t\t\tkeyIsExist := keyFieldIndexIsExist(index, keyIndex)\n\t\t\tif keyIsExist {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\n\t\tmod := mongo.IndexModel{\n\t\t\tKeys: keys,\n\t\t\tOptions: opt,\n\t\t}\n\n\t\topts := options.CreateIndexes().SetMaxTime(5 * time.Second)\n\t\t_, err = collection.Indexes().CreateOne(ctx, mod, opts)\n\t\tif err != nil {\n\t\t\tlog.Panicf(\"ensure index error %v\", err)\n\t\t}\n\t}\n}", "func New(data []byte) *Index", "func NewIndex(data []byte) (*Index, error) {\n\tvar i Index\n\tdec := gob.NewDecoder(bytes.NewBuffer(data))\n\tif err := dec.Decode(&i); err != nil {\n\t\treturn nil, err\n\t}\n\treturn &i, nil\n}", "func addToIndex(repo *git.Repository, path string) error {\n\n\tindex, err := repo.Index()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = index.AddByPath(path)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t_, err = index.WriteTree()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = index.Write()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn err\n}", "func (m *DataRepositoryMongo) CreateIndex(collectionName string, indexes map[string]interface{}) <-chan error {\n\tresult := make(chan error)\n\tgo func() {\n\n\t\tvar (\n\t\t\terr error\n\t\t\tcollection *mongo.Collection\n\t\t\tctx context.Context\n\t\t)\n\n\t\tcollection, err = m.Client.GetCollection(collectionName)\n\t\tif err != nil {\n\t\t\tlog.Error(\"Get collection %s err (%s)! \\n\", collectionName, err.Error())\n\t\t\tresult <- err\n\t\t}\n\n\t\tctx, err = m.Client.GetContext()\n\t\tif err != nil {\n\t\t\tlog.Error(\"Get context err (%s)! \\n\", err.Error())\n\t\t\tresult <- err\n\t\t}\n\n\t\tvar indexList []mongo.IndexModel\n\n\t\tfor key, value := range indexes {\n\t\t\tindexOption := &options.IndexOptions{}\n\t\t\tindexOption = indexOption.SetBackground(true)\n\t\t\tindex := mongo.IndexModel{Keys: bson.M{key: value}, Options: indexOption}\n\t\t\tindexList = append(indexList, index)\n\t\t}\n\n\t\t_, err = collection.Indexes().CreateMany(ctx, indexList)\n\t\tresult <- err\n\t\tclose(result)\n\t}()\n\n\treturn result\n}", "func createIndex(name string, paths []interface{}, wildcards []string) {\r\n\tf, err := os.Create(name)\r\n\tcheck(err)\r\n\tdefer f.Close()\r\n\tw := bufio.NewWriter(f)\r\n\tindexContents := []string{}\r\n\tfor _, path := range paths {\r\n\t\tp := path.(string)\r\n\t\tfilepath.Walk(p, walker(&indexContents, wildcards))\r\n\t}\r\n\tfor i := range indexContents {\r\n\t\ts := fmt.Sprintln(indexContents[i])\r\n\t\tbc, err := w.WriteString(s)\r\n\t\tcheck(err)\r\n\t\tif bc < len(s) {\r\n\t\t\tpanic(fmt.Sprintf(\"Couldn't write to %s\", name))\r\n\t\t}\r\n\t}\r\n\tw.Flush()\r\n\treturn\r\n}", "func createColumnIndex(tableName, colName string) {\n\tadapter := adapters[db.DriverName()]\n\tquery := fmt.Sprintf(`\n\t\tCREATE INDEX %s ON %s (%s)\n\t`, fmt.Sprintf(\"%s_%s_index\", tableName, colName), adapter.quoteTableName(tableName), colName)\n\tdbExecuteNoTx(query)\n}", "func (ec *ElasticClient) Create(indexname string, indextype string, jsondata interface{}) (string, error) {\n\tctx := ec.ctx\n\tid := genHashedID(jsondata)\n\n\tdebuges(\"Debug:Printing body %s\\n\", jsondata)\n\tresult, err := ec.client.Index().\n\t\tIndex(string(indexname)).\n\t\tType(string(indextype)).\n\t\tId(id).\n\t\tBodyJson(jsondata).\n\t\tDo(ctx)\n\tif err != nil {\n\t\t// Handle error\n\t\tdebuges(\"Create document Error %#v\", err)\n\t\treturn id, err\n\t}\n\tdebuges(\"Debug:Indexed %s to index %s, type %s\\n\", result.Id, result.Index, result.Type)\n\t// Flush to make sure the documents got written.\n\t// Flush asks Elasticsearch to free memory from the index and\n\t// flush data to disk.\n\t_, err = ec.client.Flush().Index(string(indexname)).Do(ctx)\n\treturn id, err\n\n}", "func (e *Engine) setIndex(index int64) {\n\te.Index = index\n\te.Name = naming.Name(index)\n}", "func New(ds datastore.TxnDatastore, api *apistruct.FullNodeStruct) (*Index, error) {\n\tcs := chainsync.New(api)\n\tstore, err := chainstore.New(txndstr.Wrap(ds, \"chainstore\"), cs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tinitMetrics()\n\tctx, cancel := context.WithCancel(context.Background())\n\ts := &Index{\n\t\tapi: api,\n\t\tstore: store,\n\t\tsignaler: signaler.New(),\n\t\tindex: IndexSnapshot{\n\t\t\tMiners: make(map[string]Slashes),\n\t\t},\n\t\tctx: ctx,\n\t\tcancel: cancel,\n\t\tfinished: make(chan struct{}),\n\t}\n\tif err := s.loadFromDS(); err != nil {\n\t\treturn nil, err\n\t}\n\tgo s.start()\n\treturn s, nil\n}", "func addIndexOp(bucket *bolt.Bucket, k []byte) error {\n\treturn bucket.Put(k, []byte{})\n}", "func init() {\n\tindexFields := []string{\"name\"}\n\tconfig.CreateHashIndexedCollection(CollectionName, indexFields)\n}", "func (api *Api) createIndexes() {\n\t// username and email will be unique.\n\tkeys := bsonx.Doc{\n\t\t{Key: \"username\", Value: bsonx.Int32(1)},\n\t\t{Key: \"email\", Value: bsonx.Int32(1)},\n\t}\n\tpeople := api.DB.Collection(\"people\")\n\tdb.SetIndexes(people, keys)\n}", "func TestEngine_WriteIndex_NoKeys(t *testing.T) {\n\te := OpenDefaultEngine()\n\tdefer e.Close()\n\tif err := e.WriteIndex(nil, nil, nil); err != nil {\n\t\tt.Fatal(err)\n\t}\n}", "func (st *Store) CreateIndex(index string) error {\n\tif index == \"\" {\n\t\treturn errNoIndexName\n\t}\n\t_, ok := st.indexes[index]\n\tif ok {\n\t\treturn fmt.Errorf(\"index with name %s already exist\", index)\n\t}\n\treturn nil\n}", "func NewMsgCreateIndex(owner sdk.AccAddress, tableName string, field string) MsgCreateIndex {\n return MsgCreateIndex {\n Owner: owner,\n TableName: tableName,\n Field: field,\n }\n}", "func CreateIndex(collectionName, indexName string, indexType IndexType, fd *feed.API, user utils.Address, client blockstore.Client, mutable bool) error {\n\tif fd.IsReadOnlyFeed() {\n\t\treturn ErrReadOnlyIndex\n\t}\n\tactualIndexName := collectionName + indexName\n\ttopic := utils.HashString(actualIndexName)\n\t_, oldData, err := fd.GetFeedData(topic, user)\n\tif err == nil && len(oldData) != 0 {\n\t\t// if the feed is present and it has some data means there index is still valid\n\t\treturn ErrIndexAlreadyPresent\n\t}\n\n\tmanifest := NewManifest(actualIndexName, time.Now().Unix(), indexType, mutable)\n\n\t// marshall and store the Manifest as new feed\n\tdata, err := json.Marshal(manifest)\n\tif err != nil {\n\t\treturn ErrManifestUnmarshall\n\t}\n\n\tref, err := client.UploadBlob(data, true, true)\n\tif err != nil {\n\t\treturn ErrManifestUnmarshall\n\t}\n\n\t_, err = fd.CreateFeed(topic, user, ref)\n\tif err != nil {\n\t\treturn ErrManifestCreate\n\t}\n\treturn nil\n}", "func NewIndex(index Constant) *Index {\n\treturn &Index{Constant: index}\n}", "func (st *Schema) addIndex(pk bool, name string, cols []IndexColumn) bool {\n\tif reflect.DeepEqual(st.PK, cols) {\n\t\treturn false\n\t}\n\tfor _, ind := range st.Indexes {\n\t\tif reflect.DeepEqual(ind.Columns, cols) {\n\t\t\tif pk {\n\t\t\t\tst.PrimaryKey = ind.Index\n\t\t\t}\n\t\t\treturn false\n\t\t}\n\t}\n\tst.Indexes = append(st.Indexes, SchemaIndex{\n\t\tIndex: name,\n\t\tColumns: cols,\n\t})\n\tif pk {\n\t\tst.PrimaryKey = name\n\t}\n\treturn true\n}", "func createNumericalIndexIfNotExists(ctx context.Context, iv mongo.IndexView, model mongo.IndexModel) error {\n\tc, err := iv.List(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer func() {\n\t\t_ = c.Close(ctx)\n\t}()\n\n\tmodelKeysBytes, err := bson.Marshal(model.Keys)\n\tif err != nil {\n\t\treturn err\n\t}\n\tmodelKeysDoc := bsoncore.Document(modelKeysBytes)\n\n\tfor c.Next(ctx) {\n\t\tkeyElem, err := c.Current.LookupErr(\"key\")\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tkeyElemDoc := keyElem.Document()\n\n\t\tfound, err := numericalIndexDocsEqual(modelKeysDoc, bsoncore.Document(keyElemDoc))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif found {\n\t\t\treturn nil\n\t\t}\n\t}\n\n\t_, err = iv.CreateOne(ctx, model)\n\treturn err\n}", "func NewObjectIndex(index uint16, subIndex uint8) ObjectIndex {\n\treturn ObjectIndex{\n\t\tIndex: Index{\n\t\t\tB0: byte(index & 0xFF),\n\t\t\tB1: byte(index >> 8),\n\t\t},\n\t\tSubIndex: subIndex,\n\t}\n}", "func NewObjectIndex(index uint16, subIndex uint8) ObjectIndex {\n\treturn ObjectIndex{\n\t\tIndex: Index{\n\t\t\tB0: byte(index & 0xFF),\n\t\t\tB1: byte(index >> 8),\n\t\t},\n\t\tSubIndex: subIndex,\n\t}\n}", "func (b *Bucket) createIndexes(ctx context.Context) error {\n\t// must use primary read pref mode to check if files coll empty\n\tcloned, err := b.filesColl.Clone(options.Collection().SetReadPreference(readpref.Primary()))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdocRes := cloned.FindOne(ctx, bson.D{}, options.FindOne().SetProjection(bson.D{{\"_id\", 1}}))\n\n\t_, err = docRes.DecodeBytes()\n\tif err != mongo.ErrNoDocuments {\n\t\t// nil, or error that occurred during the FindOne operation\n\t\treturn err\n\t}\n\n\tfilesIv := b.filesColl.Indexes()\n\tchunksIv := b.chunksColl.Indexes()\n\n\tfilesModel := mongo.IndexModel{\n\t\tKeys: bson.D{\n\t\t\t{\"filename\", int32(1)},\n\t\t\t{\"uploadDate\", int32(1)},\n\t\t},\n\t}\n\n\tchunksModel := mongo.IndexModel{\n\t\tKeys: bson.D{\n\t\t\t{\"files_id\", int32(1)},\n\t\t\t{\"n\", int32(1)},\n\t\t},\n\t\tOptions: options.Index().SetUnique(true),\n\t}\n\n\tif err = createNumericalIndexIfNotExists(ctx, filesIv, filesModel); err != nil {\n\t\treturn err\n\t}\n\treturn createNumericalIndexIfNotExists(ctx, chunksIv, chunksModel)\n}", "func newDefinitionsIndex(definitions []clusterv1.ClusterClassStatusVariable) definitionsIndex {\n\ti := definitionsIndex{}\n\tfor _, def := range definitions {\n\t\ti.store(def)\n\t}\n\treturn i\n}", "func SaveIndex(target string, source QueryList, verbose bool) {\n\tlogm(\"INFO\", fmt.Sprintf(\"saving index to %s...\", target), verbose)\n\tfile, err := os.Create(target)\n\tcheckResult(err)\n\tdefer file.Close()\n\n\tgr := gzip.NewWriter(file)\n\tdefer gr.Close()\n\n\tencoder := gob.NewEncoder(gr)\n\n\terr = encoder.Encode(source.Names)\n\tcheckResult(err)\n\tlogm(\"INFO\", fmt.Sprintf(\"%v sequence names saved\", len(source.Names)), verbose)\n\n\terr = encoder.Encode(source.SeedSize)\n\tcheckResult(err)\n\n\terr = encoder.Encode(source.Cgst)\n\tcheckResult(err)\n\n\t// save the index, but go has a size limit\n\tindexSize := len(source.Index)\n\terr = encoder.Encode(indexSize)\n\tcheckResult(err)\n\tlogm(\"INFO\", fmt.Sprintf(\"%v queries to save...\", indexSize), verbose)\n\n\tcount := 0\n\tfor key, value := range source.Index {\n\t\terr = encoder.Encode(key)\n\t\tcheckResult(err)\n\t\terr = encoder.Encode(value)\n\t\tcheckResult(err)\n\t\tcount++\n\t\tif count%10000 == 0 {\n\t\t\tlogm(\"INFO\", fmt.Sprintf(\"processing: saved %v items\", count), false)\n\t\t}\n\t}\n\n\tlogm(\"INFO\", fmt.Sprintf(\"saving index to %s: done\", target), verbose)\n}", "func (i *Index) Create() error {\n\n\tdoc := mapping{Properties: map[string]mappingProperty{}}\n\tfor _, f := range i.md.Fields {\n\t\tdoc.Properties[f.Name] = mappingProperty{}\n\t\tfs, err := fieldTypeString(f.Type)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdoc.Properties[f.Name][\"type\"] = fs\n\t}\n\n // Added for apple to apple benchmark\n doc.Properties[\"body\"][\"type\"] = \"text\"\n doc.Properties[\"body\"][\"analyzer\"] = \"my_english_analyzer\"\n doc.Properties[\"body\"][\"search_analyzer\"] = \"whitespace\"\n doc.Properties[\"body\"][\"index_options\"] = \"offsets\"\n //doc.Properties[\"body\"][\"test\"] = \"test\"\n index_map := map[string]int{\n \"number_of_shards\" : 1,\n \"number_of_replicas\" : 0,\n }\n analyzer_map := map[string]interface{}{\n \"my_english_analyzer\": map[string]interface{}{\n \"tokenizer\": \"standard\",\n \"char_filter\": []string{ \"html_strip\" } ,\n \"filter\" : []string{\"english_possessive_stemmer\", \n \"lowercase\", \"english_stop\", \n \"english_stemmer\", \n \"asciifolding\", \"icu_folding\"},\n },\n }\n filter_map := map[string]interface{}{\n \"english_stop\": map[string]interface{}{\n \"type\": \"stop\",\n \"stopwords\": \"_english_\",\n },\n \"english_possessive_stemmer\": map[string]interface{}{\n \"type\": \"stemmer\",\n \"language\": \"possessive_english\",\n },\n \"english_stemmer\" : map[string]interface{}{\n \"type\" : \"stemmer\",\n \"name\" : \"english\",\n },\n \"my_folding\": map[string]interface{}{\n \"type\": \"asciifolding\",\n \"preserve_original\": \"false\",\n },\n }\n analysis_map := map[string]interface{}{\n \"analyzer\": analyzer_map,\n \"filter\" : filter_map,\n }\n settings := map[string]interface{}{\n \"index\": index_map,\n \"analysis\": analysis_map,\n }\n\n // TODO delete?\n\t// we currently manually create the autocomplete mapping\n\t/*ac := mapping{\n\t\tProperties: map[string]mappingProperty{\n\t\t\t\"sugg\": mappingProperty{\n\t\t\t\t\"type\": \"completion\",\n\t\t\t\t\"payloads\": true,\n\t\t\t},\n\t\t},\n\t}*/\n\n\tmappings := map[string]mapping{\n\t\ti.typ: doc,\n //\t\"autocomplete\": ac,\n\t}\n\n fmt.Println(mappings)\n\n\t//_, err := i.conn.CreateIndex(i.name).BodyJson(map[string]interface{}{\"mappings\": mappings}).Do()\n\t_, err := i.conn.CreateIndex(i.name).BodyJson(map[string]interface{}{\"mappings\": mappings, \"settings\": settings}).Do()\n\n if err != nil {\n fmt.Println(\"Error \", err)\n\t\tfmt.Println(\"!!!!Get Error when using client to create index\")\n\t}\n\n\treturn err\n}" ]
[ "0.7086214", "0.6896672", "0.68961805", "0.68870723", "0.6807751", "0.67654306", "0.6765415", "0.67118025", "0.667807", "0.65912825", "0.6587095", "0.6584252", "0.6555517", "0.6552417", "0.6544909", "0.65409094", "0.6511039", "0.6507578", "0.64975005", "0.64857626", "0.64816004", "0.6474859", "0.6474097", "0.6470685", "0.64653486", "0.64325637", "0.6427594", "0.64128923", "0.6412728", "0.64117765", "0.6400674", "0.6399807", "0.6389094", "0.63856757", "0.6373929", "0.63717425", "0.6367363", "0.63258195", "0.6322167", "0.6319627", "0.63165975", "0.631277", "0.63043314", "0.6287991", "0.625808", "0.62579787", "0.6212997", "0.6207124", "0.62050474", "0.61711323", "0.6169212", "0.6168046", "0.6158852", "0.61550754", "0.6142448", "0.61384636", "0.61356616", "0.61343586", "0.6117708", "0.6114412", "0.6105895", "0.6095973", "0.60781425", "0.6043715", "0.6043308", "0.6037008", "0.60356426", "0.6028422", "0.6028205", "0.6009925", "0.5999363", "0.5996106", "0.5990957", "0.5988528", "0.5986658", "0.59829676", "0.5982108", "0.59755987", "0.5973254", "0.5971015", "0.5969424", "0.5960744", "0.595247", "0.593913", "0.5938808", "0.5932039", "0.5931475", "0.59297955", "0.5918969", "0.59049326", "0.59032613", "0.590118", "0.5898389", "0.5869273", "0.5864199", "0.5864199", "0.5860765", "0.5850266", "0.5849542", "0.58406144" ]
0.67859226
5
TestAllRest combines all the rest package tests into a table to avoid odd state things like database files.
func TestAllRest(t *testing.T) { Configure() var table = [][]RequestTestPair{ testAddEvent(t), testDuplicateEvent(t), testGetAllConfig(t), testAddTracer(t), testSwitchProject(t), testDeleteProject(t), } serverTestHelperBulk(table, t) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (Test) All() error {\n\tprintHeader(\"PKG TESTS (ALL)\")\n\terr := sh.RunV(\"gotestsum\", \"./pkg/...\")\n\treturn err\n}", "func testResourceAll(t *testing.T, s *Service) {\n\tres, err := s.ResourceAll(context.TODO())\n\tif err != nil {\n\t\tt.Logf(\"testResourceAll error(%v) \\n\", err)\n\t\treturn\n\t}\n\tt.Logf(\"testResourceAll res: %+v \\n\", res)\n}", "func TestAll(t *testing.T) {\n\tcfg := initializeTests(t)\n\ttestUser(t, cfg)\n\ttestHomeMessage(t, cfg)\n\ttestDepartment(t, cfg)\n\ttestCommunity(t, cfg)\n\ttestCity(t, cfg)\n\ttestCopro(t, cfg)\n\ttestBudgetAction(t, cfg)\n\ttestRenewProject(t, cfg)\n\ttestHousingType(t, cfg)\n\ttestHousing(t, cfg)\n\ttestCommitment(t, cfg)\n\ttestBeneficiary(t, cfg)\n\ttestPayment(t, cfg)\n\ttestBudgetSector(t, cfg)\n\ttestCommitmentLink(t, cfg)\n\ttestCommission(t, cfg)\n\ttestRenewProjectForecast(t, cfg)\n\ttestHousingForecast(t, cfg)\n\ttestCoproForecast(t, cfg)\n\ttestSettings(t, cfg)\n\ttestHome(t, cfg)\n\ttestBeneficiaryDatas(t, cfg)\n\ttestBeneficiaryPayments(t, cfg)\n\ttestPmtRatio(t, cfg)\n\ttestPmtForecasts(t, cfg)\n\ttestCmtForecasts(t, cfg)\n\ttestLinkCommitmentsHousings(t, cfg)\n\ttestCoproCommitmentLink(t, cfg)\n\ttestRPEventType(t, cfg)\n\ttestRPEvent(t, cfg)\n\ttestRenewProjectReport(t, cfg)\n\ttestRPPerCommunityReport(t, cfg)\n\ttestRPCmtCityJoin(t, cfg)\n\ttestDepartmentReport(t, cfg)\n\ttestCityReport(t, cfg)\n\ttestPreProg(t, cfg)\n\ttestProg(t, cfg)\n\ttestRPLS(t, cfg)\n\ttestSummaries(t, cfg)\n\ttestHousingSummary(t, cfg)\n\ttestCoproEventType(t, cfg)\n\ttestCoproEvent(t, cfg)\n\ttestCoproDoc(t, cfg)\n\ttestCoproReport(t, cfg)\n\ttestRPMultiAnnualReport(t, cfg)\n\ttestPaymentCredits(t, cfg)\n\ttestPaymentCreditJournals(t, cfg)\n\ttestPlacement(t, cfg)\n\ttestBeneficiaryGroup(t, cfg)\n\ttestBeneficiaryGroupDatas(t, cfg)\n\ttestHousingTypology(t, cfg)\n\ttestHousingConvention(t, cfg)\n\ttestHousingComment(t, cfg)\n\ttestHousingTransfer(t, cfg)\n\ttestConventionType(t, cfg)\n\ttestReservationFee(t, cfg)\n\ttestGetDifActionPaymentPrevisions(t, cfg)\n\ttestReservationReport(t, cfg)\n\ttestSoldCommitment(t, cfg)\n\ttestAvgPmtTime(t, cfg)\n\ttestPaymentDemands(t, cfg)\n\ttestPaymentDelays(t, cfg)\n}", "func (rest *RestController) All(w http.ResponseWriter, r *http.Request) (Response, error) {\n\tresources := reflect.New(reflect.SliceOf(reflect.TypeOf(rest.Table).Elem()))\n\tcount, err := rest.Table.List(resources.Interface(), models.NewDBQuery(r.URL.Query(), nil))\n\tif err != nil {\n\t\treturn nil, &httpError{err, \"\", 500}\n\t}\n\tw.Header().Set(\"Total\", strconv.FormatInt(count, 10))\n\treturn &JSONResponse{resources.Interface(), 200}, nil\n}", "func TestTestdataAll(t *testing.T) {\n\tout := new(bytes.Buffer)\n\terr := listPath(out, \"testdata\", true, false, true)\n\tif err != nil {\n\t\tt.Errorf(\"unexpected error:%s\", err)\n\t}\n\tresult := out.String()\n\tif result != resultTestdataAll {\n\t\tt.Errorf(\"bad result\\nexpected:\\n%v\\ngot:\\n%v\\n\", result, resultTestdataAll)\n\t}\n}", "func TestAll(t *testing.T) {\n\n\t// remove anything that might have been leftover from failed tests\n\tDeleteApi(TEST_API_AXLE_SERVER, TEST_API_NAME)\n\tDeleteKey(TEST_API_AXLE_SERVER, TEST_KEY_NAME)\n\tDeleteKeyRing(TEST_API_AXLE_SERVER, TEST_KEYRING_NAME)\n\n\ttestPing(t)\n\ttestInfo(t)\n\ttestGetNonExistentApi(t)\n\ttestCreateApi(t)\n\tapi := testGetApi(t)\n\ttestUpdateApi(t, api)\n\ttestApiStats(t, api)\n\tk := testNewKey(t)\n\ttestGetKey(t)\n\ttestUpdateKey(t, k)\n\ttestLinkKey(t, api)\n\ttestApiKeyCharts(t, api)\n\ttestApiKeys(t)\n\ttestApis(t)\n\ttestApisCharts(t)\n\ttestKeyApiCharts(t)\n\ttestKeyApis(t, k)\n\ttestKeyStats(t, k)\n\ttestKeysCharts(t)\n\ttestKeys(t)\n\tkr := testNewKeyRing(t)\n\ttestGetKeyRing(t)\n\ttestUpdateKeyRing(t, kr)\n\ttestKeyRingLinkKey(t)\n\ttestKeyRingKeys(t, kr)\n\ttestKeyRingStats(t, kr)\n\ttestKeyRings(t)\n\ttestKeyRingUnlinkKey(t)\n\ttestKeyRingsEmpty(t)\n\ttestUnlinkKey(t)\n\ttestDeleteKey(t)\n\ttestDeleteKeyRing(t)\n\ttestDeleteApi(t, api)\n}", "func (Test) All() error {\n\tmg.Deps(Test.Unit, Test.Lint, Test.Style)\n\treturn nil\n}", "func TestREST(t *testing.T) {\n\n\tif err := amc.Authenticate(); err != nil {\n\t\tt.Fatalf(\"Could not authenticate\")\n\t}\n\n\trt, err := amc.ListResourceTypes()\n\n\tif err != nil {\n\t\tt.Fatalf(\"Could not List resource types\")\n\t}\n\n\tfor _, v := range rt {\n\t\tt.Logf(\"%v\", v)\n\t}\n}", "func TestAllTask() {\n\tif noTestsFlag {\n\t\tlog.Println(\"*** Skipping: notests\")\n\t\treturn\n\t}\n\trunCmd(\"./pkg/pac/make.sh\")\n\trunCmd(\"go\", \"test\", \"-tags=\\\"net databases\\\"\",\n\t\t\"./cmd/...\", \"./pkg/...\")\n}", "func TestAll() error {\n\tout, err := sh.Output(\"go\", \"test\", \"./...\")\n\tif err != nil {\n\t\treturn err\n\t}\n\tlog.Print(out)\n\treturn nil\n}", "func listTests(ctx context.Context, cfg *config.Config,\n\tdrv *driver.Driver,\n\tdutInfos map[string]*protocol.DUTInfo) ([]*resultsjson.Result, error) {\n\tCompanionFeatures := make(map[string]*frameworkprotocol.DUTFeatures)\n\tfor role, dutInfo := range dutInfos {\n\t\tif role != \"\" {\n\t\t\tCompanionFeatures[role] = dutInfo.GetFeatures()\n\t\t}\n\t}\n\n\tvar dutFeature *frameworkprotocol.DUTFeatures\n\tif _, ok := dutInfos[\"\"]; ok {\n\t\tdutFeature = dutInfos[\"\"].GetFeatures()\n\t}\n\n\ttests, err := drv.ListMatchedTests(ctx, cfg.Features(dutFeature, CompanionFeatures))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar shard *sharding.Shard\n\tif cfg.ShardMethod() == \"hash\" {\n\t\tshard = sharding.ComputeHash(tests, cfg.ShardIndex(), cfg.TotalShards())\n\t} else {\n\t\tshard = sharding.ComputeAlpha(tests, cfg.ShardIndex(), cfg.TotalShards())\n\t}\n\n\tvar testsToPrint []*driver.BundleEntity\n\tif cfg.ExcludeSkipped() {\n\t\ttestsToPrint, _ = removeSkippedTestsFromBundle(shard.Included)\n\t} else {\n\t\ttestsToPrint = shard.Included\n\t}\n\n\t// Convert driver.BundleEntity to resultsjson.Result.\n\tresults := make([]*resultsjson.Result, len(testsToPrint))\n\tfor i, re := range testsToPrint {\n\t\ttest, err := resultsjson.NewTest(re.Resolved.GetEntity())\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresults[i] = &resultsjson.Result{\n\t\t\tTest: *test,\n\t\t\tSkipReason: strings.Join(re.Resolved.GetSkip().GetReasons(), \", \"),\n\t\t}\n\t}\n\treturn results, nil\n}", "func TestAllOps(t *testing.T) {\n\tdb, stop := testserver.NewDBForTestWithDatabase(t, \"photos\")\n\tdefer stop()\n\n\tctx := context.Background()\n\n\tif err := initSchema(ctx, db); err != nil {\n\t\tt.Fatal(err)\n\t}\n\tcfg := Config{\n\t\tDB: db,\n\t\tNumUsers: 1,\n\t}\n\n\tfor _, op := range ops {\n\t\tt.Logf(\"running %s\", op.name)\n\t\tif err := runUserOp(ctx, cfg, 1, op.typ); err != nil {\n\t\t\tt.Error(err)\n\t\t}\n\t}\n}", "func runTests(t *testing.T, tests []test) {\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tresp := executeRequest(tt.method, tt.url, serialize(tt.req), tt.asAdmin)\n\t\t\tif resp.StatusCode != tt.want {\n\t\t\t\tt.Errorf(\"Unexpected status code %d\", resp.StatusCode)\n\t\t\t}\n\n\t\t\tif tt.body != \"\" {\n\t\t\t\tbodyBytes, err := ioutil.ReadAll(resp.Body)\n\t\t\t\tdefer resp.Body.Close()\n\t\t\t\tif err != nil {\n\t\t\t\t\tt.Errorf(\"Error loading body\")\n\t\t\t\t}\n\t\t\t\tif tt.body != string(bodyBytes) {\n\t\t\t\t\tt.Errorf(\"Unexpected body '%s', expected '%s'\", bodyBytes, tt.body)\n\t\t\t\t}\n\t\t\t}\n\t\t})\n\t}\n}", "func SubtestAll(t *testing.T, tr mux.Multiplexer) {\n\ttmux.SubtestAll(t, tr)\n\n}", "func (db *Database) GetAllTestClasses() (pgx.Rows, error) {\n\treturn db.conn.Query(context.Background(), \"SELECT * FROM test_classes\")\n}", "func TestAPIGetAll() error {\n\ttestRead := testCase{\n\t\tinput: \"\",\n\t\texpected: `[{\"FirstName\":\"Alec\", \"LastName\":\"Perro\", \"Age\":5}]`,\n\t}\n\n query, err := dm.Read(1)\n if err != nil {\n log.Fatal(err)\n }\n\n\tjsonify, err := json.Marshal(query)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tif testRead.expected != string(jsonify) {\n\t\treturn errors.New(\"testDB failed\")\n\t}\n\n\tfmt.Println(\"Tests passed\")\n\treturn nil\n}", "func newRestTester(tb testing.TB, restConfig *RestTesterConfig, collectionConfig collectionConfiguration, numCollections int) *RestTester {\n\tvar rt RestTester\n\tif tb == nil {\n\t\tpanic(\"tester parameter cannot be nil\")\n\t}\n\trt.TB = tb\n\tif restConfig != nil {\n\t\trt.RestTesterConfig = restConfig\n\t} else {\n\t\trt.RestTesterConfig = &RestTesterConfig{}\n\t}\n\trt.RestTesterConfig.collectionConfig = collectionConfig\n\trt.RestTesterConfig.numCollections = numCollections\n\trt.RestTesterConfig.useTLSServer = base.ServerIsTLS(base.UnitTestUrl())\n\treturn &rt\n}", "func (sfs *SuiteFS) TestAll(t *testing.T) {\n\tsfs.RunTests(t, UsrTest,\n\t\t// VFS tests\n\t\tsfs.TestClone,\n\t\tsfs.TestChdir,\n\t\tsfs.TestChtimes,\n\t\tsfs.TestCreate,\n\t\tsfs.TestEvalSymlink,\n\t\tsfs.TestGetTempDir,\n\t\tsfs.TestLink,\n\t\tsfs.TestLstat,\n\t\tsfs.TestMkdir,\n\t\tsfs.TestMkdirAll,\n\t\tsfs.TestOpen,\n\t\tsfs.TestOpenFileWrite,\n\t\tsfs.TestReadDir,\n\t\tsfs.TestReadFile,\n\t\tsfs.TestReadlink,\n\t\tsfs.TestRemove,\n\t\tsfs.TestRemoveAll,\n\t\tsfs.TestRename,\n\t\tsfs.TestSameFile,\n\t\tsfs.TestStat,\n\t\tsfs.TestSymlink,\n\t\tsfs.TestTempDir,\n\t\tsfs.TestTempFile,\n\t\tsfs.TestTruncate,\n\t\tsfs.TestWriteFile,\n\t\tsfs.TestWriteString,\n\t\tsfs.TestToSysStat,\n\t\tsfs.TestUmask,\n\n\t\t// File tests\n\t\tsfs.TestFileChdir,\n\t\tsfs.TestFileCloseWrite,\n\t\tsfs.TestFileCloseRead,\n\t\tsfs.TestFileFd,\n\t\tsfs.TestFileName,\n\t\tsfs.TestFileRead,\n\t\tsfs.TestFileReadDir,\n\t\tsfs.TestFileReaddirnames,\n\t\tsfs.TestFileSeek,\n\t\tsfs.TestFileStat,\n\t\tsfs.TestFileSync,\n\t\tsfs.TestFileTruncate,\n\t\tsfs.TestFileWrite,\n\t\tsfs.TestFileWriteString,\n\t\tsfs.TestFileWriteTime,\n\n\t\t// Path tests\n\t\tsfs.TestAbs,\n\t\tsfs.TestBase,\n\t\tsfs.TestClean,\n\t\tsfs.TestDir,\n\t\tsfs.TestFromToSlash,\n\t\tsfs.TestGlob,\n\t\tsfs.TestIsAbs,\n\t\tsfs.TestJoin,\n\t\tsfs.TestRel,\n\t\tsfs.TestSplit,\n\t\tsfs.TestWalk)\n\n\t// Tests to be run as root\n\tsfs.RunTests(t, avfs.UsrRoot,\n\t\tsfs.TestChmod,\n\t\tsfs.TestChown,\n\t\tsfs.TestChroot,\n\t\tsfs.TestLchown,\n\t\tsfs.TestFileChmod,\n\t\tsfs.TestFileChown)\n}", "func CasesGetAllStudentsCourse() mymodels.AllTest {\n\treturn mymodels.AllTest{\n\t\t{\n\t\t\tMethod: \"GET\",\n\t\t\tURL: \"/Students/GetAllStudentsCourse\",\n\t\t\tFunction: courses.GetAllStudentsCourse,\n\t\t\tBody: `{\"CourseIDDD\": 1}`,\n\t\t\tExpectedBody: `CourseID is empty or not valid`,\n\t\t\tStatusCode: http.StatusBadRequest,\n\t\t},\n\t\t{\n\t\t\tMethod: \"GET\",\n\t\t\tURL: \"/Students/GetAllStudentsCourse\",\n\t\t\tFunction: courses.GetAllStudentsCourse,\n\t\t\tBody: `{\"CourseID\": 1}`,\n\t\t\tExpectedBody: `[{\"ID\":10001,\"Name\":\"Daniel Gómez Sermeño\",\"Email\":\"[email protected]\"},{\"ID\":10002,\"Name\":\"Xavier Garzón López\",\"Email\":\"[email protected]\"},{\"ID\":10003,\"Name\":\"Juan F. Gil\",\"Email\":\"[email protected]\"},{\"ID\":10004,\"Name\":\"Edgar Silva\",\"Email\":\"[email protected]\"}]`,\n\t\t\tStatusCode: http.StatusOK,\n\t\t},\n\t\t{\n\t\t\tMethod: \"GET\",\n\t\t\tURL: \"/Students/GetAllStudentsCourse\",\n\t\t\tFunction: courses.GetAllStudentsCourse,\n\t\t\tBody: `{\"CourseID\": 2}`,\n\t\t\tExpectedBody: `[{\"ID\":10005,\"Name\":\"Juanita María Parra Villamíl\",\"Email\":\"[email protected]\"},{\"ID\":10006,\"Name\":\"Sebastián Rodríguez Osorio Silva\",\"Email\":\"[email protected]\"},{\"ID\":10007,\"Name\":\"Andrés Felipe Garcés\",\"Email\":\"[email protected]\"}]`,\n\t\t\tStatusCode: http.StatusOK,\n\t\t},\n\t\t{\n\t\t\tMethod: \"GET\",\n\t\t\tURL: \"/Students/GetAllStudentsCourse\",\n\t\t\tFunction: courses.GetAllStudentsCourse,\n\t\t\tBody: ``,\n\t\t\tExpectedBody: `CourseID is empty or not valid`,\n\t\t\tStatusCode: http.StatusBadRequest,\n\t\t},\n\t\t{\n\t\t\tMethod: \"GET\",\n\t\t\tURL: \"/Students/GetAllStudentsCourse\",\n\t\t\tFunction: courses.GetAllStudentsCourse,\n\t\t\tBody: `{\"CourseID\": 56}`,\n\t\t\tExpectedBody: \"(db 2) element does not exist in db\",\n\t\t\tStatusCode: http.StatusNotFound,\n\t\t},\n\t}\n}", "func (t *TestBehaviour) RunAllTests() {\n\tt.RunGetICLATemplateWatermark()\n\tt.RunGetCCLATemplateWatermark()\n}", "func (sIdm *SuiteIdm) TestAll(t *testing.T) {\n\tsIdm.TestCurrentUser(t)\n\tsIdm.TestGroupAddDel(t)\n\tsIdm.TestUserAddDel(t)\n\tsIdm.TestLookup(t)\n\tsIdm.TestUser(t)\n\tsIdm.TestUserDenied(t)\n\tsIdm.TestPermDenied(t)\n}", "func TestGetAllOrdersForTableID(t *testing.T) {\n\n // ...\n\n}", "func BaseTestAll3(engine *xorm.Engine, t *testing.T) {\r\n\tfmt.Println(\"-------------- processors TX --------------\")\r\n\ttestProcessorsTx(engine, t)\r\n\tfmt.Println(\"-------------- insert pointer data --------------\")\r\n\ttestPointerData(engine, t)\r\n\tfmt.Println(\"-------------- insert null data --------------\")\r\n\ttestNullValue(engine, t)\r\n\tfmt.Println(\"-------------- testCompositeKey --------------\")\r\n\ttestCompositeKey(engine, t)\r\n\tfmt.Println(\"-------------- testCompositeKey2 --------------\")\r\n\ttestCompositeKey2(engine, t)\r\n\tfmt.Println(\"-------------- testStringPK --------------\")\r\n\ttestStringPK(engine, t)\r\n}", "func TestGetAllOrders(t *testing.T) {\n\n // ...\n\n}", "func runTests(c *C, overrider configOverrider, tests ...func(dbt *DBTest)) {\n\tdb, err := sql.Open(\"mysql\", getDSN(overrider))\n\tc.Assert(err, IsNil, Commentf(\"Error connecting\"))\n\tdefer db.Close()\n\n\tdb.Exec(\"DROP TABLE IF EXISTS test\")\n\n\tdbt := &DBTest{c, db}\n\tfor _, test := range tests {\n\t\ttest(dbt)\n\t\tdbt.db.Exec(\"DROP TABLE IF EXISTS test\")\n\t}\n}", "func (c *Configuration) GetAllTests(tests, suites []string) <-chan JobWithError {\n\toutput := make(chan JobWithError)\n\tgo func() {\n\t\tfor check := range c.TestsByName(tests...) {\n\t\t\toutput <- check\n\t\t}\n\n\t\tfor check := range c.TestsForSuites(suites...) {\n\t\t\toutput <- check\n\t\t}\n\t\tclose(output)\n\t}()\n\n\treturn output\n}", "func TestVtctldListAllTablets(t *testing.T) {\n\tdefer cluster.PanicHandler(t)\n\turl := fmt.Sprintf(\"http://%s:%d/api/keyspaces/\", clusterInstance.Hostname, clusterInstance.VtctldHTTPPort)\n\ttestURL(t, url, \"keyspace url\")\n\n\thealthCheckURL := fmt.Sprintf(\"http://%s:%d/debug/health\", clusterInstance.Hostname, clusterInstance.VtctldHTTPPort)\n\ttestURL(t, healthCheckURL, \"vtctld health check url\")\n\n\ttestListAllTablets(t)\n\tdeleteCell(t)\n\taddCellback(t)\n}", "func testGetHome(t *testing.T, c *TestContext) {\n\ttcc := []TestCase{\n\t\t*c.UserCheckTestCase, // 0 : token empty\n\t\t{\n\t\t\tToken: c.Config.Users.User.Token,\n\t\t\tRespContains: []string{`\"Commitment\":`, `\"Payment\":`, `\"ImportLog\":[`,\n\t\t\t\t`\"Programmation\":[`, `\"PaymentCreditSum\":`,\n\t\t\t\t`\"HomeMessage\":{\"Title\":\"Message du jour\",\"Body\":\"Corps du message\"}`,\n\t\t\t\t`\"AveragePayment\":[`, `\"CsfWeekTrend\":`,\n\t\t\t\t`\"FlowStockDelays\":`, `\"PaymentRate\":`},\n\t\t\tCount: 4,\n\t\t\tCountItemName: `\"Month\"`,\n\t\t\tStatusCode: http.StatusOK}, // 1 : ok\n\t}\n\tf := func(tc TestCase) *httpexpect.Response {\n\t\treturn c.E.GET(\"/api/home\").\n\t\t\tWithHeader(\"Authorization\", \"Bearer \"+tc.Token).Expect()\n\t}\n\tfor _, r := range chkFactory(tcc, f, \"GetHomes\") {\n\t\tt.Error(r)\n\t}\n}", "func TestViewAllItem(t *testing.T) {\n\n\trequest, _ := http.NewRequest(\"GET\", \"/viewAllItem\", nil)\n\tresponse := httptest.NewRecorder()\n\tRouter().ServeHTTP(response, request)\n\tassert.Equal(t, 200, response.Code, \"Ok response expected\")\n}", "func (client *Client) WaitForAllTestResourcesReady() error {\n\tif err := client.WaitForChannelsReady(); err != nil {\n\t\treturn err\n\t}\n\tif err := client.WaitForSubscriptionsReady(); err != nil {\n\t\treturn err\n\t}\n\tif err := client.WaitForBrokersReady(); err != nil {\n\t\treturn err\n\t}\n\tif err := client.WaitForTriggersReady(); err != nil {\n\t\treturn err\n\t}\n\tif err := client.WaitForCronJobSourcesReady(); err != nil {\n\t\treturn err\n\t}\n\tif err := client.WaitForContainerSourcesReady(); err != nil {\n\t\treturn err\n\t}\n\tif err := pkgTest.WaitForAllPodsRunning(client.Kube, client.Namespace); err != nil {\n\t\treturn err\n\t}\n\t// FIXME(Fredy-Z): This hacky sleep is added to try mitigating the test flakiness.\n\t// Will delete it after we find the root cause and fix.\n\ttime.Sleep(10 * time.Second)\n\treturn nil\n}", "func TestAll(t *testing.T) {\n\tfor i := 0; i <= 0xff; i += 7 {\n\t\tassertU16(t, uint16(i))\n\t}\n\tfor i := 0; i <= 0xffff; i += 211 {\n\t\tassertU32(t, uint32(i))\n\t}\n\tfor i := 0; i <= 0xffffffff; i += 1000000007 {\n\t\tassertU64(t, uint64(i))\n\t}\n}", "func (suite *SuiteTester) SetupTest() {\n r.Table(\"users\").Delete().RunWrite(session)\n user_fixtures := make([]User, 4)\n user_fixtures[0] = User{\n FirstName: \"Tyrion\",\n LastName: \"Lannister\",\n Email: \"[email protected]\",\n Bio: \"Younger brother to Cersei and Jaime.\",\n FacebookId: \"0b8a2b98-f2c5-457a-adc0-34d10a6f3b5c\",\n CreatedAt: time.Date(2008, time.June, 13, 18, 30, 10, 0, time.UTC),\n UpdatedAt: time.Date(2014, time.October, 5, 18, 30, 10, 0, time.UTC),\n }\n user_fixtures[1] = User{\n FirstName: \"Tywin\",\n LastName: \"Lannister\",\n Email: \"[email protected]\",\n Bio: \"Lord of Casterly Rock, Shield of Lannisport and Warden of the West.\",\n FacebookId: \"bb2d8a7b-92e6-4baf-b4f7-b664bdeee25b\",\n CreatedAt: time.Date(1980, time.July, 14, 18, 30, 10, 0, time.UTC),\n UpdatedAt: time.Date(2014, time.October, 6, 18, 30, 10, 0, time.UTC),\n }\n user_fixtures[2] = User{\n FirstName: \"Jaime\",\n LastName: \"Lannister\",\n Email: \"[email protected]\",\n Bio: \"Nicknamed 'Kingslayer' for killing the previous King, Aerys II.\",\n FacebookId: \"d4c19866-eaff-4417-a1c1-93882162606d\",\n CreatedAt: time.Date(2000, time.September, 15, 18, 30, 10, 0, time.UTC),\n UpdatedAt: time.Date(2014, time.October, 7, 18, 30, 10, 0, time.UTC),\n }\n user_fixtures[3] = User{\n FirstName: \"Cersei\",\n LastName: \"Lannister\",\n Email: \"[email protected]\",\n Bio: \"Queen of the Seven Kingdoms of Westeros, is the wife of King Robert Baratheon.\",\n FacebookId: \"251d74d8-7462-4f2a-b132-6f7e429507e5\",\n CreatedAt: time.Date(2002, time.May, 12, 18, 30, 10, 0, time.UTC),\n UpdatedAt: time.Date(2014, time.October, 8, 18, 30, 10, 0, time.UTC),\n }\n\n r.Table(\"users\").Insert(user_fixtures).RunWrite(session)\n}", "func NewRestAPI[T any](db database.ReadWriter) *RestAPI[T] {\n\treturn &RestAPI[T]{\n\t\tdb: db,\n\t}\n}", "func convertTestsToTestTasks(tests []string) (tTasks []testBatch, have_show, have_is bool) {\n\tbatchSize := 30\n\ttotal := (len(tests) / batchSize) + 2\n\t// the extra 1 is for sub_query_more test\n\ttTasks = make([]testBatch, total+1)\n\ttestIdx := 0\n\thave_subqmore, have_role := false, false\n\tfor i := 0; i < total; i++ {\n\t\ttTasks[i] = make(testBatch, 0, batchSize)\n\t\tfor j := 0; j <= batchSize && testIdx < len(tests); j++ {\n\t\t\t// skip sub_query_more test, since it consumes the most time\n\t\t\t// we better use a separate goroutine to run it\n\t\t\t// role test has many connection/disconnection operation.\n\t\t\t// we better use a separate goroutine to run it\n\t\t\tswitch tests[testIdx] {\n\t\t\tcase \"sub_query_more\":\n\t\t\t\thave_subqmore = true\n\t\t\tcase \"show\":\n\t\t\t\thave_show = true\n\t\t\tcase \"infoschema\":\n\t\t\t\thave_is = true\n\t\t\tcase \"role\":\n\t\t\t\thave_role = true\n\t\t\tcase \"role2\":\n\t\t\t\thave_role = true\n\t\t\tdefault:\n\t\t\t\ttTasks[i] = append(tTasks[i], tests[testIdx])\n\t\t\t}\n\t\t\ttestIdx++\n\t\t}\n\t}\n\n\tif have_subqmore {\n\t\ttTasks[total-1] = testBatch{\"sub_query_more\"}\n\t}\n\n\tif have_role {\n\t\ttTasks[total] = testBatch{\"role\", \"role2\"}\n\t}\n\treturn\n}", "func TestGetAllTodos(t *testing.T) {\n\t_client := setupDatabase(t)\n\t_handler := setupTodoHandler(_client)\n\tdefer _client.Close()\n\n\tapp := fiber.New()\n\n\tapp.Get(\"/todos\", _handler.GetAllTodos)\n\n\tr := httptest.NewRequest(\"GET\", \"/todos\", nil)\n\n\tresp, err := app.Test(r)\n\tif err != nil {\n\t\tassert.Fail(t, err.Error())\n\t}\n\n\tassert.Equal(t, fiber.StatusOK, resp.StatusCode)\n\n\tvar data []*entity.Todo\n\tif err := json.NewDecoder(resp.Body).Decode(&data); err != nil {\n\t\tassert.Fail(t, err.Error())\n\t}\n\n\tfmt.Println(\"All Todos:\", data)\n\tfmt.Println(\"Status Code:\", resp.StatusCode)\n}", "func (tt *requestTest) Test(t *testing.T) {\n\tt.Parallel()\n\tvars := tt.Init()\n\th, err := rest.NewHandler(vars.Index)\n\tif err != nil {\n\t\tt.Errorf(\"rest.NewHandler failed: %s\", err)\n\t\treturn\n\t}\n\tr, err := tt.NewRequest()\n\tif err != nil || r == nil {\n\t\tt.Errorf(\"tt.NewRequest failed: %s\", err)\n\t\treturn\n\t}\n\tw := httptest.NewRecorder()\n\n\th.ServeHTTP(w, r)\n\tif tt.ResponseCode != w.Code {\n\t\tt.Errorf(\"Expected HTTP response code %d, got %d\", tt.ResponseCode, w.Code)\n\t}\n\theader := w.Header()\n\tfor k, evs := range tt.ResponseHeader {\n\t\tif eCnt, aCnt := len(evs), len(header[k]); eCnt != aCnt {\n\t\t\tt.Errorf(\"expected HTTP Header %q to have %d items, got %d items\", k, eCnt, aCnt)\n\t\t\tcontinue\n\t\t}\n\t\tfor i, ev := range evs {\n\t\t\tif av := header[k][i]; ev != av {\n\t\t\t\tt.Errorf(\"Expected HTTP header[%q][%d] to equal %q, got %q\", k, i, ev, av)\n\t\t\t}\n\t\t}\n\n\t}\n\tb, _ := ioutil.ReadAll(w.Body)\n\tif len(tt.ResponseBody) > 0 {\n\t\ttestutil.JSONEq(t, []byte(tt.ResponseBody), b)\n\t} else if len(b) > 0 {\n\t\tt.Errorf(\"Expected empty response body, got:\\n%s\", b)\n\t}\n\n\tif tt.ExtraTest != nil {\n\t\ttt.ExtraTest(t, vars)\n\t}\n}", "func RegisterRestContext(s *godog.Suite, upstream *feature.RestContext) *RestContext {\n\tc := RestContext{\n\t\tRestContext: upstream,\n\t}\n\n\t// AfterStep is used to sleep the test before check into the database,\n\t// otherwise the corresponding message hasn't been dispatched when the test run check against the table\n\t// therefore the projection is not found.\n\ts.AfterStep(func(step *gherkin.Step, e error) {\n\t\tif e != nil {\n\t\t\treturn\n\t\t}\n\n\t\tre := regexp.MustCompile(`^I request REST endpoint with method \"([^\"]*)\" and path \"([^\"]*)\".*$`)\n\t\tif re.Match([]byte(step.Text)) {\n\t\t\ttime.Sleep(1 * time.Second)\n\t\t}\n\t})\n\n\treturn &c\n}", "func NewRestTester(tb testing.TB, restConfig *RestTesterConfig) *RestTester {\n\treturn newRestTester(tb, restConfig, useSingleCollection, 1)\n}", "func (i *InspecRunner) RunAllTests(path string) (verifiers.TestSuite, error) {\n\tv := inspec.InspecVerifier{}\n\terr := v.Setup(path)\n\n\tif err != nil {\n\t\tfmt.Println(\"error during inspec verifier setup\")\n\t\treturn verifiers.TestSuite{}, err\n\t}\n\n\tresult, err := v.Check(path)\n\n\tif err != nil {\n\t\tfmt.Println(\"error during inspec test execution\")\n\t\treturn result, err\n\t}\n\treturn result, nil\n\n}", "func TestAllSpecs(t *testing.T) {\n\tr := gospec.NewRunner()\n\n\tr.Parallel = false\n\n\tr.BeforeEach = func() {\n\t\tConfigure(map[string]string{\n\t\t\t\"server\": \"localhost:7000\",\n\t\t\t\"process\": \"1\",\n\t\t\t\"database\": \"15\",\n\t\t\t\"pool\": \"1\",\n\t\t})\n\n\t\tfor port := 7000; port <= 7002; port++ {\n\t\t\tc, _ := redis.Dial(\"tcp\", \"localhost:\"+strconv.Itoa(port))\n\t\t\tc.Cmd(\"flushdb\")\n\t\t\tc.Close()\n }\n\t}\n\n\t// List all specs here\n\tr.AddSpec(WorkersSpec)\n\tr.AddSpec(ConfigSpec)\n\tr.AddSpec(MsgSpec)\n\tr.AddSpec(FetchSpec)\n\tr.AddSpec(WorkerSpec)\n\tr.AddSpec(ManagerSpec)\n\tr.AddSpec(ScheduledSpec)\n\tr.AddSpec(EnqueueSpec)\n\tr.AddSpec(MiddlewareSpec)\n\tr.AddSpec(MiddlewareRetrySpec)\n\tr.AddSpec(MiddlewareStatsSpec)\n\n\t// Run GoSpec and report any errors to gotest's `testing.T` instance\n\tgospec.MainGoTest(r, t)\n}", "func (client IotHubResourceClient) TestAllRoutesSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}", "func TestGetVideosFullList(t *testing.T) {\r\n\tlog.Printf(\"--------------------------------------------------------\")\r\n\tlog.Printf(\"RUNNING: ===>tablecrud/videoCrud.TestGetVideosFullList()\")\r\n\tlog.Printf(\"--------------------------------------------------------\")\r\n\tmyDB := db.Connect()\r\n\tmyVideo := &Video{}\r\n\tmyVideo.GetVideosFullList(myDB)\r\n}", "func TestRunTestAllReal(t *testing.T) {\n\tif testing.Short() {\n\t\tt.Skip(\"Skipping test in short mode.\")\n\t}\n\n\ttaskData := agent.TaskData{\n\t\tStringValues: map[string]string{\n\t\t\tCFG_TEST_TYPE: CFG_TYPE_ALL,\n\t\t\tCFG_SERVER_HOST: \"speedtest.nyc.rr.com:8080\",\n\t\t\tCFG_SERVER_ID: \"16976\",\n\t\t},\n\t\tIntValues: map[string]int{\n\t\t\tCFG_SERVER_ID: 16976,\n\t\t\tCFG_TIME_OUT: 5,\n\t\t},\n\t\tFloatValues: map[string]float64{CFG_MAX_SECONDS: 6},\n\t\tIntSlices: map[string][]int{\n\t\t\tCFG_DOWNLOAD_SIZES: {245388, 505544},\n\t\t\tCFG_UPLOAD_SIZES: {32768, 65536},\n\t\t},\n\t}\n\n\tspdTestRunner := SpeedTestRunner{}\n\n\tspTestResults, err := spdTestRunner.Run(taskData)\n\tif err != nil {\n\t\tt.Fatalf(\"Unexpected Error: \\n%s\", err.Error())\n\t}\n\n\tresults := spTestResults.Latency.Seconds()\n\n\tif results <= 0 {\n\t\tt.Errorf(\"Error: Expected a positive Latency result, but got: %f\", results)\n\t} else {\n\t\tfmt.Printf(\"\\nLatency test results for server %d ... %f\\n\", taskData.IntValues[CFG_SERVER_ID], results)\n\t}\n\n\tresults = spTestResults.Download\n\tif results <= 0 {\n\t\tt.Errorf(\"Error: Expected a positive Download result, but got: %f\", results)\n\t} else {\n\t\tfmt.Printf(\"\\nDownload test results for server %d ... %f\\n\", taskData.IntValues[CFG_SERVER_ID], results)\n\t}\n\n\tresults = spTestResults.Upload\n\tif results <= 0 {\n\t\tt.Errorf(\"Error: Expected a positive Upload result, but got: %f\", results)\n\t} else {\n\t\tfmt.Printf(\"\\nUpload test results for server %d ... %f\\n\", taskData.IntValues[CFG_SERVER_ID], results)\n\t}\n}", "func ConvertTablesToTestCases(tables []Result) []TestCase {\n\ttestcases := []TestCase{}\n\tfor i, t := range tables {\n\t\ttestcases = append(testcases, TestCase{\n\t\t\tIndex: strconv.Itoa(i + 1),\n\t\t\tContent: t.Content,\n\t\t\tQuestion: \"\",\n\t\t})\n\t}\n\n\treturn testcases\n}", "func VerifyRestTestByDefault(data interface{}, context *TestContext) bool {\n\trestTD, ok := data.(*RestTestData)\n\tif !ok {\n\t\tTestLog.Fatalf(\"Fail to convert data to RestTestData\")\n\t\treturn false\n\t}\n\n\t//verify the status code in response\n\tif restTD.actualStatus != restTD.ExpectedStatus {\n\t\tTestLog.Fatalf(\"Expected status %d, but returned status %d for uri %s.\", restTD.ExpectedStatus, restTD.actualStatus, restTD.URI)\n\t\treturn false\n\t}\n\n\tremoveMetaData(reflect.ValueOf(restTD.OutputBody))\n\t//verify the response body\n\tequal := reflect.DeepEqual(restTD.ExpectedBody, restTD.OutputBody)\n\tif !equal {\n\t\tepc, _ := json.Marshal(restTD.ExpectedBody)\n\t\tact, _ := json.Marshal(restTD.OutputBody)\n\n\t\t//workaroud for issue 117.\n\t\tif string(act) == \"null\" && string(epc) == \"[]\" {\n\t\t\tTestLog.Log(\"===Nothing returned\")\n\t\t\tequal = true\n\t\t} else {\n\t\t\tTestLog.Fatalf(\"Expected response body %s,\\r\\n but returned response body %s for uri %s.\", epc, act, restTD.URI)\n\t\t}\n\t}\n\n\treturn equal\n}", "func (client IotHubResourceClient) TestAllRoutesPreparer(ctx context.Context, input TestAllRoutesInput, iotHubName string, resourceGroupName string) (*http.Request, error) {\n\tpathParameters := map[string]interface{}{\n\t\t\"iotHubName\": autorest.Encode(\"path\", iotHubName),\n\t\t\"resourceGroupName\": autorest.Encode(\"path\", resourceGroupName),\n\t\t\"subscriptionId\": autorest.Encode(\"path\", client.SubscriptionID),\n\t}\n\n\tconst APIVersion = \"2022-04-30-preview\"\n\tqueryParameters := map[string]interface{}{\n\t\t\"api-version\": APIVersion,\n\t}\n\n\tpreparer := autorest.CreatePreparer(\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\n\t\tautorest.AsPost(),\n\t\tautorest.WithBaseURL(client.BaseURI),\n\t\tautorest.WithPathParameters(\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/IotHubs/{iotHubName}/routing/routes/$testall\", pathParameters),\n\t\tautorest.WithJSON(input),\n\t\tautorest.WithQueryParameters(queryParameters))\n\treturn preparer.Prepare((&http.Request{}).WithContext(ctx))\n}", "func batchProgrammingsTest(e *httpexpect.Expect, t *testing.T) {\n\ttestCases := []testCase{\n\t\tnotLoggedTestCase,\n\t\t{\n\t\t\tToken: testCtx.User.Token,\n\t\t\tStatus: http.StatusUnauthorized,\n\t\t\tSent: []byte(`{Pend}`),\n\t\t\tBodyContains: []string{\"Droits administrateur requis\"}},\n\t\t{\n\t\t\tToken: testCtx.Admin.Token,\n\t\t\tStatus: http.StatusInternalServerError,\n\t\t\tSent: []byte(`{Pend}`),\n\t\t\tBodyContains: []string{\"Batch programmation, décodage : \"}},\n\t\t//cSpell:disable\n\t\t{\n\t\t\tToken: testCtx.Admin.Token,\n\t\t\tStatus: http.StatusOK,\n\t\t\tSent: []byte(`{\"Programmings\": [\n\t\t\t{\"physical_op_id\":9,\"year\":2018,\"value\":100000000,\n\t\t\t\"commission_id\":7,\"total_value\":null,\"state_ratio\":null},\n\t\t\t{\"physical_op_id\":10,\"year\":2018,\"value\":200000000,\n\t\t\t\"commission_id\":8,\"total_value\":400000000,\"state_ratio\":null},\n\t\t\t{\"physical_op_id\":14 ,\"year\":2018,\"value\":300000000,\n\t\t\t\"commission_id\":3,\"total_value\":600000000,\"state_ratio\":0.35}],\n\t\t\t\"year\":2018}`),\n\t\t\tBodyContains: []string{\"Programmings\", `\"physical_op_id\":9`, `\"physical_op_id\":10`,\n\t\t\t\t`\"physical_op_id\":14`, `\"value\":200000000`, `\"commission_id\":8`, `\"total_value\":400000000`,\n\t\t\t\t`\"total_value\":null`, `\"state_ratio\":null`, `\"state_ratio\":0.35`}},\n\t}\n\t//cSpell:enable\n\tf := func(tc testCase) *httpexpect.Response {\n\t\treturn e.POST(\"/api/programmings/array\").\n\t\t\tWithHeader(\"Authorization\", \"Bearer \"+tc.Token).WithBytes(tc.Sent).Expect()\n\t}\n\tfor _, r := range chkTestCases(testCases, f, \"BatchProgrammings\") {\n\t\tt.Error(r)\n\t}\n}", "func TestDashboardHandler_Deployments(t *testing.T) {\n\tfor _, test := range tests {\n\t\tt.Logf(\"Running test: %s\", test.Name)\n\t\tsubTest(t, test)\n\t}\n}", "func initTests() int {\n\trows, err := db.Query(\"SELECT idTEST, categories, conditions, params, period, scoreMap, custORacct FROM TXTEST\")\n\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// defer rows.Close()\n\ti := 0\n\tfor rows.Next() {\n\t\ttest := new(TxTest)\n\t\terr := rows.Scan(&test.TName, &test.CategoryStr, &test.Conditions, &test.ParamStr, &test.PeriodStr, &test.ScoreMapStr, &test.CustOrAcct)\n\t\tif err != nil {\n\t\t\trows.Close()\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\ttest.Params, test.QueryType = parseParams(strings.Split(test.ParamStr, \",\"), test.CustOrAcct)\n\t\ttest.Period = *parsePeriod(test.PeriodStr)\n\t\ttest.ScoreMap = parseScoreMap(test.ScoreMapStr)\n\n\t\ttxTestCache[test.TName] = test\n\t\ti++\n\t\tfmt.Printf(\"\\ntest %s: %+v\", txTestCache[test.TName].TName, txTestCache[test.TName])\n\t}\n\trows.Close()\n\t//\treturn custs, err\n\treturn i\n}", "func TestAllDocsAccessControl(t *testing.T) {\n\trt := NewRestTester(t, &RestTesterConfig{SyncFn: channels.DocChannelsSyncFunction})\n\tdefer rt.Close()\n\n\ttype allDocsRow struct {\n\t\tID string `json:\"id\"`\n\t\tKey string `json:\"key\"`\n\t\tValue struct {\n\t\t\tRev string `json:\"rev\"`\n\t\t\tChannels []string `json:\"channels,omitempty\"`\n\t\t\tAccess map[string]base.Set `json:\"access,omitempty\"` // for admins only\n\t\t} `json:\"value\"`\n\t\tDoc db.Body `json:\"doc,omitempty\"`\n\t\tError string `json:\"error\"`\n\t}\n\ttype allDocsResponse struct {\n\t\tTotalRows int `json:\"total_rows\"`\n\t\tOffset int `json:\"offset\"`\n\t\tRows []allDocsRow `json:\"rows\"`\n\t}\n\n\t// Create some docs:\n\ta := auth.NewAuthenticator(rt.MetadataStore(), nil, rt.GetDatabase().AuthenticatorOptions())\n\ta.Collections = rt.GetDatabase().CollectionNames\n\tguest, err := a.GetUser(\"\")\n\tassert.NoError(t, err)\n\tguest.SetDisabled(false)\n\terr = a.Save(guest)\n\tassert.NoError(t, err)\n\n\tRequireStatus(t, rt.SendRequest(\"PUT\", \"/{{.keyspace}}/doc5\", `{\"channels\":\"Cinemax\"}`), 201)\n\tRequireStatus(t, rt.SendRequest(\"PUT\", \"/{{.keyspace}}/doc4\", `{\"channels\":[\"WB\", \"Cinemax\"]}`), 201)\n\tRequireStatus(t, rt.SendRequest(\"PUT\", \"/{{.keyspace}}/doc3\", `{\"channels\":[\"CBS\", \"Cinemax\"]}`), 201)\n\tRequireStatus(t, rt.SendRequest(\"PUT\", \"/{{.keyspace}}/doc2\", `{\"channels\":[\"CBS\"]}`), 201)\n\tRequireStatus(t, rt.SendRequest(\"PUT\", \"/{{.keyspace}}/doc1\", `{\"channels\":[]}`), 201)\n\n\tguest, err = a.GetUser(\"\")\n\tassert.NoError(t, err)\n\tguest.SetDisabled(true)\n\terr = a.Save(guest)\n\tassert.NoError(t, err)\n\n\t// Create a user:\n\talice, err := a.NewUser(\"alice\", \"letmein\", channels.BaseSetOf(t, \"Cinemax\"))\n\trequire.NoError(t, err)\n\tassert.NoError(t, a.Save(alice))\n\n\t// Get a single doc the user has access to:\n\tresponse := rt.SendUserRequest(http.MethodGet, \"/{{.keyspace}}/doc3\", \"\", \"alice\")\n\tRequireStatus(t, response, 200)\n\n\t// Get a single doc the user doesn't have access to:\n\tresponse = rt.SendUserRequest(http.MethodGet, \"/{{.keyspace}}/doc2\", \"\", \"alice\")\n\tRequireStatus(t, response, 403)\n\n\t// Check that _all_docs only returns the docs the user has access to:\n\tresponse = rt.SendUserRequest(http.MethodGet, \"/{{.keyspace}}/_all_docs?channels=true\", \"\", \"alice\")\n\tRequireStatus(t, response, 200)\n\n\tallDocsResult := allDocsResponse{}\n\tlog.Printf(\"Response = %s\", response.Body.Bytes())\n\terr = base.JSONUnmarshal(response.Body.Bytes(), &allDocsResult)\n\trequire.NoError(t, err)\n\trequire.Equal(t, 3, len(allDocsResult.Rows))\n\tassert.Equal(t, \"doc3\", allDocsResult.Rows[0].ID)\n\tassert.Equal(t, []string{\"Cinemax\"}, allDocsResult.Rows[0].Value.Channels)\n\tassert.Equal(t, \"doc4\", allDocsResult.Rows[1].ID)\n\tassert.Equal(t, []string{\"Cinemax\"}, allDocsResult.Rows[1].Value.Channels)\n\tassert.Equal(t, \"doc5\", allDocsResult.Rows[2].ID)\n\tassert.Equal(t, []string{\"Cinemax\"}, allDocsResult.Rows[2].Value.Channels)\n\n\t// Check all docs limit option\n\tresponse = rt.SendUserRequest(http.MethodGet, \"/{{.keyspace}}/_all_docs?limit=1&channels=true\", \"\", \"alice\")\n\tRequireStatus(t, response, 200)\n\n\tlog.Printf(\"Response = %s\", response.Body.Bytes())\n\tallDocsResult = allDocsResponse{}\n\terr = base.JSONUnmarshal(response.Body.Bytes(), &allDocsResult)\n\trequire.NoError(t, err)\n\trequire.Equal(t, 1, len(allDocsResult.Rows))\n\tassert.Equal(t, \"doc3\", allDocsResult.Rows[0].ID)\n\tassert.Equal(t, []string{\"Cinemax\"}, allDocsResult.Rows[0].Value.Channels)\n\n\t// Check all docs startkey option\n\tresponse = rt.SendUserRequest(http.MethodGet, \"/{{.keyspace}}/_all_docs?startkey=doc5&channels=true\", \"\", \"alice\")\n\tRequireStatus(t, response, 200)\n\n\tlog.Printf(\"Response = %s\", response.Body.Bytes())\n\tallDocsResult = allDocsResponse{}\n\terr = base.JSONUnmarshal(response.Body.Bytes(), &allDocsResult)\n\trequire.NoError(t, err)\n\trequire.Equal(t, 1, len(allDocsResult.Rows))\n\tassert.Equal(t, \"doc5\", allDocsResult.Rows[0].ID)\n\tassert.Equal(t, []string{\"Cinemax\"}, allDocsResult.Rows[0].Value.Channels)\n\n\t// Check all docs startkey option with double quote\n\tresponse = rt.SendUserRequest(http.MethodGet, \"/{{.keyspace}}/_all_docs?startkey=doc5&channels=true\", \"\", \"alice\")\n\tRequireStatus(t, response, 200)\n\n\tlog.Printf(\"Response = %s\", response.Body.Bytes())\n\tallDocsResult = allDocsResponse{}\n\terr = base.JSONUnmarshal(response.Body.Bytes(), &allDocsResult)\n\trequire.NoError(t, err)\n\trequire.Equal(t, 1, len(allDocsResult.Rows))\n\tassert.Equal(t, \"doc5\", allDocsResult.Rows[0].ID)\n\tassert.Equal(t, []string{\"Cinemax\"}, allDocsResult.Rows[0].Value.Channels)\n\n\t// Check all docs endkey option\n\tresponse = rt.SendUserRequest(http.MethodGet, \"/{{.keyspace}}/_all_docs?endkey=doc3&channels=true\", \"\", \"alice\")\n\tRequireStatus(t, response, 200)\n\n\tlog.Printf(\"Response = %s\", response.Body.Bytes())\n\tallDocsResult = allDocsResponse{}\n\terr = base.JSONUnmarshal(response.Body.Bytes(), &allDocsResult)\n\trequire.NoError(t, err)\n\trequire.Equal(t, 1, len(allDocsResult.Rows))\n\tassert.Equal(t, \"doc3\", allDocsResult.Rows[0].ID)\n\tassert.Equal(t, []string{\"Cinemax\"}, allDocsResult.Rows[0].Value.Channels)\n\n\t// Check all docs endkey option\n\tresponse = rt.SendUserRequest(http.MethodGet, \"/{{.keyspace}}/_all_docs?endkey=doc3&channels=true\", \"\", \"alice\")\n\tRequireStatus(t, response, 200)\n\n\tlog.Printf(\"Response = %s\", response.Body.Bytes())\n\tallDocsResult = allDocsResponse{}\n\terr = base.JSONUnmarshal(response.Body.Bytes(), &allDocsResult)\n\trequire.NoError(t, err)\n\trequire.Equal(t, 1, len(allDocsResult.Rows))\n\tassert.Equal(t, \"doc3\", allDocsResult.Rows[0].ID)\n\tassert.Equal(t, []string{\"Cinemax\"}, allDocsResult.Rows[0].Value.Channels)\n\n\t// Check _all_docs with include_docs option:\n\tresponse = rt.SendUserRequest(http.MethodGet, \"/{{.keyspace}}/_all_docs?include_docs=true\", \"\", \"alice\")\n\tRequireStatus(t, response, 200)\n\n\tlog.Printf(\"Response = %s\", response.Body.Bytes())\n\tallDocsResult = allDocsResponse{}\n\terr = base.JSONUnmarshal(response.Body.Bytes(), &allDocsResult)\n\trequire.NoError(t, err)\n\trequire.Equal(t, 3, len(allDocsResult.Rows))\n\tassert.Equal(t, \"doc3\", allDocsResult.Rows[0].ID)\n\tassert.Equal(t, \"doc4\", allDocsResult.Rows[1].ID)\n\tassert.Equal(t, \"doc5\", allDocsResult.Rows[2].ID)\n\n\t// Check POST to _all_docs:\n\tbody := `{\"keys\": [\"doc4\", \"doc1\", \"doc3\", \"b0gus\"]}`\n\tresponse = rt.SendUserRequest(http.MethodPost, \"/{{.keyspace}}/_all_docs?channels=true\", body, \"alice\")\n\tRequireStatus(t, response, 200)\n\n\tlog.Printf(\"Response from POST _all_docs = %s\", response.Body.Bytes())\n\tallDocsResult = allDocsResponse{}\n\terr = base.JSONUnmarshal(response.Body.Bytes(), &allDocsResult)\n\trequire.NoError(t, err)\n\trequire.Equal(t, 4, len(allDocsResult.Rows))\n\tassert.Equal(t, \"doc4\", allDocsResult.Rows[0].Key)\n\tassert.Equal(t, \"doc4\", allDocsResult.Rows[0].ID)\n\tassert.Equal(t, \"1-e0351a57554e023a77544d33dd21e56c\", allDocsResult.Rows[0].Value.Rev)\n\tassert.Equal(t, []string{\"Cinemax\"}, allDocsResult.Rows[0].Value.Channels)\n\tassert.Equal(t, \"doc1\", allDocsResult.Rows[1].Key)\n\tassert.Equal(t, \"forbidden\", allDocsResult.Rows[1].Error)\n\tassert.Equal(t, \"\", allDocsResult.Rows[1].Value.Rev)\n\tassert.Equal(t, \"doc3\", allDocsResult.Rows[2].ID)\n\tassert.Equal(t, []string{\"Cinemax\"}, allDocsResult.Rows[2].Value.Channels)\n\tassert.Equal(t, \"1-20912648f85f2bbabefb0993ddd37b41\", allDocsResult.Rows[2].Value.Rev)\n\tassert.Equal(t, \"b0gus\", allDocsResult.Rows[3].Key)\n\tassert.Equal(t, \"not_found\", allDocsResult.Rows[3].Error)\n\tassert.Equal(t, \"\", allDocsResult.Rows[3].Value.Rev)\n\n\t// Check GET to _all_docs with keys parameter:\n\tresponse = rt.SendUserRequest(http.MethodGet, \"/{{.keyspace}}/_all_docs?channels=true&keys=%5B%22doc4%22%2C%22doc1%22%2C%22doc3%22%2C%22b0gus%22%5D\", \"\", \"alice\")\n\tRequireStatus(t, response, 200)\n\n\tlog.Printf(\"Response from GET _all_docs = %s\", response.Body.Bytes())\n\tallDocsResult = allDocsResponse{}\n\terr = base.JSONUnmarshal(response.Body.Bytes(), &allDocsResult)\n\trequire.NoError(t, err)\n\trequire.Equal(t, 4, len(allDocsResult.Rows))\n\tassert.Equal(t, \"doc4\", allDocsResult.Rows[0].Key)\n\tassert.Equal(t, \"doc4\", allDocsResult.Rows[0].ID)\n\tassert.Equal(t, []string{\"Cinemax\"}, allDocsResult.Rows[0].Value.Channels)\n\tassert.Equal(t, \"doc1\", allDocsResult.Rows[1].Key)\n\tassert.Equal(t, \"forbidden\", allDocsResult.Rows[1].Error)\n\tassert.Equal(t, \"doc3\", allDocsResult.Rows[2].ID)\n\tassert.Equal(t, []string{\"Cinemax\"}, allDocsResult.Rows[2].Value.Channels)\n\tassert.Equal(t, \"b0gus\", allDocsResult.Rows[3].Key)\n\tassert.Equal(t, \"not_found\", allDocsResult.Rows[3].Error)\n\n\t// Check POST to _all_docs with limit option:\n\tbody = `{\"keys\": [\"doc4\", \"doc1\", \"doc3\", \"b0gus\"]}`\n\tresponse = rt.SendUserRequest(http.MethodPost, \"/{{.keyspace}}/_all_docs?limit=1&channels=true\", body, \"alice\")\n\tRequireStatus(t, response, 200)\n\n\tlog.Printf(\"Response from POST _all_docs = %s\", response.Body.Bytes())\n\tallDocsResult = allDocsResponse{}\n\terr = base.JSONUnmarshal(response.Body.Bytes(), &allDocsResult)\n\trequire.NoError(t, err)\n\trequire.Equal(t, 1, len(allDocsResult.Rows))\n\tassert.Equal(t, \"doc4\", allDocsResult.Rows[0].Key)\n\tassert.Equal(t, \"doc4\", allDocsResult.Rows[0].ID)\n\tassert.Equal(t, []string{\"Cinemax\"}, allDocsResult.Rows[0].Value.Channels)\n\n\t// Check _all_docs as admin:\n\tresponse = rt.SendAdminRequest(http.MethodGet, \"/{{.keyspace}}/_all_docs\", \"\")\n\tRequireStatus(t, response, 200)\n\n\tlog.Printf(\"Admin response = %s\", response.Body.Bytes())\n\tallDocsResult = allDocsResponse{}\n\terr = base.JSONUnmarshal(response.Body.Bytes(), &allDocsResult)\n\trequire.NoError(t, err)\n\trequire.Equal(t, 5, len(allDocsResult.Rows))\n\tassert.Equal(t, \"doc1\", allDocsResult.Rows[0].ID)\n\tassert.Equal(t, \"doc2\", allDocsResult.Rows[1].ID)\n}", "func GetAllExecutions(count int, start int) ([]V_testlink_testexecution_tree, error) {\n\tvar rs []V_testlink_testexecution_tree\n\terr := orm.Limit(count, start).Find(&rs)\n\treturn rs, err\n}", "func getAll(w http.ResponseWriter, r *http.Request) {\n\tenableCors(&w)\n\trows, err := mainDB.Query(\"SELECT * FROM testTable\")\n\tcheckErr(err)\n\tvar logs Logs\n\tfor rows.Next() {\n\t\tvar log Log\n\t\terr = rows.Scan(&log.ID, &log.Time, &log.Level, &log.Msg, &log.Category, &log.DebugId, &log.Ip, &log.RequestId, &log.Type, &log.Uri, &log.UserId)\n\t\tcheckErr(err)\n\t\tlogs = append(logs, log)\n\t}\n\tjsonB, errMarshal := json.Marshal(logs)\n\tcheckErr(errMarshal)\n\tfmt.Fprintf(w, \"%s\", string(jsonB))\n}", "func (client IotHubResourceClient) TestAllRoutesResponder(resp *http.Response) (result TestAllRoutesResult, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func TestEveryInterface(t *testing.T) {\n\ts := &TestInterfaceScene{}\n\ttango.Run(tango.RunOptions{\n\t\tNoRun: true,\n\t\tHeadlessMode: true,\n\t}, s)\n\tif s.failed {\n\t\tt.Errorf(\"failed to test every interface. Reason: %v\", s.reason)\n\t}\n}", "func TestMultiSchemaSupport(t *testing.T) {\n\twithEachTestDB(t, func(t *testing.T, tdb *TestDB) {\n\t\tmusic := NewMigrator(WithDialect(tdb.Dialect), WithTableName(\"music_migrations\"))\n\t\tcontacts := NewMigrator(WithDialect(tdb.Dialect), WithTableName(\"contacts_migrations\"))\n\n\t\t// Use the same connection for both sets of migrations\n\t\tdb := tdb.Connect(t)\n\t\tdefer func() { _ = db.Close() }()\n\n\t\t// Apply the Music migrations\n\t\terr := music.Apply(db, testMigrations(t, \"music\"))\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Failed to apply music migrations: %s\", err)\n\t\t}\n\n\t\t// ... then the Contacts Migrations\n\t\terr = contacts.Apply(db, testMigrations(t, \"contacts\"))\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Failed to apply contact migrations: %s\", err)\n\t\t}\n\n\t\t// Then run a SELECT COUNT(*) query on each table to ensure that all of the\n\t\t// expected tables are co-existing in the same database and that they all\n\t\t// contain the expected number of rows (this approach is admittedly odd,\n\t\t// but it relies only on ANSI SQL code, so it should run on any SQL database).\n\t\texpectedRowCounts := map[string]int{\n\t\t\t\"music_migrations\": 3,\n\t\t\t\"contacts_migrations\": 3,\n\t\t\t\"contacts\": 1,\n\t\t\t\"phone_numbers\": 3,\n\t\t\t\"addresses\": 2,\n\t\t\t\"artists\": 0,\n\t\t\t\"albums\": 0,\n\t\t\t\"tracks\": 0,\n\t\t}\n\t\tfor table, expectedRowCount := range expectedRowCounts {\n\t\t\tqtn := tdb.Dialect.QuotedTableName(\"\", table)\n\t\t\tactualCount := -1 // Don't initialize to 0 because that's an expected value\n\t\t\tquery := fmt.Sprintf(\"SELECT COUNT(*) FROM %s\", qtn)\n\t\t\trows, err := db.Query(query)\n\t\t\tif err != nil {\n\t\t\t\tt.Error(err)\n\t\t\t}\n\t\t\tif rows != nil && rows.Next() {\n\t\t\t\terr = rows.Scan(&actualCount)\n\t\t\t\tif err != nil {\n\t\t\t\t\tt.Error(err)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tt.Errorf(\"Expected rows\")\n\t\t\t}\n\t\t\tif actualCount != expectedRowCount {\n\t\t\t\tt.Errorf(\"Expected %d rows in table %s. Got %d\", expectedRowCount, qtn, actualCount)\n\t\t\t}\n\t\t}\n\t})\n}", "func RenderTestflowTable(writer io.Writer, flow tmv1beta1.TestFlow) {\n\ttable := tablewriter.NewWriter(writer)\n\ttable.SetHeader([]string{\"Step\", \"Definition\", \"Dependencies\"})\n\ttable.SetAutoWrapText(true)\n\ttable.SetRowSeparator(\"-\")\n\ttable.SetRowLine(true)\n\n\tfor _, s := range flow {\n\t\tdefinition := \"\"\n\t\tif s.Definition.Name != \"\" {\n\t\t\tdefinition = fmt.Sprintf(\"Name: %s\", s.Definition.Name)\n\t\t}\n\t\tif s.Definition.Label != \"\" {\n\t\t\tdefinition = fmt.Sprintf(\"Label: %s\", s.Definition.Label)\n\t\t}\n\n\t\ttable.Append([]string{s.Name, definition, strings.Join(s.DependsOn, \"\\n\")})\n\t}\n\ttable.Render()\n}", "func DumpAll(c *gin.Context) {\n\tc.JSON(200, models.All)\n}", "func runAllTestCases(t *testing.T, checker resultsChecker) {\n\tt.Helper()\n\tchecker.resetTestCasesRun()\n\terr := filepath.Walk(checker.rootDir(),\n\t\tfunc(path string, info os.FileInfo, err error) error {\n\t\t\trequire.NoError(t, err)\n\t\t\tif info.IsDir() && checker.isTestDir(path) {\n\t\t\t\trunDirectoryTestCase(t, path, checker)\n\t\t\t}\n\t\t\treturn nil\n\t\t})\n\trequire.NoError(t, err)\n\trequire.NotZero(t, len(checker.TestCasesRun()), \"No complete test cases found in %s\", checker.rootDir())\n}", "func (b TestableBuild) TestData() []settings.TestBundle {\n\tbasic := resp.MiloBuild{\n\t\tSummary: resp.BuildComponent{\n\t\t\tLabel: \"Test swarming build\",\n\t\t\tStatus: resp.Success,\n\t\t\tStarted: time.Date(2016, 1, 2, 15, 4, 5, 999999999, time.UTC),\n\t\t\tFinished: time.Date(2016, 1, 2, 15, 4, 6, 999999999, time.UTC),\n\t\t\tDuration: time.Second,\n\t\t},\n\t}\n\tresults := []settings.TestBundle{\n\t\t{\n\t\t\tDescription: \"Basic successful build\",\n\t\t\tData: templates.Args{\"Build\": basic},\n\t\t},\n\t}\n\tc := context.Background()\n\tc, _ = testclock.UseTime(c, time.Date(2016, time.March, 14, 11, 0, 0, 0, time.UTC))\n\tfor _, tc := range getTestCases() {\n\t\tbuild, err := swarmingBuildImpl(c, \"foo\", \"debug\", tc)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\"Error while processing %s: %s\", tc, err))\n\t\t}\n\t\tresults = append(results, settings.TestBundle{\n\t\t\tDescription: tc,\n\t\t\tData: templates.Args{\"Build\": build},\n\t\t})\n\t}\n\treturn results\n}", "func (suite *EventStoreTestSuite) TestLoadAll() {\n\tid, _ := uuid.Parse(\"c1138e5f-f6fb-4dd0-8e79-255c6c8d3756\")\n\ttimestamp := time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC)\n\n\texpectedEvents := []eh.Event{\n\t\teh.NewEventForAggregate(mocks.EventType, &mocks.EventData{Content: \"event1\"},\n\t\t\ttimestamp, mocks.AggregateType, id, 1),\n\t\teh.NewEventForAggregate(mocks.EventType, &mocks.EventData{Content: \"event2\"},\n\t\t\ttimestamp, mocks.AggregateType, id, 2),\n\t}\n\n\t_ = suite.store.Save(context.Background(), expectedEvents, 0)\n\n\tevents, err := suite.store.LoadAll(context.Background())\n\tassert.Nil(suite.T(), err)\n\tassert.Len(suite.T(), events, 2)\n\n\tfor i, event := range events {\n\t\tif err := eh.CompareEvents(event, expectedEvents[i]); err != nil {\n\t\t\tsuite.T().Error(\"the event was incorrect:\", err)\n\t\t}\n\t\tif event.Version() != i+1 {\n\t\t\tsuite.T().Error(\"the event version should be correct:\", event, event.Version())\n\t\t}\n\t}\n}", "func Test_before(t *testing.T) {\n db.Connect()\n RegisterAllHandlers()\n TEST = true\n}", "func NewRestTesterMultipleCollections(tb testing.TB, restConfig *RestTesterConfig, numCollections int) *RestTester {\n\tif !base.TestsUseNamedCollections() {\n\t\ttb.Skip(\"This test requires named collections and is running against a bucket type that does not support them\")\n\t}\n\tif numCollections == 0 {\n\t\ttb.Errorf(\"0 is not a valid number of collections to specify\")\n\t}\n\treturn newRestTester(tb, restConfig, useMultipleCollection, numCollections)\n}", "func (c *Client) ListAllJUnitResults(begin, end time.Time) ([]JUnitResult, error) {\n\tresults, err := c.issueQuery(\"cicd_jUnitResult\", begin, end)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error listing all JUnit results: %v\", err)\n\t}\n\n\treturn processJUnitResults(results)\n}", "func TestIntegrationAll(t *testing.T) {\n\ts := New(genericRegions[0], genericRegions)\n\tresult := len(s.All())\n\tif len(s.sessions) != result {\n\t\tt.Fatalf(\"All() length invalid, expected: %d, got: %d\", len(genericRegions), result)\n\t}\n}", "func (a *App) retrieveAll(c *echo.Context) error {\n\tvar tasks []*model.Task\n\ta.GetDB().Find(&tasks, struct{}{})\n\tc.JSON(http.StatusOK, tasks)\n\treturn nil\n}", "func TestGetAllOrdersForRestaurantID(t *testing.T) {\n\n // ...\n\n}", "func All(helper *SQLHelper, awaitables []Awaitable) Awaitable {\n\tif _, ok := awaitables[0].(sqlRowContainer); !ok {\n\t\treturn awaitables[0]\n\t}\n\n\tret := &sqlAllAwaiter{\n\t\thelper: helper,\n\t\tdata: make(map[interface{}]sqlRowContainer, len(awaitables)),\n\t\tretrievedValues: make([]map[string]interface{}, 0),\n\t\ttable: awaitables[0].(sqlRowContainer).getTable(),\n\t}\n\n\tfor _, row := range awaitables {\n\t\trowContainer, ok := row.(sqlRowContainer)\n\t\tif !ok {\n\t\t\treturn row\n\t\t}\n\t\tkey := rowContainer.getData()[rowContainer.getTable().uniqueIndex[0]]\n\t\tret.data[normalizeKeys(key)] = rowContainer\n\t}\n\n\treturn &basicAwaitable{\n\t\tpollableAndCheckable: ret,\n\t\ttimeout: 120 * time.Second,\n\t}\n}", "func (c *ClickHouse) initAll() error {\n\tif err := c.initConn(); err != nil {\n\t\treturn err\n\t}\n\tif err := c.initSchema(); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func returnAllFacts(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\n\tvar facts []Fact\n\n\tresult, err := db.Query(\"SELECT * FROM Facts\")\n\tif err != nil {\n\t\tflushResponseWriter(w, 0)\n\t\tpanic(err.Error())\n\t}\n\tdefer result.Close()\n\n\tfor result.Next() {\n\t\tvar fact Fact\n\t\terr := result.Scan(&fact.ID, &fact.FactType, &fact.Content)\n\t\tif err != nil {\n\t\t\tflushResponseWriter(w, 0)\n\t\t\tpanic(err.Error())\n\t\t}\n\t\tfacts = append(facts, fact)\n\t}\n\n\tw.WriteHeader(http.StatusOK) //200 to client\n\tjson.NewEncoder(w).Encode(facts)\n}", "func (b *TestDriver) GenerateAllStates() (err error) {\n\treturn\n}", "func runTestSuite(t *testing.T, ts TestSuite) {\n\t// Load only the rule for this test suite\n\truleConfigPath := strings.Split(ts.RootPath, \"config-lint/cli/assets/\")[1] + \"/rule.yml\"\n\truleSet, err := loadBuiltInRuleSet(ruleConfigPath)\n\tif err != nil {\n\t\tassert.Nil(t, err, \"Cannot load built-in Terraform rule\")\n\t}\n\n\tfor _, tc := range ts.Tests {\n\t\toptions := linter.Options{\n\t\t\tRuleIDs: []string{tc.RuleId},\n\t\t}\n\t\tvs := assertion.StandardValueSource{}\n\n\t\t// validate the rule set\n\t\tif contains(tc.Tags, \"terraform11\") {\n\t\t\t// Load the test resources for this test suite\n\t\t\ttestResourceDirectory := strings.Split(ts.RootPath, \"config-lint/cli/\")[1] + \"/tests/terraform11/\"\n\t\t\ttestResources, err := getTestResources(testResourceDirectory)\n\t\t\tif err != nil {\n\t\t\t\tassert.Nil(t, err, \"Cannot load built-in Terraform 11 test resources\")\n\n\t\t\t}\n\t\t\t// Defining 'tf11' for the Parser type\n\t\t\tl, err := linter.NewLinter(ruleSet, vs, testResources, \"tf11\")\n\n\t\t\treport, err := l.Validate(ruleSet, options)\n\t\t\tassert.Nil(t, err, \"Validate failed for file\")\n\n\t\t\twarningViolationsReported := getViolationsString(\"WARNING\", report.Violations)\n\t\t\twarningMessage := fmt.Sprintf(\"Expecting %d warnings for rule %s:\\n %s\", tc.Warnings, tc.RuleId, warningViolationsReported)\n\t\t\tassert.Equal(t, tc.Warnings, numberOfWarnings(report.Violations), warningMessage)\n\n\t\t\tfailureViolationsReported := getViolationsString(\"FAILURE\", report.Violations)\n\t\t\tfailureMessage := fmt.Sprintf(\"Expecting %d failures for rule %s:\\n %s\", tc.Failures, tc.RuleId, failureViolationsReported)\n\t\t\tassert.Equal(t, tc.Failures, numberOfFailures(report.Violations), failureMessage)\n\t\t}\n\n\t\tif contains(tc.Tags, \"terraform12\") {\n\t\t\t// Load the test resources for this test suite\n\t\t\ttestResourceDirectory := strings.Split(ts.RootPath, \"config-lint/cli/\")[1] + \"/tests/terraform12/\"\n\t\t\ttestResources, err := getTestResources(testResourceDirectory)\n\t\t\tif err != nil {\n\t\t\t\tassert.Nil(t, err, \"Cannot load built-in Terraform 12 test resources\")\n\n\t\t\t}\n\t\t\t// Defining 'tf11' for the Parser type\n\t\t\tl, err := linter.NewLinter(ruleSet, vs, testResources, \"tf12\")\n\n\t\t\treport, err := l.Validate(ruleSet, options)\n\t\t\tassert.Nil(t, err, \"Validate failed for file\")\n\n\t\t\twarningViolationsReported := getViolationsString(\"WARNING\", report.Violations)\n\t\t\twarningMessage := fmt.Sprintf(\"Expecting %d warnings for rule %s:\\n %s\", tc.Warnings, tc.RuleId, warningViolationsReported)\n\t\t\tassert.Equal(t, tc.Warnings, numberOfWarnings(report.Violations), warningMessage)\n\n\t\t\tfailureViolationsReported := getViolationsString(\"FAILURE\", report.Violations)\n\t\t\tfailureMessage := fmt.Sprintf(\"Expecting %d failures for rule %s:\\n %s\", tc.Failures, tc.RuleId, failureViolationsReported)\n\t\t\tassert.Equal(t, tc.Failures, numberOfFailures(report.Violations), failureMessage)\n\t\t}\n\t}\n}", "func TestAllRouter(t *testing.T) {\n\tprefixrouter := v2.Router{}\n\tprefixrouter.Match = v2.RouterMatch{\n\t\tPrefix: \"/foo\",\n\t}\n\tprefixrouter.Route = v2.RouteAction{\n\t\tRouterActionConfig: v2.RouterActionConfig{\n\t\t\tClusterName: \"prefix\",\n\t\t},\n\t}\n\tpathrouter := v2.Router{}\n\tpathrouter.Match = v2.RouterMatch{\n\t\tPath: \"/foo1\",\n\t}\n\tpathrouter.Route = v2.RouteAction{\n\t\tRouterActionConfig: v2.RouterActionConfig{\n\t\t\tClusterName: \"path\",\n\t\t},\n\t}\n\tregrouter := v2.Router{}\n\tregrouter.Match = v2.RouterMatch{\n\t\tRegex: \"/foo[0-9]+\",\n\t}\n\tregrouter.Route = v2.RouteAction{\n\t\tRouterActionConfig: v2.RouterActionConfig{\n\t\t\tClusterName: \"regexp\",\n\t\t},\n\t}\n\t// path \"/foo1\" match all of the router\n\t// path \"/foo11\" match prefix and regexp router\n\t// path \"/foo\" match prefix router only\n\trouters := []v2.Router{pathrouter, regrouter, prefixrouter}\n\ttestCases := []struct {\n\t\tpath string\n\t\tclustername string\n\t\tmatched int\n\t}{\n\t\t{\"/foo1\", \"path\", 3},\n\t\t{\"/foo11\", \"regexp\", 2},\n\t\t{\"/foo\", \"prefix\", 1},\n\t}\n\tvirtualHost, _ := NewVirtualHostImpl(&v2.VirtualHost{\n\t\tName: \"test\",\n\t\tDomains: []string{\"*\"},\n\t\tRouters: routers,\n\t})\n\tfor i, tc := range testCases {\n\t\theaders := protocol.CommonHeader(map[string]string{\n\t\t\tstrings.ToLower(protocol.MosnHeaderPathKey): tc.path,\n\t\t})\n\t\trts := virtualHost.GetAllRoutesFromEntries(headers, 1)\n\t\tif len(rts) != tc.matched {\n\t\t\tt.Errorf(\"#%d route unexpected result\\n\", i)\n\t\t}\n\t}\n}", "func runTestsOnNewDB(c *C, overrider configOverrider, dbName string, tests ...func(dbt *DBTest)) {\n\tdsn := getDSN(overrider, func(config *mysql.Config) {\n\t\tconfig.DBName = \"\"\n\t})\n\tdb, err := sql.Open(\"mysql\", dsn)\n\tc.Assert(err, IsNil, Commentf(\"Error connecting\"))\n\tdefer db.Close()\n\n\t_, err = db.Exec(fmt.Sprintf(\"DROP DATABASE IF EXISTS `%s`;\", dbName))\n\tc.Assert(err, IsNil, Commentf(\"Error drop database %s: %s\", dbName, err))\n\n\t_, err = db.Exec(fmt.Sprintf(\"CREATE DATABASE `%s`;\", dbName))\n\tc.Assert(err, IsNil, Commentf(\"Error create database %s: %s\", dbName, err))\n\n\tdefer func() {\n\t\t_, err = db.Exec(fmt.Sprintf(\"DROP DATABASE IF EXISTS `%s`;\", dbName))\n\t\tc.Assert(err, IsNil, Commentf(\"Error drop database %s: %s\", dbName, err))\n\t}()\n\n\t_, err = db.Exec(fmt.Sprintf(\"USE `%s`;\", dbName))\n\tc.Assert(err, IsNil, Commentf(\"Error use database %s: %s\", dbName, err))\n\n\tdbt := &DBTest{c, db}\n\tfor _, test := range tests {\n\t\ttest(dbt)\n\t\tdbt.db.Exec(\"DROP TABLE IF EXISTS test\")\n\t}\n}", "func TestHelloWorldTable(t *testing.T) {\n\ttests := []struct{\n\t\tname\t\tstring \t//name of sub-test\n\t\trequest\t\tstring \t//what's ur request?\n\t\texpected\tstring\t//expectation\n\t}{\n\t\t{\n\t\t\tname: \"Aji\",\n\t\t\trequest: \"Aji\",\n\t\t\texpected: \"Hello Aji\",\n\t\t},\n\t\t{\n\t\t\tname: \"Wahidin\",\n\t\t\trequest: \"Wahidin\",\n\t\t\texpected: \"Hello Wahidin\",\n\t\t},\n\t}\n\n\tfor _, test := range tests {\n\t\tt.Run(test.name, func(t *testing.T) {\n\t\t\tresult := HelloWorld(test.request)\n\t\t\trequire.Equal(t, test.expected, result)\n\t\t})\n\t}\n}", "func ToTestRecords(searchResp *frontend.SearchResponse, imgBaseURL string) []*TestRecord {\n\t// Group the results by test.\n\tretMap := map[types.TestName]*TestRecord{}\n\tfor _, oneDigest := range searchResp.Digests {\n\t\ttestNameVal := oneDigest.ParamSet[types.PRIMARY_KEY_FIELD]\n\t\tif len(testNameVal) == 0 {\n\t\t\tsklog.Errorf(\"Error: Digest '%s' has no primaryKey in paramset\", oneDigest.Digest)\n\t\t\tcontinue\n\t\t}\n\n\t\tdigestInfo := &DigestInfo{\n\t\t\tSRDigest: oneDigest,\n\t\t\tURL: DigestUrl(imgBaseURL, oneDigest.Digest),\n\t\t}\n\n\t\ttestName := types.TestName(oneDigest.ParamSet[types.PRIMARY_KEY_FIELD][0])\n\t\tif found, ok := retMap[testName]; ok {\n\t\t\tfound.Digests = append(found.Digests, digestInfo)\n\t\t} else {\n\t\t\tretMap[testName] = &TestRecord{\n\t\t\t\tTestName: testName,\n\t\t\t\tDigests: []*DigestInfo{digestInfo},\n\t\t\t}\n\t\t}\n\t}\n\n\t// Put the records into an array and return them.\n\tret := make([]*TestRecord, 0, len(retMap))\n\tfor _, oneTestRec := range retMap {\n\t\tret = append(ret, oneTestRec)\n\t}\n\n\treturn ret\n}", "func (e *ExecutionsFeature) IListAllJobExecutionHistory() error {\n\trequest, err := http.NewRequest(\"GET\", \"\", nil)\n\te.response = httptest.NewRecorder()\n\tps := httprouter.Params{}\n\trest.FindExecutions(e.response, request, ps)\n\treturn err\n}", "func RestApi() (storages []StoreViews, err error) {\n\to := orm.NewOrm()\n\tvar ones []device.Machine\n\n\tstorages = make([]StoreViews, 0)\n\tif _, err = o.QueryTable(\"machine\").Filter(\"devtype\", \"storage\").Filter(\"status\", 1).All(&ones); err != nil {\n\t\tutil.AddLog(err)\n\t\treturn\n\t}\n\tfor _, val := range ones {\n\t\tstorage, err := restApi(val.Uuid)\n\t\tif err != nil {\n\t\t\tutil.AddLog(err)\n\t\t\treturn storages, err\n\t\t}\n\t\tstorages = append(storages, storage)\n\t}\n\treturn\n}", "func createSyntheticTests(rawJobResults testgridanalysisapi.RawData) []string {\n\twarnings := []string{}\n\n\t// make a pass to fill in install, upgrade, and infra synthentic tests.\n\ttype synthenticTestResult struct {\n\t\tname string\n\t\tpass int\n\t\tfail int\n\t}\n\n\tfor jobName, jobResults := range rawJobResults.JobResults {\n\t\tnumRunsWithoutSetup := 0\n\t\tfor jrrKey, jrr := range jobResults.JobRunResults {\n\t\t\tif jrr.SetupStatus == \"\" {\n\t\t\t\tnumRunsWithoutSetup++\n\t\t\t}\n\t\t\tisUpgrade := strings.Contains(jrr.Job, \"upgrade\")\n\n\t\t\tsyntheticTests := map[string]*synthenticTestResult{\n\t\t\t\ttestgridanalysisapi.InstallTestName: &synthenticTestResult{name: testgridanalysisapi.InstallTestName},\n\t\t\t\ttestgridanalysisapi.InstallTimeoutTestName: &synthenticTestResult{name: testgridanalysisapi.InstallTestName},\n\t\t\t\ttestgridanalysisapi.UpgradeTestName: &synthenticTestResult{name: testgridanalysisapi.UpgradeTestName},\n\t\t\t\ttestgridanalysisapi.InfrastructureTestName: &synthenticTestResult{name: testgridanalysisapi.InfrastructureTestName},\n\t\t\t}\n\n\t\t\thasSomeOperatorResults := len(jrr.SadOperators) > 0\n\t\t\tallOperatorsSuccessfulAtEndOfRun := true\n\t\t\tfor _, operator := range jrr.SadOperators {\n\t\t\t\tif operator.State == testgridanalysisapi.Failure {\n\t\t\t\t\tallOperatorsSuccessfulAtEndOfRun = false\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t\tupgradeFailed := false\n\t\t\tfor _, operator := range jrr.UpgradeOperators {\n\t\t\t\tif operator.State == testgridanalysisapi.Failure {\n\t\t\t\t\tupgradeFailed = true\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t\tsetupFailed := jrr.SetupStatus != testgridanalysisapi.Success\n\t\t\tsetupSucceeded := jrr.SetupStatus == testgridanalysisapi.Success\n\n\t\t\t// set overall installed status\n\t\t\tswitch {\n\t\t\tcase setupSucceeded:\n\t\t\t\t// if setup succeeded, we are guaranteed that installation succeeded.\n\t\t\t\tsyntheticTests[testgridanalysisapi.InstallTestName].pass = 1\n\n\t\t\tcase !hasSomeOperatorResults:\n\t\t\t\t// if we don't have any operator results, then don't count this an install one way or the other. This was an infra failure\n\n\t\t\tdefault:\n\t\t\t\t// the setup failed and we have some operator results, which means the install started. This is a failure\n\t\t\t\tjrr.TestFailures++\n\t\t\t\tjrr.FailedTestNames = append(jrr.FailedTestNames, testgridanalysisapi.InstallTestName)\n\t\t\t\tsyntheticTests[testgridanalysisapi.InstallTestName].fail = 1\n\n\t\t\t\t// TODO if the setupSucceeds, but we have some failing operators reporting failing at the end, then we should consider\n\t\t\t\t// marking all the operator tests themselves as flaking, but not failing because the install worked.\n\n\t\t\t}\n\n\t\t\t// set overall install timeout status\n\t\t\tswitch {\n\t\t\tcase !setupSucceeded && hasSomeOperatorResults && allOperatorsSuccessfulAtEndOfRun:\n\t\t\t\t// the setup failed and yet all operators were successful in the end. This means we had a weird problem. Probably a timeout failure.\n\t\t\t\tjrr.TestFailures++\n\t\t\t\tjrr.FailedTestNames = append(jrr.FailedTestNames, testgridanalysisapi.InstallTimeoutTestName)\n\t\t\t\tsyntheticTests[testgridanalysisapi.InstallTimeoutTestName].fail = 1\n\n\t\t\tdefault:\n\t\t\t\tsyntheticTests[testgridanalysisapi.InstallTimeoutTestName].pass = 1\n\n\t\t\t}\n\n\t\t\t// set the infra status\n\t\t\tswitch {\n\t\t\tcase setupFailed && !hasSomeOperatorResults:\n\t\t\t\t// we only count failures as infra if we have no operator results. If we got any operator working, then CI infra was working.\n\t\t\t\tjrr.TestFailures++\n\t\t\t\tjrr.FailedTestNames = append(jrr.FailedTestNames, testgridanalysisapi.InfrastructureTestName)\n\t\t\t\tsyntheticTests[testgridanalysisapi.InfrastructureTestName].fail = 1\n\n\t\t\tdefault:\n\t\t\t\tsyntheticTests[testgridanalysisapi.InfrastructureTestName].pass = 1\n\t\t\t}\n\n\t\t\t// set the update status\n\t\t\tswitch {\n\t\t\tcase setupFailed:\n\t\t\t\t// do nothing\n\t\t\tcase !isUpgrade:\n\t\t\t// do nothing\n\n\t\t\tcase len(jrr.UpgradeOperators) == 0 || upgradeFailed:\n\t\t\t\tjrr.TestFailures++\n\t\t\t\tjrr.FailedTestNames = append(jrr.FailedTestNames, testgridanalysisapi.UpgradeTestName)\n\t\t\t\tsyntheticTests[testgridanalysisapi.UpgradeTestName].fail = 1\n\n\t\t\tdefault:\n\t\t\t\tsyntheticTests[testgridanalysisapi.UpgradeTestName].pass = 1\n\t\t\t}\n\n\t\t\tfor testName, result := range syntheticTests {\n\t\t\t\taddTestResult(jobResults.TestResults, testName, result.pass, result.fail, 0)\n\t\t\t}\n\n\t\t\tjobResults.JobRunResults[jrrKey] = jrr\n\t\t}\n\t\tif float64(numRunsWithoutSetup)/float64(len(jobResults.JobRunResults)+1)*100 > 50 {\n\t\t\tif !jobsWithKnownBadSetupContainer.Has(jobName) {\n\t\t\t\twarnings = append(warnings, fmt.Sprintf(\"%q is missing a test setup job to indicate successful installs\", jobName))\n\t\t\t}\n\t\t}\n\n\t\trawJobResults.JobResults[jobName] = jobResults\n\t}\n\treturn warnings\n}", "func ExampleRest() {\n\tinstance := &RestExample{\n\t\tpost: make(map[string]string),\n\t\twatch: make(map[string]chan string),\n\t}\n\trest, err := rest.New(instance)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thttp.ListenAndServe(\"127.0.0.1:8080\", rest)\n}", "func (r *RootResolver) ETListAll() (*email.ETListAllResolver, error) {\n\tvar etRxs *email.ETListAllResolver\n\n\t// identity\n\ti, err := r.SetIdentity(r.context)\n\tif err != nil {\n\t\treturn etRxs, &GraphQLError{Code: http.StatusInternalServerError, Message: err.Error()}\n\t}\n\tif !i.IsLogin || i.IsAnonymous {\n\t\treturn etRxs, &GraphQLError{Code: http.StatusForbidden, Message: \"Forbidden Access\"}\n\t}\n\ti.RequestInfo.RequestObject = \"graphql.email.template.list-all\"\n\ti.RequestInfo.RequestAction = \"READ\"\n\n\t// request\n\n\t// send to application layer\n\tresp, err := r.appEmail.EmailTemplateSvc.ListAll(i)\n\tif err != nil {\n\t\treturn etRxs, &GraphQLError{Code: http.StatusInternalServerError, Message: err.Error()}\n\t}\n\n\t// response\n\tetRxs = &email.ETListAllResolver{Model: resp}\n\n\t/* // another trick\n\ttmpRes := &email.ETListAllResponse{}\n\ttmpRes.Count = resp.Count\n\tfor _, r := range resp.Data {\n\t\td := &email.EmailTemplate{\n\t\t\tID: r.ID,\n\t\t\tUUID: r.UUID,\n\t\t\tCode: r.Code,\n\t\t\tName: r.Name,\n\t\t\tIsActive: r.IsActive,\n\t\t\tEmailFormat: r.EmailFormat,\n\t\t\tDefaultVersionID: r.DefaultVersionID,\n\t\t}\n\t\ttmpRes.Data = append(tmpRes.Data, d)\n\t}\n\tetRxs = &email.ETListAllResolver{Model: tmpRes}\n\t*/\n\n\treturn etRxs, nil\n}", "func getDbTest() {\n\tvar index int\n\tvar data string\n\tvar newTest test\n\tlistTest = allTest{}\n\n\trows, err := database.Query(\"SELECT * FROM Test\")\n\tif err != nil {\n\t\tfmt.Println(\"Error running query\")\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\n\tfor rows.Next() {\n\t\trows.Scan(&index, &data)\n\t\tfmt.Printf(strconv.Itoa(index)+\" %s\", data)\n\t\tfmt.Printf(\"\\n\")\n\t\tnewTest.ID = index\n\t\tnewTest.Nombre = data\n\t\tlistTest = append(listTest, newTest)\n\t}\n\tdefer rows.Close()\n}", "func RunRestServer() *echo.Echo {\n\t// -- init db and data-sources\n\tdb := datasources.ConnectDb()\n\tdatasources.Migrate(db)\n\tredis := datasources.ConnectRedis()\n\n\t// -- repositories\n\tredisReadRepo := repositories.NewRedisReadRepository(redis)\n\tredisWriteRepo := repositories.NewRedisEventualRepository(redis)\n\tmysqlWriteRepo := repositories.NewMySqlStrongRepository(db)\n\n\tmanagers := _managers.NewMutantManager(redisReadRepo, mysqlWriteRepo, redisWriteRepo)\n\trestMethods := rest.NewRest(managers)\n\n\t// -- init rest server\n\te := echo.New()\n\trest.RegisterRoutes(e, restMethods)\n\treturn e\n}", "func genMysqlTestdata(t *testing.T, dump func()) {\n\tdb, err := gosql.Open(\"mysql\", \"root@/\"+mysqlTestDB)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer db.Close()\n\n\tdropTables := `DROP TABLE IF EXISTS everything, third, second, simple CASCADE`\n\tif _, err := db.Exec(dropTables); err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tfor _, schema := range []string{\n\t\t`CREATE TABLE simple (i INT PRIMARY KEY AUTO_INCREMENT, s text, b binary(200))`,\n\t\t`CREATE TABLE SECOND (\n\t\t\ti INT PRIMARY KEY,\n\t\t\tk INT,\n\t\t\tFOREIGN KEY (k) REFERENCES simple (i) ON UPDATE CASCADE,\n\t\t\tUNIQUE KEY ik (i, k),\n\t\t\tKEY ki (k, i)\n\t\t)`,\n\t\t`CREATE TABLE third (\n\t\t\ti INT PRIMARY KEY AUTO_INCREMENT,\n\t\t\ta INT, b INT, C INT,\n\t\t\tFOREIGN KEY (a, b) REFERENCES second (i, k) ON DELETE RESTRICT ON UPDATE RESTRICT,\n\t\t\tFOREIGN KEY (c) REFERENCES third (i) ON UPDATE CASCADE\n\t\t)`,\n\t\t`CREATE TABLE everything (\n\t\t\t\ti INT PRIMARY KEY,\n\n\t\t\t\tc CHAR(10) NOT NULL,\n\t\t\t\ts VARCHAR(100) DEFAULT 'this is s\\'s default value',\n\t\t\t\ttx TEXT,\n\t\t\t\te ENUM('Small', 'Medium', 'Large'),\n\n\t\t\t\tbin BINARY(100) NOT NULL,\n\t\t\t\tvbin VARBINARY(100),\n\t\t\t\tbl BLOB,\n\n\t\t\t\tdt DATETIME NOT NULL DEFAULT '2000-01-01 00:00:00',\n\t\t\t\td DATE,\n\t\t\t\tts TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,\n\t\t\t\tt TIME,\n\t\t\t\t-- TODO(dt): fix parser: for YEAR's length option\n\t\t\t\t-- y YEAR,\n\n\t\t\t\tde DECIMAL,\n\t\t\t\tnu NUMERIC,\n\t\t\t\td53 DECIMAL(5,3),\n\n\t\t\t\tiw INT(5) NOT NULL,\n\t\t\t\tiz INT ZEROFILL,\n\t\t\t\tti TINYINT DEFAULT 5,\n\t\t\t\tsi SMALLINT,\n\t\t\t\tmi MEDIUMINT,\n\t\t\t\tbi BIGINT,\n\n\t\t\t\tfl FLOAT NOT NULL,\n\t\t\t\trl REAL,\n\t\t\t\tdb DOUBLE,\n\n\t\t\t\tf17 FLOAT(17),\n\t\t\t\tf47 FLOAT(47),\n\t\t\t\tf75 FLOAT(7, 5),\n\t\t\t\tj JSON\n\t\t)`,\n\t} {\n\t\tif _, err := db.Exec(schema); err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t}\n\n\tfor _, tc := range simpleTestRows {\n\t\ts := &tc.s\n\t\tif *s == injectNull {\n\t\t\ts = nil\n\t\t}\n\t\tif _, err := db.Exec(\n\t\t\t`INSERT INTO simple (s, b) VALUES (?, ?)`, s, tc.b,\n\t\t); err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t}\n\tfor i := 1; i <= secondTableRows; i++ {\n\t\tif _, err := db.Exec(`INSERT INTO second VALUES (?, ?)`, -i, i); err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t}\n\n\tfor _, r := range everythingTestRows {\n\t\tif _, err := db.Exec(\n\t\t\t`INSERT INTO everything (\n\t\t\ti, e, c, bin, dt, iz, iw, fl, d53, j\n\t\t) VALUES (\n\t\t\t?, ?, ?, ?, ?, ?, ?, ?, ?, ?\n\t\t)`, r.i, r.e, r.c, r.bin, r.dt, r.iz, r.iw, r.fl, r.d53, r.j); err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t}\n\n\tdump()\n\n\tif _, err := db.Exec(dropTables); err != nil {\n\t\tt.Fatal(err)\n\t}\n}", "func TestAllShares(t *testing.T) {\n\tshares := []m.Share{\n\t\t{\n\t\t\tID: uuid.MustParse(\"f43b0e48-13cc-4c6c-8a23-3a18a670effd\"),\n\t\t\tIsPublic: true,\n\t\t},\n\t\t{\n\t\t\tID: uuid.MustParse(\"a558aca3-fb40-400b-8dc6-ae49c705c791\"),\n\t\t\tIsPublic: false,\n\t\t},\n\t}\n\tdb.Create(&shares[0])\n\tdb.Create(&shares[1])\n\tdefer db.Delete(&shares[0])\n\tdefer db.Delete(&shares[1])\n\n\tt.Run(\"happy path\", func(t *testing.T) {\n\t\t// request\n\t\tres, _ := http.Get(url + \"/shares\")\n\t\t// parse\n\t\tbody, _ := ioutil.ReadAll(res.Body)\n\t\tvar actual []m.Share\n\t\tvar expected = []m.Share{parseShare(shares[0])}\n\t\t_ = json.Unmarshal(body, &actual)\n\t\t// assertions\n\t\tassert.Equal(t, http.StatusOK, res.StatusCode)\n\t\tassert.Len(t, actual, len(expected))\n\t\tassert.Equal(t, expected, actual)\n\t})\n\n\tt.Run(\"with admin key\", func(t *testing.T) {\n\t\t// do request\n\t\treq, _ := http.NewRequest(\"GET\", fmt.Sprint(url, \"/shares\"), nil)\n\t\treq.Header.Set(\"Authorization\", \"Bearer \"+base64.StdEncoding.EncodeToString([]byte(os.Getenv(\"ADMIN_KEY\"))))\n\t\tres, _ := http.DefaultClient.Do(req)\n\t\t// parse\n\t\tbody, _ := ioutil.ReadAll(res.Body)\n\t\tvar actual []m.Share\n\t\tvar expected = []m.Share{parseShare(shares[0]), parseShare(shares[1])}\n\t\t_ = json.Unmarshal(body, &actual)\n\t\t// assertions\n\t\tassert.Equal(t, http.StatusOK, res.StatusCode)\n\t\tassert.Len(t, actual, len(expected))\n\t\tassert.Equal(t, expected, actual)\n\t})\n}", "func AfterEachTest(contexts []*scheduler.Context, ids ...int) {\n\ttestStatus := \"Pass\"\n\tlogrus.Debugf(\"contexts: %v\", contexts)\n\tginkgoTestDescr := ginkgo.CurrentGinkgoTestDescription()\n\tif ginkgoTestDescr.Failed {\n\t\tlogrus.Infof(\">>>> FAILED TEST: %s\", ginkgoTestDescr.FullTestText)\n\t\tCollectSupport()\n\t\tDescribeNamespace(contexts)\n\t\ttestStatus = \"Fail\"\n\t}\n\tif len(ids) >= 1 {\n\t\tdriverVersion, err := Inst().V.GetDriverVersion()\n\t\tif err != nil {\n\t\t\tlogrus.Errorf(\"Error in getting driver version\")\n\t\t}\n\t\ttestrailObject := testrailuttils.Testrail{\n\t\t\tStatus: testStatus,\n\t\t\tTestID: ids[0],\n\t\t\tRunID: ids[1],\n\t\t\tDriverVersion: driverVersion,\n\t\t}\n\t\ttestrailuttils.AddTestEntry(testrailObject)\n\t}\n}", "func (suite *AddCommandTestSuite) TestExecuteWithMultipleURLs() {\n\n}", "func getProgrammingsTest(e *httpexpect.Expect, t *testing.T) {\n\ttestCases := []testCase{\n\t\tnotLoggedTestCase,\n\t\t{\n\t\t\tToken: testCtx.Admin.Token,\n\t\t\tParam: \"2018\",\n\t\t\tStatus: http.StatusOK,\n\t\t\tBodyContains: []string{\"Programmings\", `\"PrevCommitmentTotal\":96730644861`},\n\t\t\tCountItemName: `\"id\"`,\n\t\t\tArraySize: 626},\n\t}\n\tf := func(tc testCase) *httpexpect.Response {\n\t\treturn e.GET(\"/api/programmings\").WithHeader(\"Authorization\", \"Bearer \"+tc.Token).\n\t\t\tWithQuery(\"year\", tc.Param).Expect()\n\t}\n\tfor _, r := range chkTestCases(testCases, f, \"GetProgrammings\") {\n\t\tt.Error(r)\n\t}\n}", "func (db *MySQLDB) CreateAllTable(ctx context.Context) error {\n\tfLog := mysqlLog.WithField(\"func\", \"CreateAllTable\").WithField(\"RequestID\", ctx.Value(constants.RequestID))\n\n\thansipDomain := config.Get(\"hansip.domain\")\n\thansipAdmin := config.Get(\"hansip.admin\")\n\n\t_, err := db.instance.ExecContext(ctx, CreateTenantMySQL)\n\tif err != nil {\n\t\tfLog.Errorf(\"db.instance.ExecContext HANSIP_TENANT Got %s. SQL = %s\", err.Error(), CreateTenantMySQL)\n\t\treturn &ErrDBExecuteError{\n\t\t\tWrapped: err,\n\t\t\tMessage: \"Error while trying to create table HANSIP_TENANT\",\n\t\t\tSQL: CreateTenantMySQL,\n\t\t}\n\t}\n\t_, err = db.CreateTenantRecord(ctx, \"Hansip System\", \"hansip\", \"Hansip built in tenant\")\n\tif err != nil {\n\t\tfLog.Errorf(\"db.CreateTenantRecord Got %s\", err.Error())\n\t\treturn err\n\t}\n\t_, err = db.instance.ExecContext(ctx, CreateUserMySQL)\n\tif err != nil {\n\t\tfLog.Errorf(\"db.instance.ExecContext HANSIP_USER Got %s. SQL = %s\", err.Error(), CreateUserMySQL)\n\t\treturn &ErrDBExecuteError{\n\t\t\tWrapped: err,\n\t\t\tMessage: \"Error while trying to create table HANSIP_USER\",\n\t\t\tSQL: CreateUserMySQL,\n\t\t}\n\t}\n\t_, err = db.instance.ExecContext(ctx, CreateGroupMySQL)\n\tif err != nil {\n\t\tfLog.Errorf(\"db.instance.ExecContext HANSIP_GROUP Got %s. SQL = %s\", err.Error(), CreateGroupMySQL)\n\t\treturn &ErrDBExecuteError{\n\t\t\tWrapped: err,\n\t\t\tMessage: \"Error while trying to create table HANSIP_GROUP\",\n\t\t\tSQL: CreateGroupMySQL,\n\t\t}\n\t}\n\t_, err = db.instance.ExecContext(ctx, CreateRoleMySQL)\n\tif err != nil {\n\t\tfLog.Errorf(\"db.instance.ExecContext HANSIP_ROLE Got %s. SQL = %s\", err.Error(), CreateRoleMySQL)\n\t\treturn &ErrDBExecuteError{\n\t\t\tWrapped: err,\n\t\t\tMessage: \"Error while trying to create table HANSIP_ROLE\",\n\t\t\tSQL: CreateRoleMySQL,\n\t\t}\n\t}\n\t_, err = db.instance.ExecContext(ctx, CreateUserRoleMySQL)\n\tif err != nil {\n\t\tfLog.Errorf(\"db.instance.ExecContext HANSIP_USER_ROLE Got %s. SQL = %s\", err.Error(), CreateUserRoleMySQL)\n\t\treturn &ErrDBExecuteError{\n\t\t\tWrapped: err,\n\t\t\tMessage: \"Error while trying to create table HANSIP_USER_ROLE\",\n\t\t\tSQL: CreateUserRoleMySQL,\n\t\t}\n\t}\n\t_, err = db.instance.ExecContext(ctx, CreateUserGroupMySQL)\n\tif err != nil {\n\t\tfLog.Errorf(\"db.instance.ExecContext HANSIP_USER_GROUP Got %s. SQL = %s\", err.Error(), CreateUserGroupMySQL)\n\t\treturn &ErrDBExecuteError{\n\t\t\tWrapped: err,\n\t\t\tMessage: \"Error while trying to create table HANSIP_USER_GROUP\",\n\t\t\tSQL: CreateUserGroupMySQL,\n\t\t}\n\t}\n\t_, err = db.instance.ExecContext(ctx, CreateGroupRoleMySQL)\n\tif err != nil {\n\t\tfLog.Errorf(\"db.instance.ExecContext HANSIP_GROUP_ROLE Got %s. SQL = %s\", err.Error(), CreateGroupRoleMySQL)\n\t\treturn &ErrDBExecuteError{\n\t\t\tWrapped: err,\n\t\t\tMessage: \"Error while trying to create table HANSIP_GROUP_ROLE\",\n\t\t\tSQL: CreateGroupRoleMySQL,\n\t\t}\n\t}\n\t_, err = db.instance.ExecContext(ctx, CreateTOTPRecoveryCodeMySQL)\n\tif err != nil {\n\t\tfLog.Errorf(\"db.instance.ExecContext HANSIP_TOTP_RECOVERY_CODES Got %s. SQL = %s\", err.Error(), CreateTOTPRecoveryCodeMySQL)\n\t\treturn &ErrDBExecuteError{\n\t\t\tWrapped: err,\n\t\t\tMessage: \"Error while trying to create table HANSIP_TOTP_RECOVERY_CODES\",\n\t\t\tSQL: CreateTOTPRecoveryCodeMySQL,\n\t\t}\n\t}\n\t_, err = db.instance.ExecContext(ctx, CreateRevocationMySQL)\n\tif err != nil {\n\t\tfLog.Errorf(\"db.instance.ExecContext HANSIP_REVOCATION Got %s. SQL = %s\", err.Error(), CreateRevocationMySQL)\n\t\treturn &ErrDBExecuteError{\n\t\t\tWrapped: err,\n\t\t\tMessage: \"Error while trying to create table HANSIP_REVOCATION\",\n\t\t\tSQL: CreateRevocationMySQL,\n\t\t}\n\t}\n\t_, err = db.CreateRole(ctx, hansipAdmin, hansipDomain, \"Administrator role\")\n\tif err != nil {\n\t\tfLog.Errorf(\"db.CreateRole Got %s\", err.Error())\n\t\treturn err\n\t}\n\treturn nil\n}", "func batchPaymentCreditJournalsTest(t *testing.T, c *TestContext) {\n\ttcc := []TestCase{\n\t\t*c.AdminCheckTestCase,\n\t\t{\n\t\t\tToken: c.Config.Users.Admin.Token,\n\t\t\tStatusCode: http.StatusBadRequest,\n\t\t\tSent: []byte(`{\"PaymentCredit\":[`),\n\t\t\tRespContains: []string{\"Batch mouvements de crédits, décodage : \"}},\n\t\t{\n\t\t\tToken: c.Config.Users.Admin.Token,\n\t\t\tStatusCode: http.StatusOK,\n\t\t\tSent: []byte(`{\"PaymentCreditJournal\":[{\"Chapter\":908,\"Function\":811,` +\n\t\t\t\t`\"CreationDate\":20190310,\"ModificationDate\":20190315,\"Name\":\"Mouvement\",\"Value\":100000}]}`),\n\t\t\tRespContains: []string{\"Mouvements de crédits importés\"}},\n\t}\n\tf := func(tc TestCase) *httpexpect.Response {\n\t\treturn c.E.POST(\"/api/payment_credit_journal\").\n\t\t\tWithHeader(\"Authorization\", \"Bearer \"+tc.Token).WithBytes(tc.Sent).Expect()\n\t}\n\tfor _, r := range chkFactory(tcc, f, \"BatchPaymentCreditJournals\") {\n\t\tt.Error(r)\n\t}\n}", "func TestResticFiles(t *testing.T) {\r\n\trepoPath := getTestRepoPath()\r\n\trestic, err := NewRestic()\r\n\tif err != nil {\r\n\t\tt.Fatal(\"failed to resolve restic binary\", err)\r\n\t}\r\n\t// open repo and fetch snapshots\r\n\trepo := NewRepository(Location{Path: repoPath, Password: testRepoPass}, restic)\r\n\tsnapshots, err := repo.GetSnapshots()\r\n\tif err != nil {\r\n\t\tt.Error(\"failed to fetch snapshots\")\r\n\t}\r\n\tinvalidSnapshot := &Snapshot{ID: \"INVALID_ID\"}\r\n\t_, err = repo.GetFiles(invalidSnapshot, \"/\")\r\n\tif err == nil {\r\n\t\tt.Error(\"expecting an error for invalid snapshot ids\")\r\n\t}\r\n\trandSnapshot := snapshots[rand.Intn(len(snapshots)-1)]\r\n\t// fetch files\r\n\t_, err = repo.GetFiles(randSnapshot, \"/INVALID_PATH\")\r\n\tif err == nil {\r\n\t\tt.Error(\"expecting an error for invalid snapshot paths\")\r\n\t}\r\n\tfiles, err := repo.GetFiles(randSnapshot, \"/\")\r\n\tif err != nil || len(files) == 0 {\r\n\t\tt.Fatal(\"failed to fetch files\")\r\n\t}\r\n\t// test file serialization\r\n\tfor _, f := range files {\r\n\t\t// NB: UID and Gid may be empty\r\n\t\tif f.Type != \"file\" && f.Type != \"dir\" {\r\n\t\t\tt.Errorf(\"unexpected file type property: %s\", f.Type)\r\n\t\t} else if f.Atime == \"\" || f.Ctime == \"\" || f.Mtime == \"\" {\r\n\t\t\tt.Error(\"missing file time properties\", f)\r\n\t\t} else if f.Name == \"\" || f.Path == \"\" || f.Mode == 0 || (f.Type == \"file\" && f.Size == 0) {\r\n\t\t\tt.Error(\"missing or unexpected file properties\", f)\r\n\t\t}\r\n\t}\r\n}", "func (re *AllResults) PrintAllResults() {\n\t/*\tvop, _ := re.Validate()\n\t\tif vop != true {\n\t\t\tfunction, file, line, _ := runtime.Caller(1)\n\t\t\top := fmt.Sprintf(\"Validation failure at %s %s %d\", file, runtime.FuncForPC(function).Name(), line)\n\n\t\t\treturn errors.New(op)\n\t\t}*/\n\tfmt.Printf(\"Printing all of AllResults\\n\")\n\t/*\tfunction, file, line, _ := runtime.Caller(0)\n\t\top := fmt.Sprintf(\"Validation failure at %s %s %d\", file, runtime.FuncForPC(function).Name(), line)\n\t\tfmt.Printf(\"%s\\n\", op)*/\n\tfor k, v := range *re {\n\t\tfmt.Printf(\"%d\\t%s\\t%s\\t%s\\t%d\\n\", k, v.PluginName, v.OutputString, v.OutputDesc, v.OutputCode)\n\t}\n\n}", "func TestAll(t *testing.T) {\n\tassert := assert.New(t)\n\n\tdir, err := ioutil.TempDir(\"\", \"gito-test-\")\n\tassert.NoError(err, \"making temp directory\")\n\tdefer os.RemoveAll(dir)\n\n\tf, err := os.Create(filepath.Join(dir, \"config\"))\n\tassert.NoError(err, \"making config file\")\n\n\terr = os.MkdirAll(filepath.Join(dir, \"src\"), 0755)\n\tassert.NoError(err, \"making src dir\")\n\n\tconfig := &Config{\n\t\tWorkspaces: []*Workspace{{\n\t\t\tName: \"default\",\n\t\t\tPath: dir,\n\t\t\tpath: []string{filepath.Join(dir, \"src\")},\n\t\t\tAliases: map[string]string{},\n\t\t\tCustom: map[string]string{},\n\t\t}},\n\t\tf: f,\n\t}\n\tconfig.active = config.Workspaces[0]\n\n\tg := New(config)\n\n\t//\n\t// test get\n\t//\n\n\terr = g.Get(\"github.com/r-medina/gito\")\n\tassert.NoError(err, \"getting 'r-medina/gito'\")\n\n\t//\n\t// test where\n\t//\n\n\t// with full name of repo\n\n\twhere, err := g.Where(\"github.com/r-medina/gito\")\n\tassert.NoError(err, \"where 'github.com/r-medina/gito\")\n\tassert.Equal(filepath.Join(dir, \"src\", \"github.com/r-medina/gito\"),\n\t\twhere,\n\t\t\"r-medina/gito not in expected location\")\n\n\t// dropping github.com\n\n\twhere, err = g.Where(\"r-medina/gito\")\n\tassert.NoError(err, \"where 'r-medina/gito\")\n\tassert.Equal(filepath.Join(dir, \"src\", \"github.com/r-medina/gito\"),\n\t\twhere,\n\t\t\"r-medina/gito not in expected location\")\n\n\t// dropping r-medina\n\n\twhere, err = g.Where(\"gito\")\n\tassert.NoError(err, \"where 'gito\")\n\tassert.Equal(filepath.Join(dir, \"src\", \"github.com/r-medina/gito\"),\n\t\twhere,\n\t\t\"gito not in expected location\")\n\n\t// make sure get downloaded a repo\n\n\tassert.True(isRepo(where), \"%q is not a repo\", where)\n\n\t//\n\t// test url\n\t//\n\n\turl, err := g.URL(\"gito\")\n\tassert.NoError(err, \"getting 'gito' url\")\n\tassert.Equal(\"https://github.com/r-medina/gito\", url, \"url for 'gito'\")\n\n\t//\n\t// test alias\n\t//\n\n\terr = g.Alias(\"g\", \"r-medina/gito\")\n\tassert.NoError(err, \"making alias g for r-medina/gito\")\n\n\talias, ok := config.active.Aliases[\"g\"]\n\tassert.True(ok, \"getting alias 'g' from workspace (using underlying map)\")\n\tassert.Equal(\"r-medina/gito\", alias, \"alias value for 'g' (in underlying map)\")\n\talias, ok = config.active.Alias(\"g\")\n\tassert.True(ok, \"getting alias 'g' from workspace (using method Alias)\")\n\tassert.Equal(\"r-medina/gito\", alias, \"alias value for 'g' (using method Alias)\")\n\n\t// make sure where still works\n\twhere, err = g.Where(\"g\")\n\tassert.NoError(err, \"where 'gito\")\n\tassert.Equal(filepath.Join(dir, \"src\", \"github.com/r-medina/gito\"),\n\t\twhere,\n\t\t\"'g' not in expected location\")\n\n\t//\n\t// test set\n\t//\n\n\twant := filepath.Join(dir, \"dotfiles\")\n\t_, err = gitCloneAt(\"github.com/r-medina/dotfiles\", want)\n\tassert.NoError(err, \"cloning dotfiles\")\n\n\terr = g.Set(\"this\", want)\n\tassert.NoError(err, \"calling set\")\n\twhere, err = g.Where(\"this\")\n\tassert.Equal(want, where, \"calling where on 'this' after setting\")\n\n\t//\n\t// test self\n\t//\n\n\tgot, err := g.Self()\n\tassert.NoError(err, \"getting self\")\n\tassert.Equal(\"\", got)\n\tassert.NoError(g.SetSelf(\"github.com/r-medina\"))\n\n\tgot, err = g.Self()\n\tassert.NoError(err, \"getting self\")\n\tassert.Equal(filepath.Join(dir, \"src\", \"github.com/r-medina\"), got)\n}", "func (test *RestTest) PreExecute(testcase *TestCase, ctx *TestContext) error {\n\n\ttestcase.SetVerifyTestFunc(VerifyRestTestByDefault)\n\n\tswitch testcase.Method {\n\tcase METHOD_CREATE_SERVICE:\n\t\ttest.Client = NewRestClient_PMS()\n\t\ttestcase.SetPostTestFunc(PostCreateGetServiceTest)\n\t\tbreak\n\tcase METHOD_GET_SERVICE:\n\t\ttest.Client = NewRestClient_PMS()\n\t\ttestcase.SetPostTestFunc(PostCreateGetServiceTest)\n\t\tbreak\n\tcase METHOD_QUERY_SERVICE:\n\t\ttest.Client = NewRestClient_PMS()\n\t\ttestcase.SetPostTestFunc(PostListServiceTest)\n\t\tbreak\n\tcase METHOD_DELETE_SERVICE:\n\t\ttest.Client = NewRestClient_PMS()\n\t\tbreak\n\tcase METHOD_CREATE_POLICY:\n\t\ttest.Client = NewRestClient_PMS()\n\t\ttestcase.SetPostTestFunc(PostCreateGetPolicyTest)\n\t\tbreak\n\tcase METHOD_GET_POLICY:\n\t\ttest.Client = NewRestClient_PMS()\n\t\ttestcase.SetPreTestFunc(PreGetDeletePolicyTest)\n\t\ttestcase.SetPostTestFunc(PostCreateGetPolicyTest)\n\t\tbreak\n\tcase METHOD_QUERY_POLICY:\n\t\ttest.Client = NewRestClient_PMS()\n\t\ttestcase.SetPostTestFunc(PostListPolicyTest)\n\t\tbreak\n\tcase METHOD_DELETE_POLICY:\n\t\ttest.Client = NewRestClient_PMS()\n\t\ttestcase.SetPreTestFunc(PreGetDeletePolicyTest)\n\t\tbreak\n\tcase METHOD_CREATE_ROLEPOLICY:\n\t\ttest.Client = NewRestClient_PMS()\n\t\ttestcase.SetPostTestFunc(PostCreateGetRolePolicyTest)\n\t\tbreak\n\tcase METHOD_GET_ROLEPOLICY:\n\t\ttest.Client = NewRestClient_PMS()\n\t\ttestcase.SetPreTestFunc(PreGetDeletePolicyTest)\n\t\ttestcase.SetPostTestFunc(PostCreateGetRolePolicyTest)\n\t\tbreak\n\tcase METHOD_QUERY_ROLEPOLICY:\n\t\ttest.Client = NewRestClient_PMS()\n\t\ttestcase.SetPostTestFunc(PostListRolePolicyTest)\n\t\tbreak\n\tcase METHOD_DELETE_ROLEPOLICY:\n\t\ttest.Client = NewRestClient_PMS()\n\t\ttestcase.SetPreTestFunc(PreGetDeletePolicyTest)\n\t\tbreak\n\tcase METHOD_IS_ALLOWED:\n\t\ttest.Client = NewRestClient_ADS()\n\t\tbreak\n\tcase METHOD_GET_GRANTED_PERMISSIONS:\n\t\ttest.Client = NewRestClient_ADS()\n\t\tbreak\n\tcase METHOD_GET_GRANTED_ROLES:\n\t\ttest.Client = NewRestClient_ADS()\n\t\tbreak\n\tdefault:\n\t\treturn errors.New(ERROR_SPEEDLE_NOT_SUPPORTED)\n\t}\n\n\treturn nil\n}", "func batchPaymentCreditsTest(t *testing.T, c *TestContext) {\n\ttcc := []TestCase{\n\t\t*c.AdminCheckTestCase, // 0 : bad user\n\t\t{\n\t\t\tToken: c.Config.Users.Admin.Token,\n\t\t\tStatusCode: http.StatusBadRequest,\n\t\t\tSent: []byte(`{\"PaymentCredit\":[`),\n\t\t\tRespContains: []string{\"Batch d'enveloppes de crédits, décodage : \"}}, // 1 : bad payload\n\t\t{\n\t\t\tToken: c.Config.Users.Admin.Token,\n\t\t\tStatusCode: http.StatusOK,\n\t\t\tSent: []byte(`{\"PaymentCredit\":[{\"Chapter\":908,\"Function\":811,` +\n\t\t\t\t`\"Primitive\":1000000,\"Reported\":0,\"Added\":500000,\"Modified\":300000,` +\n\t\t\t\t`\"Movement\":50000}]}`),\n\t\t\tRespContains: []string{\"Enveloppes de crédits importées\"}}, // 2 : ok\n\t}\n\n\tf := func(tc TestCase) *httpexpect.Response {\n\t\treturn c.E.POST(\"/api/payment_credits\").\n\t\t\tWithHeader(\"Authorization\", \"Bearer \"+tc.Token).WithBytes(tc.Sent).Expect()\n\t}\n\tfor _, r := range chkFactory(tcc, f, \"BatchPaymentCredits\") {\n\t\tt.Error(r)\n\t}\n}", "func downloadAllBadAndGreyFuzzes(commitHash, category string, storageClient *storage.Client) (badFuzzPaths []string, greyFuzzPaths []string, err error) {\n\n\tbad, err := common.DownloadAllFuzzes(storageClient, config.Aggregator.FuzzPath, category, commitHash, config.Generator.Architecture, \"bad\", config.Generator.NumDownloadProcesses)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tgrey, err := common.DownloadAllFuzzes(storageClient, config.Aggregator.FuzzPath, category, commitHash, config.Generator.Architecture, \"grey\", config.Generator.NumDownloadProcesses)\n\treturn bad, grey, err\n}", "func StorageListTest(app *Server, t *testing.T, testData string) {\n\tapp.Storage.Clear()\n\tmodData := testData + testData\n\tkey, err := app.Storage.Set(\"test/123\", testData)\n\trequire.NoError(t, err)\n\trequire.Equal(t, \"123\", key)\n\tkey, err = app.Storage.Set(\"test/456\", modData)\n\trequire.NoError(t, err)\n\trequire.Equal(t, \"456\", key)\n\tdata, err := app.Storage.Get(\"test/*\")\n\trequire.NoError(t, err)\n\tvar testObjects []objects.Object\n\terr = json.Unmarshal(data, &testObjects)\n\trequire.NoError(t, err)\n\trequire.Equal(t, 2, len(testObjects))\n\tfor i := range testObjects {\n\t\tif testObjects[i].Index == \"123\" {\n\t\t\trequire.Equal(t, testData, testObjects[i].Data)\n\t\t}\n\n\t\tif testObjects[i].Index == \"456\" {\n\t\t\trequire.Equal(t, modData, testObjects[i].Data)\n\t\t}\n\t}\n\tdata1, err := app.Storage.Get(\"test/123\")\n\trequire.NoError(t, err)\n\tdata2, err := app.Storage.Get(\"test/456\")\n\trequire.NoError(t, err)\n\tobj1, err := objects.DecodeRaw(data1)\n\trequire.NoError(t, err)\n\tobj2, err := objects.DecodeRaw(data2)\n\trequire.NoError(t, err)\n\trequire.Equal(t, testData, obj1.Data)\n\trequire.Equal(t, modData, obj2.Data)\n\tkeys, err := app.Storage.Keys()\n\trequire.NoError(t, err)\n\trequire.Equal(t, \"{\\\"keys\\\":[\\\"test/123\\\",\\\"test/456\\\"]}\", string(keys))\n\n\treq := httptest.NewRequest(\n\t\t\"POST\", \"/test/*\",\n\t\tbytes.NewBuffer(\n\t\t\t[]byte(`{\"data\":\"testpost\"}`),\n\t\t),\n\t)\n\tw := httptest.NewRecorder()\n\tapp.Router.ServeHTTP(w, req)\n\tresp := w.Result()\n\trequire.Equal(t, http.StatusOK, resp.StatusCode)\n\tbody, err := io.ReadAll(resp.Body)\n\trequire.NoError(t, err)\n\tdat, err := objects.DecodeRaw(body)\n\trequire.NoError(t, err)\n\tdata, err = app.Storage.Get(\"test/*\")\n\tapp.Console.Log(string(data))\n\trequire.NoError(t, err)\n\terr = json.Unmarshal(data, &testObjects)\n\trequire.NoError(t, err)\n\trequire.Equal(t, 3, len(testObjects))\n\terr = app.Storage.Del(\"test/\" + dat.Index)\n\trequire.NoError(t, err)\n\tdata, err = app.Storage.Get(\"test/*\")\n\trequire.NoError(t, err)\n\terr = json.Unmarshal(data, &testObjects)\n\trequire.NoError(t, err)\n\trequire.Equal(t, 2, len(testObjects))\n\tkey, err = app.Storage.Set(\"test/glob1/glob123\", testData)\n\trequire.NoError(t, err)\n\trequire.Equal(t, \"glob123\", key)\n\tkey, err = app.Storage.Set(\"test/glob2/glob456\", modData)\n\trequire.NoError(t, err)\n\trequire.Equal(t, \"glob456\", key)\n\tdata, err = app.Storage.Get(\"test/*/*\")\n\trequire.NoError(t, err)\n\terr = json.Unmarshal(data, &testObjects)\n\tapp.Console.Log(testObjects)\n\trequire.NoError(t, err)\n\trequire.Equal(t, 2, len(testObjects))\n\tkey, err = app.Storage.Set(\"test/1/glob/g123\", testData)\n\trequire.NoError(t, err)\n\trequire.Equal(t, \"g123\", key)\n\tkey, err = app.Storage.Set(\"test/2/glob/g456\", modData)\n\trequire.NoError(t, err)\n\trequire.Equal(t, \"g456\", key)\n\tdata, err = app.Storage.Get(\"test/*/glob/*\")\n\trequire.NoError(t, err)\n\terr = json.Unmarshal(data, &testObjects)\n\tapp.Console.Log(testObjects)\n\trequire.NoError(t, err)\n\trequire.Equal(t, 2, len(testObjects))\n\tkey, err = app.Storage.Set(\"test1\", testData)\n\trequire.NoError(t, err)\n\trequire.Equal(t, \"test1\", key)\n\tkey, err = app.Storage.Set(\"test2\", modData)\n\trequire.NoError(t, err)\n\trequire.Equal(t, \"test2\", key)\n\tdata, err = app.Storage.Get(\"*\")\n\trequire.NoError(t, err)\n\terr = json.Unmarshal(data, &testObjects)\n\tapp.Console.Log(testObjects)\n\trequire.NoError(t, err)\n\trequire.Equal(t, 2, len(testObjects))\n\terr = app.Storage.Del(\"*\")\n\trequire.NoError(t, err)\n\tdata, err = app.Storage.Get(\"*\")\n\trequire.NoError(t, err)\n\terr = json.Unmarshal(data, &testObjects)\n\tapp.Console.Log(testObjects)\n\trequire.NoError(t, err)\n\trequire.Equal(t, 0, len(testObjects))\n}", "func PrepareTestSummary(w http.ResponseWriter, r *http.Request) {\n\n\t// Read the request body\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"failed to read request: %s\", err),\n\t\t\thttp.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t// Get arguments from response body\n\targuments := strings.Split(string(body), \" \")\n\tif len(arguments) < 1 {\n\t\thttp.Error(w, fmt.Sprintf(\"error: request has not enough arguments\"),\n\t\t\thttp.StatusInternalServerError)\n\t\treturn\n\t}\n\n\ttestPeer := arguments[0]\n\tlog.Printf(\"PrepareTestSummary from %s: %v\\n\", testPeer, currentTestSummary)\n\n\tfor _, currentTestEval := range currentTestSummary {\n\t\tif currentTestEval.Peer == testPeer {\n\t\t\thttp.Error(w, fmt.Sprintf(\"error: summary of %s already processed\", testPeer),\n\t\t\t\thttp.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t}\n\n\tlog.Printf(\"currentTestResult: \\n%v\\n\", currentTestResult)\n\n\tfmt.Printf(\"GetTestSummary: %s\\n\", string(decodeJsonBytes(body)))\n\n\tfmt.Printf(\"currentTestResult.ID: %s\\n\", currentTestResult.ID)\n\tfmt.Printf(\"currentTestResult.Name: %s\\n\", currentTestResult.Name)\n\n\tfor i, result := range currentTestResult.CommandResults {\n\t\tif result.Peer == testPeer {\n\n\t\t\tcurrentTestEvaluation.ID = currentTestResult.ID\n\t\t\tcurrentTestEvaluation.Name = currentTestResult.Name\n\t\t\tcurrentTestEvaluation.Peer = testPeer\n\t\t\tcurrentTestEvaluation.Kind = \"command\"\n\t\t\tcurrentTestEvaluation.Status = result.Status\n\t\t\tcurrentTestEvaluation.Test = result.Data\n\t\t\tcurrentTestEvaluation.Result = result.Data\n\n\t\t\tcurrentTestSummary = append(currentTestSummary, currentTestEvaluation)\n\n\t\t\tif strings.Split(result.Data, \" \")[0] == \"testfilter\" {\n\n\t\t\t\tsource := strings.Split(result.Data, \" \")[1]\n\t\t\t\tfilter := strings.Join(strings.Split(result.Data, \" \")[3:], \" \")\n\n\t\t\t\tcurrentTestEvaluation.ID = currentTestResult.ID\n\t\t\t\tcurrentTestEvaluation.Name = currentTestResult.Name\n\t\t\t\tcurrentTestEvaluation.Peer = testPeer\n\t\t\t\tcurrentTestEvaluation.Kind = \"event\"\n\t\t\t\tcurrentTestEvaluation.Test = result.Data\n\n\t\t\t\tcurrentTestEvaluation.Status = \"FAILED\"\n\t\t\t\tfor _, eventFilter := range currentTestEventFilters {\n\n\t\t\t\t\tfmt.Printf(\"\\nXXX\\n eventFilter: %v\\n\\n\", eventFilter)\n\n\t\t\t\t\tfmt.Printf(\"NumExpectedEvents %d == NumReceivedEvents %d\\n\",\n\t\t\t\t\t\teventFilter.NumExpectedEvents, eventFilter.NumReceivedEvents)\n\n\t\t\t\t\tfmt.Printf(\"eventFilter.Peer %q == testPeer %q\\n\",\n\t\t\t\t\t\teventFilter.Peer, testPeer)\n\t\t\t\t\tfmt.Printf(\"eventFilter.Source %q == source %q\\n\",\n\t\t\t\t\t\teventFilter.Source, source)\n\t\t\t\t\tfmt.Printf(\"eventFilter.Filter %q == filter %q\\n\",\n\t\t\t\t\t\teventFilter.Filter, filter)\n\n\t\t\t\t\tcurrentTestEvaluation.Comment = fmt.Sprintf(\"NumExpectedEvents: %d, NumReceivedEvents: %d\",\n\t\t\t\t\t\teventFilter.NumExpectedEvents, eventFilter.NumReceivedEvents)\n\n\t\t\t\t\tif eventFilter.Peer == testPeer &&\n\t\t\t\t\t\teventFilter.Source == source &&\n\t\t\t\t\t\teventFilter.Filter == filter &&\n\t\t\t\t\t\teventFilter.NumExpectedEvents == eventFilter.NumReceivedEvents {\n\n\t\t\t\t\t\tcurrentTestEvaluation.Status = \"OK\"\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tcurrentTestEvaluation.Result = result.Data\n\t\t\t\tcurrentTestSummary = append(currentTestSummary, currentTestEvaluation)\n\t\t\t}\n\n\t\t\tfmt.Printf(\"%d result: %s\\n\", i, result)\n\t\t}\n\t}\n\n\tsaveTestJsonData(w)\n\n\t//currentTestRun.ID = \"\"\n\n\t_, err = fmt.Fprintf(w, \"\")\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"failed to write response: %s\", err),\n\t\t\thttp.StatusInternalServerError)\n\t}\n}", "func ExecuteGenericTestTable(t *testing.T, testTable []GenericTestEntry) {\n\tfor _, testCase := range testTable {\n\t\terr := ocpp2.Validate.Struct(testCase.Element)\n\t\tif err != nil {\n\t\t\tassert.Equal(t, testCase.ExpectedValid, false, err.Error())\n\t\t} else {\n\t\t\tassert.Equal(t, testCase.ExpectedValid, true, \"%v is valid\", testCase.Element)\n\t\t}\n\t}\n}", "func FetchAll(c *gin.Context) {\n\tvar recipies []model.Recipe\n\tmodel.DB.Find(&recipies)\n\n\tif len(recipies) <= 0 {\n\t\tc.JSON(http.StatusNoContent, gin.H{\"message\": \"no recipes found\"})\n\t\treturn\n\t}\n\n\tvar descriptions []*model.RecipeDescription\n\tfor _, r := range recipies {\n\t\tmodel.DB.Model(&r).Related(&r.Resources)\n\t\tdescriptions = append(descriptions, r.Description())\n\t}\n\n\tc.JSON(http.StatusOK, gin.H{\"message\": strconv.Itoa(len(descriptions)) + \" recipes found\", \"data\": descriptions})\n}", "func init() {\n\ttests = make(map[string]Test)\n\n\ttests[\"database_commands\"] = Test{\n\t\tqueries: []*Query{\n\t\t\t&Query{\n\t\t\t\tname: \"create database should succeed\",\n\t\t\t\tcommand: `CREATE DATABASE db0`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"create database with retention duration should succeed\",\n\t\t\t\tcommand: `CREATE DATABASE db0_r WITH DURATION 24h REPLICATION 2 NAME db0_r_policy`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"create database with retention policy should fail with invalid name\",\n\t\t\t\tcommand: `CREATE DATABASE db1 WITH NAME \".\"`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"error\":\"invalid name\"}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"create database should error with some unquoted names\",\n\t\t\t\tcommand: `CREATE DATABASE 0xdb0`,\n\t\t\t\texp: `{\"error\":\"error parsing query: found 0xdb0, expected identifier at line 1, char 17\"}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"create database should error with invalid characters\",\n\t\t\t\tcommand: `CREATE DATABASE \".\"`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"error\":\"invalid name\"}]}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"create database with retention duration should error with bad retention duration\",\n\t\t\t\tcommand: `CREATE DATABASE db0 WITH DURATION xyz`,\n\t\t\t\texp: `{\"error\":\"error parsing query: found xyz, expected duration at line 1, char 35\"}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"create database with retention replication should error with bad retention replication number\",\n\t\t\t\tcommand: `CREATE DATABASE db0 WITH REPLICATION xyz`,\n\t\t\t\texp: `{\"error\":\"error parsing query: found xyz, expected integer at line 1, char 38\"}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"create database with retention name should error with missing retention name\",\n\t\t\t\tcommand: `CREATE DATABASE db0 WITH NAME`,\n\t\t\t\texp: `{\"error\":\"error parsing query: found EOF, expected identifier at line 1, char 31\"}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"show database should succeed\",\n\t\t\t\tcommand: `SHOW DATABASES`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"databases\",\"columns\":[\"name\"],\"values\":[[\"db0\"],[\"db0_r\"]]}]}]}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"create database should not error with existing database\",\n\t\t\t\tcommand: `CREATE DATABASE db0`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"create database should create non-existing database\",\n\t\t\t\tcommand: `CREATE DATABASE db1`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"create database with retention duration should error if retention policy is different\",\n\t\t\t\tcommand: `CREATE DATABASE db1 WITH DURATION 24h`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"error\":\"retention policy conflicts with an existing policy\"}]}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"create database should error with bad retention duration\",\n\t\t\t\tcommand: `CREATE DATABASE db1 WITH DURATION xyz`,\n\t\t\t\texp: `{\"error\":\"error parsing query: found xyz, expected duration at line 1, char 35\"}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"show database should succeed\",\n\t\t\t\tcommand: `SHOW DATABASES`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"databases\",\"columns\":[\"name\"],\"values\":[[\"db0\"],[\"db0_r\"],[\"db1\"]]}]}]}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"drop database db0 should succeed\",\n\t\t\t\tcommand: `DROP DATABASE db0`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"drop database db0_r should succeed\",\n\t\t\t\tcommand: `DROP DATABASE db0_r`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"drop database db1 should succeed\",\n\t\t\t\tcommand: `DROP DATABASE db1`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"drop database should not error if it does not exists\",\n\t\t\t\tcommand: `DROP DATABASE db1`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"drop database should not error with non-existing database db1\",\n\t\t\t\tcommand: `DROP DATABASE db1`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"show database should have no results\",\n\t\t\t\tcommand: `SHOW DATABASES`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"databases\",\"columns\":[\"name\"]}]}]}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"create database with shard group duration should succeed\",\n\t\t\t\tcommand: `CREATE DATABASE db0 WITH SHARD DURATION 61m`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"create database with shard group duration and duration should succeed\",\n\t\t\t\tcommand: `CREATE DATABASE db1 WITH DURATION 60m SHARD DURATION 30m`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t},\n\t\t},\n\t}\n\n\ttests[\"drop_and_recreate_database\"] = Test{\n\t\tdb: \"db0\",\n\t\trp: \"rp0\",\n\t\twrites: Writes{\n\t\t\t&Write{data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, \"2000-01-01T00:00:00Z\").UnixNano())},\n\t\t},\n\t\tqueries: []*Query{\n\t\t\t&Query{\n\t\t\t\tname: \"Drop database after data write\",\n\t\t\t\tcommand: `DROP DATABASE db0`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Recreate database\",\n\t\t\t\tcommand: `CREATE DATABASE db0`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Recreate retention policy\",\n\t\t\t\tcommand: `CREATE RETENTION POLICY rp0 ON db0 DURATION 365d REPLICATION 1 DEFAULT`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Show measurements after recreate\",\n\t\t\t\tcommand: `SHOW MEASUREMENTS`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Query data after recreate\",\n\t\t\t\tcommand: `SELECT * FROM cpu`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t},\n\t}\n\n\ttests[\"drop_database_isolated\"] = Test{\n\t\tdb: \"db0\",\n\t\trp: \"rp0\",\n\t\twrites: Writes{\n\t\t\t&Write{data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, \"2000-01-01T00:00:00Z\").UnixNano())},\n\t\t},\n\t\tqueries: []*Query{\n\t\t\t&Query{\n\t\t\t\tname: \"Query data from 1st database\",\n\t\t\t\tcommand: `SELECT * FROM cpu`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"cpu\",\"columns\":[\"time\",\"host\",\"region\",\"val\"],\"values\":[[\"2000-01-01T00:00:00Z\",\"serverA\",\"uswest\",23.2]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Query data from 1st database with GROUP BY *\",\n\t\t\t\tcommand: `SELECT * FROM cpu GROUP BY *`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"cpu\",\"tags\":{\"host\":\"serverA\",\"region\":\"uswest\"},\"columns\":[\"time\",\"val\"],\"values\":[[\"2000-01-01T00:00:00Z\",23.2]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Drop other database\",\n\t\t\t\tcommand: `DROP DATABASE db1`,\n\t\t\t\tonce: true,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Query data from 1st database and ensure it's still there\",\n\t\t\t\tcommand: `SELECT * FROM cpu`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"cpu\",\"columns\":[\"time\",\"host\",\"region\",\"val\"],\"values\":[[\"2000-01-01T00:00:00Z\",\"serverA\",\"uswest\",23.2]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Query data from 1st database and ensure it's still there with GROUP BY *\",\n\t\t\t\tcommand: `SELECT * FROM cpu GROUP BY *`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"cpu\",\"tags\":{\"host\":\"serverA\",\"region\":\"uswest\"},\"columns\":[\"time\",\"val\"],\"values\":[[\"2000-01-01T00:00:00Z\",23.2]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t},\n\t}\n\n\ttests[\"delete_series_time\"] = Test{\n\t\tdb: \"db0\",\n\t\trp: \"rp0\",\n\t\twrites: Writes{\n\t\t\t&Write{data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, \"2000-01-01T00:00:00Z\").UnixNano())},\n\t\t\t&Write{data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=100 %d`, mustParseTime(time.RFC3339Nano, \"2000-01-02T00:00:00Z\").UnixNano())},\n\t\t\t&Write{data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=200 %d`, mustParseTime(time.RFC3339Nano, \"2000-01-03T00:00:00Z\").UnixNano())},\n\t\t\t&Write{db: \"db1\", data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, \"2000-01-01T00:00:00Z\").UnixNano())},\n\t\t},\n\t\tqueries: []*Query{\n\t\t\t&Query{\n\t\t\t\tname: \"Show series is present\",\n\t\t\t\tcommand: `SHOW SERIES`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"columns\":[\"key\"],\"values\":[[\"cpu,host=serverA,region=uswest\"]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Delete series\",\n\t\t\t\tcommand: `DELETE FROM cpu WHERE time < '2000-01-03T00:00:00Z'`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Show series still exists\",\n\t\t\t\tcommand: `SHOW SERIES`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"columns\":[\"key\"],\"values\":[[\"cpu,host=serverA,region=uswest\"]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Make sure last point still exists\",\n\t\t\t\tcommand: `SELECT * FROM cpu`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"cpu\",\"columns\":[\"time\",\"host\",\"region\",\"val\"],\"values\":[[\"2000-01-03T00:00:00Z\",\"serverA\",\"uswest\",200]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Make sure data wasn't deleted from other database.\",\n\t\t\t\tcommand: `SELECT * FROM cpu`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"cpu\",\"columns\":[\"time\",\"host\",\"region\",\"val\"],\"values\":[[\"2000-01-01T00:00:00Z\",\"serverA\",\"uswest\",23.2]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db1\"}},\n\t\t\t},\n\t\t},\n\t}\n\n\ttests[\"delete_series_time_tag_filter\"] = Test{\n\t\tdb: \"db0\",\n\t\trp: \"rp0\",\n\t\twrites: Writes{\n\t\t\t&Write{data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, \"2000-01-01T00:00:00Z\").UnixNano())},\n\t\t\t&Write{data: fmt.Sprintf(`cpu,host=serverB,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, \"2000-01-01T00:00:00Z\").UnixNano())},\n\t\t\t&Write{data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=100 %d`, mustParseTime(time.RFC3339Nano, \"2000-01-02T00:00:00Z\").UnixNano())},\n\t\t\t&Write{data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=200 %d`, mustParseTime(time.RFC3339Nano, \"2000-01-03T00:00:00Z\").UnixNano())},\n\t\t\t&Write{db: \"db1\", data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, \"2000-01-01T00:00:00Z\").UnixNano())},\n\t\t},\n\t\tqueries: []*Query{\n\t\t\t&Query{\n\t\t\t\tname: \"Show series is present\",\n\t\t\t\tcommand: `SHOW SERIES`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"columns\":[\"key\"],\"values\":[[\"cpu,host=serverA,region=uswest\"],[\"cpu,host=serverB,region=uswest\"]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Delete series\",\n\t\t\t\tcommand: `DELETE FROM cpu WHERE host = 'serverA' AND time < '2000-01-03T00:00:00Z'`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Show series still exists\",\n\t\t\t\tcommand: `SHOW SERIES`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"columns\":[\"key\"],\"values\":[[\"cpu,host=serverA,region=uswest\"],[\"cpu,host=serverB,region=uswest\"]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Make sure last point still exists\",\n\t\t\t\tcommand: `SELECT * FROM cpu`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"cpu\",\"columns\":[\"time\",\"host\",\"region\",\"val\"],\"values\":[[\"2000-01-01T00:00:00Z\",\"serverB\",\"uswest\",23.2],[\"2000-01-03T00:00:00Z\",\"serverA\",\"uswest\",200]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Make sure data wasn't deleted from other database.\",\n\t\t\t\tcommand: `SELECT * FROM cpu`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"cpu\",\"columns\":[\"time\",\"host\",\"region\",\"val\"],\"values\":[[\"2000-01-01T00:00:00Z\",\"serverA\",\"uswest\",23.2]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db1\"}},\n\t\t\t},\n\t\t},\n\t}\n\n\ttests[\"drop_and_recreate_series\"] = Test{\n\t\tdb: \"db0\",\n\t\trp: \"rp0\",\n\t\twrites: Writes{\n\t\t\t&Write{data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, \"2000-01-01T00:00:00Z\").UnixNano())},\n\t\t\t&Write{db: \"db1\", data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, \"2000-01-01T00:00:00Z\").UnixNano())},\n\t\t},\n\t\tqueries: []*Query{\n\t\t\t&Query{\n\t\t\t\tname: \"Show series is present\",\n\t\t\t\tcommand: `SHOW SERIES`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"columns\":[\"key\"],\"values\":[[\"cpu,host=serverA,region=uswest\"]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Drop series after data write\",\n\t\t\t\tcommand: `DROP SERIES FROM cpu`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Show series is gone\",\n\t\t\t\tcommand: `SHOW SERIES`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Make sure data wasn't deleted from other database.\",\n\t\t\t\tcommand: `SELECT * FROM cpu`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"name\":\"cpu\",\"columns\":[\"time\",\"host\",\"region\",\"val\"],\"values\":[[\"2000-01-01T00:00:00Z\",\"serverA\",\"uswest\",23.2]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db1\"}},\n\t\t\t},\n\t\t},\n\t}\n\ttests[\"drop_and_recreate_series_retest\"] = Test{\n\t\tdb: \"db0\",\n\t\trp: \"rp0\",\n\t\twrites: Writes{\n\t\t\t&Write{data: fmt.Sprintf(`cpu,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, \"2000-01-01T00:00:00Z\").UnixNano())},\n\t\t},\n\t\tqueries: []*Query{\n\t\t\t&Query{\n\t\t\t\tname: \"Show series is present again after re-write\",\n\t\t\t\tcommand: `SHOW SERIES`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"columns\":[\"key\"],\"values\":[[\"cpu,host=serverA,region=uswest\"]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t},\n\t}\n\n\ttests[\"drop_series_from_regex\"] = Test{\n\t\tdb: \"db0\",\n\t\trp: \"rp0\",\n\t\twrites: Writes{\n\t\t\t&Write{data: strings.Join([]string{\n\t\t\t\tfmt.Sprintf(`a,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, \"2000-01-01T00:00:00Z\").UnixNano()),\n\t\t\t\tfmt.Sprintf(`aa,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, \"2000-01-01T00:00:00Z\").UnixNano()),\n\t\t\t\tfmt.Sprintf(`b,host=serverA,region=uswest val=23.2 %d`, mustParseTime(time.RFC3339Nano, \"2000-01-01T00:00:00Z\").UnixNano()),\n\t\t\t\tfmt.Sprintf(`c,host=serverA,region=uswest val=30.2 %d`, mustParseTime(time.RFC3339Nano, \"2000-01-01T00:00:00Z\").UnixNano()),\n\t\t\t}, \"\\n\")},\n\t\t},\n\t\tqueries: []*Query{\n\t\t\t&Query{\n\t\t\t\tname: \"Show series is present\",\n\t\t\t\tcommand: `SHOW SERIES`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"columns\":[\"key\"],\"values\":[[\"a,host=serverA,region=uswest\"],[\"aa,host=serverA,region=uswest\"],[\"b,host=serverA,region=uswest\"],[\"c,host=serverA,region=uswest\"]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Drop series after data write\",\n\t\t\t\tcommand: `DROP SERIES FROM /a.*/`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Show series is gone\",\n\t\t\t\tcommand: `SHOW SERIES`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"columns\":[\"key\"],\"values\":[[\"b,host=serverA,region=uswest\"],[\"c,host=serverA,region=uswest\"]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Drop series from regex that matches no measurements\",\n\t\t\t\tcommand: `DROP SERIES FROM /a.*/`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"make sure DROP SERIES doesn't delete anything when regex doesn't match\",\n\t\t\t\tcommand: `SHOW SERIES`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"columns\":[\"key\"],\"values\":[[\"b,host=serverA,region=uswest\"],[\"c,host=serverA,region=uswest\"]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Drop series with WHERE field should error\",\n\t\t\t\tcommand: `DROP SERIES FROM c WHERE val > 50.0`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"error\":\"shard 1: fields not supported in WHERE clause during deletion\"}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"make sure DROP SERIES with field in WHERE didn't delete data\",\n\t\t\t\tcommand: `SHOW SERIES`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"columns\":[\"key\"],\"values\":[[\"b,host=serverA,region=uswest\"],[\"c,host=serverA,region=uswest\"]]}]}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Drop series with WHERE time should error\",\n\t\t\t\tcommand: `DROP SERIES FROM c WHERE time > now() - 1d`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"error\":\"DROP SERIES doesn't support time in WHERE clause\"}]}`,\n\t\t\t\tparams: url.Values{\"db\": []string{\"db0\"}},\n\t\t\t},\n\t\t},\n\t}\n\n\ttests[\"retention_policy_commands\"] = Test{\n\t\tdb: \"db0\",\n\t\tqueries: []*Query{\n\t\t\t&Query{\n\t\t\t\tname: \"create retention policy with invalid name should return an error\",\n\t\t\t\tcommand: `CREATE RETENTION POLICY \".\" ON db0 DURATION 1d REPLICATION 1`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"error\":\"invalid name\"}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"create retention policy should succeed\",\n\t\t\t\tcommand: `CREATE RETENTION POLICY rp0 ON db0 DURATION 1h REPLICATION 1`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"show retention policy should succeed\",\n\t\t\t\tcommand: `SHOW RETENTION POLICIES ON db0`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"columns\":[\"name\",\"duration\",\"shardGroupDuration\",\"replicaN\",\"default\"],\"values\":[[\"rp0\",\"1h0m0s\",\"1h0m0s\",1,false]]}]}]}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"alter retention policy should succeed\",\n\t\t\t\tcommand: `ALTER RETENTION POLICY rp0 ON db0 DURATION 2h REPLICATION 3 DEFAULT`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"show retention policy should have new altered information\",\n\t\t\t\tcommand: `SHOW RETENTION POLICIES ON db0`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"columns\":[\"name\",\"duration\",\"shardGroupDuration\",\"replicaN\",\"default\"],\"values\":[[\"rp0\",\"2h0m0s\",\"1h0m0s\",3,true]]}]}]}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"show retention policy should still show policy\",\n\t\t\t\tcommand: `SHOW RETENTION POLICIES ON db0`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"columns\":[\"name\",\"duration\",\"shardGroupDuration\",\"replicaN\",\"default\"],\"values\":[[\"rp0\",\"2h0m0s\",\"1h0m0s\",3,true]]}]}]}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"create a second non-default retention policy\",\n\t\t\t\tcommand: `CREATE RETENTION POLICY rp2 ON db0 DURATION 1h REPLICATION 1`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"show retention policy should show both\",\n\t\t\t\tcommand: `SHOW RETENTION POLICIES ON db0`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"columns\":[\"name\",\"duration\",\"shardGroupDuration\",\"replicaN\",\"default\"],\"values\":[[\"rp0\",\"2h0m0s\",\"1h0m0s\",3,true],[\"rp2\",\"1h0m0s\",\"1h0m0s\",1,false]]}]}]}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"dropping non-default retention policy succeed\",\n\t\t\t\tcommand: `DROP RETENTION POLICY rp2 ON db0`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"create a third non-default retention policy\",\n\t\t\t\tcommand: `CREATE RETENTION POLICY rp3 ON db0 DURATION 1h REPLICATION 1 SHARD DURATION 30m`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"create retention policy with default on\",\n\t\t\t\tcommand: `CREATE RETENTION POLICY rp3 ON db0 DURATION 1h REPLICATION 1 SHARD DURATION 30m DEFAULT`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"error\":\"retention policy conflicts with an existing policy\"}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"show retention policy should show both with custom shard\",\n\t\t\t\tcommand: `SHOW RETENTION POLICIES ON db0`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"columns\":[\"name\",\"duration\",\"shardGroupDuration\",\"replicaN\",\"default\"],\"values\":[[\"rp0\",\"2h0m0s\",\"1h0m0s\",3,true],[\"rp3\",\"1h0m0s\",\"1h0m0s\",1,false]]}]}]}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"dropping non-default custom shard retention policy succeed\",\n\t\t\t\tcommand: `DROP RETENTION POLICY rp3 ON db0`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"show retention policy should show just default\",\n\t\t\t\tcommand: `SHOW RETENTION POLICIES ON db0`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"columns\":[\"name\",\"duration\",\"shardGroupDuration\",\"replicaN\",\"default\"],\"values\":[[\"rp0\",\"2h0m0s\",\"1h0m0s\",3,true]]}]}]}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Ensure retention policy with unacceptable retention cannot be created\",\n\t\t\t\tcommand: `CREATE RETENTION POLICY rp4 ON db0 DURATION 1s REPLICATION 1`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"error\":\"retention policy duration must be at least 1h0m0s\"}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Check error when deleting retention policy on non-existent database\",\n\t\t\t\tcommand: `DROP RETENTION POLICY rp1 ON mydatabase`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"Ensure retention policy for non existing db is not created\",\n\t\t\t\tcommand: `CREATE RETENTION POLICY rp0 ON nodb DURATION 1h REPLICATION 1`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"error\":\"database not found: nodb\"}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"drop rp0\",\n\t\t\t\tcommand: `DROP RETENTION POLICY rp0 ON db0`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t},\n\t\t\t// INF Shard Group Duration will normalize to the Retention Policy Duration Default\n\t\t\t&Query{\n\t\t\t\tname: \"create retention policy with inf shard group duration\",\n\t\t\t\tcommand: `CREATE RETENTION POLICY rpinf ON db0 DURATION INF REPLICATION 1 SHARD DURATION 0s`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t// 0s Shard Group Duration will normalize to the Replication Policy Duration\n\t\t\t&Query{\n\t\t\t\tname: \"create retention policy with 0s shard group duration\",\n\t\t\t\tcommand: `CREATE RETENTION POLICY rpzero ON db0 DURATION 1h REPLICATION 1 SHARD DURATION 0s`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t// 1s Shard Group Duration will normalize to the MinDefaultRetentionPolicyDuration\n\t\t\t&Query{\n\t\t\t\tname: \"create retention policy with 1s shard group duration\",\n\t\t\t\tcommand: `CREATE RETENTION POLICY rponesecond ON db0 DURATION 2h REPLICATION 1 SHARD DURATION 1s`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"show retention policy: validate normalized shard group durations are working\",\n\t\t\t\tcommand: `SHOW RETENTION POLICIES ON db0`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"columns\":[\"name\",\"duration\",\"shardGroupDuration\",\"replicaN\",\"default\"],\"values\":[[\"rpinf\",\"0s\",\"168h0m0s\",1,false],[\"rpzero\",\"1h0m0s\",\"1h0m0s\",1,false],[\"rponesecond\",\"2h0m0s\",\"1h0m0s\",1,false]]}]}]}`,\n\t\t\t},\n\t\t},\n\t}\n\n\ttests[\"retention_policy_auto_create\"] = Test{\n\t\tqueries: []*Query{\n\t\t\t&Query{\n\t\t\t\tname: \"create database should succeed\",\n\t\t\t\tcommand: `CREATE DATABASE db0`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0}]}`,\n\t\t\t\tonce: true,\n\t\t\t},\n\t\t\t&Query{\n\t\t\t\tname: \"show retention policies should return auto-created policy\",\n\t\t\t\tcommand: `SHOW RETENTION POLICIES ON db0`,\n\t\t\t\texp: `{\"results\":[{\"statement_id\":0,\"series\":[{\"columns\":[\"name\",\"duration\",\"shardGroupDuration\",\"replicaN\",\"default\"],\"values\":[[\"autogen\",\"0s\",\"168h0m0s\",1,true]]}]}]}`,\n\t\t\t},\n\t\t},\n\t}\n\n}" ]
[ "0.61003697", "0.60823077", "0.5916795", "0.5866954", "0.58110267", "0.5796846", "0.57569313", "0.5720729", "0.57079947", "0.5668394", "0.561246", "0.5400425", "0.5381105", "0.5378305", "0.5299053", "0.5254709", "0.5244366", "0.52367264", "0.5223388", "0.51863146", "0.5137129", "0.51328343", "0.5089182", "0.50596267", "0.5006664", "0.5003926", "0.5001331", "0.4999571", "0.49924147", "0.49832302", "0.4970993", "0.4967981", "0.49446377", "0.49399415", "0.49225917", "0.48764125", "0.48695582", "0.48638883", "0.48339826", "0.48293203", "0.48168474", "0.48141885", "0.4806853", "0.48012453", "0.47990164", "0.4796166", "0.47874638", "0.4780786", "0.4780088", "0.47764388", "0.47763473", "0.47557214", "0.4748085", "0.47371504", "0.4736329", "0.4735494", "0.4727758", "0.47206137", "0.4719123", "0.47184125", "0.47085088", "0.47077793", "0.46963924", "0.46929964", "0.46882197", "0.46837017", "0.4680612", "0.46699566", "0.46605375", "0.4658344", "0.46460503", "0.46393058", "0.46369234", "0.4623914", "0.46132874", "0.4612473", "0.4605096", "0.4599864", "0.45963028", "0.4596029", "0.45833883", "0.45792308", "0.45742568", "0.45737615", "0.45727396", "0.45707262", "0.45668975", "0.45660862", "0.45586473", "0.45566627", "0.45559576", "0.45556355", "0.4551271", "0.45454687", "0.45442197", "0.4543093", "0.45425078", "0.4541595", "0.45401886", "0.45385146" ]
0.8086325
0
Getters Length method to return the list length
func (l *LinkedList) Length() int { return l.length }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (list List) Length() int {\n\treturn len(list)\n}", "func (l List) Length() int {\n\treturn len(l)\n}", "func (l *List) Len() int {\n return l.size\n}", "func (l *List) Length() int {\n\treturn len(l.items)\n}", "func (sl *List) Len() int { return sl.len }", "func (l *List) Len() int { return l.len }", "func (list linkedList) getLength() int {\n\treturn list.length\n}", "func (list IntList) Length() int {\n\treturn len(list)\n}", "func (l *List) Len() int {\r\n\treturn l.length\r\n}", "func (l *SList) Len() int { return l.n }", "func (lst List) Len() Number {\n\treturn Number(lst.len)\n}", "func (l List) Len() int {\n\treturn l.len\n}", "func (l *List) Len() int {\n\treturn l.size\n}", "func (p List) Len() int { return len(p) }", "func (l List) Length() (length uint) {\n\tcurrent := l.Root\n\n\tfor current != nil {\n\t\tlength++\n\t\tcurrent = current.Next\n\t}\n\treturn\n}", "func (l *DList) Len() int { return l.n }", "func (s SkipList) Length() int {\n\treturn s.length\n}", "func (l *List) Len() int {\n\tl.RLock()\n\tdefer l.RUnlock()\n\n\treturn l.length\n}", "func (pl List) Len() int {\n\treturn len(pl)\n}", "func (list *List) Len() int {\n\treturn len(list.data)\n}", "func (List *List) Length() int {\n\tlen := 0\n\ttraversingPtr := List.Head\n\n\tfor traversingPtr != nil {\n\t\tlen = len + 1\n\t\ttraversingPtr = traversingPtr.Next\n\t}\n\treturn len\n}", "func (l *List) Len() int {\n\treturn l.len\n}", "func (l *List) Len() int {\n\treturn l.len\n}", "func (l *List) Len() int {\n\treturn l.len\n}", "func (list *LinkedList[T]) Length() uint {\n\tlist.key.RLock()\n\tdefer list.key.RUnlock()\n\n\treturn list.length\n}", "func (l IntList) Length() int {\n\tvar cnt int\n\tfor _, _ = range l {\n\t\tcnt++\n\t}\n\treturn cnt\n}", "func (list *DoublyLinkedList) Len() int {\n\treturn list.size\n}", "func (list CategoryList) Len() int { return len(list) }", "func (l *RangeList) Length() int {\n\treturn len(*l)\n}", "func (ll *LinkedList) Len() int {\n\treturn ll.length\n}", "func (l DNA8List) Len() int { return len(l) }", "func (s *SkipList) Len() int {\n\treturn s.length\n}", "func (l *idList) Len() int { return l.len }", "func TestShouldGetListLength(t *testing.T) {\n\tlst := []int{1, 2, 3, 4, 5}\n\n\tl := Length(lst)\n\n\tassert.Equal(t, l, 5, \"List size should be 5\")\n}", "func (l *ListHT) Len() uint64 {\n\treturn l.lt.len\n}", "func (list VulnerabilityList) Len() int {\n\treturn len(list)\n}", "func (list *TList) Len() int {\n\treturn list.list.Len()\n}", "func (l *IntList) Len() int {\n\treturn int(atomic.LoadInt64(&l.length))\n}", "func (s SampleList) Len() int {\n\treturn len(s)\n}", "func (this *List) Len() int {\n this.lock.RLock()\n this.lock.RUnlock()\n\n return len(this.counters)\n}", "func ListLength(head *Node) int {\n\trunner := head\n\tlength := 0\n\tfor runner != nil {\n\t\tlength++\n\t\trunner = runner.Next\n\t}\n\treturn length\n}", "func (sl *genericSkipList) Len() int {\n\treturn sl.size\n}", "func (o openList) Len() int {\n\treturn len(o)\n}", "func (l *List) Size() int {\n\treturn l.len\n}", "func (l *List) Len() int {\n\treturn len(l.Source)\n}", "func (b *ListBuilder) Len() (int, error) {\n\tl, err := b.List()\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn l.Len(), nil\n}", "func (l *AttributeList) Len() int { return l.length }", "func (b *ListBuilder) Len() (int, error) {\n\tl, err := b.List()\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn l.Len(), nil\n}", "func (l *HandoffList) Len() int {\n\tif l == nil {\n\t\treturn 0\n\t}\n\treturn len(l.items)\n}", "func (sll *SingleLinkedList) Length() int {\n\treturn sll.length\n}", "func (pList *LinkedListNumber) GetLength() int {\n\tlen := 0\n\tptr := pList\n\tfor ptr != nil {\n\t\tlen++\n\t\tptr = ptr.Next\n\t}\n\treturn len\n}", "func (l *SkipList) Len() int {\n\tl.mu.Lock()\n\tdefer l.mu.Unlock()\n\n\treturn l.len\n}", "func (r RuneList) Len() int {\n\treturn len(r)\n}", "func (I Twitlist) Len() int {\n\treturn len(I)\n}", "func (shortlist *Shortlist) Len() int {\n\tlength := 0\n\tfor _, entry := range shortlist.Entries {\n\t\tif entry != nil {\n\t\t\tlength++\n\t\t}\n\t}\n\treturn length\n}", "func (l ResolutionList) Len() int { return len(l) }", "func (h *Strings) Len() int {\n\treturn len(h.list)\n}", "func (l *semaphoreList) length() int {\n\tl.RLock()\n\tdefer l.RUnlock()\n\tlength := len(l.list)\n\treturn length\n}", "func (dll *DoublyLinkedList) Length() int32 {\n\tvar count int32 = 0\n\ttemp := dll.head\n\tfor temp != nil {\n\t\tcount += 1\n\t\ttemp = temp.next\n\t}\n\treturn count\n}", "func (list *List) Size() int {\n return list.size\n}", "func (l *List) Len() int {\n\ti := 1\n\ty := l\n\tfor !y.End() {\n\t\ti++\n\t\ty = y.next\n\t}\n\n\treturn i\n}", "func (s *SinglyLinkedList) Length() int {\n return s.length\n}", "func (a *EmergencyNumberList) GetLen() (len uint8) {}", "func (l *LList) Size() int {\n\treturn l.size\n}", "func (m *Memberlist) Len() int {\n\treturn m.list.NumMembers()\n}", "func (l *List) Size() int {\n\treturn l.Len()\n}", "func (list elemlist) Len() int {\n\treturn len(list.elements)\n}", "func (opts *ListOpts) Len() int {\n return len((*opts.values))\n}", "func (ls *LevelSlice) Len() int {\n\treturn ls.length\n}", "func (s *Store) Len(ctx context.Context) (int64, error) {\n\tvar nb int64\n\tif err := s.List(ctx, \"\", func(string) error {\n\t\tnb++\n\t\treturn nil\n\t}); err != nil {\n\t\treturn 0, err\n\t}\n\treturn nb, nil\n}", "func (fields List) Len() int {\n\tvar count int\n\tb := ptob(fields.p)\n\tvar i int\n\tfor {\n\t\tx, n := uvarint(b[i:])\n\t\tif n == 0 {\n\t\t\tbreak\n\t\t}\n\t\tif useSharedNames {\n\t\t\ti += n\n\t\t} else {\n\t\t\ti += n + x\n\t\t}\n\t\tisdatakind := datakind(Kind(b[i]))\n\t\ti++\n\t\tif isdatakind {\n\t\t\tx, n = uvarint(b[i:])\n\t\t\ti += n + x\n\t\t}\n\t\tcount++\n\t}\n\treturn count\n}", "func (arr *ArrayList) Len() uint32 {\n return arr.length\n}", "func (bl NamedRepositoryList) Len() int { return len(bl) }", "func (l *AddOnList) Len() int {\n\tif l == nil {\n\t\treturn 0\n\t}\n\treturn len(l.items)\n}", "func (h *Header) Len()int {\r\n\treturn h.List.Len()\r\n}", "func (wl *W3CNodeList) Length() int {\n\tif wl == nil {\n\t\treturn 0\n\t}\n\treturn len(wl.nodes)\n}", "func (l *CSPList) Len() int {\n\treturn len(l.ObjectList.Items)\n}", "func (d *babbleDictionary) GetListLength() int {\n\treturn len(d.sourceList)\n}", "func (s *SliceOfInt64) Len() int {\n\treturn len(s.items)\n}", "func (t ReplicaList) Len() int {\n\treturn len(t)\n}", "func (a byCount) Len() int { return len(a) }", "func (c Chain) Length() int {\n\tcount := 1\n\tcurrent := c.Rest\n\tfor {\n\t\tif current == nil {\n\t\t\tbreak\n\t\t}\n\t\tcount++\n\t\tcurrent = current.Rest\n\t}\n\treturn count\n}", "func (stack *Stack) Len() int {\n\treturn stack.list.Len()\n}", "func (list *List) Size() int {\n\treturn list.size\n}", "func (list *List) Size() int {\n\treturn list.size\n}", "func (list *List) Size() int {\n\treturn list.size\n}", "func (l *pqList) Len() int {\n\treturn len(l.Slice)\n}", "func (l *HandlerList) Len() int {\n\treturn len(l.list)\n}", "func (v VersionList) Len() int {\n\treturn len(v)\n}", "func (s *SliceOfInt32) Len() int {\n\treturn len(s.items)\n}", "func (list LinkedListNode) Len() (l int) {\n\tl = 0\n\tfor curr := &list; curr != nil; curr = curr.Next {\n\t\tl++\n\t}\n\treturn\n}", "func (ls *LookupList) Len() int {\n\treturn len(ls.Nodelist)\n}", "func (ls *LookupList) Len() int {\n\treturn len(ls.Nodelist)\n}", "func (s SongSlice) Length() int {\n\t// Iterate and sum duration\n\tlength := 0\n\tfor _, song := range s {\n\t\tlength += song.Length\n\t}\n\n\treturn length\n}", "func (fs Fruits) Len() int { return len(fs) }", "func (lru *LRU) Len() int {\n\treturn lru.list.Len()\n}", "func (l *LDAPIdentityProviderList) Len() int {\n\tif l == nil {\n\t\treturn 0\n\t}\n\treturn len(l.items)\n}", "func (s *SliceOfInt8) Len() int {\n\treturn len(s.items)\n}", "func (rules *Rules) Len() int {\n\treturn len(rules.list)\n}", "func (l *sampleList) Len() int { return len(l.samples) - len(l.free) }" ]
[ "0.86637837", "0.85828537", "0.8372451", "0.837093", "0.83670425", "0.8366518", "0.83277553", "0.8309399", "0.82558864", "0.8134802", "0.8130817", "0.8125857", "0.81123465", "0.8108812", "0.81026274", "0.8097367", "0.8041334", "0.80260754", "0.80061847", "0.8005908", "0.80029535", "0.79799753", "0.79799753", "0.79799753", "0.7951934", "0.7905113", "0.78790224", "0.78339857", "0.7820882", "0.78073585", "0.77752846", "0.7765905", "0.7759184", "0.7757208", "0.7756738", "0.7754403", "0.7750814", "0.7747403", "0.7737559", "0.77331626", "0.7733102", "0.77190065", "0.7710118", "0.77069354", "0.7701044", "0.7695299", "0.76949614", "0.7690435", "0.7676093", "0.76718986", "0.7665674", "0.76535153", "0.7651566", "0.7631418", "0.7628723", "0.76103455", "0.7590608", "0.7580688", "0.7574564", "0.7571862", "0.75647956", "0.75526255", "0.7548671", "0.75404364", "0.75374174", "0.75151765", "0.7500665", "0.74996597", "0.7487336", "0.74863696", "0.7483903", "0.7483027", "0.74828875", "0.7473412", "0.74572957", "0.7451806", "0.74473345", "0.7433745", "0.74138796", "0.74134004", "0.7403114", "0.74022454", "0.73709255", "0.7362967", "0.7362967", "0.7362967", "0.73576367", "0.7348506", "0.7347779", "0.73452294", "0.73214895", "0.73208416", "0.73208416", "0.7315811", "0.7297314", "0.727663", "0.7273825", "0.7273185", "0.7259731", "0.72577125" ]
0.7825778
28
Linked list structure methods Insert new node at the end of the linked list
func (l *LinkedList) Insert(val interface{}) { n := &Node{value: val} if l.head == nil { l.head = n } else { l.tail.SetNext(n) } l.tail = n l.length = l.length + 1 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func InsertEnd(head *Node, data int) {\n\t// create a node\n\tnode := Node{\n\t\tData: data,\n\t\tNext: nil,\n\t}\n\t// check if the head is the only one node in the list\n\tif head.Next == nil {\n\t\thead.Next = &node\n\t} else {\n\t\t// Head has next node\n\t\tnext := head.Next\n\t\t// Traverse to end\n\t\tfor {\n\t\t\tif next.Next != nil {\n\t\t\t\tnext = next.Next\n\t\t\t} else {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\t// we are at the end of list, last node\n\t\t// add new node as last node's next node\n\t\tnext.Next = &node\n\t}\n}", "func (f *LinkedList) insertLast(newElmt *Element) () {\n\tif (f.length == 0){\n\t\tf.start = newElmt\n\t\tf.end = newElmt\n\t} else {\n\t\tcurrentElmt := f.end\n\t\tcurrentElmt.next = newElmt\n\t\tnewElmt.prev = f.end\n\t\tf.end = newElmt\n\t}\n\tf.length++\n}", "func addNodeEnd(newPerson, personList *person) *person {\n\tif personList == nil {\n\t\treturn personList\n\t}\n\tfor p := personList; p != nil; p = p.next {\n\t\tif p.next == nil {\n\t\t\tp.next = newPerson\n\t\t\treturn personList\n\t\t}\n\t}\n\treturn personList\n}", "func (list *List) InsertAfter(data int, reference int) {\n // 1. Create a new node\n newNode := &Node{data: data, next: nil}\n\n // 2. Add node to list if the list is empty and return\n if list.Size() == 0 {\n list.head = newNode\n list.size++\n return\n }\n\n // 3. Get the head of the list as current iterator\n current := list.Head()\n\n // 4a. Traverse the list to find reference node\n // 4b. If reference node is found, insert the new node and return\n for current != nil {\n if current.data == reference {\n newNode.next = current.next\n current.next = newNode\n list.size++\n return\n }\n current = current.next\n }\n\n // 5. Provide message to user if the reference node was not found\n fmt.Println(\"Could not insert the node as did not find the given reference node\")\n}", "func (list *LinkedList) InsertListEnd(element interface{}) {\n\tnode := Element{element, nil}\n\tif list.size == 0 {\n\t\tlist.head = &node\n\t\tlist.tail = &node\n\t} else {\n\t\tlist.tail.next = &node\n\t\tlist.tail = &node\n\t}\n\tlist.size++\n}", "func InsertAtEnd(head *Node, x int) *Node {\n\tfmt.Println(\"\\nIn InsertAtEnd:\")\n\ttmp := Node{Next: nil, Data: x}\n\tfmt.Println(\"Inserting\", tmp, \"at the end\")\n\tcurrent := head\n\tif head == nil {\n\t\thead = &tmp\n\t} else {\n\t\tfor ; current.Next != nil; current = current.Next {\n\t\t}\n\t\tcurrent.Next = &tmp\n\t}\n\tfmt.Printf(\"Address of node %p\\n\", &*head)\n\tfmt.Println(\"Address of head pointer \", &head)\n\tfmt.Println(\"Returning\")\n\treturn head\n}", "func (list *List) AppendToEnd(data int) {\n // 1. Create a new Node\n newNode := &Node{data: data, next: nil}\n\n // 2a. If list contains no elements, set new node as head of list\n // 2b. If list contains any element, traverse till last and append new node\n if list.size == 0 {\n list.head = newNode\n } else if list.size > 0 {\n current := list.head\n for current.next != nil {\n current = current.next\n }\n current.next = newNode\n }\n\n // 3. Increment the list size\n list.size++\n}", "func (n *Node) insertNodeAtTheEnd(el string) {\n\tif (*n).value == \"\" {\n\t\t(*n).setData(el)\n\t\treturn\n\t}\n\tif (*n).next == nil {\n\t\tnextNode := Node{}\n\t\t(*n).next = &nextNode\n\t\t(nextNode).insertNodeAtTheEnd(el)\n\t} else {\n\t\t((*n).next).insertNodeAtTheEnd(el)\n\t}\n}", "func (head *Node) Insert(data interface{}) {\n\thead.Next = &Node{Data: data, Next: head.Next}\n}", "func (node *LinkedListNode) insert(val int) *LinkedListNode {\n\thead := node\n\tnew_node := LinkedListNode{Val: val, Next: nil}\n\tfor node.Next != nil {\n\t\tnode = node.Next\n\t}\n\tnode.Next = &new_node\n\treturn head\n}", "func InsertAtEndInPlace(head **Node, x int) {\n\tfmt.Println(\"\\nIn InsertAtEndInPlace:\")\n\ttmp := Node{Next: nil, Data: x}\n\tfmt.Println(\"Inserting\", tmp, \"at the end\")\n\tif *head == nil {\n\t\t*head = &tmp\n\t} else {\n\t\tvar p *Node\n\t\tp = *head\n\t\tfor ; p.Next != nil; p = p.Next {\n\t\t}\n\t\tp.Next = &tmp\n\t}\n\tfmt.Printf(\"Address of head node %p\\n\", &**head)\n\tfmt.Println(\"Address of head pointer \", &*head)\n\tfmt.Println(\"Returning\")\n}", "func Insert(ll *List, key string, value interface{}, ttl int64) (*Node, error) {\n\t// Lock access to the list\n\tll.Mux.Lock()\n\tdefer ll.Mux.Unlock()\n\n\t// Init the new node\n\tnewNode := &Node{\n\t\tKey: key,\n\t\tValue: value,\n\t\tTTL: ttl,\n\t\tCreatedAt: time.Now().Unix(),\n\t\tPrev: nil,\n\t\tNext: nil,\n\t}\n\n\t// Update the pointers of head and tail and set pointers\n\t// for the new node.\n\tnewNode.Prev = ll.Head\n\tnewNode.Next = ll.Head.Next\n\tll.Head.Next = newNode // Point Head to newNode\n\tnewNode.Next.Prev = newNode // Point the old \"Most Recent\" to the new node\n\n\t// Atomically increment the size.\n\tatomic.AddInt32(&ll.Size, 1)\n\n\treturn newNode, nil\n}", "func (l *LinkedList) Push(newData datatype) {\n\n\tnode_to_be_added := node{\n\t\tdata: newData,\n\t}\n\n\tif l.head == nil { // Linked List is empty.\n\t\tl.head = &node_to_be_added\n\t} else {\n\t\tcurrent := l.head\n\t\tfor current.next != nil {\n\t\t\tcurrent = current.next\n\t\t}\n\t\tcurrent.next = &node_to_be_added\n\t}\n\n}", "func (l *LinkedList) Insert(val interface{}) {\n\tn := Node{}\n\tn.data = val\n\tif l.len == 0 {\n\t\tl.head = &n\n\t\tl.tail = &n\n\t\tl.len++\n\t\treturn\n\t}\n\tl.tail.next = &n\n\tn.prev = l.tail\n\tl.tail = &n\n\tl.len++\n}", "func (l *PostingList) Insert(n *Node) {\n\n\tl.n++\n\n\tif l.tNode != nil {\n\t\tl.tNode.next = n\n\t\tl.tNode = n\n\t}\n\n\tif l.hNode == nil {\n\t\tl.hNode = n\n\t\tl.tNode = n\n\t}\n}", "func insertNode(i int, x, y float64, last *node) *node {\n\tp := newNode(i, x, y)\n\n\tif last == nil {\n\t\tp.prev = p\n\t\tp.next = p\n\n\t} else {\n\t\tp.next = last.next\n\t\tp.prev = last\n\t\tlast.next.prev = p\n\t\tlast.next = p\n\t}\n\treturn p\n}", "func (dl *DcmList) Insert(obj *DcmObject, pos E_ListPos) *DcmObject {\n\tif obj != nil {\n\t\tif dl.Empty() { // list is empty !\n\t\t\tdl.currentNode = NewDcmListNode(obj)\n\t\t\tdl.firstNode = dl.currentNode\n\t\t\tdl.lastNode = dl.currentNode\n\t\t\tdl.cardinality = dl.cardinality + 1\n\t\t} else {\n\t\t\tif pos == ELP_last {\n\t\t\t\tdl.Append(obj) // cardinality++;\n\t\t\t} else if pos == ELP_first {\n\t\t\t\tdl.Prepend(obj) // cardinality++;\n\t\t\t} else if dl.Valid() != true {\n\t\t\t\t// set current node to the end if there is no predecessor or\n\t\t\t\t// there are successors to be determined\n\t\t\t\tdl.Append(obj) // cardinality++;\n\t\t\t} else if pos == ELP_prev { // insert before current node\n\t\t\t\tnode := NewDcmListNode(obj)\n\t\t\t\tif dl.currentNode.prevNode == nil {\n\t\t\t\t\tdl.firstNode = node // insert at the beginning\n\t\t\t\t} else {\n\t\t\t\t\tdl.currentNode.prevNode.nextNode = node\n\t\t\t\t}\n\t\t\t\tnode.prevNode = dl.currentNode.prevNode\n\t\t\t\tnode.nextNode = dl.currentNode\n\t\t\t\tdl.currentNode.prevNode = node\n\t\t\t\tdl.currentNode = node\n\t\t\t\tdl.cardinality = dl.cardinality + 1\n\t\t\t} else { //( pos==ELP_next || pos==ELP_atpos )\n\t\t\t\t// insert after current node\n\t\t\t\tnode := NewDcmListNode(obj)\n\t\t\t\tif dl.currentNode.nextNode == nil {\n\t\t\t\t\tdl.lastNode = node // append to the end\n\t\t\t\t} else {\n\t\t\t\t\tdl.currentNode.nextNode.prevNode = node\n\t\t\t\t}\n\t\t\t\tnode.nextNode = dl.currentNode.nextNode\n\t\t\t\tnode.prevNode = dl.currentNode\n\t\t\t\tdl.currentNode.nextNode = node\n\t\t\t\tdl.currentNode = node\n\t\t\t\tdl.cardinality = dl.cardinality + 1\n\t\t\t}\n\t\t}\n\t}\n\treturn obj\n}", "func (l *LinkedList) InsertAfter(existVal, val interface{}) {\n\tp := l.Head\n\tif p == nil {\n\t\treturn\n\t}\n\n\tfor p != nil {\n\t\tif p.Val == existVal {\n\t\t\tnode := NewListNode(val)\n\t\t\tnode.Next = p.Next\n\t\t\tp.Next = node\n\t\t\treturn\n\t\t}\n\t\tp = p.Next\n\t}\n}", "func (list *DoublyLinkedList) Append(newNode *Node) {\n\tif list.head == nil {\n\t\tlist.head = newNode\n\t\tlist.lenght++\n\t\treturn\n\t}\n\tlastNode := list.GetLastNode()\n\tlastNode.next = newNode\n\tnewNode.previous = lastNode\n\n\tlist.lenght++\n}", "func (l *List) Append(newData interface{}, parentInfo interface{}) {\n\tn := &Node{newData, new(Node), new(Node), parentInfo}\n\tif l.len == 0 {\n\t\tl.Head.Prev = n\n\t\tl.Head.Next = n\n\t\tl.Head.Prev.Next = l.Head\n\t\tl.Head.Prev.Prev = l.Head\n\t} else {\n\t\tl.Head.Prev.Next = n\n\t\tn.Prev = l.Head.Prev\n\t\tl.Head.Prev = n\n\t\tl.Head.Prev.Next = l.Head\n\t}\n\tl.len++\n}", "func (ll *LinkedList) Append(t Item) {\n\tll.lock.Lock()\n\tnode := Node{t, nil}\n\n\t//fmt.Printf(\"Appending value: %s\\n\", node.content)\n\n\tif ll.head == nil {\n\t\tll.head = &node\n\t} else {\n\t\tlast := ll.head\n\n\t\tfor {\n\t\t\tif last.next == nil {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tlast = last.next\n\t\t}\n\n\t\tlast.next = &node\n\t}\n\n\tll.count++\n\tll.lock.Unlock()\n}", "func d19reInsert(node *d19nodeT, listNode **d19listNodeT) {\n\tnewListNode := &d19listNodeT{}\n\tnewListNode.node = node\n\tnewListNode.next = *listNode\n\t*listNode = newListNode\n}", "func (l *LinkedList) InsertAtEnd(val fmt.Stringer) error {\n\tif l.head == nil {\n\t\treturn l.InsertAtFront(val)\n\t}\n\tnode := Node{val, nil}\n\ttmp := l.head\n\tfor tmp.next != nil {\n\t\ttmp = tmp.next\n\t}\n\ttmp.next = &node\n\tl.tail = &node\n\tl.size++\n\treturn nil\n}", "func (node *LinkList) Insert(data int) {\n\tnewNode := new(LinkList)\n\tnewNode.data = data\n\tnewNode.next = nil\n\tif node == nil {\n\t\tnode = newNode\n\t} else {\n\t\ttemp := node\n\t\tfor temp.next != nil {\n\t\t\ttemp = temp.next\n\t\t}\n\t\ttemp.next = newNode\n\t}\n}", "func (l *List) insert(n, at *Node) *Node {\n\tn.prev = at\n\tn.next = at.next\n\tn.prev.next = n\n\tn.next.prev = n\n\tn.list = l\n\tl.Size++\n\treturn n\n}", "func d8reInsert(node *d8nodeT, listNode **d8listNodeT) {\n\tnewListNode := &d8listNodeT{}\n\tnewListNode.node = node\n\tnewListNode.next = *listNode\n\t*listNode = newListNode\n}", "func (l *LinkedList) Insert(val int) {\n\tnode := new(ListNode)\n\tnode.data = val\n\n\tif l.head == nil {\n\t\tl.head = node\n\t}\n\tif l.back == nil {\n\t\tl.back = node\n\t} else {\n\t\tl.back.next = node\n\t\tl.back = node\n\t}\n\n}", "func d3reInsert(node *d3nodeT, listNode **d3listNodeT) {\n\tnewListNode := &d3listNodeT{}\n\tnewListNode.node = node\n\tnewListNode.next = *listNode\n\t*listNode = newListNode\n}", "func (l *List) Insert(val interface{}) {\n\tlst := &Node{\n\t\tnext: l.head,\n\t\tval: val,\n\t}\n\tif l.head != nil {\n\t\tl.head.prev = lst\n\t}\n\tl.head = lst\n\n\tli := l.head\n\tfor li.next != nil {\n\t\tli = li.next\n\t}\n\tl.tail = li\n}", "func (nl *NodeList) Insert(n *Node) {\n\tif nl.front == nil {\n\t\tnl.front = n\n\t\tnl.back = nl.front\n\t\tnl.length = 1\n\t\treturn\n\t}\n\n\tn.prev = nl.back\n\tn.Next = nil\n\tnl.back.Next = n\n\tnl.back = n\n\n\tnl.length++\n\n}", "func InsertAfter(node, newNode *Node) {\n\tnewNode.Next = node.Next\n\tnode.Next = newNode\n}", "func d20reInsert(node *d20nodeT, listNode **d20listNodeT) {\n\tnewListNode := &d20listNodeT{}\n\tnewListNode.node = node\n\tnewListNode.next = *listNode\n\t*listNode = newListNode\n}", "func d12reInsert(node *d12nodeT, listNode **d12listNodeT) {\n\tnewListNode := &d12listNodeT{}\n\tnewListNode.node = node\n\tnewListNode.next = *listNode\n\t*listNode = newListNode\n}", "func (l *List) Insert(data interface{}) {\n\tnewNode := new(Node)\n\tnewNode.Data = data\n\tnewNode.Next = l.Head\n\tl.Head = newNode\n}", "func (cll *CircularLinkedList) InsertEnd(newNode *Node) {\n\tif !(cll.CheckIfEmptyAndAdd(newNode)) {\n\t\thead := cll.Start\n\t\tfor {\n\t\t\tif head.Next == cll.Start {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\thead = head.Next\n\t\t}\n\t\thead.Next = newNode\n\t\tnewNode.Next = cll.Start\n\t\tcll.Len++\n\t}\n}", "func (l *LinkedList) InsertToTail(val interface{}) {\n\tnode := NewListNode(val)\n\tp := l.Head\n\n\tif p == nil {\n\t\tl.Head = node\n\t\treturn\n\t}\n\n\tfor p.Next != nil {\n\t\tp = p.Next\n\t}\n\tp.Next = node\n}", "func (l *LinkedList) Insert(elem string) {\n\tnode := Node{\n\t\tNext: l.Head,\n\t\tValue: elem,\n\t}\n\tl.Head = &node\n\tl.Size++\n}", "func d6reInsert(node *d6nodeT, listNode **d6listNodeT) {\n\tnewListNode := &d6listNodeT{}\n\tnewListNode.node = node\n\tnewListNode.next = *listNode\n\t*listNode = newListNode\n}", "func (head *Node) insert(node *Node) {\n\tif head == nil {\n\t\treturn\n\t}\n\n\tafter := head\n\tfor after.next != nil && node.weight >= after.next.weight {\n\t\tafter = after.next\n\t}\n\n\tnode.prev = after\n\tnode.next = after.next\n\tif after.next != nil {\n\t\tafter.next.prev = node\n\t}\n\tafter.next = node\n}", "func d5reInsert(node *d5nodeT, listNode **d5listNodeT) {\n\tnewListNode := &d5listNodeT{}\n\tnewListNode.node = node\n\tnewListNode.next = *listNode\n\t*listNode = newListNode\n}", "func d1reInsert(node *d1nodeT, listNode **d1listNodeT) {\n\tnewListNode := &d1listNodeT{}\n\tnewListNode.node = node\n\tnewListNode.next = *listNode\n\t*listNode = newListNode\n}", "func (l *LinkedList) Append(val int) {\n\t// check if head is exist\n\tif l.Head == nil {\n\t\tl.Head = &Node{\n\t\t\tData: val,\n\t\t}\n\t\treturn\n\t}\n\n\t// loop until the end of linkedlist\n\tcurrent := l.Head\n\tfor current.Next != nil {\n\t\tcurrent = current.Next\n\t}\n\n\t// connect tail to new node\n\tcurrent.Next = &Node{\n\t\tData: val,\n\t}\n}", "func (head *Node) AddLast(e int) *Node {\n\t// create a new node\n\tnode := &Node{Val: e}\n\n\tif head == nil {\n\t\treturn node\n\t}\n\n\t// iterate till last node\n\titer := head\n\tfor iter.Next != nil {\n\t\titer = iter.Next\n\t}\n\n\t//add new node after last node\n\titer.Next = node\n\treturn head\n}", "func (dl *DoublyLinkedList) AppendAfter(n *Node, a *Node) error {\n\tif n == nil || a == nil {\n\t\treturn fmt.Errorf(\"Tried to create or append to a nil node\")\n\t}\n\t// Check if empty\n\tif dl.Length == 0 {\n\t\treturn fmt.Errorf(\"Tried to append after an empty list\")\n\t}\n\tn.Pre = a\n\tif a.Next == nil {\n\t\tdl.Tail = n\n\t} else {\n\t\tn.Next = a.Next\n\t}\n\ta.Next = n\n\tdl.Length++\n\treturn nil\n}", "func d2reInsert(node *d2nodeT, listNode **d2listNodeT) {\n\tnewListNode := &d2listNodeT{}\n\tnewListNode.node = node\n\tnewListNode.next = *listNode\n\t*listNode = newListNode\n}", "func d9reInsert(node *d9nodeT, listNode **d9listNodeT) {\n\tnewListNode := &d9listNodeT{}\n\tnewListNode.node = node\n\tnewListNode.next = *listNode\n\t*listNode = newListNode\n}", "func d11reInsert(node *d11nodeT, listNode **d11listNodeT) {\n\tnewListNode := &d11listNodeT{}\n\tnewListNode.node = node\n\tnewListNode.next = *listNode\n\t*listNode = newListNode\n}", "func (dll *DoublyLinkedList) Insert(data int32) {\n\tnewNode := &node{\n\t\tdata: data,\n\t\tnext: nil,\n\t\tprev: nil,\n\t}\n\tif dll.head == nil {\n\t\tdll.head = newNode\n\t} else {\n\t\ttemp := dll.head\n\t\tfor temp.next != nil {\n\t\t\ttemp = temp.next\n\t\t}\n\t\ttemp.next = newNode\n\t\tnewNode.prev = temp\n\t}\n}", "func (ll *DoubleLinkedList) AddAtEnd(val int) {\n\tn := NewNode(val)\n\n\tif ll.head == nil {\n\t\tll.head = n\n\t\treturn\n\t}\n\n\tcur := ll.head\n\tfor ; cur.next != nil; cur = cur.next {\n\t}\n\tcur.next = n\n\tn.prev = cur\n}", "func d7reInsert(node *d7nodeT, listNode **d7listNodeT) {\n\tnewListNode := &d7listNodeT{}\n\tnewListNode.node = node\n\tnewListNode.next = *listNode\n\t*listNode = newListNode\n}", "func (l *LinkedList) Insert(node *LLNode, index int) error {\n\tswitch {\n\tcase index > l.Size:\n\t\treturn fmt.Errorf(\"Index %d out of range\", index)\n\n\tcase index == 0:\n\t\tl.Prepend(node)\n\t\treturn nil\n\n\tcase index == l.Size:\n\t\tl.Append(node)\n\t\treturn nil\n\n\tdefault:\n\t\tcurrentNode, NextNode := l.Head, l.Head.Next\n\t\tfor i := 0; i < l.Size; i++ {\n\t\t\tif i+1 == index {\n\t\t\t\tnode.Next = NextNode\n\t\t\t\tcurrentNode.Next = node\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\tcurrentNode = NextNode\n\t\t\tNextNode = NextNode.Next\n\t\t}\n\n\t\tl.Size++\n\n\t\treturn nil\n\t}\n}", "func (list *DoublyLinkedList) AddAtTheBeggining(node *Node) {\n\tif list.lenght == 0 {\n\t\tlist.head = node\n\n\t} else {\n\t\tsecondNode := list.head\n\t\tlist.head = node\n\t\tlist.head.next = secondNode\n\t\tsecondNode.previous = node\n\t}\n\tlist.lenght++\n}", "func (l *List) Append(node *Node) {\n\tif l.length == 0 {\n\t\tl.head = node\n\t} else {\n\t\tcurrentNode := l.head\n\t\tfor currentNode.next != nil {\n\t\t\tcurrentNode = currentNode.next\n\t\t}\n\t\tcurrentNode.next = node\n\t}\n\tl.length++\n}", "func addToTail(tail, node *doubleListNode) {\n\tnode.right, node.left = tail, tail.left\n\ttail.left.right, tail.left = node, node\n}", "func d18reInsert(node *d18nodeT, listNode **d18listNodeT) {\n\tnewListNode := &d18listNodeT{}\n\tnewListNode.node = node\n\tnewListNode.next = *listNode\n\t*listNode = newListNode\n}", "func d4reInsert(node *d4nodeT, listNode **d4listNodeT) {\n\tnewListNode := &d4listNodeT{}\n\tnewListNode.node = node\n\tnewListNode.next = *listNode\n\t*listNode = newListNode\n}", "func (list *Linkedlist) Push(value interface{}) {\n\tlist.head = &Node{value, list.head}\n\tlist.size++\n}", "func (list *DoublyLinkedList) AddAfter(index int, newNode *Node) string {\n\tvar err string\n\tvar previousNode *Node\n\tvar nextNode *Node\n\tif list.isEmpty() {\n\t\terr = \"vacia\"\n\t} else if list.isInRange(index) {\n\t\terr = \"El indice está fuera de rango\"\n\t} else {\n\t\tpreviousNode, _ = list.GetNodeAt(index)\n\t\tif previousNode.next != nil {\n\t\t\tnextNode = previousNode.next\n\t\t}\n\n\t\tpreviousNode.next = newNode\n\t\tnewNode.previous = previousNode\n\n\t\tif nextNode != nil {\n\t\t\tnextNode.previous = newNode\n\t\t\tnewNode.next = nextNode\n\t\t} else {\n\t\t\tnewNode.next = nil\n\t\t}\n\n\t}\n\tlist.lenght++\n\treturn err\n}", "func (nl *nodeList) insert(i int, n *Node) {\n\t// Add a nil value to the end of the slice, to make room for the new Node.\n\tnl.elements = append(nl.elements, nil)\n\t// Copy values from the insertion point to the right by one\n\tcopy(nl.elements[i+1:], nl.elements[i:])\n\t// Set the value at the insertion point\n\tnl.elements[i] = n\n}", "func d13reInsert(node *d13nodeT, listNode **d13listNodeT) {\n\tnewListNode := &d13listNodeT{}\n\tnewListNode.node = node\n\tnewListNode.next = *listNode\n\t*listNode = newListNode\n}", "func (l *SinglyLinkedList) insert(node, at *Node) *Node {\n\tvar n = at.next\n\tat.next = node\n\tnode.next = n\n\tnode.list = l\n\tl.size++\n\treturn node\n}", "func d10reInsert(node *d10nodeT, listNode **d10listNodeT) {\n\tnewListNode := &d10listNodeT{}\n\tnewListNode.node = node\n\tnewListNode.next = *listNode\n\t*listNode = newListNode\n}", "func (al *LinkedList) Append(item int) {\n\tn := &node.Node{item, nil}\n\tif al.head == nil {\n\t\tal.head = n\n\t\tal.curr = n \n\t\tal.size = al.size + 1\n\t\treturn \n\t}\n\tcurr := al.curr\n\tcurr.Next = n\n\tal.curr = n\n\tal.size = al.size + 1\n}", "func (m *Model) InsertAfter(tarNode *Node, newNodes ...*Node) error {\n\tif _, ok := m.Get(tarNode.ID); !ok {\n\t\treturn errors.New(\"Target node not in model\")\n\t}\n\n\tfor _, newNode := range newNodes {\n\t\tif _, ok := m.Get(newNode.ID); ok {\n\t\t\treturn errors.New(\"Node already in model\")\n\t\t}\n\n\t\tm.table[newNode.ID] = newNode\n\n\t\tfor nextNode := tarNode.Next; nextNode != m.tail; nextNode = nextNode.Next {\n\t\t\tif newNode.ID.Compare(nextNode.ID) == -1 {\n\t\t\t\ttarNode = nextNode\n\t\t\t} else {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tlinkAfter(tarNode, newNode)\n\t\ttarNode = newNode\n\t}\n\treturn nil\n}", "func (l *Slist) Append(d interface{}) {\n\tif l.head == nil {\n\t\tl.head = &Snode{data: d}\n\t\tl.len++\n\t\treturn\n\t}\n\n\tcurrent := l.head\n\tfor current != nil {\n\t\tif current.next == nil {\n\t\t\tcurrent.next = &Snode{data: d}\n\t\t\tl.len++\n\t\t\treturn\n\t\t}\n\t\tcurrent = current.next\n\t}\n}", "func (this *MyLinkedList) AddAtTail(val int) {\n\n\tnode := this\n\n\tfor node.next != nil {\n\t\tnode = node.next\n\t}\n\n\tnode.next = &MyLinkedList{\n\t\tval: val,\n\t\tnext: nil,\n\t}\n}", "func d14reInsert(node *d14nodeT, listNode **d14listNodeT) {\n\tnewListNode := &d14listNodeT{}\n\tnewListNode.node = node\n\tnewListNode.next = *listNode\n\t*listNode = newListNode\n}", "func (l *list) Push(data *node) {\n\tvar last *node\n\n\tl.Lock()\n\tswitch {\n\tcase l.head.next == nil:\n\t\tlast = l.head\n\tdefault: // first case false\n\t\tlast = l.tail\n\t}\n\tlast.next = data\n\tdata.prev = last\n\tl.tail = last.next\n\tl.Unlock()\n}", "func (f * LinkedList) insertAfterIdx(idx int, newElmt *Element) () {\n\tif (idx < f.length) {\n\t\tif (idx == f.length - 1){\n\t\t\tf.insertLast(newElmt)\n\t\t} else {\n\t\t\tel := f.getElmt(idx)\n\t\t\tnewElmt.prev = el\n\t\t\tnewElmt.next = el.next\n\t\t\tel.next.prev = newElmt\n\t\t\tel.next = newElmt\n\t\t\tf.length++\n\t\t}\n\t}\n}", "func (l *LinkedList) Append(node *LLNode) {\n\tif l.Tail == nil {\n\t\tl.Head = node\n\t\tl.Tail = node\n\t} else {\n\t\tl.Tail.Next = node\n\t\tl.Tail = node\n\t}\n\n\tl.Size++\n}", "func (list *LinkedList) Insert(link *Link) {\n\tlink.next = list.firstLink\n\tlist.firstLink = link\n}", "func (l *List) PushBack(newNode *Node) {\n\n\tl.size++\n \tl.tail = newNode\n \tif l.head == nil {\n \t\tl.head = newNode\n \t\treturn\n \t} \n\tcurrentNode := l.head\n\tfor currentNode.next != nil {\n\t\tcurrentNode = currentNode.next\n\t}\n\tcurrentNode.next = newNode \n}", "func (l *List) Insert(pos int, v interface{}) error {\n\tif pos == 0 {\n\t\tl.head = &Node{v, l.head}\n\t\treturn nil\n\t}\n\n\tp := l.head\n\ti := pos - 1\n\tfor i != 0 {\n\t\tif p.next == nil {\n\t\t\treturn fmt.Errorf(\"%v is not a valid position for a %v long list\", pos, pos-i)\n\t\t}\n\t\tp = p.next\n\t\ti--\n\t}\n\n\tp.next = &Node{v, p.next}\n\treturn nil\n}", "func (list *linkedList) pushBack(node *node) {\n\tif list.head == nil {\n\t\tlist.head = node\n\t\tlist.tail = node\n\t\tnode.next = list.head\n\n\t\tlist.length++\n\t\treturn\n\t}\n\n\tlist.tail.next = node\n\tlist.tail = node\n\tlist.tail.next = list.head\n\tlist.length++\n}", "func (s *SinglyLinkedList) AddToEnd(val interface{}) {\n\tnode := &Node{val, nil}\n\ts.Count += 1\n\tlast := s.LastNode()\n\tif last != nil {\n\t\tlast.Next = node\n\t}\n}", "func (l *List) Append(val interface{}) {\n\tnewTail := &Node{\n\t\tprev: l.tail,\n\t\tval: val,\n\t}\n\tl.tail.next = newTail\n\tl.tail = newTail\n}", "func (l *list) insert(i int, e string) {\n n := l.first\n\n // if the list is empty, e becomes the first item\n if n == nil {\n l.first = create(nil, nil, e)\n return\n }\n\n // does this go to the first index?\n if i <= 0 {\n m := create(nil, n, e)\n n.prev = m\n l.first = m\n return\n } \n\n // otherwise traverse to the correct position\n j := 1\n for n.next != nil && j < i {\n n = n.next\n j++\n }\n\n m := create(n, n.next, e)\n\n // add the element into the list and fix the links\n if (n.next != nil) {\n n.next.prev = m\n }\n\n n.next = m\n}", "func d16reInsert(node *d16nodeT, listNode **d16listNodeT) {\n\tnewListNode := &d16listNodeT{}\n\tnewListNode.node = node\n\tnewListNode.next = *listNode\n\t*listNode = newListNode\n}", "func (this *MyLinkedList) AddAtTail(val int) {\n\tif this.val == nil {\n\t\tthis.val = &val\n\t}else if this.next == nil {\n\t\tthis.next = &MyLinkedList{val:&val}\n\t} else{\n\t\tfindElement := true\n\t\tfor findElement {\n\t\t\tif this.next != nil {\n\t\t\t\tthis = this.next\n\t\t\t} else {\n\t\t\t\tfindElement = false\n\t\t\t}\n\t\t}\n\t\ttmp := MyLinkedList{val: &val}\n\t\tthis.next = &tmp\n\t}\n\n}", "func (this *MyLinkedList) AddAtTail(val int) {\n\tnewTail := &NodeList{\n\t\tVal: val,\n\t\tPrev: this.tail,\n\t\tNext: nil,\n\t}\n\n\tif this.tail != nil {\n\t\tthis.tail.Next = newTail\n\n\t} else {\n\t\tthis.head = newTail\n\n\t}\n\n\tthis.tail = newTail\n\n\tthis.length = this.length + 1\n\n}", "func InsertAfter(head *Node, data int, after int) int {\n\tfmt.Println(\"Inside InsertAfter (data => \", data, \" after =>\", after, \" )\")\n\tcreated := 0\n\t// check if the list is empty or not\n\tif head.Next != nil {\n\t\tnext := head.Next\n\t\tfor {\n\t\t\tif next.Data == after {\n\t\t\t\tfmt.Println(\"Found data match\")\n\t\t\t\t// node data found\n\t\t\t\t// create a new node\n\t\t\t\tnode := Node{\n\t\t\t\t\tData: data,\n\t\t\t\t\tNext: next.Next,\n\t\t\t\t}\n\t\t\t\tnext.Next = &node\n\t\t\t\t// done inserting\n\t\t\t\tcreated = 1\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\tnext = next.Next\n\t\t}\n\t}\n\n\treturn created\n}", "func (l *List) Push(newData interface{}, parentInfo interface{}) *Node {\n\tn := &Node{newData, new(Node), new(Node), parentInfo}\n\tif l.len == 0 {\n\t\tl.Head.Prev = n\n\t\tl.Head.Next = n\n\t\tl.Head.Prev.Next = l.Head\n\t\tl.Head.Prev.Prev = l.Head\n\t\tl.len++\n\t\treturn n\n\t}\n\n\tl.Head.Next.Prev = n\n\tn.Next = l.Head.Next\n\tn.Prev = l.Head\n\tl.Head.Next = n\n\n\tl.len++\n\treturn n\n}", "func (l *List) AddAfter(d interface{}, at *Node) *Node {\n\tl.lazyInit()\n\treturn l.insertData(d, at)\n}", "func (node *LinkList) InsertAtPos(data, pos int) {\n\tif node == nil {\n\t\tfmt.Println(\"list is empty\")\n\t\treturn\n\t}\n\tnewNode := new(LinkList)\n\tnewNode.data = data\n\tnewNode.next = nil\n\tcount := 1\n\ttemp := node\n\tfor temp.next != nil && count < pos {\n\t\ttemp = temp.next\n\t\tcount++\n\t}\n\tif count < pos {\n\t\tfmt.Println(\"Not enough element present in the list\")\n\t\treturn\n\t}\n\tnewNode.next = temp.next\n\ttemp.next = newNode\n}", "func (this *MyLinkedList) AddAtTail(val int) {\n\tnode := &ListNode{val: val, next: nil}\n\tif this.head == nil {\n\t\tthis.head = node\n\t} else {\n\t\tthis.tail.next = node\n\t}\n\tthis.tail = node\n}", "func (item *MapItem) InsertTail(list *MapItem) {\n\tprev := list.prev\n\titem.next = list\n\titem.prev = prev\n\tprev.next = item\n\tlist.prev = item\n}", "func (node *LinkedListNode) AppendToTail(data int) {\n\tend := new(LinkedListNode)\n\tend.data = data\n\n\tfor node.next != nil {\n\t\tnode = node.next\n\t}\n\tnode.next = end\n}", "func (this *MyLinkedList) AddAtTail(val int) {\n\tnode := &ListNode{val, nil}\n\tthis.listMap = append(this.listMap, node)\n\tif this.tail == nil {\n\t\tthis.tail = node\n\t\tthis.head = node\n\t} else {\n\t\tthis.tail.Next = node\n\t\tthis.tail = node\n\t}\n\tthis.size++\n}", "func (dl *DoublyLinkedList) Append(n *Node) {\n\tif n == nil {\n\t\treturn\n\t}\n\t// Check if empty\n\tif dl.Length == 0 {\n\t\tdl.Head = n\n\t\tdl.Tail = n\n\t\tdl.Length++\n\t\treturn\n\t}\n\n\tlastNode := dl.Tail\n\n\t// Then, we update the last node (tail) in our Doubly linked list\n\t// with the new Node, connecting it to the previous last Node\n\t// and also connecting our previous last Node to the new one.\n\t// Finally, we only need to increment the length.\n\tdl.Tail = n\n\tdl.Tail.Pre = lastNode\n\tlastNode.Next = n\n\tdl.Length++\n}", "func (dl *DoublyLinkedList) push(value int32) {\n\tnode := Node{value, nil, nil}\n\n\tif dl.head == nil {\n\t\tdl.head = &node\n\t\tdl.tail = &node\n\t} else {\n\t\tdl.tail.next = &node\n\t\tnode.previous = dl.tail\n\t\tdl.tail = &node\n\t}\n\tdl.length++\n}", "func (list *LinkList) Insert(value interface{}) bool {\n\tnode := LinkNode{value: value}\n\tnode.next = list.head\n\tlist.head = &node\n\treturn true\n}", "func (l *LinkedList) Prepend(node *LLNode) {\n\tnode.Next = l.Head\n\tl.Head = node\n\n\tif l.Tail == nil {\n\t\tl.Tail = node\n\t}\n\n\tl.Size++\n}", "func (list *MyLinkedList) AddAtTail(val int) {\n\tnode := &Node{val, nil}\n\n\tif 0 == list.len {\n\t\tlist.head = node\n\t\tlist.tail = node\n\t} else {\n\t\tlist.tail.next = node\n\t\tlist.tail = node\n\t}\n\n\tlist.len++\n}", "func addToHead(head, node *doubleListNode) {\n\tnode.left, node.right = head, head.right\n\thead.right.left, head.right = node, node\n}", "func (l *LinkedList) Insert(index int, value interface{}) error {\n\t// Abort if index is not valid\n\tif index > l.length+1 || index <= 0 {\n\t\treturn errors.New(\"index is not valid. It should be between 1 and the length of the list + 1\")\n\t}\n\n\tp := l.head\n\tif p == nil {\n\t\tl.head = &Node{value: value}\n\t\tl.length++\n\t\treturn nil\n\t}\n\n\tif index == 1 {\n\t\tnewNode := &Node{value: value}\n\t\tl.Prepend(newNode)\n\t\treturn nil\n\t}\n\n\t// loop until the place right before the desired spot (index - 2)\n\tfor i := 0; i < index-2; i++ {\n\t\tp = p.next\n\t}\n\n\t// Save next node\n\taux := p.next\n\tp.next = &Node{value: value, next: aux}\n\tl.length++\n\treturn nil\n}", "func (list *LinkedList) InsertList(element interface{}) {\n\tlist.size++\n\tnode := Element{element, nil}\n\tnode.next = list.head\n\tlist.head = &node\n\tif list.size == 1 {\n\t\tlist.tail = &node\n\t}\n}", "func (d *dll) insert(i int) {\n\tn := new(node)\n\n\tn.x = i\n\tn.prev = d.dummy\n\tn.next = d.dummy.next\n\n\td.dummy.next.prev = n\n\td.dummy.next = n\n\n}", "func (this *MyLinkedList) AddAtTail(val int) {\n\ttail := this.Tail\n\tthis.Tail = &MyNode{\n\t\tVal: val,\n\t\tPrev: tail,\n\t\tNext: nil,\n\t}\n\tif tail != nil {\n\t\ttail.Next = this.Tail\n\t} else {\n\t\tthis.Head = this.Tail\n\t}\n\tthis.size += 1\n}", "func (l *List) AddToBack(d interface{}) *Node {\n\t// lazyInit just in case\n\t// this will guarantee that the next and prev nodes of root are not nil\n\tl.lazyInit()\n\n\t// insert the node to the end of the list\n\treturn l.insertData(d, l.root.prev)\n}", "func (bn *BufNode) InsertAfter(bnNew *BufNode) {\n\tbn1 := bn\n\tbn2 := bn.Next\n\n\tbn1.Next = bnNew\n\tbnNew.Prev = bn1\n\tbnNew.Next = bn2\n\tif bn2 != nil {\n\t\tbn2.Prev = bnNew\n\t}\n}" ]
[ "0.7369796", "0.72556496", "0.7145135", "0.7096707", "0.697039", "0.692947", "0.6897717", "0.686911", "0.68483496", "0.6822113", "0.6814988", "0.6801255", "0.67631364", "0.6759743", "0.6745765", "0.67423207", "0.674012", "0.6714902", "0.6678421", "0.66561234", "0.6636525", "0.66333103", "0.6621472", "0.6597117", "0.6592059", "0.6590373", "0.6576415", "0.65543497", "0.65525234", "0.65334016", "0.6517133", "0.6514493", "0.6503375", "0.64735484", "0.6470263", "0.64654267", "0.6456276", "0.6454689", "0.6441679", "0.644034", "0.64341855", "0.6432113", "0.6431168", "0.6418807", "0.6417496", "0.6401581", "0.6397506", "0.63824767", "0.6374726", "0.63503873", "0.63477474", "0.63446176", "0.63325113", "0.6325497", "0.6324887", "0.63231987", "0.6320207", "0.63059986", "0.6302993", "0.62870395", "0.6275553", "0.6270096", "0.62630683", "0.62625206", "0.62567455", "0.6256569", "0.62491274", "0.62491035", "0.6249078", "0.62477255", "0.6246242", "0.62429976", "0.6237625", "0.6231424", "0.6225064", "0.6206567", "0.6198377", "0.6186911", "0.6185256", "0.617241", "0.61624694", "0.6149975", "0.6146033", "0.6143904", "0.6128203", "0.61265177", "0.6126259", "0.6121119", "0.611952", "0.611428", "0.61086994", "0.61068606", "0.6102404", "0.6099836", "0.6098769", "0.6091365", "0.6085042", "0.6078875", "0.6077119", "0.60714215" ]
0.6566609
27
InsertAt method adds a new value at the given position
func (l *LinkedList) InsertAt(pos int, val interface{}) { n := &Node{value: val} // If the given position is lower than the list length // the element will be inserted at the end of the list switch { case l.length < pos: l.Insert(val) case pos == 1: n.SetNext(l.head) l.head = n default: node := l.head // Position - 2 since we want the element replacing the given position for i := 1; i < (pos - 1); i++ { node = node.Next() } n.SetNext(node.Next()) node.SetNext(n) } l.length = l.length + 1 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (s *items) insertAt(index int, item Item) {\n\t*s = append(*s, nil)\n\tif index < len(*s) {\n\t\tcopy((*s)[index+1:], (*s)[index:])\n\t}\n\t(*s)[index] = item\n}", "func (e *ObservableEditableBuffer) InsertAt(rp0 int, rs []rune) {\n\tp0 := e.f.RuneTuple(rp0)\n\ts, nr := RunesToBytes(rs)\n\n\te.Insert(p0, s, nr)\n}", "func (s *children) insertAt(index int, n *node) {\n\t*s = append(*s, nil)\n\tif index < len(*s) {\n\t\tcopy((*s)[index+1:], (*s)[index:])\n\t}\n\t(*s)[index] = n\n}", "func (l *LinkedList) InsertAt(pos int, value interface{}) {\n\t// create a new node\n\tnewNode := Node{}\n\tnewNode.data = value\n\t// validate the position\n\tif pos < 0 {\n\t\tfmt.Println(\"Position can not be negative, skipping insertion of: \", value)\n\t\treturn\n\t}\n\tif pos == 0 && l.len == 0 {\n\t\tl.head = &newNode\n\t\tl.tail = &newNode\n\t\tl.len++\n\t\treturn\n\t}\n\tif pos > l.len {\n\t\tfmt.Println(\"Position can not be greater than list size\")\n\t\treturn\n\t}\n\tn := l.GetAt(pos)\n\tif n != nil {\n\t\tn.prev = &newNode\n\t}\n\tnewNode.next = n\n\tprevNode := l.GetAt(pos - 1)\n\tif prevNode != nil {\n\t\tprevNode.next = &newNode\n\t}\n\tnewNode.prev = prevNode\n\t// change tail with newly added node only if no node present at given position\n\tif n == nil {\n\t\tl.tail = &newNode\n\t} else if pos == 0 {\n\t\t// change head with newly added node only if adding node at 0th position\n\t\tl.head = &newNode\n\t}\n\tl.len++\n}", "func (ll *LinkedList) InsertAt(t Item, pos int) error {\n\tll.lock.RLock()\n\tdefer ll.lock.RUnlock()\n\n\tif ll.head == nil || pos < 0 || ll.count < pos {\n\t\treturn fmt.Errorf(\"Index out of bounds\")\n\t}\n\n\tnewNode := Node{t, nil}\n\tcurrent := ll.head\n\tindex := 0\n\n\tif pos == 0 {\n\t\tnewNode.next = ll.head\n\t\tll.head = &newNode\n\t\tll.count++\n\t\treturn nil\n\t}\n\n\tfor index < pos-2 {\n\t\tindex++\n\t\tcurrent = current.next\n\t}\n\n\tnewNode.next = current.next\n\tcurrent.next = &newNode\n\tll.count++\n\treturn nil\n}", "func (ref Ref) Insert(x *Term, pos int) Ref {\n\tswitch {\n\tcase pos == len(ref):\n\t\treturn ref.Append(x)\n\tcase pos > len(ref)+1:\n\t\tpanic(\"illegal index\")\n\t}\n\tcpy := make(Ref, len(ref)+1)\n\tcopy(cpy, ref[:pos])\n\tcpy[pos] = x\n\tcopy(cpy[pos+1:], ref[pos:])\n\treturn cpy\n}", "func (s *nodeBlock) insertItemAt(index int, item Metadata) {\n\t_ = s.items[maxItems-1-s.itemsSize]\n\tcopy(s.items[index+1:], s.items[index:])\n\ts.items[index] = item\n\ts.itemsSize++\n\ts.markDirty()\n}", "func (t *StringSlice) InsertAt(i int, s string) *StringSlice {\n\tif i < 0 || i >= len(t.items) {\n\t\treturn t\n\t}\n\tres := []string{}\n\tres = append(res, t.items[:0]...)\n\tres = append(res, s)\n\tres = append(res, t.items[i:]...)\n\tt.items = res\n\treturn t\n}", "func (o *KeyValueOrdered) Insert(key Key, idx int, value Value) {\n\to.Remove(key)\n\to.m[key] = idx\n\to.shift(idx, len(o.s), 1)\n\to.s = append(append(append(make([]KeyValueCapsule, 0, len(o.s)+1), o.s[:idx]...), KeyValueCapsule{key, value}), o.s[idx:]...)\n}", "func (lst *List) InsertAt(idx int, n Val_t){\n\tif(idx < 0 || idx > lst.Len){\n\t\tpanic(\"index is out of boundary\")\n\t}\n\n\tnewNode := &ListNode{Val:n}\n\n\tdummy := &ListNode{Val:math.MaxInt32}\n\tdummy.Next = lst.Head\n\tpre,cur := dummy,dummy.Next\n\tfor idx > 0{\n\t\tpre = cur\n\t\tcur = cur.Next\n\t\tidx--\n\t}\n\n\t//insert new node\n\tpre.Next = newNode\n\tnewNode.Next = cur\n\t\n\t//update the ref of head and tail\n\tlst.Len++\n\tlst.Head = dummy.Next\n\n\ttail := lst.Head\n\tfor tail.Next != nil{\n\t\ttail = tail.Next\n\t}\n\tlst.Tail = tail\n}", "func (v *Data) Insert(idx int, val PicData) {\n\tdv := *v\n\t*v = append(append(append(make(Data, 0, len(dv)+1), dv[:idx]...), val), dv[idx:]...)\n}", "func (t *Indexed) Insert(index, value int) {\n\tnewNode := NewNodeWithValue(index, value)\n\tl, r := t.split(t.root, index-1)\n\txl, xr := t.split(r, index)\n\tif xl == nil {\n\t\tt.size++\n\t}\n\tl = t.merge(l, newNode)\n\tt.root = t.merge(l, xr)\n}", "func (s *nodeBlock) insertChildAt(index int, n *nodeBlock) {\n\t_ = s._children[maxChildren-1-s.childrenSize]\n\tcopy(s._children[index+1:], s._children[index:])\n\tcopy(s.childrenOffset[index+1:], s.childrenOffset[index:])\n\ts._children[index] = n\n\ts.childrenOffset[index] = n.offset\n\ts.childrenSize++\n\ts.markDirty()\n}", "func insertIndex(index []byte, c byte, idx int) {\n\t// Append to \"grow\" the slice, should never reallocate so we don't need to\n\t// return the slice to the caller since the underlying byte array has been\n\t// modified as desired.\n\tindex = append(index, c)\n\tcopy(index[idx+1:], index[idx:])\n\tindex[idx] = c\n}", "func (d *Deque[T]) Insert(pos int, value T) {\n\tif pos < 0 || pos > d.size {\n\t\treturn\n\t}\n\tif pos == 0 {\n\t\td.PushFront(value)\n\t\treturn\n\t}\n\tif pos == d.size {\n\t\td.PushBack(value)\n\t\treturn\n\t}\n\tseg, pos := d.pos(pos)\n\tif seg < d.segUsed()-seg {\n\t\t// seg is closer to the front\n\t\td.moveFrontInsert(seg, pos, value)\n\t} else {\n\t\t// seg is closer to the back\n\t\td.moveBackInsert(seg, pos, value)\n\t}\n\td.size++\n\tif d.segUsed() >= len(d.segs) {\n\t\td.expand()\n\t}\n}", "func (list *Linked_List) Insert_Before_At(index int, data interface{}) {\n\tlist.Insert_Before(list.At(index), data)\n}", "func insert(x []int, index, value int) []int {\n\t// Grow the slice by one element.\n\tx = x[0 : len(x)+1]\n\t// Create room for new element.\n\tcopy(x[index+1:], x[index:])\n\t// Insert the new element.\n\tx[index] = value\n\treturn x\n}", "func (gdt *Array) Insert(pos Int, value Variant) {\n\targ0 := gdt.getBase()\n\targ1 := pos.getBase()\n\targ2 := value.getBase()\n\n\tC.go_godot_array_insert(GDNative.api, arg0, arg1, arg2)\n}", "func (r *Rope) Insert(at int, str string) error {\n\treturn r.InsertBytes(at, []byte(str))\n}", "func insert(a []int, index int, value int) []int {\r\n\tif len(a) == index { // nil or empty slice or after last element\r\n\t\treturn append(a, value)\r\n\t}\r\n\ta = append(a[:index+1], a[index:]...) // index < len(a)\r\n\ta[index] = value\r\n\treturn a\r\n}", "func (list *List) Insert(idx int, element interface{}) error {\n\tif list.Length() < idx || idx < 0 {\n\t\treturn fmt.Errorf(\"index out of range\")\n\t}\n\tlist_ := []interface{}(*list)\n\t*list = append(list_[:idx], append([]interface{}{element}, list_[idx:]...)...)\n\treturn nil\n}", "func (node *LinkList) InsertAtPos(data, pos int) {\n\tif node == nil {\n\t\tfmt.Println(\"list is empty\")\n\t\treturn\n\t}\n\tnewNode := new(LinkList)\n\tnewNode.data = data\n\tnewNode.next = nil\n\tcount := 1\n\ttemp := node\n\tfor temp.next != nil && count < pos {\n\t\ttemp = temp.next\n\t\tcount++\n\t}\n\tif count < pos {\n\t\tfmt.Println(\"Not enough element present in the list\")\n\t\treturn\n\t}\n\tnewNode.next = temp.next\n\ttemp.next = newNode\n}", "func (a *DynamicArray) Insert(index int, value int) error {\n\tif index < 0 || index > a.len {\n\t\treturn ErrArrIndexOutOfBound\n\t}\n\tif a.len == a.cap {\n\t\ta.resize(a.cap * 2)\n\t}\n\n\t// if insert value in the middle of array, shift the elements after index\n\tif index < a.len {\n\t\tcopy(a.data[index+1:], a.data[index:])\n\t}\n\ta.data[index] = value\n\ta.len++\n\n\treturn nil\n}", "func InsertIntAt(x []int, y int, index int) ([]int, error) {\n\tif index < 0 || index > len(x) {\n\t\treturn x, fmt.Errorf(\"out of bound\")\n\t}\n\tx = append(x, 0)\n\tcopy(x[index+1:], x[index:])\n\tx[index] = y\n\treturn x, nil\n}", "func (li *List) InsertAt(pos int, item IPanel) *ListItem {\n\n\tlitem := newListItem(li, item)\n\tli.ItemScroller.InsertAt(pos, litem)\n\tlitem.Panel.Subscribe(OnMouseDown, litem.onMouse)\n\tlitem.Panel.Subscribe(OnCursorEnter, litem.onCursor)\n\treturn litem\n}", "func InsertIntoSlice(slice []interface{}, index int, value interface{}) []interface{} {\n\tvar newSlice []interface{}\n\n\t// Grow the slice by one element.\n\tif cap(slice) == len(slice) {\n\t\tnewSlice = make([]interface{}, len(slice)+1)\n\t\tcopy(newSlice[0:index], slice[0:index])\n\t} else {\n\t\tnewSlice = slice[0 : len(slice)+1]\n\t}\n\n\t// Use copy to move the upper part of the slice out of the way and open a hole.\n\tcopy(newSlice[index+1:], slice[index:])\n\t// Store the new value.\n\tnewSlice[index] = value\n\t// Return the result.\n\treturn newSlice\n}", "func Insert(str string, pos int, value string) string {\n\treturn string([]rune(str)[:pos]) + value + string([]rune(str)[pos:])\n}", "func (nl *nodeList) insert(i int, n *Node) {\n\t// Add a nil value to the end of the slice, to make room for the new Node.\n\tnl.elements = append(nl.elements, nil)\n\t// Copy values from the insertion point to the right by one\n\tcopy(nl.elements[i+1:], nl.elements[i:])\n\t// Set the value at the insertion point\n\tnl.elements[i] = n\n}", "func insert(slice []int, index, value int) []int {\n // Grow the slice by one element.\n slice = slice[0 : len(slice)+1]\n // Use copy to move the upper part of the slice out of the way and open a hole.\n copy(slice[index+1:], slice[index:])\n // Store the new value.\n slice[index] = value\n // Return the result.\n return slice\n}", "func (kg *groupKeyList) InsertAt(key flux.GroupKey) int {\n\tif kg.Last().Less(key) {\n\t\treturn len(kg.elements)\n\t}\n\treturn sort.Search(len(kg.elements), func(i int) bool {\n\t\treturn !kg.elements[i].key.Less(key)\n\t})\n}", "func (s *SliceInt) Insert(index, value int) *SliceInt {\n\t// Grow the slice by one element.\n\ts.data = s.data[0 : len(s.data)+1]\n\t// Use copy to move the upper part of the slice out of the way and open a hole.\n\tcopy(s.data[index+1:], s.data[index:])\n\t// Store the new value.\n\ts.data[index] = value\n\t// Return the result.\n\treturn s\n}", "func (o *Outline) Insert(index uint, item *OutlineItem) {\n\tl := uint(len(o.items))\n\tif index > l {\n\t\tindex = l\n\t}\n\n\to.items = append(o.items[:index], append([]*OutlineItem{item}, o.items[index:]...)...)\n}", "func (b *BTree) insertInNodeAtIdx(n *memNode, item *Item, i int) {\n\ts := n.node.Items\n\ts = append(s, nil)\n\tif i < len(s) {\n\t\tcopy(s[i+1:], s[i:])\n\t}\n\ts[i] = item\n\tn.node.Items = s\n}", "func (v *IntVec) Insert(idx int, val ...int) {\n\tdv := *v\n\tdv = append(dv, val...)\n\tcopy(dv[idx+len(val):], dv[idx:])\n\tcopy(dv[idx:], val)\n\t*v = dv\n}", "func (s *Slab) Insert(val interface{}) int {\n\tkey := s.next\n\ts.insertAt(key, val)\n\treturn key\n}", "func (recv *ValueArray) Insert(index uint32, value *Value) *ValueArray {\n\tc_index_ := (C.guint)(index)\n\n\tc_value := (*C.GValue)(C.NULL)\n\tif value != nil {\n\t\tc_value = (*C.GValue)(value.ToC())\n\t}\n\n\tretC := C.g_value_array_insert((*C.GValueArray)(recv.native), c_index_, c_value)\n\tretGo := ValueArrayNewFromC(unsafe.Pointer(retC))\n\n\treturn retGo\n}", "func (a *Array) Insert(index uint, v int) error {\n\t// data is full\n\tif a.Len() == uint(cap(a.data)) {\n\t\treturn errors.New(\"array is full\")\n\t}\n\n\t// index is out of range\n\tif a.IsIndexOutOfRange(index) {\n\t\treturn errors.New(\"out of index range\")\n\t}\n\n\tfor i := a.Len(); i > index; i++ {\n\t\ta.data[i] = a.data[i-1]\n\t}\n\ta.data[index] = v\n\ta.length++\n\treturn nil\n}", "func (d *DirectAddress) Insert(key int, value interface{}) {\n\tif err := d.validateKey(key); err != nil {\n\t\treturn\n\t}\n\td.array[key-d.uMin] = value\n}", "func (a *Array) Insert(index uint, v int) error {\n\tif a.Len() == uint(cap(a.data)) {\n\t\treturn errors.New(\"full array\")\n\t}\n\t// Call the Insert function directly, index must be an existing subscript\n \n\tif index != a.length && a.isIndexOutOfRange(index) {\n\t\treturn errors.New(\"out of index range\")\n\t}\n\tfor i := a.length; i > index; i-- {\n\t\ta.data[i] = a.data[i-1]\n\t}\n\ta.data[index] = v\n\ta.length++\n\treturn nil\n}", "func (hat *HashedArrayTree) Insert(index int, values ...interface{}) error {\n\tlenValues := len(values)\n\tnewSize := hat.size + lenValues\n\tif err := hat.resize(newSize); err != nil {\n\t\treturn err\n\t}\n\t// Move items in the middle of the buffer by the length of values\n\tfor i := hat.size - 1; i >= index; i-- {\n\t\tbti, bli := hat.topIndex(i), hat.leafIndex(i)\n\t\tati, ali := hat.topIndex(i+lenValues), hat.leafIndex(i+lenValues)\n\t\that.top[ati][ali] = hat.top[bti][bli]\n\t}\n\tfor i, j := index, 0; i < index+lenValues; i, j = i+1, j+1 {\n\t\tti, li := hat.topIndex(i), hat.leafIndex(i)\n\t\that.top[ti][li] = values[j]\n\t}\n\that.size = newSize\n\treturn nil\n}", "func (oi *OutlineItem) Insert(index uint, item *OutlineItem) {\n\tl := uint(len(oi.items))\n\tif index > l {\n\t\tindex = l\n\t}\n\n\toi.items = append(oi.items[:index], append([]*OutlineItem{item}, oi.items[index:]...)...)\n}", "func (list *List) Insert(index int, values ...interface{}) {\n\n\tif !list.withinRange(index) {\n\t\t// Append\n\t\tif index == list.size {\n\t\t\tlist.Add(values...)\n\t\t}\n\t\treturn\n\t}\n\n\tlist.size += len(values)\n\n\tvar beforeElement *element\n\tfoundElement := list.first\n\tfor e := 0; e != index; e, foundElement = e+1, foundElement.next {\n\t\tbeforeElement = foundElement\n\t}\n\n\tif foundElement == list.first {\n\t\toldNextElement := list.first\n\t\tfor i, value := range values {\n\t\t\tnewElement := &element{value: value}\n\t\t\tif i == 0 {\n\t\t\t\tlist.first = newElement\n\t\t\t} else {\n\t\t\t\tbeforeElement.next = newElement\n\t\t\t}\n\t\t\tbeforeElement = newElement\n\t\t}\n\t\tbeforeElement.next = oldNextElement\n\t} else {\n\t\toldNextElement := beforeElement.next\n\t\tfor _, value := range values {\n\t\t\tnewElement := &element{value: value}\n\t\t\tbeforeElement.next = newElement\n\t\t\tbeforeElement = newElement\n\t\t}\n\t\tbeforeElement.next = oldNextElement\n\t}\n}", "func (ll *Doubly[T]) AddAtBeg(val T) {\n\tll.lazyInit()\n\tll.insertValue(val, ll.Head)\n}", "func insertIntoPosition(data []string, insertion string) []string {\n\t// I am really sorry for this loop. I have not figured out why slice concatenation doesn't work\n\tvar newData []string\n\tdataLength := len(data)\n\tposition := pickNumberRange(dataLength + 1)\n\tif position == dataLength {\n\t\tnewData = append(data, []string{insertion}...)\n\t} else {\n\t\tfor i, entry := range data {\n\t\t\tif i == position {\n\t\t\t\tnewData = append(newData, []string{insertion}...)\n\t\t\t}\n\t\t\tnewData = append(newData, entry)\n\t\t}\n\t}\n\treturn newData\n}", "func insertAt(i int, char string, perm string) string {\n\tstart := perm[0:i]\n\tend := perm[i:len(perm)]\n\treturn start + char + end\n}", "func (e *ObservableEditableBuffer) Insert(p0 OffsetTuple, s []byte, nr int) {\n\tbefore := e.getTagStatus()\n\tdefer e.notifyTagObservers(before)\n\n\te.f.Insert(p0, s, nr, e.seq)\n\tif e.seq < 1 {\n\t\te.f.FlattenHistory()\n\t}\n\te.inserted(p0, s, nr)\n}", "func (s *ItemScroller) InsertAt(pos int, item IPanel) {\n\n\t// Validates position\n\tif pos < 0 || pos > len(s.items) {\n\t\tpanic(\"ItemScroller.InsertAt(): Invalid position\")\n\t}\n\titem.GetPanel().SetVisible(false)\n\n\t// Insert item in the items array\n\ts.items = append(s.items, nil)\n\tcopy(s.items[pos+1:], s.items[pos:])\n\ts.items[pos] = item\n\n\t// Insert item in the scroller\n\ts.Panel.Add(item)\n\ts.autoSize()\n\ts.recalc()\n\n\t// Scroll bar should be on the foreground,\n\t// in relation of all the other child panels.\n\tif s.vscroll != nil {\n\t\ts.Panel.SetTopChild(s.vscroll)\n\t}\n\tif s.hscroll != nil {\n\t\ts.Panel.SetTopChild(s.hscroll)\n\t}\n}", "func (l *LinkedList) Insert(index int, value interface{}) error {\n\t// Abort if index is not valid\n\tif index > l.length+1 || index <= 0 {\n\t\treturn errors.New(\"index is not valid. It should be between 1 and the length of the list + 1\")\n\t}\n\n\tp := l.head\n\tif p == nil {\n\t\tl.head = &Node{value: value}\n\t\tl.length++\n\t\treturn nil\n\t}\n\n\tif index == 1 {\n\t\tnewNode := &Node{value: value}\n\t\tl.Prepend(newNode)\n\t\treturn nil\n\t}\n\n\t// loop until the place right before the desired spot (index - 2)\n\tfor i := 0; i < index-2; i++ {\n\t\tp = p.next\n\t}\n\n\t// Save next node\n\taux := p.next\n\tp.next = &Node{value: value, next: aux}\n\tl.length++\n\treturn nil\n}", "func (jz *Jzon) Insert(k string, v *Jzon) (err error) {\n\tif jz.Type != JzTypeObj {\n\t\treturn expectTypeOf(JzTypeObj, jz.Type)\n\t}\n\n\tjz.data.(map[string]*Jzon)[k] = v\n\treturn nil\n}", "func (sl *stringList) insert(i int, aString string) {\n\t// Add a empty string value to the end of the slice, to make room for the new element.\n\tsl.elements = append(sl.elements, \"\")\n\t// Copy values from the insertion point to the right by one\n\tcopy(sl.elements[i+1:], sl.elements[i:])\n\t// Set the value at the insertion point\n\tsl.elements[i] = aString\n}", "func insert(array []int8, val int8, pos int) []int8 {\r\n\tvar length = len(array)\r\n\tvar tempArray = make([]int8, length+1)\r\n\tfmt.Printf(\"\\n\")\r\n\r\n\t// copy each value from start to position\r\n\t// leave the pos we want to fill empty and copy each value after that\r\n\t// eg at pos 3: 1 2 3 x 4 5 6 7 -> 1 2 3 21 4 5 6 7\r\n\r\n\tfor i := 0; i < length; i++ {\r\n\t\tif i < pos {\r\n\t\t\ttempArray[i] = array[i]\r\n\t\t} else {\r\n\t\t\ttempArray[i+1] = array[i]\r\n\t\t}\r\n\t}\r\n\r\n\ttempArray[pos] = val\r\n\treturn tempArray\r\n}", "func Insert(a []int, value int) int {\n\tvar index = 1\n\tvar b = make([]int, index)\n\tcopy(b, a[:index])\n\tb = append(b, value)\n\tb = append(b, a[index:]...)\n\n\treturn index\n}", "func (sl *Slice) Insert(k Ki, idx int) {\n\tSliceInsert((*[]Ki)(sl), k, idx)\n}", "func (t *Tab) InsertAt(name string, n int, child Control) {\n\tc := (*C.uiControl)(nil)\n\tif child != nil {\n\t\tc = touiControl(child.LibuiControl())\n\t}\n\tcname := C.CString(name)\n\n\tC.uiTabInsertAt(t.t, cname, C.int(n), c)\n\tfreestr(cname)\n\tch := make([]Control, len(t.children) + 1)\n\t// and insert into t.children at the right place\n\tcopy(ch[:n], t.children[:n])\n\tch[n] = child\n\tcopy(ch[n + 1:], t.children[n:])\n\tt.children = ch\n}", "func (p *SliceOfMap) Insert(i int, obj interface{}) ISlice {\n\tif p == nil || len(*p) == 0 {\n\t\treturn p.ConcatM(obj)\n\t}\n\n\t// Insert the item before j if pos and after j if neg\n\tj := i\n\tif j = absIndex(len(*p), j); j == -1 {\n\t\treturn p\n\t}\n\tif i < 0 {\n\t\tj++\n\t}\n\tif elems, err := ToSliceOfMapE(obj); err == nil {\n\t\tif j == 0 {\n\t\t\t*p = append(*elems, *p...)\n\t\t} else if j < len(*p) {\n\t\t\t*p = append(*p, *elems...) // ensures enough space exists\n\t\t\tcopy((*p)[j+len(*elems):], (*p)[j:]) // shifts right elements drop added\n\t\t\tcopy((*p)[j:], *elems) // set new in locations vacated\n\t\t} else {\n\t\t\t*p = append(*p, *elems...)\n\t\t}\n\t}\n\treturn p\n}", "func (p *IntVector) Insert(i int, x int)\t{ p.Vector.Insert(i, x) }", "func (bids *Bids) insert(index int, Bid Bid) {\n\tif len(bids.ticks) == index { // nil or empty slice or after last element\n\t\tbids.ticks = append(bids.ticks, Bid)\n\t}\n\tbids.ticks = append(bids.ticks[:index+1], bids.ticks[index:]...) // index < len(a)\n\tbids.ticks[index] = Bid\n}", "func (c *OrderedMap) Insert(index int, key string, value interface{}) (interface{}, bool) {\n\toldValue, exists := c.Map[key]\n\tc.Map[key] = value\n\tif exists {\n\t\treturn oldValue, true\n\t}\n\tif index == len(c.Keys) {\n\t\tc.Keys = append(c.Keys, key)\n\t} else {\n\t\tc.Keys = append(c.Keys[:index+1], c.Keys[index:]...)\n\t\tc.Keys[index] = key\n\t}\n\treturn nil, false\n}", "func (l *List) Insert(pos int, v interface{}) error {\n\tif pos == 0 {\n\t\tl.head = &Node{v, l.head}\n\t\treturn nil\n\t}\n\n\tp := l.head\n\ti := pos - 1\n\tfor i != 0 {\n\t\tif p.next == nil {\n\t\t\treturn fmt.Errorf(\"%v is not a valid position for a %v long list\", pos, pos-i)\n\t\t}\n\t\tp = p.next\n\t\ti--\n\t}\n\n\tp.next = &Node{v, p.next}\n\treturn nil\n}", "func SliceInsert(sl *[]Ki, k Ki, idx int) {\n\tkl := len(*sl)\n\tif idx < 0 {\n\t\tidx = kl + idx\n\t}\n\tif idx < 0 { // still?\n\t\tidx = 0\n\t}\n\tif idx > kl { // last position allowed for insert\n\t\tidx = kl\n\t}\n\t// this avoids extra garbage collection\n\t*sl = append(*sl, nil)\n\tif idx < kl {\n\t\tcopy((*sl)[idx+1:], (*sl)[idx:kl])\n\t}\n\t(*sl)[idx] = k\n}", "func (b *Bag) Insert(val rune) {\n\tb.data[val]++\n}", "func (tv *TextView) InsertAtCursor(txt []byte) {\n\twupdt := tv.TopUpdateStart()\n\tdefer tv.TopUpdateEnd(wupdt)\n\tif tv.HasSelection() {\n\t\ttbe := tv.DeleteSelection()\n\t\ttv.CursorPos = tbe.AdjustPos(tv.CursorPos, AdjustPosDelStart) // move to start if in reg\n\t}\n\ttbe := tv.Buf.InsertText(tv.CursorPos, txt, true, true)\n\tif tbe == nil {\n\t\treturn\n\t}\n\tpos := tbe.Reg.End\n\tif len(txt) == 1 && txt[0] == '\\n' {\n\t\tpos.Ch = 0 // sometimes it doesn't go to the start..\n\t}\n\ttv.SetCursorShow(pos)\n\ttv.SetCursorCol(tv.CursorPos)\n}", "func Insert(slice []int, index, value int) []int {\n\treturn append(slice[:index], append([]int{value}, slice[index:]...)...)\n}", "func Insert(value, index int) {\n\tnewNode := &Node{value: value, freq: 1}\n\tif index >= size {\n\t\tfmt.Println(\"\\n-- Not enough space in array. --\")\n\t\treturn\n\t}\n\tif bst[index] == nil {\n\t\tbst[index] = newNode\n\t} else if bst[index].value == value {\n\t\tbst[index].freq++\n\t} else {\n\t\tif value < bst[index].value {\n\t\t\tInsert(value, (index*2)+1)\n\t\t} else {\n\t\t\tInsert(value, (index*2)+2)\n\t\t}\n\t}\n}", "func (v *Int32Vec) Insert(idx int, val ...int32) {\n\tdv := *v\n\tdv = append(dv, val...)\n\tcopy(dv[idx+len(val):], dv[idx:])\n\tcopy(dv[idx:], val)\n\t*v = dv\n}", "func (heap *MinHeap) Insert(val int) {\n\theap.elements = append(heap.elements, val)\n\theap.minHeapify(heap.getParent(heap.lastIndex()))\n}", "func (list *PyList) Insert(index int, obj *PyObject) error {\n\tif C.PyList_Insert(list.ptr, C.long(index), obj.ptr) == -1 {\n\t\treturn ErrCouldNotInsert\n\t}\n\n\treturn nil\n}", "func (i *Input) Insert(r rune) {\n\ti.Buffer.InsertRune(r, i.Pos)\n\ti.Pos++\n}", "func (s *Series) Insert(t time.Time, value float64) {\n\tb := &Bucket{s.floor(t), value}\n\tidx := s.index(b.T)\n\ts.buckets[idx] = b\n}", "func insertColumn(row table.Row, index int, item interface{}) table.Row {\n\trow = append(row, item)\n\n\tcopy(row[index+1:], row[index:])\n\n\trow[index] = item\n\n\treturn row\n}", "func (w *Window) Insert(e interface{}) {\n\tw.insertAt(time.Now(), e)\n}", "func (sll *SingleLinkedList) Insert(index int, element interface{}) {\n\t// Panic if index is smaller 0\n\tif index < 0 {\n\t\tpanic(\"index < 0\")\n\t}\n\n\t// Insert as first element\n\tif index == 0 {\n\t\tsll.first = &singleNode{value: element, next: sll.first}\n\t\tsll.length++\n\t\treturn\n\t}\n\n\t// Get node before the place where the new node is added\n\tn := sll.getNode(index - 1)\n\t// New node\n\tnewNode := &singleNode{value: element}\n\t// If there is a node after the index where to insert, attach it to the\n\t// new node\n\tif n.next != nil {\n\t\tnewNode.next = n.next\n\t}\n\t// Insert new node\n\tn.next = newNode\n\tsll.length++\n}", "func (list *ArrayList[T]) Insert(index int, ele T) bool {\n\tif index < 0 || index > list.Size() {\n\t\treturn false\n\t}\n\tif index == list.Size() {\n\t\treturn list.Add(ele)\n\t}\n\ttemp := append(list.elems[:index], ele)\n\tlist.elems = append(temp, list.elems[index+1:]...)\n\treturn true\n}", "func (r *root) Insert(x, y float64, nval interface{}) {\n\telems := make([]interface{}, 1, 1)\n\telems[0] = nval\n\tr.rootNode.insert(x, y, elems, nil, r)\n}", "func InsertToSlice(nums []int, v, pos int) []int {\n\tif pos > len(nums) || pos < 0 {\n\t\treturn nil\n\t}\n\tvar ret []int\n\tret = append(ret, nums[0:pos]...)\n\tret = append(ret, v)\n\tret = append(ret, nums[pos:]...)\n\treturn ret\n}", "func (pu *PostUpdate) AddUpdateAt(i int64) *PostUpdate {\n\tpu.mutation.AddUpdateAt(i)\n\treturn pu\n}", "func (vector *Vector) Insert(i int, element interface{}) {\n\t//a = append(a[:i], append([]T{x}, a[i:]...)...)\n\t// NOTE The second append creates a new slice with its own underlying storage and copies\n\t// elements in a[i:] to that slice, and these elements are then copied back to slice a\n\t// (by the first append). The creation of the new slice (and thus memory garbage) and the\n\t// second copy can be avoided by using an alternative way:\n\n\t*vector = append(*vector, 0 /* use the zero value of the element type */)\n\tcopy((*vector)[i+1:], (*vector)[i:])\n\t(*vector)[i] = element\n}", "func (a Slice[T]) Insert(index int, elements ...T) Slice[T] {\n\tresult := Slice[T]{}\n\tresult = append(result, a[0:index]...)\n\tresult = append(result, elements...)\n\tresult = append(result, a[index:]...)\n\treturn result\n}", "func Insert(pts PTslice, pt *PT) PTslice {\n\ti := pts.locate(pt.Time)\n\tnpts := append(pts, &PT{})\n\tcopy(npts[i+1:], npts[i:])\n\tnpts[i] = pt\n\treturn npts\n}", "func (b *Buffer) Insert(o, n int) {\n\tp := make([]*Tile, n)\n\tb.Tiles = append(b.Tiles[:o], append(p, b.Tiles[o:]...)...)\n}", "func (t *T) Insert(s string, v interface{}) {\n\tt.root = insert(t.root, nil, s, t, v)\n}", "func (q *quartileIndex) Insert(n int, at int) error {\n\tif n%4 != 0 {\n\t\tpanic(\"can only extend by nibbles (multiples of 4)\")\n\t}\n\terr := q.bits.Insert(n, at)\n\tif err != nil {\n\t\treturn err\n\t}\n\tnewlen := q.bits.Len()\n\tfor i := 0; i < 3; i++ {\n\t\tq.adjust(i, n, at, (newlen * (i + 1) / 4))\n\t}\n\treturn nil\n}", "func (q *PriorityQueue) Insert(value interface{}) {\n\tq.nodes = append(q.nodes, value)\n\tq.upHeap(len(q.nodes) - 1)\n}", "func Push(tx *bolt.Tx, name []byte, position *Position, value, defaultKey []byte) error {\n\tbucket, err := tx.CreateBucketIfNotExists(name)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcursor := bucket.Cursor()\n\tkey, _ := position.fn(cursor)\n\tif key == nil {\n\t\tkey = defaultKey\n\t} else {\n\t\tkey = addToKey(key, position.delta)\n\t}\n\n\tif err := bucket.Put(key, value); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (s *S43_SmallSetOfIntegers) Insert(n int, done chan bool) {\n\tdefer close(done)\n\n\ti := s.SEARCH(n)\n\tif i < s.size {\n\t\tdone <- false\n\t\treturn // nothing to do\n\t}\n\t// not found, insert to the array\n\tif i == s.size && s.size < 100 {\n\t\ts.content[s.size] = n\n\t\ts.size++\n\t\tdone <- true\n\t\treturn\n\t}\n\n\tdone <- false\n\treturn\n}", "func (bst *BinarySearch) Insert(value int) {\n\tbst.lock.Lock()\n\tdefer bst.lock.Unlock()\n\tbst.len++\n\n\tn := &BinarySearchNode{value, nil, nil}\n\tif bst.root == nil {\n\t\tbst.root = n\n\t} else {\n\t\tinsertBinarySearchNode(bst.root, n)\n\t}\n}", "func (tree *Tree) Insert(value int) {\n\ttree.root, _ = tree.root.Insert(value)\n}", "func (v *Value) insert(params ...interface{}) (interface{}, error) {\n\tif len(params) != 2 {\n\t\treturn nil, newParamLenErr(len(params), 2)\n\t}\n\n\tvar (\n\t\tok bool\n\t\tkey string\n\t\tvalue string\n\t)\n\n\tkey, ok = params[0].(string)\n\tif !ok {\n\t\treturn nil, newParamTypeErr(params[0], key)\n\t}\n\n\tvalue, ok = params[1].(string)\n\tif !ok {\n\t\treturn nil, newParamTypeErr(params[1], value)\n\t}\n\n\t(*v)[key] = value\n\treturn key, nil\n}", "func (v *TreeStore) Insert(parent *TreeIter, position int) *TreeIter {\n\tvar ti C.GtkTreeIter\n\tvar cParent *C.GtkTreeIter\n\tif parent != nil {\n\t\tcParent = parent.native()\n\t}\n\tC.gtk_tree_store_insert(v.native(), &ti, cParent, C.gint(position))\n\titer := &TreeIter{ti}\n\treturn iter\n}", "func (puo *PostUpdateOne) AddUpdateAt(i int64) *PostUpdateOne {\n\tpuo.mutation.AddUpdateAt(i)\n\treturn puo\n}", "func (p *PriorityQueue) Insert(v interface{}, priority float64) {\n\t_, ok := p.lookup[v]\n\tif ok {\n\t\treturn\n\t}\n\n\tnewItem := &item{\n\t\tvalue: v,\n\t\tpriority: priority,\n\t}\n\theap.Push(p.itemHeap, newItem)\n\tp.lookup[v] = newItem\n}", "func (c *Context) PutAt(value []byte, globalID uint32) (uint32, error) {\n\tc.lock.Lock()\n\tdefer c.lock.Unlock()\n\tsp, err := c.locate(globalID)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tindex, err := c.putAt(value, sp)\n\tif err != nil {\n\t\treturn index, err\n\t}\n\treturn index, nil\n}", "func insert(a []interface{}, c interface{}, i int) []interface{} {\n\treturn append(a[:i], append([]interface{}{c}, a[i:]...)...)\n}", "func (n *Node) Insert(v Boxer) {\n\t// If this node does not contain the given box return.\n\tif !n.boundingBox.ContainsCenter(v.Box()) {\n\t\treturn\n\t}\n\n\ti := n.index(v)\n\n\tif i == -1 {\n\t\tn.values = append(n.values, v)\n\n\t\tif len(n.values) > maxNodeSize {\n\t\t\tn.split()\n\t\t}\n\t} else {\n\t\tn.children[i].Insert(v)\n\t}\n}", "func addAt(z, x nat, i int) {\n\tif n := len(x); n > 0 {\n\t\tif c := addVV(z[i:i+n], z[i:], x); c != 0 {\n\t\t\tj := i + n\n\t\t\tif j < len(z) {\n\t\t\t\taddVW(z[j:], z[j:], c)\n\t\t\t}\n\t\t}\n\t}\n}", "func (l *littr) Insert(s string, i int) {\n\tl.code = l.code[:i] + s + l.code[i:]\n}", "func (m *movingAverage) insertValue(value float64) {\n\n\tm.ring[m.nextIdx] = value\n\tm.nextIdx = (m.nextIdx + 1) % m.sampleSize\n\tif m.nextIdx == 0 {\n\t\tm.samplingComplete = true\n\t}\n}", "func (f * LinkedList) insertAfterIdx(idx int, newElmt *Element) () {\n\tif (idx < f.length) {\n\t\tif (idx == f.length - 1){\n\t\t\tf.insertLast(newElmt)\n\t\t} else {\n\t\t\tel := f.getElmt(idx)\n\t\t\tnewElmt.prev = el\n\t\t\tnewElmt.next = el.next\n\t\t\tel.next.prev = newElmt\n\t\t\tel.next = newElmt\n\t\t\tf.length++\n\t\t}\n\t}\n}", "func (rad *Radix) Insert(key string, value interface{}) error {\n\trad.lock.Lock()\n\tif value == nil {\n\t\treturn errors.New(\"undefined value\")\n\t}\n\tdefer rad.lock.Unlock()\n\treturn rad.root.insert([]rune(key), value)\n}", "func tabInsert(ls *LuaState) int {\n\te := _auxGetN(ls, 1, TAB_RW) + 1 /* first empty element */\n\tvar pos int64 /* where to insert new element */\n\tswitch luaGetTop(ls) {\n\tcase 2: /* called with only 2 arguments */\n\t\tpos = e /* insert new element at the end */\n\tcase 3:\n\t\tpos = ls.CheckInteger(2) /* 2nd argument is the position */\n\t\tls.ArgCheck(1 <= pos && pos <= e, 2, \"position out of bounds\")\n\t\tfor i := e; i > pos; i-- { /* move up elements */\n\t\t\tluaGetI(ls, 1, i-1)\n\t\t\tluaSetI(ls, 1, i) /* t[i] = t[i - 1] */\n\t\t}\n\tdefault:\n\t\treturn ls.Error2(\"wrong number of arguments to 'insert'\")\n\t}\n\tluaSetI(ls, 1, pos) /* t[pos] = v */\n\treturn 0\n}" ]
[ "0.7564645", "0.74996585", "0.74422127", "0.7220725", "0.7071093", "0.7002865", "0.6979025", "0.6906865", "0.68581086", "0.67563075", "0.6698297", "0.6635002", "0.6630728", "0.6617596", "0.65675616", "0.656548", "0.65653753", "0.65562993", "0.6556297", "0.6522896", "0.64958084", "0.649512", "0.649416", "0.64725995", "0.6452375", "0.64365405", "0.6434901", "0.64305896", "0.64246505", "0.64179707", "0.6360455", "0.63514453", "0.63503164", "0.6332454", "0.63017863", "0.62908816", "0.62840754", "0.6279271", "0.62301826", "0.6211637", "0.61984843", "0.61976135", "0.6193649", "0.6170549", "0.6168696", "0.616523", "0.61245865", "0.6120359", "0.61109555", "0.6105867", "0.61056477", "0.61013246", "0.6097958", "0.60967636", "0.6037374", "0.6015977", "0.59919256", "0.598613", "0.5984972", "0.5963876", "0.5963124", "0.5946318", "0.59419227", "0.5933917", "0.59191364", "0.5914705", "0.589966", "0.5884456", "0.5879371", "0.58745325", "0.58680576", "0.58589876", "0.5842282", "0.5839656", "0.5837055", "0.58357316", "0.58276284", "0.5825326", "0.5810308", "0.5808313", "0.5796288", "0.5783696", "0.5777602", "0.57644403", "0.57613975", "0.57610285", "0.57541025", "0.5746701", "0.57421905", "0.5724975", "0.5724624", "0.57150376", "0.5710007", "0.57014126", "0.5677676", "0.5668171", "0.5667061", "0.56615996", "0.56597805", "0.56593573" ]
0.75425845
1
Get value in the given position
func (l *LinkedList) Get(pos int) interface{} { if pos > l.length { return nil } node := l.head // Position - 1 since we want the value in the given position for i := 0; i < pos-1; i++ { node = node.Next() } return node.Value() }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (da *cedar) get(key []byte, from, pos int) *int {\n\tfor ; pos < len(key); pos++ {\n\t\tif value := da.Array[from].Value; value >= 0 && value != ValueLimit {\n\t\t\tto := da.follow(from, 0)\n\t\t\tda.Array[to].Value = value\n\t\t}\n\t\tfrom = da.follow(from, key[pos])\n\t}\n\tto := from\n\tif da.Array[from].Value < 0 {\n\t\tto = da.follow(from, 0)\n\t}\n\treturn &da.Array[to].Value\n}", "func (uni *UniformMatrix3f) Get(pos int) float32 {\n\n\treturn uni.v[pos]\n}", "func (pager *pager) getValue(pointer int) uint8 {\n\tpage := pager.findPage(pointer)\n\tpageAddress := pageAddress(pointer, pager.pagesize)\n\treturn page[pageAddress]\n}", "func (uni *Uniform3fv) GetPos(pos int) float32 {\n\n\treturn uni.v[pos]\n}", "func (seq Sequence) ValueAtOffset(offset int, e expr.Expr) (val float64, found bool) {\n\tif e.IsConstant() {\n\t\tval, found, _ = e.Get(nil)\n\t\treturn\n\t}\n\tif len(seq) == 0 {\n\t\treturn 0, false\n\t}\n\toffset = offset + Width64bits\n\tif offset >= len(seq) {\n\t\treturn 0, false\n\t}\n\tval, wasSet, _ := e.Get(seq[offset:])\n\treturn val, wasSet\n}", "func currentValue(position float64) float64 {\n var result float64\n\n result = 1 / (math.Pow(3, position) * (3 + 2 * (position - 1)))\n\n return result\n}", "func (seq List) Value(i int) interface{} { return seq[i] }", "func (v variable) At(index int) interface{} {\n\tm, ok := v.store.Get(v.Name)\n\tif !ok {\n\t\treturn nil\n\t}\n\tif intArray, ok := m.([]interface{}); ok {\n\t\tif index < 1 || index > len(intArray) {\n\t\t\treturn nil\n\t\t}\n\t\treturn intArray[index-1]\n\t}\n\tif indexable, ok := m.(core.Indexable); ok {\n\t\treturn indexable.At(index)\n\t}\n\tif sequenceable, ok := m.(core.Sequenceable); ok {\n\t\treturn core.BuildSequence(sequenceable.S().At(index))\n\t}\n\treturn nil\n}", "func (this *Tuple) Get(n int) interface{} {\n\titem := this.data[this.Offset(n)]\n\treturn item\n}", "func (uni *Uniform4fv) GetPos(pos int) float32 {\n\n\treturn uni.v[pos]\n}", "func (src *Injects) Get(pos int) interface{} {\n\treturn (*src)[pos]\n}", "func (b *box) getFieldValue(x, y int) int {\n\treturn b.values[x+y*3]\n}", "func getElemByPosition(data []byte, position uint16) (uint64, uint64, error) {\n\telemCount := uint16UnsafeConvert(data)\n\tif position >= elemCount {\n\t\treturn 0, 0, notFoundErr\n\t}\n\n\toff := uint64(elemCount)*2 + 2\n\tvar i uint16\n\tfor i = 0; i < position; i++ {\n\t\toff += uint64(uint16UnsafeConvert(data[i*2+2: i*2+4]))\n\t}\n\n\telemLen := uint16UnsafeConvert(data[i*2+2: i*2+4])\n\treturn off, uint64(elemLen), nil\n}", "func (v *Posit8x4) Get(i int) Posit8 { return v.impl[i] }", "func (i *IndexIterator) Value() int64 { return i.current.lpos }", "func (obj VECTOR_TYPE) ValueAt(i int) float64 {\n if i < 0 || i >= obj.Dim() {\n panic(\"index out of bounds\")\n }\n if v, ok := obj.values[i]; ok {\n return v.GetValue()\n } else {\n return 0.0\n }\n}", "func (r *Ring) get(p int) interface{} {\n\treturn r.buff[r.mod(p)]\n}", "func (uni *Uniform1fv) Get(pos int, v float32) float32 {\n\n\treturn uni.v[pos]\n}", "func valueAt(row []string, position int) *float64 {\n\tif row == nil {\n\t\treturn nil\n\t}\n\n\treturn stringToFloat(row[position])\n}", "func (v *Posit16x2) Get(i int) Posit16 { return v.impl[i] }", "func (s *Arena) getVal(offset uint32, size uint32) (ret y.ValueStruct) {\n\tret.Decode(s.data[offset : offset+size])\n\treturn\n}", "func (arr *Array) Get(pos *Term) *Term {\n\tnum, ok := pos.Value.(Number)\n\tif !ok {\n\t\treturn nil\n\t}\n\n\ti, ok := num.Int()\n\tif !ok {\n\t\treturn nil\n\t}\n\n\tif i >= 0 && i < len(arr.elems) {\n\t\treturn arr.elems[i]\n\t}\n\n\treturn nil\n}", "func (da *DataFrame) GetPos(j int) interface{} {\n\tif da.done {\n\t\treturn nil\n\t}\n\n\treturn da.data[j][da.chunk-1]\n}", "func (gc *GisCache) GetPosition(typ string, name string) (*Position, error) {\n\tkey := gc.baseKey + posKey + typ + \":\" + name\n\n\t// Create position map\n\tpositionMap := make(map[string]*Position)\n\n\t// Get all position entry details\n\terr := gc.rc.ForEachEntry(key, getPosition, &positionMap)\n\tif err != nil {\n\t\tlog.Error(\"Failed to get all entries with error: \", err.Error())\n\t\treturn nil, err\n\t}\n\n\t// only one result, so return the first one\n\tfor _, position := range positionMap {\n\t\treturn position, nil\n\t}\n\treturn nil, nil\n}", "func (l LinkedList) ValueAt(index int) (interface{}, error) {\n\tif index >= l.Size {\n\t\treturn \"\", fmt.Errorf(\"Index %d out of range\", index)\n\t}\n\n\tvar val interface{}\n\tnode := l.Head\n\n\tfor i := 0; i < l.Size; i++ {\n\t\tif i == index {\n\t\t\tval = node.Data\n\t\t\tbreak\n\t\t}\n\n\t\tnode = node.Next\n\t}\n\n\treturn val, nil\n}", "func (v valuer) Value(i int) float64 {\n\treturn v.data[i]\n}", "func (access FloatAccess) Get(row int) float64 {\n return access.rawData[access.indices[row]]\n}", "func (r *SlidingWindow) Get(index int) (interface{}, bool) {\n\tindex -= r.base\n\tif index < 0 || index >= r.Capacity() {return nil, false}\n\tindex = r.normalize(index + r.start)\n\tvalue := r.values[index]\n\treturn value.value, value.present\n}", "func (items IntSlice) Value(index int) interface{} { return items[index] }", "func (l List) ValueAt(index int) (value uint16, err error) {\n\n\tif l.Root == nil {\n\t\treturn value, fmt.Errorf(\"Oops! Looks like the list is empty\")\n\t}\n\n\tcurrent := l.Root\n\tcurrentIndex := 0\n\n\tfor current != nil {\n\t\tif currentIndex == index {\n\t\t\treturn current.Value, err\n\t\t} else if current.Next == nil && index > currentIndex {\n\t\t\treturn value, fmt.Errorf(\"Provided index %v is out of bounds\", index)\n\t\t}\n\t\tcurrentIndex++\n\t\tcurrent = current.Next\n\t}\n\treturn value, err\n}", "func (q *Deque) At(idx int) interface{} {\n\tif idx >= len(q.values) {\n\t\treturn nil\n\t}\n\tactualIdx := idx\n\tif q.front != 0 {\n\t\tactualIdx = (idx + q.front) % cap(q.values)\n\t}\n\treturn q.values[actualIdx]\n}", "func (device *ServoBrick) GetPosition(servoNum uint8) (position int16, err error) {\n\tvar buf bytes.Buffer\n\tbinary.Write(&buf, binary.LittleEndian, servoNum)\n\n\tresultBytes, err := device.device.Get(uint8(FunctionGetPosition), buf.Bytes())\n\tif err != nil {\n\t\treturn position, err\n\t}\n\tif len(resultBytes) > 0 {\n\t\tvar header PacketHeader\n\n\t\theader.FillFromBytes(resultBytes)\n\n\t\tif header.Length != 10 {\n\t\t\treturn position, fmt.Errorf(\"Received packet of unexpected size %d, instead of %d\", header.Length, 10)\n\t\t}\n\n\t\tif header.ErrorCode != 0 {\n\t\t\treturn position, DeviceError(header.ErrorCode)\n\t\t}\n\n\t\tresultBuf := bytes.NewBuffer(resultBytes[8:])\n\t\tbinary.Read(resultBuf, binary.LittleEndian, &position)\n\n\t}\n\n\treturn position, nil\n}", "func (obj *SparseRealVector) ValueAt(i int) float64 {\n if i < 0 || i >= obj.Dim() {\n panic(\"index out of bounds\")\n }\n if v, ok := obj.values[i]; ok {\n return v.GetValue()\n } else {\n return 0.0\n }\n}", "func (t Tuple) At(idx int) float64 {\n\treturn t[idx]\n}", "func (x IntSlice) Get(i int) interface{} {return x[i]}", "func (access IntAccess) Get(row int) int {\n return access.rawData[access.indices[row]]\n}", "func GetVal(ints []int, opcodeandparammodes []int, instructionpointer int, position int) int {\n\tmode := opcodeandparammodes[position]\n\tif mode == 0 { //position mode\n\t\treturn ints[ints[instructionpointer+position]]\n\t} else if mode == 1 { //immediate mode\n\t\treturn ints[instructionpointer+position]\n\t}\n\t//todo decide how to handle unsupported modes\n\treturn -1\n}", "func (seq Sequence) ValueAt(period int, e expr.Expr) (val float64, found bool) {\n\tif e.IsConstant() {\n\t\tval, found, _ = e.Get(nil)\n\t\treturn\n\t}\n\tif len(seq) == 0 {\n\t\treturn 0, false\n\t}\n\tif period < 0 {\n\t\treturn 0, false\n\t}\n\treturn seq.ValueAtOffset(period*e.EncodedWidth(), e)\n}", "func (v *Vector) Get(i int) float64 {\n\tswitch i {\n\tcase 0:\n\t\treturn v.X\n\tcase 1:\n\t\treturn v.Y\n\tcase 2:\n\t\treturn v.Z\n\t}\n\treturn 0.0\n}", "func (p Point) At(idx int) float64 {\n\treturn p[idx]\n}", "func (self *T) Get(col, row int) float64 {\n\treturn self[row]\n}", "func (v *VectorImpl) Get(i int) Value {\n\tif i < 0 || uint(i) >= v.len {\n\t\tpanic(\"Index out of bounds\")\n\t}\n\n\treturn v.sliceFor(uint(i))[i&shiftBitMask]\n}", "func (t *FenwickTreeSimple) Get(index int) int {\n\treturn t.QueryRange(index, index)\n}", "func (v Value) Position() (int, int) {\n\treturn position(v.input, v.start)\n}", "func (vm *VirtualMachine) getValueForElement(e quads.Element) interface{} {\n\tif strings.Contains(e.ID(), \"ptr_\") {\n\t\tmemblock := vm.getMemBlockForAddr(e.GetAddr())\n\t\tptrAddr, ok := memblock.Get(e.GetAddr()).(float64)\n\t\tif !ok {\n\t\t\tlog.Fatalf(\"Error: (getValueForElement) couldn't cast index to float64\")\n\t\t}\n\n\t\tauxElement := quads.NewElement(int(ptrAddr), e.ID(), e.Type(), \"\")\n\t\tmemblock = vm.getMemBlockForElement(auxElement)\n\t\trealValue := memblock.Get(int(ptrAddr))\n\t\treturn realValue\n\t}\n\tmemblock := vm.getMemBlockForElement(e)\n\treturn memblock.Get(e.GetAddr())\n}", "func (card *Card) ValueAt(cellName string) (int, error) {\n\tcell, err := card.cellAt(cellName)\n\tif err != nil {\n\t\treturn nan, err\n\t}\n\treturn cell.value, nil\n}", "func (v Vector) Get(n int) float64 {\n\treturn v.data[n]\n}", "func (iter *SliceIterator) Value() interface{} {\n\treturn iter.s.At(iter.position)\n}", "func (bb *ByteSliceBuffer) GetPos(pos uint64) ([]byte, bool) {\n\tif n, ok := bb.Buffer.TransPos(pos); ok {\n\t\treturn bb.data[n], true\n\t}\n\treturn nil, false\n}", "func (r *Result) Getx(index int) interface{} {\n\tif index < 0 || index >= len(r.val.columns) {\n\t\tpanic(ErrorColumnNotFound{At: \"Getx\", Index: index})\n\t}\n\tbv := r.val.buffer[index]\n\treturn bv.Value\n}", "func (self Source) GetPosition(result *Vector) {\n\tresult[x], result[y], result[z] = self.Get3f(AlPosition)\n}", "func (s *VectorImplSlice) Get(i int) Value {\n\tif i < 0 || s.start+i >= s.stop {\n\t\tpanic(\"Index out of bounds\")\n\t}\n\n\treturn s.vector.Get(s.start + i)\n}", "func (g Grid) GetValueAt(p *Point) float32 {\n\n\tif(p.X<g.Header.Lo1 || p.X>g.Header.Lo2){\n\t\treturn float32(-9999);\n\t}\n\n\tif(p.Y>g.Header.La1 || p.Y<g.Header.La2){\n\t\treturn float32(-9999);\n\t}\n\n\tidxX := int(((p.X - g.Header.Lo1) / g.Width()) * float64(g.Header.Nx-1))\n\tidxY := int(((g.Header.La1 - p.Y) / g.Height()) * float64(g.Header.Ny-1))\n\n\tul := g.GetValueAtIdx(idxX, idxY)\n\tur := g.GetValueAtIdx(idxX+1, idxY)\n\tll := g.GetValueAtIdx(idxX, idxY+1)\n\tlr := g.GetValueAtIdx(idxX+1, idxY+1)\n\n\tv:=BilinearInterpolation(&ll,&ul,&lr,&ur,p)\n\n\treturn float32(v)\n}", "func (aa *Array) Get(idx int) interface{} {\n\t// do not lock if not needed\n\tif idx < 0 || idx >= aa.length {\n\t\treturn nil\n\t}\n\n\taa.mutex.RLock()\n\tres := aa.items[idx]\n\taa.mutex.RUnlock()\n\treturn res\n}", "func (v Vector) At(idx int) float64 {\n\treturn v[idx]\n}", "func (args *Args) at(index int) *Arg {\n\tif len(args.items) > index && index >= 0 {\n\t\treturn args.items[index]\n\t}\n\treturn nil\n}", "func (g *Grid) GetValueAtIdx(idxX int, idxY int) PointValue{\n\th:= g.Header\n\n\tif(idxX<0 || idxX>=h.Nx) || (idxY<0 || idxY>=h.Ny){\n\t\t//fmt.Println(\"Out of bounds\")\n\t\treturn PointValue{Point{h.Lo1 + float64(idxX)*h.Dx, h.La1 - float64(idxY)*h.Dy }, float32(-9999)}\n\t}\n\n\tv:= g.Data[idxY * h.Nx + idxX]\n\n\tx:= h.Lo1 + float64(idxX)*h.Dx\n\ty:= h.La1 - float64(idxY)*h.Dy\n\n\treturn PointValue{Point{x, y },v}\n}", "func (jz *Jzon) ValueAt(i int) (v *Jzon, err error) {\n\tif jz.Type != JzTypeArr {\n\t\treturn v, expectTypeOf(JzTypeArr, jz.Type)\n\t}\n\n\tif i < 0 || i >= len(jz.data.([]*Jzon)) {\n\t\terr = errors.New(\"index is out of bound\")\n\t\treturn\n\t}\n\n\treturn jz.data.([]*Jzon)[i], nil\n}", "func (l *Int32) Get(index int) int32 {\n\treturn l.values[index]\n}", "func (this *Value) Index(index int) (*Value, error) {\n\t// aliases always have priority\n\tif this.alias != nil {\n\t\tresult, ok := this.alias[strconv.Itoa(index)]\n\t\tif ok {\n\t\t\treturn result, nil\n\t\t}\n\t}\n\t// next we already parsed, used that\n\tswitch parsedValue := this.parsedValue.(type) {\n\tcase []*Value:\n\t\tif index >= 0 && index < len(parsedValue) {\n\t\t\tresult := parsedValue[index]\n\t\t\treturn result, nil\n\t\t} else {\n\t\t\t// this way it behaves consistent with jsonpointer below\n\t\t\treturn nil, &Undefined{}\n\t\t}\n\t}\n\t// finally, consult the raw bytes\n\tif this.raw != nil {\n\t\tres, err := jsonpointer.Find(this.raw, \"/\"+strconv.Itoa(index))\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif res != nil {\n\t\t\treturn NewValueFromBytes(res), nil\n\t\t}\n\t}\n\treturn nil, &Undefined{}\n}", "func (m *Mat) Value(index int) (val float32) {\n\tcursor := 0\n\tfor i := 0; i < len(m.W); i++ {\n\t\tfor f := 0; f < 4; f++ {\n\t\t\tif cursor >= index {\n\t\t\t\tval = m.W[i][f]\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tcursor++\n\t\t}\n\t}\n\treturn val\n}", "func (items Float64Slice) Value(index int) interface{} { return items[index] }", "func (iter *Iterator) Position() uint64 { return iter.impl.Value() }", "func (uni *Uniform3fv) Get(idx int) (v0, v1, v2 float32) {\n\n\tpos := idx * 3\n\treturn uni.v[pos], uni.v[pos+1], uni.v[pos+2]\n}", "func (m *Iterator) Value() Elem {\n\treturn m.buf[m.bufCur]\n}", "func (a Vector) At(p int) float64 {\n\treturn a[p]\n}", "func (rs *RuneScanner) Position(n int) Position {\n\tif len(rs.ps) == 1 {\n\t\treturn rs.ps[0]\n\t} else if n < 0 {\n\t\treturn rs.ps[rs.inext+1+n]\n\t} else {\n\t\treturn rs.ps[n]\n\t}\n}", "func GetItem(slice []int, index int) int {\n\tif checkOutOfBounds(index, len(slice)) {\n\t\treturn -1\n\t}\n\treturn slice[index]\n}", "func (vn VecN) Get(i int) float64 {\n\treturn vn.vec[i]\n}", "func (ba *FilterBitArray) ValueAt(i uint) byte {\n\tif i < ba.Capacity() {\n\t\treturn (*ba)[i/byteSize] & (1 << (i % byteSize))\n\t}\n\treturn 0\n}", "func (s *dataSet) get(i int) entry {\n\tv := s.e(i).value(s.buf)\n\treturn entry{v.bytes(), v.ord()}\n}", "func (b *Board) Get(p Position) Piece {\n\treturn *b.At(p)\n}", "func (this *MyLinkedList) Get(index int) int {\n\n\tnode := this\n\ti := 0\n\tfor node != nil {\n\t\tif i == index {\n\t\t\treturn node.val\n\t\t}\n\t\ti++\n\t\tnode = node.next\n\t}\n\treturn -1\n}", "func (b *BrainFuck) val() int {\n\treturn b.memory.cell[b.cur()]\n}", "func (m *spiralMemory) get(x int, y int) int {\n\trx := x + m.offset\n\try := y + m.offset\n\tif ry < 0 || ry > len(m.grid)-1 || rx < 0 || rx > len(m.grid[ry])-1 {\n\t\treturn 0\n\t}\n\treturn m.grid[ry][rx]\n}", "func (arr *ArrayList) Get(index uint32) ItemType {\n if index < arr.length {\n return arr.data[index]\n }\n panic(\"out of bounds\")\n}", "func (sw *subscriptionWorker) GetPosition(ctx context.Context) (int64, error) {\n\tlog := logrus.\n\t\tWithFields(logrus.Fields{\n\t\t\t\"SubscriberID\": sw.subscriberID,\n\t\t})\n\n\tmsgs, _ := sw.ms.Get(\n\t\tctx,\n\t\tPositionStream(sw.subscriberID),\n\t\tConverter(convertEnvelopeToPositionMessage),\n\t\tLast(),\n\t)\n\tif len(msgs) < 1 {\n\t\tlog.Debug(\"no messages found for subscriber, using default\")\n\t\treturn 0, nil\n\t}\n\n\tswitch pos := msgs[0].(type) {\n\tcase *positionMessage:\n\t\treturn pos.MyPosition, nil\n\tdefault:\n\t\tlog.\n\t\t\tWithError(ErrIncorrectMessageInPositionStream).\n\t\t\tError(\"incorrect message type in position stream\")\n\t\treturn 0, nil\n\t}\n}", "func (v *V) At(i int) float64 {\n\tif i < 0 || i >= v.Dim() {\n\t\tpanic(ErrIndex)\n\t}\n\treturn v.Data[i]\n}", "func (d Datapoints) ValueAt(n int) float64 { return d[n].Value }", "func (this *MyLinkedList) Get(index int) int {\n\tif index < 0 || index >= this.length {\n\t\treturn -1\n\t}\n\n\treturn this.getNode(index).Val\n\n}", "func (access ObjectAccess) Get(row int) interface{} {\n return access.rawData[access.indices[row]]\n}", "func (sll *SingleLinkedList) Get(index int) interface{} {\n\treturn sll.getNode(index).value\n}", "func (e rawEntry) value(buf []byte) rawValue { return buf[e.ptr():][:e.sz()] }", "func (self *T) Get(col, row int) float32 {\n\treturn self[col][row]\n}", "func (m *WorkbookWorksheet) GetPosition()(*int32) {\n return m.position\n}", "func (ps *PrimeStore) GetByIndex(nth uint64) (n uint64) {\n\tdefer Tracer(NewTrace(\"GetByIndex\"))\n\n\tn = 0\n\tif nth < ps.base || nth >= (ps.base+ps.count) {\n\t\tlog.Print(\"out of range.\", nth, \" \", ps)\n\t\treturn\n\t}\n\n\tn = ps.index[nth-ps.base]\n\treturn\n}", "func (cache *Cache) GetAt(seqno uint16, index uint16, result []byte) uint16 {\n\tcache.mu.Lock()\n\tdefer cache.mu.Unlock()\n\n\tif int(index) >= len(cache.entries) {\n\t\treturn 0\n\t}\n\tif cache.entries[index].seqno != seqno {\n\t\treturn 0\n\t}\n\treturn uint16(copy(\n\t\tresult[:cache.entries[index].length()],\n\t\tcache.entries[index].buf[:]),\n\t)\n}", "func (r *sparseRow) Value(feature int) float64 {\n\ti := search(r.ind, feature)\n\tif i >= 0 {\n\t\treturn r.val[i]\n\t}\n\treturn 0\n}", "func (uni *Uniform4fv) Get(idx int) (v0, v1, v2, v3 float32) {\n\n\tpos := idx * 4\n\treturn uni.v[pos], uni.v[pos+1], uni.v[pos+2], uni.v[pos+3]\n}", "func (sp *Space) Get(index int) Shape {\n\treturn (*sp)[index]\n}", "func (a ValueArray) Get(index int) Value {\n\tif index < 0 || index >= len(a.data) {\n\t\treturn Null()\n\t}\n\treturn a.data[index]\n}", "func (rb *RingBuffer) Get(index int) (ans stats.Record) {\n\trb.lock.RLock()\n\tdefer rb.lock.RUnlock()\n\tif index < 0 {\n\t\tindex = len(rb.data) + index\n\t}\n\treturn rb.data[(rb.seq+uint64(index))%uint64(len(rb.data))]\n}", "func (m M) Get(r, c int) Frac {\n\tr, c = r-1, c-1\n\treturn m.values[r*m.c+c]\n}", "func (p Polynom) ValueAt(x0 *big.Int) *big.Int {\n\tval := big.NewInt(0)\n\tfor i := len(p.coeff) - 1; i >= 0; i-- {\n\t\tval.Mul(val, x0)\n\t\tval.Add(val, p.coeff[i])\n\t\tval.Mod(val, p.mod)\n\t}\n\treturn val\n}", "func (m *Mob) GetPosition() context.Position {\n\tm.mutex.RLock()\n\tdefer m.mutex.RUnlock()\n\n\treturn *m.position\n}", "func (a *Array) Get(index int) interface{} {\n\treturn a.Data[index]\n}", "func (g *grid) get(v vector) int8 {\n\treturn g.bits[v.y][v.x]\n}", "func valueAtbit(num int, bit int) int {\n\treturn -1\n}", "func (m NumSeriesDistribution) Get(index int) *NumSeries {\n\tif index > -1 {\n\t\tif s, ok := m[index]; ok {\n\t\t\treturn s\n\t\t}\n\t}\n\treturn nil\n}", "func (s *Scanner) GetPosition() token.Position { return s.reader.Pos }" ]
[ "0.68028307", "0.6756042", "0.6708186", "0.665074", "0.6611376", "0.6589559", "0.6579609", "0.65628374", "0.6557401", "0.6526513", "0.6507764", "0.6463204", "0.64461094", "0.6418835", "0.6396228", "0.63865566", "0.63500816", "0.63385755", "0.632145", "0.63111657", "0.630474", "0.6293072", "0.6258402", "0.6253626", "0.62515867", "0.6240899", "0.62039125", "0.6191934", "0.61857593", "0.6181251", "0.6180045", "0.6179244", "0.6176103", "0.6157751", "0.61411566", "0.6135464", "0.6121682", "0.61194813", "0.61077094", "0.61037284", "0.6044246", "0.60377413", "0.6010176", "0.6001465", "0.5987188", "0.5983508", "0.59791994", "0.59737754", "0.5954908", "0.5952849", "0.59503883", "0.592985", "0.59241176", "0.5920426", "0.59147537", "0.59038824", "0.58867", "0.58814895", "0.5877724", "0.58754057", "0.5874776", "0.5874289", "0.58626044", "0.5861599", "0.58526766", "0.5847095", "0.5845524", "0.5845204", "0.58183306", "0.58136696", "0.5803028", "0.5794911", "0.57930887", "0.57815886", "0.578071", "0.5779148", "0.57617784", "0.57611716", "0.57585967", "0.57574755", "0.5755212", "0.5738832", "0.5736523", "0.57363105", "0.5730596", "0.57298714", "0.57271135", "0.56997365", "0.56948286", "0.56799984", "0.56787944", "0.5659002", "0.56562465", "0.5650715", "0.5639771", "0.5628609", "0.55975664", "0.5594133", "0.5589918", "0.5584484" ]
0.6263303
22
Delete value at the given position
func (l *LinkedList) Delete(pos int) bool { if pos > l.length { return false } node := l.head if pos == 1 { l.head = node.Next() } else { for i := 1; i < pos-1; i++ { node = node.Next() } node.SetNext(node.Next().Next()) } l.length = l.length - 1 return true }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (e *ObservableEditableBuffer) DeleteAt(rp0, rp1 int) {\n\tp0 := e.f.RuneTuple(rp0)\n\tp1 := e.f.RuneTuple(rp1)\n\n\te.Delete(p0, p1)\n}", "func (this *Array) Delete(index uint) (int, error){\n\tif this.isIndexOutOfRange(index){\n\t\treturn 0, errors.New(\"index out of range\")\n\t}\n\tv := this.data[index]\n\tfor i := index; i < this.Len()-1; i++{\n\t\tthis.data[i] = this.data[i+1]\n\t}\n\tthis.length --\n\treturn v, nil\n}", "func (gc *GisCache) DelPosition(typ string, name string) {\n\tkey := gc.baseKey + posKey + typ + \":\" + name\n\terr := gc.rc.DelEntry(key)\n\tif err != nil {\n\t\tlog.Error(\"Failed to delete position for \", name, \" with err: \", err.Error())\n\t}\n}", "func (a *Array) Delete(index uint) (int, error) {\n\tif a.isIndexOutOfRange(index) {\n\t\treturn 0, errors.New(\"out of index range\")\n\t}\n\tv := a.data[index]\n\tfor i := index; i < a.Len()-1; i++ {\n\t\ta.data[i] = a.data[i+1]\n\t}\n\ta.length--\n\treturn v, nil\n}", "func (xym *XYMapStringInt) Delete(key string) (value int, existed bool) {\n\tif idx, ok := xym.mapping[key]; ok {\n\t\tslot := xym.storage[idx]\n\t\tif slot.Valid {\n\t\t\tvalue = slot.Value\n\t\t\texisted = true\n\t\t\tslot.Valid = false\n\t\t\txym.empty++\n\t\t\tif xym.empty > 10 && float32(xym.empty)/float32(len(xym.storage)) > 0.8 {\n\t\t\t\txym.Compress()\n\t\t\t}\n\t\t}\n\t\treturn\n\t}\n\treturn\n}", "func (ll *LinkedList) DeleteAtPos(pos int) (util.Value, bool) {\n\tif ll.IsEmpty() {\n\t\treturn nil, false\n\t}\n\tif pos >= ll.size {\n\t\treturn nil, false\n\t}\n\n\tvar prev *node\n\tvar cur = ll.head\n\tvar deletedVal util.Value\n\n\tfor i := 0; i < pos; i++ {\n\t\tprev = cur\n\t\tcur = cur.next\n\t}\n\n\t// If prev is nil, then we need to remove head.\n\t// Else, we need to remove the cur node.\n\tdeletedVal = cur.val\n\tif prev == nil {\n\t\tll.head = cur.next\n\t\tcur.next = nil\n\t} else {\n\t\tprev.next = cur.next\n\t\tcur.next = nil\n\t}\n\tcur = nil // setting up for garbage collection.\n\tll.size--\n\treturn deletedVal, true\n}", "func (s *Set) Delete(val int) {\n\tdelete(s.set, val)\n}", "func (cll *CircularLinkedList) DeleteFromPosition(pos int) int {\n\tif !(cll.CheckIfEmpty()) {\n\t\thead := cll.Start\n\t\tdeletedEle := head.Data\n\t\tif cll.Len == 1 {\n\t\t\t// delete from beginning\n\t\t\tdeletedEle = cll.DeleteBeginning()\n\t\t\treturn deletedEle\n\t\t}\n\t\tif cll.Len == pos {\n\t\t\t// delete from end\n\t\t\tdeletedEle = cll.DeleteEnd()\n\t\t\treturn deletedEle\n\t\t}\n\t\t// delete from middle\n\t\t//traverse till you find position\n\t\tcount := 1\n\t\tfor {\n\t\t\tif count == pos-1 {\n\t\t\t\tdeletedEle = head.Next.Data\n\t\t\t\tbreak\n\t\t\t}\n\t\t\thead = head.Next\n\t\t}\n\t\thead.Next = head.Next.Next\n\t\tcll.Len--\n\t\treturn deletedEle\n\t}\n\treturn -1\n}", "func (xym *XYMapIntInt) Delete(key int) (value int, existed bool) {\n\tif idx, ok := xym.mapping[key]; ok {\n\t\tslot := xym.storage[idx]\n\t\tif slot.Valid {\n\t\t\tvalue = slot.Value\n\t\t\texisted = true\n\t\t\tslot.Valid = false\n\t\t\txym.empty++\n\t\t\tif xym.empty > 10 && float32(xym.empty)/float32(len(xym.storage)) > 0.8 {\n\t\t\t\txym.Compress()\n\t\t\t}\n\t\t}\n\t\treturn\n\t}\n\treturn\n}", "func (a *Array) Delete(index int) interface{} {\n\tdefer func() {\n\t\ta.Data = append(a.Data[:index], a.Data[index+1:]...)\n\t\ta.Length--\n\t}()\n\treturn a.Data[index]\n}", "func (b *Bag) DeleteAt(index int) {\n\tb.items[index] = b.items[len(b.items)-1]\n\tb.items = b.items[:len(b.items)-1]\n}", "func (v *OrderedValues) Del(key []byte) {\n\tvar (\n\t\ti int\n\t\tok bool\n\t\tj [][]byte\n\t)\n\tfor i, j = range *v {\n\t\tif len(j) > 0 && bytes.Equal(j[0], key) {\n\t\t\tok = true\n\t\t\tbreak\n\t\t}\n\t}\n\tif !ok {\n\t\treturn\n\t}\n\tcopy((*v)[i:], (*v)[i+1:])\n\t(*v)[len(*v)-1] = nil\n\t*v = (*v)[:len(*v)-1]\n}", "func (s *items) removeAt(index int) Item {\n\titem := (*s)[index]\n\tcopy((*s)[index:], (*s)[index+1:])\n\t(*s)[len(*s)-1] = nil\n\t*s = (*s)[:len(*s)-1]\n\treturn item\n}", "func (nl *nodeList) delete(i int) *Node {\n\tend := len(nl.elements) - 1\n\tn := nl.elements[i]\n\t// Copy values from the deletion point to the left by one\n\tcopy(nl.elements[i:], nl.elements[i+1:])\n\t// Dereference the last value\n\tnl.elements[end] = nil\n\t// Truncate the slice\n\tnl.elements = nl.elements[:end]\n\n\treturn n\n}", "func (a *Array) Delete(v int) error {\n\tvar deleted bool\n\tfor i, d := range a.data {\n\t\tif d == v {\n\t\t\tdeleted = true\n\t\t\tfor ; i < int(a.length); i++ {\n\t\t\t\ta.data[i] = a.data[i+1]\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\tif deleted {\n\t\ta.length--\n\t\treturn nil\n\t}\n\treturn errors.New(\"not found in array\")\n}", "func (s *children) removeAt(index int) *node {\n\tn := (*s)[index]\n\tcopy((**s)[index:], (*s)[index+1:])\n\t(*s)[len(*s)-1] = nil\n\t*s = (*s)[:len(*s)-1]\n\treturn n\n}", "func (dll *DoublyLinkedList) DeleteAtGivenPosition(position int32) {\n\tdefer func() {\n\t\tif err := recover(); err != nil {\n\t\t\tfmt.Println(\"panic occurred:\", err)\n\t\t}\n\t}()\n\tlengthOfdll := dll.Length()\n\tif lengthOfdll < position {\n\t\tpanic(\"Position out of bound\")\n\t} else {\n\t\tif dll.head == nil {\n\t\t\treturn\n\t\t}\n\n\t\ttemp := dll.head\n\t\tif position == 1 {\n\t\t\tdll.head = temp.next\n\t\t\tdll.head.prev = nil\n\t\t\treturn\n\t\t}\n\n\t\tvar currentPosition int32 = 1\n\t\tfor temp != nil {\n\t\t\tif currentPosition+1 == position {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tcurrentPosition += 1\n\t\t\ttemp = temp.next\n\t\t}\n\t\tif temp.next == nil {\n\t\t\treturn\n\t\t} else {\n\t\t\ttemp.next = temp.next.next\n\t\t\tif temp.next != nil {\n\t\t\t\ttemp.next.prev = temp\n\t\t\t}\n\t\t}\n\t}\n}", "func (l *List) Del(v interface{} /* val */) *El {\n\tcur := l.search(v, true, true)\n\n\tif cur == nil || l.less(v, cur.val) {\n\t\treturn nil\n\t}\n\n\tl.len--\n\n\th := cur.height()\n\tfor i := h - 1; i >= 0; i-- {\n\t\t*l.up[i] = cur.nexti(i)\n\t}\n\n\tif l.autoreuse {\n\t\tReuse(cur)\n\t}\n\n\treturn cur\n}", "func (list *MyArrayList) removeAtIndex(idx int) (int, error) {\n\tif idx >= list.currSize {\n\t\treturn 0, errors.New(\"index out of bounds\")\n\t}\n\toldVal := list.array[idx]\n\tfor i := idx; i< (list.currSize -1); i++ {\n\t\tlist.array[i] = list.array[i+1]\n\t}\n\tlist.currSize--\n\treturn oldVal, nil\n}", "func (ll *Doubly[T]) DelByPos(pos int) (T, bool) {\n\n\tswitch {\n\tcase ll.Head == nil:\n\t\tvar r T\n\t\treturn r, false\n\tcase pos-1 == 0:\n\t\treturn ll.DelAtBeg()\n\tcase pos-1 == ll.Count():\n\t\treturn ll.DelAtEnd()\n\tcase pos-1 > ll.Count():\n\t\tvar r T\n\t\treturn r, false\n\t}\n\tvar prev *Node[T]\n\tvar val T\n\tcur := ll.Head\n\tcount := 0\n\n\tfor count < pos-1 {\n\t\tprev = cur\n\t\tcur = cur.Next\n\t\tcount++\n\t}\n\n\tcur.Next.Prev = prev\n\tval = cur.Val\n\tprev.Next = cur.Next\n\treturn val, true\n}", "func (lst *List) RemoveAt(idx int) (ret Val_t){\n\tif lst.Len < 1{\n\t\tpanic(\"no item to pop\")\n\t}\n\tcur := lst.GetAt(idx)\n\tret = cur.Val\n\tif(idx == 0){\n\t\treturn lst.PopFront()\n\t}else if idx == lst.Len-1{\n\t\treturn lst.PopBack()\n\t}else{\n\t\tpre,next := lst.GetAt(idx-1),lst.GetAt(idx+1)\n\t\tpre.Next = next\n\t\tlst.Len--\n\t}\n\treturn\n}", "func (h *binaryHeap) removeIdx(idx int) {\n\tif h.invalidNode(idx) {\n\t\treturn\n\t}\n\th.swapIdx(idx, h.len)\n\th.tree[h.len] = 0\n\th.len--\n\th.bubbleDown(idx)\n}", "func (b *Bag) Delete(x byte) {\n\t_, ok := (*b)[x]\n\tif ok {\n\t\t(*b)[x]--\n\t}\n}", "func (v *Data) Remove(idx int) (val PicData) {\n\tvar nil PicData\n\n\tdv := *v\n\n\tval = dv[idx]\n\t*v = append(dv[:idx], dv[1+idx:]...)\n\n\tdv[len(dv)-1] = nil\n\n\treturn val\n}", "func RemoveItemAt(list *List, index uint64) *uint64 {\n var returnPtr *uint64;\n var returnAddr uint64;\n var newReturnPtr uint64;\n var actualReturnPtr *uint64;\n var i uint64;\n returnPtr = GetItemAt(list, index); //Get the correspondig value value from the list\n newReturnPtr = Alloc(list.itemSize); //Allocate memory to store the item to be removed\n returnAddr = ToUint64FromUint64Ptr(returnPtr);\n CopyMem(returnAddr, newReturnPtr, list.itemSize); //Save item to be removed in order to return it\n for i = index; i < list.itemCount - 1; i = i + 1 { //Remove item by moving the following ones \"backwards\" in order to fill the gap caused by the deleted item\n CopyMem(list.baseAddress + (i + 1) * list.itemSize, list.baseAddress + list.itemSize * i, list.itemSize); //Move item at position i + 1 to position i\n }\n list.itemCount = list.itemCount - 1; //Update (decrease) item count\n actualReturnPtr = ToUint64PtrFromUint64(newReturnPtr);\n return actualReturnPtr; //Return value removed from list\n}", "func (d *DirectAddress) Delete(key int) {\n\tif err := d.validateKey(key); err != nil {\n\t\treturn\n\t}\n\td.array[key-d.uMin] = nil\n}", "func DeleteValue(db storage.Store, cmd *fsm.Command) error {\n\tk := KeyFromCommand(cmd)\n\treturn db.Delete(k.ToBytes())\n}", "func (s *Set) Delete(val interface{}) {\n\tdelete(s.set, val)\n}", "func (self *QueuedSet) delete(item interface{}) {\n\tcount := self.set[item]\n\tif count <= 1 {\n\t\tdelete(self.set, item)\n\t} else {\n\t\tself.set[item] = count - 1\n\t}\n\treturn\n\n}", "func (l *LinkedList) DeleteAt(pos int) error {\n\t// validate the position\n\tif pos < 0 {\n\t\tfmt.Println(\"position can not be negative\")\n\t\treturn errors.New(\"position can not be negative\")\n\t} else if l.len == 0 {\n\t\tfmt.Println(\"No nodes in list\")\n\t\treturn errors.New(\"No nodes in list\")\n\t} else if pos > (l.len - 1) {\n\t\tfmt.Println(\"Position: \", pos, \" can not be greater than list size, hence returning\")\n\t\treturn errors.New(\"Position can not be greater than list size, hence returning\")\n\t}\n\t// if position is first call DeleteAtFirst()\n\tif pos == 0 {\n\t\treturn l.DeleteAtFirst()\n\t}\n\n\t// if position is last call DeleteAtEnd()\n\tif pos == (l.len - 1) {\n\t\treturn l.DeleteAtEnd()\n\t}\n\t// get node from given position\n\tnode := l.GetAt(pos)\n\tnode.next.prev = node.prev\n\tnode.prev.next = node.next\n\tl.len--\n\treturn nil\n}", "func (e *EditTree) deleteChildValue(hash int) {\n\tdelete(e.childScalarValues, hash)\n\tdelete(e.childCompositeValues, hash)\n}", "func (c *Cursor) Delete() error {\n\tif c.bucket.tx.db == nil {\n\t\treturn ErrTxClosed\n\t} else if !c.bucket.Writable() {\n\t\treturn ErrTxNotWritable\n\t}\n\n\tkey, _, flags := c.keyValue()\n\t// Return an error if current value is a bucket.\n\tif (flags & bucketLeafFlag) != 0 {\n\t\treturn ErrIncompatibleValue\n\t}\n\t// 从node中移除,本质上将inode数组进行移动\n\tc.node().del(key)\n\n\treturn nil\n}", "func (f * LinkedList) delIdx(idx int) (*Element){\n\t// will return deleted element\n\tif (f.length == 0){\n\t\treturn nil\n\t} else if (idx < f.length) {\n\t\tif (idx == f.length - 1){\n\t\t\treturn f.delLast()\n\t\t} else if (idx == 0){\n\t\t\treturn f.delFirst()\n\t\t} else {\n\t\t\tel := f.getElmt(idx)\n\t\t\tel.prev.next = el.next\n\t\t\tel.next.prev = el.prev\n\t\t\tf.length--\n\t\t\treturn el\n\t\t}\n\t} else {\n\t\treturn nil\n\t}\n}", "func (a *DynamicArray) Remove(val int) {\n\tfor index := 0; index < a.len; {\n\t\tgot, _ := a.IndexAt(index)\n\t\t// If the element is found, erase the element at the index\n\t\tif got == val {\n\t\t\ta.EraseAt(index)\n\t\t\tcontinue\n\t\t}\n\t\t// If the element does not found in current index, shift to the right\n\t\tindex++\n\t}\n}", "func (p *IntArray) Delete_at(del_index int) {\n\ttmp := *p\n\tvar new_array IntArray\n\tfor i := 0; i < len(tmp); i++ {\n\t\tif i != del_index {\n\t\t\tnew_array = append(new_array, tmp[i])\n\t\t}\n\t}\n\t*p = new_array\n}", "func (h *indexedHeap) remove(idx int) (string, uint) {\n\treturn h.removeInternal(h.indices[idx])\n}", "func (b *Buffer) Remove(offset int, size int) {\n if (offset + size) > len(b.data) {\n panic(\"invalid offset & size\")\n }\n\n copy(b.data[offset:], b.data[offset+size:])\n b.size -= size\n if b.offset >= (offset + size) {\n b.offset -= size\n } else if b.offset >= offset {\n b.offset -= b.offset - offset\n if b.offset > b.size {\n b.offset = b.size\n }\n }\n}", "func (h *Heap) Delete(nodeIndex int) int {\n\tif nodeIndex >= h.count || nodeIndex < 0 {\n\t\treturn -1\n\t}\n\t// Store removed value\n\tremovedValue := h.array[nodeIndex]\n\tlastNodeIndex := h.count - 1\n\t// Swap the root and the last node\n\tutils.SwapInt(&h.array[lastNodeIndex], &h.array[nodeIndex])\n\t// Remove the root value now corresponding at the last nodes\n\th.pop()\n\t// Heapify the heap with his new state\n\th.down(0)\n\treturn removedValue\n}", "func (t *TreeNode) Delete(value int) {\n\tt.remove(value)\n}", "func (o *KeyValueOrdered) RemoveIndex(idx int) (cell KeyValueCapsule) {\n\tcell = o.s[idx]\n\tdelete(o.m, o.s[idx].K)\n\to.shift(idx+1, len(o.s), -1)\n\to.s = append(o.s[:idx], o.s[idx+1:]...)\n\treturn\n}", "func (t *openAddressing) Delete(key string) {\n\tround := 0\n\tfor round != len(t.values) {\n\t\thash := t.hash(key, round)\n\t\tslot := t.values[hash]\n\t\tif slot != nil && slot.key == key {\n\t\t\tt.values[hash].deleted = true\n\t\t\tt.len--\n\t\t\treturn\n\t\t}\n\t\tround++\n\t}\n}", "func (sl *stringList) delete(i int) (string, bool) {\n\tend := len(sl.elements) - 1\n\tif i > end || i < 0 {\n\t\treturn \"\", false\n\t}\n\n\te := sl.elements[i]\n\t// Copy values from the deletion point to the left by one\n\tcopy(sl.elements[i:], sl.elements[i+1:])\n\t// Set last value to empty string\n\tsl.elements[end] = \"\"\n\t// Truncate the slice\n\tsl.elements = sl.elements[:end]\n\n\treturn e, true\n}", "func (r *runestring) Del(pos ...int) {\n\tfor _, i := range pos {\n\t\tif i >= 0 && i <= len(*r) {\n\t\t\t*r = append((*r)[:i], (*r)[i+1:]...)\n\t\t}\n\t}\n}", "func (a Slice[T]) DeleteAt(index int) Slice[T] {\n\treturn append(a[:index], a[index+1:]...)\n}", "func (vector *Vector) Delete(i int) {\n\t//a = append(a[:i], a[i+1:]...)\n\t// or\n\t//a = a[:i+copy(a[i:], a[i+1:])]\n\t// NOTE If the type of the element is a pointer or a struct with pointer fields,\n\t// which need to be garbage collected, the above implementation of Delete has a potential\n\t// memory leak problem: some elements with values are still referenced by slice a and\n\t// thus can not be collected. The following code can fix this problem:\n\n\tcopy((*vector)[i:], (*vector)[i+1:])\n\t(*vector)[len(*vector)-1] = nil // or the zero value of T\n\t*vector = (*vector)[:len(*vector)-1]\n}", "func decrementDigitInListAtPos( numberAtCursor *int ) {\n\n\tif *numberAtCursor == 0 { \t\t\t\t// Scenario 1: Value is 0\n\n\t\t*numberAtCursor = 9 \t\t\t\t// resets value at position to 9\n\n\t} else { \t\t\t\t// Scenario 2: value not 0\n\n\t\t*numberAtCursor-- \t\t\t\t// decrements value by one\n\t}\n}", "func DeleteKeyValueViaValue(iValue []byte) (err error) {\n\tvar has bool\n\tvar _KeyValue = &KeyValue{Value: iValue}\n\tif has, err = Engine.Get(_KeyValue); (has == true) && (err == nil) {\n\t\tif row, err := Engine.Where(\"value = ?\", iValue).Delete(new(KeyValue)); (err != nil) || (row <= 0) {\n\t\t\treturn err\n\t\t} else {\n\t\t\treturn nil\n\t\t}\n\t}\n\treturn\n}", "func (k Keeper) delValue(ctx sdk.Context, accessPath *vm_grpc.VMAccessPath) {\n\tstore := ctx.KVStore(k.storeKey)\n\tkey := common_vm.GetPathKey(accessPath)\n\n\tstore.Delete(key)\n}", "func delete(array []int8, pos int) []int8 {\r\n\tvar length = len(array)\r\n\tvar tempArray = make([]int8, length+1)\r\n\tfmt.Printf(\"\\n\")\r\n\tfor i := 0; i < length; i++ {\r\n\t\tif i < pos {\r\n\t\t\ttempArray[i] = array[i]\r\n\t\t} else {\r\n\t\t\ttempArray[i-1] = array[i]\r\n\t\t}\r\n\t}\r\n\treturn tempArray\r\n\r\n}", "func (d Data) Del(key uint32) {\n\td.mutex.Lock()\n\tcount := d.counts[key]\n\tcount -= 1\n\tif count < 1 {\n\t\tdelete(d.data, key)\n\t\tdelete(d.counts, key)\n\t} else {\n\t\td.counts[key] = count\n\t}\n\td.mutex.Unlock()\n}", "func decr_offset() {\n\toffset = offset - 1\n}", "func (elems *ElementsNR) delete(pt dvid.Point3d) (deleted *ElementNR, changed bool) {\n\t// Delete any elements at point.\n\tvar cut = -1\n\tfor i, elem := range *elems {\n\t\tif pt.Equals(elem.Pos) {\n\t\t\tcut = i\n\t\t\tbreak\n\t\t}\n\t}\n\tif cut >= 0 {\n\t\tdeleted = (*elems)[cut].Copy()\n\t\tchanged = true\n\t\t(*elems)[cut] = (*elems)[len(*elems)-1] // Delete without preserving order.\n\t\t*elems = (*elems)[:len(*elems)-1]\n\t}\n\treturn\n}", "func (mdl *Model) Delete(key interface{}) error {\n\tmdl.mux.Lock()\n\tdefer mdl.mux.Unlock()\n\tif std.ModelTypeList == mdl.GetType() {\n\t\tk := key.(int)\n\t\tif k > len(mdl.data) {\n\t\t\treturn errors.New(InvalidIndex, \"index '%d' out of range\", k)\n\t\t}\n\t\tmdl.data = append(mdl.data[:key.(int)-1], mdl.data[key.(int):]...)\n\t\treturn nil\n\t}\n\n\tk := key.(string)\n\tif idx, ok := mdl.hashIdx[k]; ok {\n\t\tmdl.data = append(mdl.data[:idx-1], mdl.data[idx:]...)\n\t\tdelete(mdl.hashIdx, k)\n\t\tdelete(mdl.idxHash, idx)\n\t\treturn nil\n\t}\n\treturn errors.New(InvalidIndex, \"index '%s' out of range\", k)\n}", "func (s *SkipList) Delete(key interface{}) (value interface{}, ok bool) {\n\tif key == nil {\n\t\tpanic(\"goskiplist: nil keys are not supported\")\n\t}\n\n\tupdate := make([]*snode, s.level()+1, s.effectiveMaxLevel())\n\tcandidate := s.getPath(s.header, update, key)\n\tif candidate == nil || candidate.key != key {\n\t\treturn nil, false\n\t}\n\n\tprevious := candidate.backward\n\tif s.footer == candidate {\n\t\ts.footer = previous\n\t}\n\n\t// 设置节点的前指针\n\tnext := candidate.next()\n\tif next != nil {\n\t\tnext.backward = previous\n\t}\n\n\t// 设置节点levelN的后指针\n\tfor i := 0; i <= s.level() && update[i].forward[i] == candidate; i++ {\n\t\tupdate[i].forward[i] = candidate.forward[i]\n\t}\n\n\t// 删除节点后,levelN链表为空的情况\n\tfor s.level() > 0 && s.header.forward[s.level()] == nil {\n\t\ts.header.forward = s.header.forward[:s.level()]\n\t}\n\n\ts.length--\n\n\treturn candidate.value, true\n}", "func (c *OrderedMap) DeleteIndex(index int) (string, interface{}) {\n\tkey := c.Keys[index]\n\tvalue := c.Map[key]\n\tdelete(c.Map, key)\n\tc.Keys = append(c.Keys[:index], c.Keys[index+1:]...)\n\treturn key, value\n}", "func (m *Model) deleteAfterCursor() bool {\n\tm.value = m.value[:m.pos]\n\treturn m.setCursor(len(m.value))\n}", "func Delete(seq Sequence, offset, length int) Sequence {\n\tinfo := seq.Info()\n\tinfo = tryExpand(info, offset, -length)\n\tseq = WithInfo(seq, info)\n\n\tff := seq.Features()\n\tfor i, f := range ff {\n\t\tff[i].Loc = f.Loc.Expand(offset, -length)\n\t}\n\tseq = WithFeatures(seq, ff)\n\n\tq := seq.Bytes()\n\tp := make([]byte, len(q)-length)\n\tcopy(p[:offset], q[:offset])\n\tcopy(p[offset:], q[offset+length:])\n\tseq = WithBytes(seq, p)\n\n\treturn seq\n}", "func (s *Storage) RangeDelete(start, end []byte) error {\n\ts.kv.RangeDelete(start, end)\n\treturn nil\n}", "func (d data) deleteSliceElement(dataFieldName string, i int) error {\n\tslice := reflect.ValueOf(d.src).Elem().FieldByName(dataFieldName)\n\tif slice.Kind() != reflect.Slice {\n\t\treturn fmt.Errorf(\"property: delete %s[%d]; not a slice\", dataFieldName, i)\n\t}\n\tif i < 0 {\n\t\treturn fmt.Errorf(\"property: delete %s[%d]\", dataFieldName, i)\n\t}\n\tn := slice.Len()\n\tif i >= n {\n\t\treturn fmt.Errorf(\"property: delete %s[%d] >= len\", dataFieldName, i)\n\t}\n\n\t// notify DeleteID\n\te := slice.Index(i)\n\tif e.Kind() != reflect.Struct {\n\t\treturn fmt.Errorf(\"property: delete %s[%d]: not a struct\", dataFieldName, i)\n\t}\n\tvar zv reflect.Value\n\tnamefield := e.FieldByName(\"Name\")\n\tif namefield != zv {\n\t\tif err := d.src.DeleteID(namefield); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif i == n-1 {\n\t\tslice.SetLen(n - 1)\n\t} else {\n\t\tnewSlice := reflect.AppendSlice(slice.Slice(0, i), slice.Slice(i+1, n))\n\t\tslice.Set(newSlice)\n\t}\n\treturn nil\n}", "func (a *List) DelItem(i int) {\n\ta.Items = append(a.Items[:i], a.Items[i+1:]...)\n}", "func DelValue(chave string) bool {\n\tconn := getPool().Get()\n\tconn.Do(\"DEL\", chave)\n\tdefer conn.Close()\n\treturn true\n}", "func (a *attrVal) deleteAttrVal(hd *defs, td *defs, id string, codeName string, attrName string, hostname string, bflags attrVal, attrVals ...string) {\n idx := (*a).FindItemIndex(attrVals...)\n for _, i := range *idx {\n if strings.HasPrefix((*a)[i], \"^\"){\n if isSafeDeleteRegex(hd, td, (*a)[i], id, hostname){\n printDeletion(id, codeName, attrName, (*a)[i], \"val\", bflags)\n RemoveItemByIndex(a, i)\n }\n }else{\n printDeletion(id, codeName, attrName, (*a)[i], \"val\", bflags)\n RemoveItemByIndex(a, i)\n\n }\n }\n}", "func (s *Set) Del(value string) error {\n\tif !s.host.rawUTF8 {\n\t\tEncode(&value)\n\t}\n\t// Remove a value from the table\n\t_, err := s.host.db.Exec(fmt.Sprintf(\"DELETE FROM %s WHERE %s = '%s'\", s.table, setCol, value))\n\treturn err\n}", "func (arr *FloatArray) Delete(elem float64) {\n\t// original version got form https://yourbasic.org/golang/delete-element-slice/\n\tvar elemIndex *int\n\n\tfor i, e := range *arr {\n\t\tif e == elem {\n\t\t\telemIndex = &i\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif elemIndex != nil {\n\t\t// Remove the element at index i from a.\n\t\tcopy((*arr)[*elemIndex:], (*arr)[*elemIndex+1:]) // Shift a[i+1:] left one index.\n\t\t*arr = (*arr)[:len(*arr)-1] // Truncate slice.\n\n\t}\n}", "func (this *MyLinkedList) DeleteAtIndex(index int) {\n\n\tif index == 0 {\n\t\tif this.next != nil {\n\t\t\tthis.val = this.next.val\n\t\t\tthis.next = this.next.next\n\t\t} else {\n\t\t\tthis.val = -1\n\t\t\tthis.next = nil\n\t\t\tthis.use = false\n\t\t}\n\t\treturn\n\t}\n\ti := 1\n\tnode := this\n\tfor node.next != nil {\n\t\tif i == index {\n\t\t\tnode.next = node.next.next\n\t\t\tbreak\n\t\t}\n\t\ti++\n\t\tnode = node.next\n\t}\n}", "func (e *ObservableEditableBuffer) Delete(q0, q1 OffsetTuple) {\n\tbefore := e.getTagStatus()\n\tdefer e.notifyTagObservers(before)\n\n\te.f.Delete(q0, q1, e.seq)\n\tif e.seq < 1 {\n\t\te.f.FlattenHistory()\n\t}\n\te.deleted(q0, q1)\n}", "func (c ContractValue) Delete(rst itransaction.ReadOnlyStateTrie, inst itransaction.Instruction, trans itransaction.Transaction) (sc []byte, err error) {\n\t// cout = coins\n\n\t// Find the darcID for this instance.\n\t// var darcID darc.ID\n\t// _, _, _, darcID, err = rst.GetValues(inst.InstanceID.Slice())\n\t// if err != nil {\n\t// \treturn\n\t// }\n\n\t// sc = transaction.StateChanges{\n\t// \ttransaction.NewStateChange(transaction.Remove, inst.InstanceID, ContractValueID, nil, darcID),\n\t// }\n\treturn nil, nil\n}", "func (treeNode *TreeNode) Delete(value int) {\n\ttreeNode.remove(value)\n}", "func (client *NginxClient) deleteKeyValuePair(zone string, key string, stream bool) error {\n\tbase := \"http\"\n\tif stream {\n\t\tbase = \"stream\"\n\t}\n\tif zone == \"\" {\n\t\treturn fmt.Errorf(\"zone required\")\n\t}\n\n\t// map[string]string can't have a nil value so we use a different type here.\n\tkeyval := make(map[string]interface{})\n\tkeyval[key] = nil\n\n\tpath := fmt.Sprintf(\"%v/keyvals/%v\", base, zone)\n\terr := client.patch(path, &keyval, http.StatusNoContent)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to remove key values pair for %v/%v zone: %w\", base, zone, err)\n\t}\n\treturn nil\n}", "func (gdt *Array) Remove(idx Int) {\n\targ0 := gdt.getBase()\n\targ1 := idx.getBase()\n\n\tC.go_godot_array_remove(GDNative.api, arg0, arg1)\n}", "func (v *VEBTree) Delete(x int) {\n\t// base cases\n\tif v.min == v.max {\n\t\t// v contains only 1 element\n\t\tv.min = v.null\n\t\tv.max = v.null\n\t\treturn\n\t}\n\tif v.u == 2 {\n\t\tif x == 1 {\n\t\t\tv.max = 0\n\t\t} else {\n\t\t\tv.min = 1\n\t\t}\n\t\treturn\n\t}\n\n\t// recursive case: v has >1 element, and v.u>=4\n\tif x == v.min {\n\t\tfirstCluster := v.summary.Minimum() // first cluster containing an element (v.min is not an a cluster)\n\t\t// set x to the value of the lowest element in that cluser (the new v.min, since we're deleting x)\n\t\tx = v.index(firstCluster, v.cluster[firstCluster].Minimum())\n\t\tv.min = x\n\t\t// having reassigned the variable x to a different element in the cluster and made that element the new min,\n\t\t// we must now delete x from the cluster (since the v.min is not in a cluster)\n\t}\n\t// we must delete x from its cluster, whether the variable x has been reassigned or not\n\tv.cluster[v.high(x)].Delete(v.low(x))\n\n\t// x's cluster may now be empty. If it is, we must remove x's cluster from the summary\n\tif v.cluster[v.high(x)].Minimum() == v.null {\n\t\tv.summary.Delete(v.high(x))\n\t\tif x == v.max {\n\t\t\t// check whether x was the summary max, and if it was, we need to update the summary max to the highest remaining element in the summary\n\t\t\tsummaryMax := v.summary.Maximum() // the highest numbered nonempty cluster\n\t\t\tif summaryMax == v.null {\n\t\t\t\t// All clusters are empty, so only the v.min remains in V.\n\t\t\t\tv.max = v.min\n\t\t\t} else {\n\t\t\t\t// set max to the maximum element in the highest-numbered nonempty cluster\n\t\t\t\tv.max = v.index(summaryMax, v.cluster[summaryMax].Maximum())\n\t\t\t}\n\t\t}\n\t} else if x == v.max {\n\t\t// x's cluster did not become empty when x was deleted\n\t\tv.max = v.index(v.high(x), v.cluster[v.high(x)].Maximum())\n\t}\n}", "func (t *Treap) Delete(value string) {\n\tt.root = delete(t.root, value)\n}", "func (t *strideTable[T]) delete(addr uint8, prefixLen int) *T {\n\tidx := prefixIndex(addr, prefixLen)\n\trecordedIdx := t.entries[idx].prefixIndex\n\tif recordedIdx != idx {\n\t\t// Route entry doesn't exist\n\t\treturn nil\n\t}\n\tval := t.entries[idx].value\n\n\tparentIdx := idx >> 1\n\tt.allot(idx, idx, t.entries[parentIdx].prefixIndex, t.entries[parentIdx].value)\n\tt.refs--\n\treturn val\n}", "func (l *list) delete(i int) {\n\n\tif l.begin == nil {\n\t\tpanic(\"list empty\")\n\t}\n\n\t// List over/underflow\n\tif i > l.nodes || i < 0 {\n\t\tpanic(\"not exists\")\n\t}\n\n\t// Removing the last node\n\tif l.nodes == 1 && i == 0 {\n\t\tl.begin = nil\n\t\tl.nodes = 0\n\t\treturn\n\t}\n\n\t// Removing at the end of the list\n\tif i == l.nodes-1 {\n\t\tn := l.begin\n\n\t\tfor j := 0; j < l.nodes-1; j++ {\n\t\t\tn = n.right\n\t\t}\n\n\t\tn.left.right = nil\n\t\tn = nil\n\t\tl.nodes--\n\t\treturn\n\t}\n\n\t// Removing the first node\n\tif i == 0 {\n\t\tn := l.begin.right\n\t\tl.begin = n\n\t\tl.begin.left = nil\n\t\tl.nodes--\n\t\treturn\n\t}\n\n\n\t// Removing in somewhere between\n\tc := l.begin\n\n\tfor j := 0; j < i; j++ {\n\t\tc = c.right\n\t}\n\n\tc.left.right, c.right.left = c.right, c.left\n\tl.nodes--\n}", "func (h *binaryHeap) Remove(val int) {\n\tidx := h.Search(h.doNegative(val))\n\th.removeIdx(idx)\n}", "func (o *BoardsSectionsPosition) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) {\n\tif o == nil {\n\t\treturn 0, errors.New(\"rdb: no BoardsSectionsPosition provided for delete\")\n\t}\n\n\tif err := o.doBeforeDeleteHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\n\targs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), boardsSectionsPositionPrimaryKeyMapping)\n\tsql := \"DELETE FROM `boards_sections_positions` WHERE `id`=?\"\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, sql)\n\t\tfmt.Fprintln(boil.DebugWriter, args...)\n\t}\n\n\tresult, err := exec.ExecContext(ctx, sql, args...)\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"rdb: unable to delete from boards_sections_positions\")\n\t}\n\n\trowsAff, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, errors.Wrap(err, \"rdb: failed to get rows affected by delete for boards_sections_positions\")\n\t}\n\n\tif err := o.doAfterDeleteHooks(ctx, exec); err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn rowsAff, nil\n}", "func (opts *ListOpts) Delete(key string) {\n for i, k := range *opts.values {\n if k == key {\n (*opts.values) = append((*opts.values)[:i], (*opts.values)[i+1:]...)\n return\n }\n }\n}", "func (c *index) Delete(sc *stmtctx.StatementContext, m kv.Mutator, indexedValues []types.Datum, h int64, ss kv.Transaction) error {\n\tkey, _, err := c.GenIndexKey(sc, indexedValues, h, nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = m.Delete(key)\n\tif ss != nil {\n\t\tswitch c.idxInfo.State {\n\t\tcase model.StatePublic:\n\t\t\t// If the index is in public state, delete this index means it must exists.\n\t\t\tss.SetAssertion(key, kv.Exist)\n\t\tdefault:\n\t\t\tss.SetAssertion(key, kv.None)\n\t\t}\n\t}\n\treturn err\n}", "func delete(n *node, value string) *node {\n\tif n == nil {\n\t\treturn nil\n\t}\n\n\t// delete the node with value after it's been rotated down to a leaf\n\tif n.left == nil && n.right == nil && n.value == value {\n\t\treturn nil\n\t}\n\n\tif n.value == value {\n\t\tn.priority = deletePriority\n\n\t\tif n.right == nil && n.left != nil {\n\t\t\tpivot := rotateRight(n, n.left)\n\t\t\tpivot.right = delete(n, value)\n\t\t\treturn pivot\n\t\t} else if n.left == nil && n.right != nil {\n\t\t\tpivot := rotateLeft(n, n.right)\n\t\t\tpivot.left = delete(n, value)\n\t\t\treturn pivot\n\t\t} else if n.right.priority > n.left.priority {\n\t\t\tpivot := rotateLeft(n, n.right)\n\t\t\tpivot.left = delete(n, value)\n\t\t\treturn pivot\n\t\t} else {\n\t\t\tpivot := rotateRight(n, n.left)\n\t\t\tpivot.right = delete(n, value)\n\t\t\treturn pivot\n\t\t}\n\t}\n\n\tif value < n.value {\n\t\tn.left = delete(n.left, value)\n\t} else {\n\t\tn.right = delete(n.right, value)\n\t}\n\n\treturn n\n}", "func (t *chaining) Delete(key string) {\n\thash := t.hash(key)\n\tlist := t.values[hash]\n\tif list == nil {\n\t\treturn\n\t}\n\tfirst := list.Start().Prev\n\tfor first != list.End() {\n\t\tfirst = first.Next\n\t\tpair := first.Value.(*pair)\n\t\tif pair.key == key {\n\t\t\tlist.Delete(first)\n\t\t\tt.len--\n\t\t\treturn\n\t\t}\n\t}\n}", "func (b *BTree) removeInNodeAtIdx(n *memNode, idx int) *Item {\n\ts := n.node.Items\n\titem := s[idx]\n\tcopy(s[idx:], s[idx+1:])\n\ts[len(s)-1] = nil\n\ts = s[:len(s)-1]\n\treturn item\n}", "func DeleteElement(s []int, i int) []int {\n\tif i < 0 || len(s) <= i {\n\t\tpanic(errors.New(\"[index error]\"))\n\t}\n\t// appendのみの実装\n\tn := make([]int, 0, len(s)-1)\n\tn = append(n, s[:i]...)\n\tn = append(n, s[i+1:]...)\n\treturn n\n}", "func Delete(key string){\n n := keyValue[key]\n n.val = \"\"\n n.hash = \"\"\n keyValue[key] = n\n}", "func (r *Rope) EraseAt(point, n int) (err error) {\n\tif point > r.runes {\n\t\tpoint = r.runes\n\t}\n\tif n >= r.runes-point {\n\t\tn = r.runes - point\n\t}\n\tvar k *knot\n\ts := skiplist{r: r}\n\tif k, err = s.find2(point); err != nil {\n\t\treturn err\n\t}\n\ts.del(k, n)\n\treturn nil\n}", "func deleteRow(index int, settings string) {\n\tfmt.Println(\"# Deleting\")\n\n\tdb := connectDB(settings)\n\tstmt, _ := db.Prepare(\"delete from test where test_id=$1\")\n\n\tres, _ := stmt.Exec(index)\n\n\taffect, _ := res.RowsAffected()\n\n\tfmt.Println(affect, \"rows changed\")\n}", "func (h *Header) Del(key string) {\n\tfor i, ok := h.index(key); ok; i, ok = h.index(key) {\n\t\th.slice = append(h.slice[:i], h.slice[i+2:]...)\n\t}\n}", "func (elems *Elements) delete(pt dvid.Point3d) (deleted *Element, changed bool) {\n\t// Delete any elements at point.\n\tvar cut = -1\n\tfor i, elem := range *elems {\n\t\tif pt.Equals(elem.Pos) {\n\t\t\tcut = i\n\t\t\tbreak\n\t\t}\n\t}\n\tif cut >= 0 {\n\t\tdeleted = (*elems)[cut].Copy()\n\t\tchanged = true\n\t\t(*elems)[cut] = (*elems)[len(*elems)-1] // Delete without preserving order.\n\t\t*elems = (*elems)[:len(*elems)-1]\n\t}\n\n\t// Delete any relationships with the point.\n\tif elems.deleteRel(pt) {\n\t\tchanged = true\n\t}\n\treturn\n}", "func (c *Scratch) Delete(key string) string {\n\tc.mu.Lock()\n\tdelete(c.values, key)\n\tc.mu.Unlock()\n\treturn \"\"\n}", "func (k Keeper) DelValue(ctx sdk.Context, accessPath *vm_grpc.VMAccessPath) {\n\tk.modulePerms.AutoCheck(types.PermStorageWrite)\n\n\tk.delValue(ctx, accessPath)\n}", "func (b *box) removeFromPossibleValues(index, val int) {\n\tif b.values[index] == 0 {\n\t\tif _, ok := b.possibleValues[index][val]; ok {\n\t\t\tdelete(b.possibleValues[index], val)\n\t\t\tb.checkAndSet(index)\n\t\t}\n\t}\n}", "func (sset *SSet) Remove(value interface{}) {\n\tkey := sset.f(value)\n\tif index, found := sset.m_index[key]; found {\n\t\tsset.list.Remove(index)\n\t\tsset.m.Remove(key)\n\t\tdelete(sset.m_index, key)\n\t\tsset.fixIndex()\n\t}\n}", "func (sll *SingleLinkedList) Delete(index int) {\n\tsll.Pop(index)\n}", "func (u *UdMap) Del(key string) { delete(u.Data, key) }", "func DeleteKeyValueExpireViaValue(iValue []byte) (err error) {\n\tvar has bool\n\tvar _KeyValueExpire = &KeyValueExpire{Value: iValue}\n\tif has, err = Engine.Get(_KeyValueExpire); (has == true) && (err == nil) {\n\t\tif row, err := Engine.Where(\"value = ?\", iValue).Delete(new(KeyValueExpire)); (err != nil) || (row <= 0) {\n\t\t\treturn err\n\t\t} else {\n\t\t\treturn nil\n\t\t}\n\t}\n\treturn\n}", "func (s *Int64) Del(vals ...int64) {\n\tfor idx := range vals {\n\t\tdelete(s.m, vals[idx])\n\t}\n}", "func (t *Indexed) Remove(x int) {\n\tl, r := t.split(t.root, x-1)\n\t_, xr := t.split(r, x)\n\tt.root = t.merge(l, xr)\n}", "func (r *Range) Delete(args *DeleteRequest, reply *DeleteResponse) {\n\tif err := r.engine.del(args.Key); err != nil {\n\t\treply.Error = err\n\t}\n}", "func DeleteIndex(a interface{}, index int) interface{} {\n\tswitch a.(type) {\n\tcase []int:\n\t\treturn DeleteIndexInt(a.([]int), index)\n\tdefault:\n\t\tpanic(\"not support type\")\n\t}\n}", "func (l *LinkedList) Delete(val int) {\n\t// check if head is exist\n\tif l.Head == nil {\n\t\treturn\n\t}\n\n\t// if element that will be deleted is head, delegate head to next element\n\tif l.Head.Data == val {\n\t\tl.Head = l.Head.Next\n\t\treturn\n\t}\n\n\t// loop until element is found, then removes previos and next connection of deleted element\n\tcurrent := l.Head\n\tfor current.Next != nil {\n\t\tif current.Next.Data == val {\n\t\t\tcurrent.Next = current.Next.Next\n\t\t\treturn\n\t\t}\n\t\tcurrent = current.Next\n\t}\n\n}", "func (buf *ListBuffer) Delete(idx BufferIndex) *error.Error {\n\tinRange, initialized := buf.legalIndex(idx)\n\tif !inRange {\n\t\tdesc := fmt.Sprintf(\n\t\t\t\"idx, %d, is out of range for IndexBuffer of length %d.\",\n\t\t\tidx, len(buf.Buffer),\n\t\t)\n\t\treturn error.New(error.Value, desc)\n\t} else if !initialized {\n\t\tdesc := fmt.Sprintf(\n\t\t\t\"Item at idx, %d, has the Type value Uninitialized.\", idx,\n\t\t)\n\t\treturn error.New(error.Value, desc)\n\t}\n\n\tbuf.internalDelete(idx)\n\treturn nil\n}", "func del(p *vpoint, pred func(x, y float64, e interface{}) bool) {\n\tfor i := len(p.elems) - 1; i >= 0; i-- {\n\t\tif pred(p.x, p.y, p.elems[i]) {\n\t\t\t// Fast del from slice\n\t\t\tlast := len(p.elems) - 1\n\t\t\tp.elems[i] = p.elems[last]\n\t\t\tp.elems = p.elems[:last]\n\t\t}\n\t}\n\treturn\n}" ]
[ "0.659485", "0.65319824", "0.6467337", "0.64375603", "0.62637955", "0.624936", "0.62493145", "0.62259007", "0.6157027", "0.6154769", "0.61195713", "0.6114202", "0.61121774", "0.61063516", "0.6071629", "0.60097206", "0.60000145", "0.5996969", "0.5984112", "0.59835285", "0.59794474", "0.59761125", "0.5973974", "0.59733963", "0.59214264", "0.59071845", "0.58988786", "0.5896811", "0.5891373", "0.5890474", "0.58843565", "0.58373785", "0.5831718", "0.5813518", "0.58056283", "0.57999325", "0.5777054", "0.5766878", "0.5765745", "0.5756294", "0.5756158", "0.5747992", "0.5717904", "0.5712906", "0.57010823", "0.56922245", "0.5690675", "0.56890815", "0.5689037", "0.56824064", "0.5678969", "0.5677473", "0.5664714", "0.5654404", "0.56477314", "0.56457746", "0.5634834", "0.56312877", "0.5630331", "0.5625775", "0.5615228", "0.56133914", "0.5613257", "0.56127214", "0.56001186", "0.55992705", "0.5594609", "0.55913305", "0.5583381", "0.5579989", "0.5563361", "0.55603385", "0.5553763", "0.55507827", "0.5534888", "0.553446", "0.5531759", "0.55289817", "0.55235744", "0.5512765", "0.55105674", "0.5508213", "0.55077916", "0.5495515", "0.54914933", "0.54879683", "0.5482033", "0.5475835", "0.54693013", "0.54658127", "0.54557496", "0.5453977", "0.54537433", "0.5440388", "0.5437335", "0.54336196", "0.54302037", "0.54295796", "0.5426288", "0.54228705", "0.5421763" ]
0.0
-1
Each method to apply to each element in the linked list a function who receives an interface and don't return any value
func (l *LinkedList) Each(f func(val interface{})) { for n := l.head; n != nil; n = n.Next() { f(n.Value()) } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Return(i interface{}) List {\n\treturn Returnf(func() interface{} { return i })\n}", "func (il *IntList) Do(f func(val int)) {\n last := il.last\n for it := il.first.next; it != last; it = it.next {\n f(it.value)\n }\n}", "func (ll *LinkedList) Map(fn func(interface{}, uint) interface{}) *LinkedList {\n\tll.RLock()\n\tdefer ll.RUnlock()\n\n\tnewList := NewLinkedList()\n\n\tcurrentNode := ll.head\n\tcurrentIndex := uint(0)\n\n\tfor currentNode != nil {\n\t\tnewList.PushBack(fn(currentNode.data, currentIndex))\n\t\tcurrentNode = currentNode.next\n\t\tcurrentIndex++\n\t}\n\n\treturn newList\n}", "func List(it Iterable) []interface{} {\n\tres := make([]interface{}, 0)\n\tfor {\n\t\tele, ok := it.Next()\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\tres = append(res, ele)\n\t}\n\treturn res\n}", "func (l *list) Map(sample interface{}, fn EachElementCallback) interface{} {\n\ttypeOf := reflect.TypeOf(sample)\n\n\tif typeOf.Kind() != reflect.Ptr {\n\t\tpanic(\"sample must be pointer\")\n\t}\n\n\tvalueOf := reflect.ValueOf(sample)\n\tvalueElem := valueOf.Elem()\n\n\tl.ForEach(func(index int, el interface{}) {\n\t\tres := fn(index, el)\n\t\tif res != nil {\n\t\t\tvalueElem.Set(reflect.Append(valueElem, reflect.ValueOf(res)))\n\t\t}\n\t})\n\n\treturn valueElem.Interface()\n}", "func (set *lalrSet) each(f func(lalrItem)) {\n\tfor _, items := range(set.items) {\n\t\tfor _, item := range(items) {\n\t\t\tf(item)\n\t\t}\n\t}\n}", "func Iterate(obj Object, fn func(Object) bool) error {\n\t// Some easy cases\n\tswitch x := obj.(type) {\n\tcase Tuple:\n\t\tfor _, item := range x {\n\t\t\tif fn(item) {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\tcase *List:\n\t\tfor _, item := range x.Items {\n\t\t\tif fn(item) {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\tcase String:\n\t\tfor _, item := range x {\n\t\t\tif fn(String(item)) {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\tcase Bytes:\n\t\tfor _, item := range x {\n\t\t\tif fn(Int(item)) {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\tdefault:\n\t\titerator, err := Iter(obj)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfor {\n\t\t\titem, err := Next(iterator)\n\t\t\tif err == StopIteration {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif fn(item) {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func (list *MapItem) Iterate(fn func(list, item *MapItem) bool) {\n\tfor item := list.next; list != item && fn(list, item); item = item.next {\n\t}\n}", "func (l List) Apply(h *HTML) {\n\tfor _, m := range l {\n\t\tapply(m, h)\n\t}\n}", "func (l *LinkedList) Find(value interface{}) {\n\n}", "func (l *HandoffList) Each(f func(item *Handoff) bool) {\n\tif l == nil {\n\t\treturn\n\t}\n\tfor _, item := range l.items {\n\t\tif !f(item) {\n\t\t\tbreak\n\t\t}\n\t}\n}", "func (l *list) ForEach(fn ForEachCallback) {\n\tfor k, v := range l.elements {\n\t\tfn(k, v)\n\t}\n}", "func (l *List) Apply(fn ApplyFn) *List {\n\tif fn == nil || l == nil {\n\t\treturn l\n\t}\n\tresult := List{}\n\tfor index := range *l {\n\t\tfnRes := fn(*l, index)\n\t\tif fnRes != nil {\n\t\t\tresult = append(result, *fnRes)\n\t\t}\n\t}\n\treturn &result\n}", "func Rule(a ...interface{}) walkerFn {\n\tlog.Println(\"Creating a rule func\")\n\t// Create walker arrayhere\n\n\tfnList := []walkerFn{}\n\tfor i, arg := range a {\n\t\tlog.Printf(\"Get walkerfn For Rule #%d - %s\", i, reflect.TypeOf(a).Elem().Name())\n\t\tfnList = append(fnList, fnFromType(arg))\n\t}\n\n\treturn func(it *Iter) walkerFn {\n\t\tlog.Println(\"Executing a Rule\")\n\t\tlog.Println(\"Cloning iterator\")\n\t\t// Sequencializer instead of for loop\n\t\tnit := it.Clone()\n\t\tfor i, fn := range fnList {\n\t\t\tlog.Printf(\"Rule Seq: #%d\", i)\n\t\t\tif fn == nil { // Ignore?\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tfn(nit)\n\t\t\tnit.Next()\n\t\t\t//Search case?\n\t\t}\n\t\treturn nil\n\t}\n}", "func (m *manager) Iter(f func(nh datapath.NodeHandler)) {\n\tm.nodeHandlersMu.RLock()\n\tdefer m.nodeHandlersMu.RUnlock()\n\n\tfor nh := range m.nodeHandlers {\n\t\tf(nh)\n\t}\n}", "func (l *AddOnList) Each(f func(item *AddOn) bool) {\n\tif l == nil {\n\t\treturn\n\t}\n\tfor _, item := range l.items {\n\t\tif !f(item) {\n\t\t\tbreak\n\t\t}\n\t}\n}", "func (r ResourceEventHandlerFuncs) OnList(obj []interface{}) {\n\tif r.ListFunc != nil {\n\t\tr.ListFunc(obj)\n\t}\n}", "func (l LinkedList) Traverse() (ret []interface{}) {\n\tp := l.Head\n\tif p == nil {\n\t\treturn\n\t}\n\n\tfor p != nil {\n\t\tret = append(ret, p.Val)\n\t\tp = p.Next\n\t}\n\n\treturn\n}", "func (lk *link) process(linkIndex int, args []Argument) ([]Argument, error) {\n\tvfn := reflect.ValueOf(lk.fn)\n\tif err := validateFunc(linkIndex, vfn); err != nil {\n\t\treturn nil, err\n\t}\n\n\tvfnType := vfn.Type()\n\tif err := validateArgs(linkIndex, vfnType, args); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// call the function\n\tout := []Argument{}\n\tfor _, o := range vfn.Call(reflectArgs(vfnType, args)) {\n\t\tout = append(out, o.Interface())\n\t}\n\n\t// if the last returned value for the function\n\t// is an error, cast the error and return it\n\t// along with the rest of values\n\tif lk.handleError && doesReturnError(vfnType) {\n\t\terr, _ := out[len(out)-1].(error)\n\t\treturn out[:len(out)-1], err\n\t}\n\n\treturn out, nil\n}", "func (this *LinkedList) Iterate() collection.Iterator {\n\treturn &LinkedListIterator{this, -1}\n}", "func (l *list) Filter(fn EachElementCallback) interface{} {\n\ttypeOf := reflect.TypeOf(l.t)\n\tsliceOf := reflect.SliceOf(typeOf)\n\tvar result = reflect.ValueOf(reflect.New(sliceOf).Interface()).Elem()\n\n\tl.ForEach(func(index int, el interface{}) {\n\t\tres := fn(index, el)\n\t\tif nil != res {\n\t\t\tresult.Set(reflect.Append(result, reflect.ValueOf(res)))\n\t\t}\n\t})\n\n\treturn result.Interface()\n}", "func Head(l List) interface{} {\n\tif l == nil {\n\t\tl = Mzero()\n\t}\n\tif _, ok := l.(unit); ok {\n\t\treturn unit{}\n\t}\n\tlf := l.([2]interface{})[0].(func() interface{})\n\treturn lf()\n}", "func (e Enumerable) Each(f func(item interface{})) {\n\tvar wg sync.WaitGroup\n\twg.Add(len(e))\n\tfor _, item := range e {\n\t\tgo func(item interface{}) {\n\t\t\tf(item)\n\t\t\twg.Done()\n\t\t}(item)\n\t}\n\twg.Wait()\n}", "func (s *SinglyLinkedList) Iterate() {\n\tfor node := s.Head; node != nil; node = node.Next {\n\t\tfmt.Println(node.Val)\n\t}\n\tfmt.Println(\"\")\n}", "func (hq HtmlQ) ForEach(f func(HtmlQ)) HtmlQ {\n\tfor _, node := range hq.nodes {\n\t\tf(HtmlQ{nodes:[]*html.Node{node}})\n\t}\n\n\treturn hq\n}", "func (l *LinkedNode) Traverse(nodeVisitorFn func(data interface{})) {\n\tfor curr := l.next; curr != l; curr = curr.next {\n\t\tnodeVisitorFn(curr.value)\n\t}\n}", "func (n Nodes) ForEach(f func(n *Node) bool) Nodes {\n\tfor _, n := range n.nodes {\n\t\tif !f(n) {\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn n\n}", "func (s *Int64) Iterate(fn func(int64)) {\n\tfor val := range s.m {\n\t\tfn(val)\n\t}\n}", "func (l *LDAPIdentityProviderList) Each(f func(item *LDAPIdentityProvider) bool) {\n\tif l == nil {\n\t\treturn\n\t}\n\tfor _, item := range l.items {\n\t\tif !f(item) {\n\t\t\tbreak\n\t\t}\n\t}\n}", "func (ss *RoundRobinServerList) Each(f func(net.Addr) error) error {\n\tss.mu.Lock()\n\tdefer ss.mu.Unlock()\n\tfor _, a := range ss.addrs {\n\t\tif err := f(a); nil != err {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (ll *linkedList) display() {\n\tfor tip := ll.head; tip != nil; tip = tip.Next {\n\t\tfmt.Printf(\"NODE: %+v %p \\n\", tip, tip)\n\t}\n\tfmt.Println()\n}", "func MapFn[S ~[]E, E any](list S, fn func(E) E) S {\n\tif list == nil {\n\t\treturn nil\n\t}\n\tdest := make(S, len(list))\n\tfor i, s := range list {\n\t\tdest[i] = fn(s)\n\t}\n\treturn dest\n}", "func (f *LinkedList) printAllElmtForward() () {\n\tif (f.length == 0) {\n\t\tfmt.Printf(\"List empty\")\n\t} else {\n\t\tfmt.Printf(\"Forward (%d): \",f.length)\n\t\tcurrentElmt := f.start\n\t\tfor currentElmt != nil {\n\t\t\tfmt.Printf(\"%d \",currentElmt.body)\n\t\t\tcurrentElmt = currentElmt.next\n\t\t}\n\t\tfmt.Println()\n\t}\n}", "func Each(elements []Value, consumer Consumer) {\n\tfor _, elem := range elements {\n\t\tconsumer(elem)\n\t}\n}", "func (l *List) Visit(f func(n *Node)) {\n\tp := l.head\n\tfor p != nil {\n\t\tf(p)\n\t\tp = p.next\n\t}\n}", "func (collection *RemoteRepoCollection) ForEach(handler func(*RemoteRepo)) {\n\tfor _, r := range collection.list {\n\t\thandler(r)\n\t}\n}", "func (l *errList) Iterate() <-chan Node {\n\tc := make(chan Node)\n\tgo func() {\n\t\titem := l.head\n\t\tfor {\n\t\t\tif item == nil {\n\t\t\t\tclose(c)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tc <- *item\n\t\t\titem = item.next\n\t\t}\n\t}()\n\treturn c\n}", "func Each(be interface {\n\tLister\n\tGetter\n}, t Type, f func(id ID, data []byte, err error)) error {\n\tids, err := be.List(t)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfor _, id := range ids {\n\t\tdata, err := be.Get(t, id)\n\t\tif err != nil {\n\t\t\tf(id, nil, err)\n\t\t\tcontinue\n\t\t}\n\n\t\tf(id, data, nil)\n\t}\n\n\treturn nil\n}", "func (s *Uint64) Iterate(fn func(uint64)) {\n\tfor val := range s.m {\n\t\tfn(val)\n\t}\n}", "func Map(f func(interface{}) interface{}, l List) List {\n\tif IsEmpty(l) {\n\t\treturn Mzero()\n\t}\n\telem := l.([2]interface{})\n\tvalFunc := elem[0].(func() interface{})\n\tnext := elem[1].(func() List)\n\tmapperFunc := func() interface{} {\n\t\treturn f(valFunc())\n\t}\n\treturn Consf(mapperFunc, Map(f, next()))\n}", "func (n *matcherNode) apply(f func(c compilable)) {\n\tqueue := make([]*matcherNode, 1)\n\tqueue[0] = n\n\n\tfor len(queue) > 0 {\n\t\tn = queue[0]\n\t\tqueue = queue[1:]\n\t\tfor _, child := range n.children {\n\t\t\tqueue = append(queue, child)\n\t\t}\n\n\t\tif n.data != nil {\n\t\t\tf(n.data)\n\t\t}\n\t}\n}", "func (a *Action) ForEach( f func(int,string) error ) error {\n if a != nil {\n for i,e := range a.actions {\n err := f(i,e)\n if err != nil {\n return err\n }\n }\n }\n\n return nil\n}", "func (l TopicsList) Each(fn func(t string, p int32)) {\n\tfor _, t := range l {\n\t\tfor _, p := range t.Partitions {\n\t\t\tfn(t.Topic, p)\n\t\t}\n\t}\n}", "func (l IntList) Map(fn unaryFunc) IntList {\n\tfor i, v := range l {\n\t\tl[i] = fn(v)\n\t}\n\treturn l\n}", "func Map[T any, U any](items []T, f func(T) U) []U {\n\toutputItems := make([]U, len(items))\n\tfor index, item := range items {\n\t\toutputItems[index] = f(item)\n\t}\n\treturn outputItems\n}", "func (l *SubscriptionRegistrationList) Each(f func(item *SubscriptionRegistration) bool) {\n\tif l == nil {\n\t\treturn\n\t}\n\tfor _, item := range l.items {\n\t\tif !f(item) {\n\t\t\tbreak\n\t\t}\n\t}\n}", "func (l *list) Find(fn EachElementCallback) interface{} {\n\tvar result interface{}\n\n\tfor k, v := range l.elements {\n\t\tresult = fn(k, v)\n\t\tif nil != result {\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn result\n}", "func (i *SliceIteration) ForEach(must OperationFn, others ...OperationFn) error {\n\tvar unFns []OperationFn\n\tunFns = append(unFns, must)\n\tunFns = append(unFns, others...)\n\tfor idx, item := range i.Items {\n\t\t// we must convert this item to an unstructured instance\n\t\tunItem := i.ItemToUnstruct(idx, item)\n\t\t// execute this item against all the callbacks\n\t\tfor _, fn := range unFns {\n\t\t\terr := fn(unItem)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func (r *RemoteList) ForEach(preferredRanges []*net.IPNet, forEach forEachFunc) {\n\tr.Rebuild(preferredRanges)\n\tr.RLock()\n\tfor _, v := range r.addrs {\n\t\tforEach(v, isPreferred(v.IP, preferredRanges))\n\t}\n\tr.RUnlock()\n}", "func (list *DoublyLinkedList) Elements() []interface{} {\n\tvalues := make([]interface{}, list.size, list.size)\n\tfor e, element := 0, list.first; element != nil; e, element = e+1, element.next {\n\t\tvalues[e] = element.value\n\t}\n\treturn values\n}", "func (r *r) List() ([]*internal.Item, error) {\n\treturn nil, fmt.Errorf(\"not implemented yet\")\n}", "func (l *ModelList) Do(block func(name string, value Model)) {\n\tfor _, each := range l.List {\n\t\tblock(each.Name, each.Model)\n\t}\n}", "func eachHelper(bin , data HashElement) {\n var hand * Handle = bin.(*Handle)\n var pair * eachPair = data.(*eachPair)\n pair.fun(hand.obj, pair.data)\n}", "func Filter(nodeList *v1.NodeList, fn func(node v1.Node) bool) {\n\tvar l []v1.Node\n\n\tfor _, node := range nodeList.Items {\n\t\tif fn(node) {\n\t\t\tl = append(l, node)\n\t\t}\n\t}\n\tnodeList.Items = l\n}", "func FuncInterface(_ T1) {}", "func (e *Element) ForEachNext(f func(*Element)) {\n\tfor i := e.next; i != e; i = i.next {\n\t\tif i.list == nil || i != &e.list.root {\n\t\t\tf(i)\n\t\t}\n\t}\n}", "func (l *List) Append(i interface{}) {\n\tfor l.next != nil {\n\t\tl = l.next\n\t}\n\n\tl.next = &List{prev: l, value: i}\n}", "func (sl *ServerList) Each(f func(net.Addr) error) error {\n\tsl.mu.RLock()\n\tdefer sl.mu.RUnlock()\n\n\tfor _, a := range sl.servers {\n\t\tif err := f(a); nil != err {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (list *elemlist) Push(item interface{}) {\n\tlist.elements = append(list.elements, item)\n}", "func (list *LinkedList) remove(index int) interface{}{\n\tfor i:=0; i<index-1;i++{\n\t\tif(list==nil){\n\t\t\treturn errors.New(\"Out of Bounds\")\n\t\t}\n\t\tlist=list.next\n\t}\n\tpivot:=list.next\n\tif pivot==nil{\n\t\treturn nil\n\t}\n\tlist.next = pivot.next\n\tpivot.next=nil\n\treturn pivot.data\n\n\n}", "func (list *Linkedlist) Push(value interface{}) {\n\tlist.head = &Node{value, list.head}\n\tlist.size++\n}", "func (list IntList) Map(fn unaryFunc) IntList {\n\tr := []int{}\n\tfor _, e := range list {\n\t\tr = append(r, fn(e))\n\t}\n\treturn IntList(r)\n}", "func templateFunctionList(elements ...interface{}) []interface{} {\n\treturn elements\n}", "func (l *List) ForEachNext(f func(*Element)) {\n\tfor e := l.root.next; e != &l.root; e = e.next {\n\t\tf(e)\n\t}\n}", "func (s *IntSlicer) Each(fn func(int)) {\n\tfor _, elem := range s.slice {\n\t\tfn(elem)\n\t}\n}", "func (l *LinkedList) List() {\n\tcurrent := l.Head\n\tfor current != nil {\n\t\tfmt.Println(\"%+v\\n\", current)\n\t\tcurrent = current.Next\n\t}\n}", "func (l *AutoscalerResourceLimitsGPULimitList) Each(f func(item *AutoscalerResourceLimitsGPULimit) bool) {\n\tif l == nil {\n\t\treturn\n\t}\n\tfor _, item := range l.items {\n\t\tif !f(item) {\n\t\t\tbreak\n\t\t}\n\t}\n}", "func (q *memQueue) Iter(fn func(int, []byte) bool) error {\n\tq.mu.Lock()\n\tdefer q.mu.Unlock()\n\n\tfor i, item := range q.q {\n\t\tif fn(i, item) {\n\t\t\tbreak\n\t\t}\n\t}\n\n\treturn nil\n}", "func _[T interface{ ~func(string) int }](f T) int {\n\treturn f(\"hello\")\n}", "func (p *SliceOfMap) EachIE(action func(int, O) error) (ISlice, error) {\n\tvar err error\n\tif p == nil {\n\t\treturn p, err\n\t}\n\tfor i := range *p {\n\t\tif err = action(i, (*p)[i]); err != nil {\n\t\t\treturn p, err\n\t\t}\n\t}\n\treturn p, err\n}", "func (s *BaseTdatListener) EnterEnumerableFunc(ctx *EnumerableFuncContext) {}", "func ForEach[T any](seq Seq[T], f func(T) error) error {\n\tfor has := seq != nil; has; has = seq.Next() {\n\t\tif err := f(seq.Value()); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func (p Peers) ForEach(f func(peer string) error) error {\n\tfor _, p := range p.p {\n\t\tif err := f(p); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func IterList(l []interface{}) Iterable {\n\tres := make(chan interface{})\n\tgo func() {\n\t\tfor _, ele := range l {\n\t\t\tres <- ele\n\t\t}\n\t\tclose(res)\n\t}()\n\treturn iterable(res)\n}", "func (d *Devices) Each(f func(Device)) {\n\tfor _, device := range *d {\n\t\tf(device)\n\t}\n}", "func (e Elements) Map(f MapFunc) Elements {\n\tfor i := range e {\n\t\te[i] = f(e[i])\n\t}\n\treturn e\n}", "func (self *Rectangle) FloorAllI(args ...interface{}) {\n self.Object.Call(\"floorAll\", args)\n}", "func mapi(f func(int) int, xs []int) []int {\n\tnxs := make([]int, len(xs))\n\tfor i, n := range xs {\n\t\tnxs[i] = f(n)\n\t}\n\treturn nxs\n}", "func EachID(be Lister, t Type, f func(ID)) error {\n\tids, err := be.List(t)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfor _, id := range ids {\n\t\tf(id)\n\t}\n\n\treturn nil\n}", "func (i *Inventory) List(omit ...thing.Interface) (list []thing.Interface) {\n\n\t// Don't modify passed slice's elements\n\tomitted := make([]thing.Interface, len(omit))\n\tcopy(omitted, omit)\n\nOMIT:\n\tfor _, thing := range i.contents {\n\t\tfor i, o := range omitted {\n\t\t\tif thing.IsAlso(o) {\n\t\t\t\t// Whittle down omitted so there is less to check each time\n\t\t\t\tomitted = append(omitted[:i], omitted[i+1:]...)\n\t\t\t\tcontinue OMIT\n\t\t\t}\n\t\t}\n\t\tlist = append(list, thing)\n\t}\n\n\treturn\n}", "func PrintList(head *Node) {\n\trunner := head\n\tfor runner != nil {\n\t\tfmt.Printf(\"%d\\t\", runner.Val)\n\t\trunner = runner.Next\n\t}\n\tfmt.Printf(\"\\n\")\n}", "func (f *Filter) Call(line int, i *Interpreter, arguments []interface{}) (interface{}, error) {\n\tfun, ok := arguments[0].(Function)\n\tif !ok {\n\t\treturn nil, &executionError{line, \"<filter> expects a function as first parameter\"}\n\t}\n\n\tif fun.Arity() != 1 {\n\t\treturn nil, &executionError{line, \"<filter> expects a function which accepts one argument\"}\n\t}\n\n\tlist, ok := arguments[1].(List)\n\tif !ok {\n\t\treturn nil, &executionError{line, \"<filter> expects a list as second parameter\"}\n\t}\n\n\tvar filteredElements []interface{}\n\n\trestOfList := list\n\n\tfor restOfList.Len() > 0 {\n\t\tvar args []interface{}\n\t\targs = append(args, restOfList.First())\n\n\t\tresponse, err := fun.Call(line, i, args)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tif isTruthy(response) {\n\t\t\tfilteredElements = append(filteredElements, restOfList.First())\n\t\t}\n\n\t\trestOfList = restOfList.Rest()\n\t}\n\n\treturn NewArrayList(filteredElements), nil\n}", "func funcIrate(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {\n\treturn instantValue(vals, enh.Out, true)\n}", "func Cons(i interface{}, l List) List {\n\treturn Consf(func() interface{} { return i }, l)\n}", "func _[T interface{ ~func() }](f T) {\n\tf()\n\tgo f()\n}", "func (sll *SingleLinkedList) Append(element interface{}) {\n\tsll.Insert(sll.length, element)\n}", "func (l *Tree) Iterate(absoluteLoser Elem) []Elem {\n\tvar res = make([]Elem, l.count)\n\tres[0] = l.p[l.r[0]]\n\tfor i := 1; i < l.count; i++ {\n\t\t_, e := l.Update(l.r[0], absoluteLoser)\n\t\tres[i] = e\n\t}\n\treturn res\n}", "func Walk(node Node, fn WalkFunc) Node {\r\n\trewritten, ok := fn(node)\r\n\tif !ok {\r\n\t\treturn rewritten\r\n\t}\r\n\r\n\tswitch n := node.(type) {\r\n\tcase *File:\r\n\t\tn.Node = Walk(n.Node, fn)\r\n\tcase *ObjectList:\r\n\t\tfor i, item := range n.Items {\r\n\t\t\tn.Items[i] = Walk(item, fn).(*ObjectItem)\r\n\t\t}\r\n\tcase *ObjectKey:\r\n\t\t// nothing to do\r\n\tcase *ObjectItem:\r\n\t\tfor i, k := range n.Keys {\r\n\t\t\tn.Keys[i] = Walk(k, fn).(*ObjectKey)\r\n\t\t}\r\n\r\n\t\tif n.Val != nil {\r\n\t\t\tn.Val = Walk(n.Val, fn)\r\n\t\t}\r\n\tcase *LiteralType:\r\n\t\t// nothing to do\r\n\tcase *ListType:\r\n\t\tfor i, l := range n.List {\r\n\t\t\tn.List[i] = Walk(l, fn)\r\n\t\t}\r\n\tcase *ObjectType:\r\n\t\tn.List = Walk(n.List, fn).(*ObjectList)\r\n\tdefault:\r\n\t\t// should we panic here?\r\n\t\tfmt.Printf(\"unknown type: %T\\n\", n)\r\n\t}\r\n\r\n\tfn(nil)\r\n\treturn rewritten\r\n}", "func Test_List(t *testing.T) {\n\n\tl := list.New()\n\tfor i := 0; i < 10; i++ {\n\t\tl.PushBack(fmt.Sprintf(\"data_%d\", i))\n\t}\n\n\tfor e := l.Front(); e != nil; e = e.Next() {\n\t\tt.Log(\"::::>>>>\", e)\n\t}\n\n\tfor e := l.Front(); e != nil; e = e.Next() {\n\t\tt.Log(\"---->>>>\", e)\n\t}\n\n}", "func enlist(a *apl.Apl, _, R apl.Value) (apl.Value, error) {\n\tr, ok := R.(apl.List)\n\tif ok == false {\n\t\treturn apl.List{R}, nil // TODO: copy\n\t}\n\n\tvar f func(l apl.List) apl.List\n\tf = func(l apl.List) apl.List {\n\t\tvar res apl.List\n\t\tfor _, e := range l {\n\t\t\tif v, ok := e.(apl.List); ok {\n\t\t\t\tv = f(v)\n\t\t\t\tres = append(res, v...) // TODO: copy\n\t\t\t} else {\n\t\t\t\tres = append(res, e) // TODO: copy\n\t\t\t}\n\t\t}\n\t\treturn res\n\t}\n\treturn f(r), nil\n}", "func walk(data *interface{}, callback scrubCallback) {\n\tswitch v := (*data).(type) {\n\tcase map[interface{}]interface{}:\n\t\twalkHash(v, callback)\n\tcase []interface{}:\n\t\twalkSlice(v, callback)\n\t}\n}", "func (ms Matrices) ForEachNoParameter(fn func(*Matrix)) {\n\tvar inner func(*Matrix, Matrix)\n\tinner = func(m1 *Matrix, _ Matrix) {\n\t\tfn(m1)\n\t}\n\tms.ForEach(inner, Matrix{})\n}", "func MapM(f func(interface{}), l List) {\n\tadapter := func(i interface{}) interface{} {\n\t\tf(i)\n\t\treturn nil\n\t}\n\tSeq(Map(adapter, l))\n}", "func (s *BasevhdlListener) EnterInterface_list(ctx *Interface_listContext) {}", "func (slist *SingleLinkedList) Iter() Iterator {\n\treturn Iterator{list: slist, index: 0, currentNode: nil}\n}", "func (a Slice[T]) Each(block func(T)) {\n\tfor _, o := range a {\n\t\tblock(o)\n\t}\n}", "func (l *AddonStatusList) Each(f func(item *AddonStatus) bool) {\n\tif l == nil {\n\t\treturn\n\t}\n\tfor _, item := range l.items {\n\t\tif !f(item) {\n\t\t\tbreak\n\t\t}\n\t}\n}", "func filterPlugged(links []netlink.Link) []netlink.Link {\n\tout := links[:0]\n\tfor _, l := range links {\n\n\t\tif err := netlink.LinkSetUp(l); err != nil {\n\t\t\tlog.Info().Str(\"interface\", l.Attrs().Name).Msg(\"failed to bring interface up\")\n\t\t\tcontinue\n\t\t}\n\n\t\tif !ifaceutil.IsVirtEth(l.Attrs().Name) && !ifaceutil.IsPluggedTimeout(l.Attrs().Name, time.Second*5) {\n\t\t\tlog.Info().Str(\"interface\", l.Attrs().Name).Msg(\"interface is not plugged in, skipping\")\n\t\t\tcontinue\n\t\t}\n\n\t\tout = append(out, l)\n\t}\n\treturn out\n}", "func (list *DoublyLinkedList) Append(values ...interface{}) {\n\tlist.Add(values...)\n}", "func (v Int32Vec) ForEach(con func(i int, e int32) int32) {\n\tfor i, e := range v {\n\t\tv[i] = con(i, e)\n\t}\n}" ]
[ "0.58451504", "0.5723475", "0.5484381", "0.53215957", "0.5320025", "0.52799416", "0.5208401", "0.51634055", "0.51432884", "0.5054827", "0.5047837", "0.50365186", "0.5033343", "0.5012246", "0.49889705", "0.49309152", "0.49179965", "0.49121413", "0.4911077", "0.48988366", "0.48850003", "0.4881072", "0.4862533", "0.4860362", "0.4843344", "0.48414811", "0.48173034", "0.4815247", "0.4804103", "0.47927877", "0.47804406", "0.47712123", "0.47621378", "0.47620863", "0.47412002", "0.47295034", "0.47294214", "0.4729282", "0.47096473", "0.4696908", "0.46745732", "0.46733755", "0.46724454", "0.46641245", "0.46524164", "0.4652356", "0.46419597", "0.46329588", "0.46190608", "0.46105775", "0.46015882", "0.4597182", "0.45931754", "0.4589395", "0.458631", "0.45849654", "0.45835128", "0.45830435", "0.45816526", "0.45782122", "0.45638126", "0.455834", "0.4556797", "0.45564407", "0.45557135", "0.45507202", "0.45320946", "0.45304057", "0.45093918", "0.45074177", "0.4504535", "0.44961503", "0.4492143", "0.44873458", "0.44857833", "0.44775558", "0.4475185", "0.447139", "0.4459772", "0.44576877", "0.44573563", "0.4451301", "0.44506928", "0.44294867", "0.44292438", "0.44262365", "0.44247922", "0.44212165", "0.44177222", "0.44171232", "0.4415989", "0.441539", "0.44142982", "0.44142807", "0.44133943", "0.44115615", "0.43995345", "0.43979096", "0.43959987", "0.4383973" ]
0.58705163
0
NewDiffCmd creates a new DiffCmd instance.
func NewDiffCmd(releaseRepo pull.Release, release1, release2 string) *DiffCmd { return &DiffCmd{ releaseRepo: releaseRepo, release1: release1, release2: release2, } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewCmdDiff(f util.Factory, ioStreams genericclioptions.IOStreams) *cobra.Command {\n\toptions := diff.NewDiffOptions(ioStreams)\n\tcmd := &cobra.Command{\n\t\tUse: \"diff DIRECTORY\",\n\t\tDisableFlagsInUseLine: true,\n\t\tShort: i18n.T(\"Diff local config against cluster applied version\"),\n\t\tArgs: cobra.MaximumNArgs(1),\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\tutil.CheckErr(Initialize(options, f, args))\n\t\t\tutil.CheckErr(options.Run())\n\t\t},\n\t}\n\n\treturn cmd\n}", "func NewApplicationDiffCommand(clientOpts *argocdclient.ClientOptions) *cobra.Command {\n\tvar command = &cobra.Command{\n\t\tUse: \"diff\",\n\t\tShort: fmt.Sprintf(\"%s app diff APPNAME\", cliName),\n\t\tRun: func(c *cobra.Command, args []string) {\n\t\t\tif len(args) == 0 {\n\t\t\t\tc.HelpFunc()(c, args)\n\t\t\t\tos.Exit(1)\n\t\t\t}\n\t\t\tconn, appIf := argocdclient.NewClientOrDie(clientOpts).NewApplicationClientOrDie()\n\t\t\tdefer util.Close(conn)\n\t\t\tappName := args[0]\n\t\t\tapp, err := appIf.Get(context.Background(), &application.ApplicationQuery{Name: appName})\n\t\t\terrors.CheckError(err)\n\t\t\ttargetObjs, err := app.Status.ComparisonResult.TargetObjects()\n\t\t\terrors.CheckError(err)\n\t\t\tliveObjs, err := app.Status.ComparisonResult.LiveObjects()\n\t\t\terrors.CheckError(err)\n\t\t\tdiffResults, err := diff.DiffArray(targetObjs, liveObjs)\n\t\t\terrors.CheckError(err)\n\t\t\tfor i := 0; i < len(targetObjs); i++ {\n\t\t\t\ttargetObj := targetObjs[i]\n\t\t\t\tdiffRes := diffResults.Diffs[i]\n\t\t\t\tfmt.Printf(\"===== %s %s ======\\n\", targetObj.GetKind(), targetObj.GetName())\n\t\t\t\tif diffRes.Modified {\n\t\t\t\t\tformatOpts := formatter.AsciiFormatterConfig{\n\t\t\t\t\t\tColoring: terminal.IsTerminal(int(os.Stdout.Fd())),\n\t\t\t\t\t}\n\t\t\t\t\tout, err := diffResults.Diffs[i].ASCIIFormat(targetObj, formatOpts)\n\t\t\t\t\terrors.CheckError(err)\n\t\t\t\t\tfmt.Println(out)\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t}\n\treturn command\n}", "func NewMockDiff(ctrl *gomock.Controller) *MockDiff {\n\tmock := &MockDiff{ctrl: ctrl}\n\tmock.recorder = &MockDiffMockRecorder{mock}\n\treturn mock\n}", "func New(name string, pattern string, command string, args ...string) *Cmd {\n\treturn &Cmd{\n\t\tName: name,\n\t\tCommand: command,\n\t\tArgs: args,\n\t\tPattern: pattern,\n\t}\n}", "func newCreateCmd() *cobra.Command {\n\tcreateCmd := cobra.Command{\n\t\tUse: \"create\",\n\t\tShort: `Create a new verless object`,\n\t\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\treturn cmd.Help()\n\t\t},\n\t}\n\n\tcreateCmd.AddCommand(newCreateProjectCmd())\n\tcreateCmd.AddCommand(newCreateThemeCmd())\n\tcreateCmd.AddCommand(newCreateFile())\n\n\treturn &createCmd\n}", "func (z *zfsctl) Diff(ctx context.Context, name, options string, target string) *execute {\n\targs := []string{\"diff\"}\n\tif len(options) > 0 {\n\t\targs = append(args, options)\n\t}\n\targs = append(args, name)\n\tif len(target) > 0 {\n\t\targs = append(args, target)\n\t}\n\treturn &execute{ctx: ctx, name: z.cmd, args: args}\n}", "func NewCmd(cfg *CmdCfg) (Cmd, error) {\n\terr := validator.Validate(cfg)\n\tif err != nil {\n\t\treturn Cmd{}, errors.Wrap(err, \"error validating config\")\n\t}\n\tif cfg.Time.IsZero() {\n\t\tcfg.Time = time.Now().UTC()\n\t}\n\n\tid, err := uuid.NewRandom()\n\tif err != nil {\n\t\treturn Cmd{}, errors.Wrap(err, \"error generating event-id\")\n\t}\n\n\tvar dataBytes []byte\n\tswitch v := cfg.Data.(type) {\n\tcase []byte:\n\t\tdataBytes = v\n\tdefault:\n\t\tdataBytes, err = json.Marshal(cfg.Data)\n\t\tif err != nil {\n\t\t\treturn Cmd{}, errors.Wrap(err, \"error json-marshalling data\")\n\t\t}\n\t}\n\n\treturn Cmd{\n\t\tid: id.String(),\n\t\tcorrelationKey: cfg.CorrelationKey,\n\n\t\ttime: cfg.Time,\n\t\taction: cfg.Action,\n\t\tdata: dataBytes,\n\t}, nil\n}", "func NewDiffEnvCmd(out io.Writer) *cobra.Command {\n\te := &diffEnvCmd{out: out}\n\n\tcmd := &cobra.Command{\n\t\tUse: \"env\",\n\t\tShort: \"Show differences in Helm releases between environments (Kubernetes namespace)\",\n\t\tLong: ``,\n\t\tArgs: func(cmd *cobra.Command, args []string) error {\n\t\t\tif e.nameLeft == \"\" {\n\t\t\t\treturn errors.New(\"name-left can not be empty\")\n\t\t\t}\n\t\t\tif e.nameRight == \"\" {\n\t\t\t\treturn errors.New(\"name-right can not be empty\")\n\t\t\t}\n\t\t\treturn nil\n\t\t},\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\treleasesLeft, err := utils.GetInstalledReleases(utils.GetInstalledReleasesOptions{\n\t\t\t\tKubeContext: e.kubeContextLeft,\n\t\t\t\tNamespace: e.nameLeft,\n\t\t\t\tIncludeFailed: false,\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatal(err)\n\t\t\t}\n\t\t\treleasesRight, err := utils.GetInstalledReleases(utils.GetInstalledReleasesOptions{\n\t\t\t\tKubeContext: e.kubeContextRight,\n\t\t\t\tNamespace: e.nameRight,\n\t\t\t\tIncludeFailed: false,\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatal(err)\n\t\t\t}\n\n\t\t\tdiffOptions := utils.DiffOptions{\n\t\t\t\tKubeContextLeft: e.kubeContextLeft,\n\t\t\t\tKubeContextRight: e.kubeContextRight,\n\t\t\t\tEnvNameLeft: e.nameLeft,\n\t\t\t\tEnvNameRight: e.nameRight,\n\t\t\t\tReleasesSpecLeft: releasesLeft,\n\t\t\t\tReleasesSpecRight: releasesRight,\n\t\t\t\tOutput: e.output,\n\t\t\t}\n\t\t\tutils.PrintDiff(diffOptions)\n\t\t},\n\t}\n\n\tf := cmd.Flags()\n\n\tf.StringVar(&e.nameLeft, \"name-left\", os.Getenv(\"ORCA_NAME_LEFT\"), \"name of left environment to compare. Overrides $ORCA_NAME_LEFT\")\n\tf.StringVar(&e.nameRight, \"name-right\", os.Getenv(\"ORCA_NAME_RIGHT\"), \"name of right environment to compare. Overrides $ORCA_NAME_RIGHT\")\n\tf.StringVar(&e.kubeContextLeft, \"kube-context-left\", os.Getenv(\"ORCA_KUBE_CONTEXT_LEFT\"), \"name of the left kubeconfig context to use. Overrides $ORCA_KUBE_CONTEXT_LEFT\")\n\tf.StringVar(&e.kubeContextRight, \"kube-context-right\", os.Getenv(\"ORCA_KUBE_CONTEXT_RIGHT\"), \"name of the right kubeconfig context to use. Overrides $ORCA_KUBE_CONTEXT_RIGHT\")\n\tf.StringVarP(&e.output, \"output\", \"o\", utils.GetStringEnvVar(\"ORCA_OUTPUT\", \"yaml\"), \"output format (yaml, table). Overrides $ORCA_OUTPUT\")\n\n\treturn cmd\n}", "func NewCmd(command string, args ...string) *Cmd {\n\treturn &Cmd{\n\t\tcommand: command,\n\t\targs: args,\n\t}\n}", "func NewCmd(db *bolt.DB) *cobra.Command {\n\topts := editOptions{}\n\n\tcmd := &cobra.Command{\n\t\tUse: \"edit <name>\",\n\t\tShort: \"Edit an entry\",\n\t\tLong: `Edit an entry. \n\t\t\nIf the name is edited, Kure will remove the entry with the old name and create one with the new name.`,\n\t\tExample: example,\n\t\tArgs: cmdutil.MustExist(db, cmdutil.Entry),\n\t\tPreRunE: auth.Login(db),\n\t\tRunE: runEdit(db, &opts),\n\t\tPostRun: func(cmd *cobra.Command, args []string) {\n\t\t\t// Reset variables (session)\n\t\t\topts = editOptions{}\n\t\t},\n\t}\n\n\tcmd.Flags().BoolVarP(&opts.interactive, \"it\", \"i\", false, \"use the text editor\")\n\n\treturn cmd\n}", "func New(cmd string) *Cmd {\n\tvar args []string\n\tcmds, err := shellquote.Split(cmd)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tname := cmds[0]\n\tfor _, arg := range cmds[1:] {\n\t\targs = append(args, arg)\n\t}\n\treturn &Cmd{Name: name, Args: args}\n}", "func NewCreateCmd(globalFlags *flags.GlobalFlags) *cobra.Command {\n\tcmd := &CreateCmd{\n\t\tGlobalFlags: globalFlags,\n\t\tlog: log.GetInstance(),\n\t}\n\n\tcobraCmd := &cobra.Command{\n\t\tUse: \"create\",\n\t\tShort: \"Create a new virtual cluster\",\n\t\tLong: `\n#######################################################\n################### vcluster create ###################\n#######################################################\nCreates a new virtual cluster\n\nExample:\nvcluster create test --namespace test\n#######################################################\n\t`,\n\t\tArgs: cobra.ExactArgs(1),\n\t\tRunE: func(cobraCmd *cobra.Command, args []string) error {\n\t\t\t// Check for newer version\n\t\t\tupgrade.PrintNewerVersionWarning()\n\n\t\t\treturn cmd.Run(cobraCmd, args)\n\t\t},\n\t}\n\n\tcobraCmd.Flags().StringVar(&cmd.ChartVersion, \"chart-version\", upgrade.GetVersion(), \"The virtual cluster chart version to use\")\n\tcobraCmd.Flags().StringVar(&cmd.ChartName, \"chart-name\", \"vcluster\", \"The virtual cluster chart name to use\")\n\tcobraCmd.Flags().StringVar(&cmd.ChartRepo, \"chart-repo\", \"https://charts.loft.sh\", \"The virtual cluster chart repo to use\")\n\tcobraCmd.Flags().StringVar(&cmd.ReleaseValues, \"release-values\", \"\", \"Path where to load the virtual cluster helm release values from\")\n\tcobraCmd.Flags().StringVar(&cmd.K3SImage, \"k3s-image\", \"\", \"If specified, use this k3s image version\")\n\tcobraCmd.Flags().StringSliceVarP(&cmd.ExtraValues, \"extra-values\", \"f\", []string{}, \"Path where to load extra helm values from\")\n\tcobraCmd.Flags().BoolVar(&cmd.CreateNamespace, \"create-namespace\", true, \"If true the namespace will be created if it does not exist\")\n\tcobraCmd.Flags().BoolVar(&cmd.DisableIngressSync, \"disable-ingress-sync\", false, \"If true the virtual cluster will not sync any ingresses\")\n\tcobraCmd.Flags().BoolVar(&cmd.CreateClusterRole, \"create-cluster-role\", false, \"If true a cluster role will be created to access nodes, storageclasses and priorityclasses\")\n\tcobraCmd.Flags().BoolVar(&cmd.Expose, \"expose\", false, \"If true will create a load balancer service to expose the vcluster endpoint\")\n\tcobraCmd.Flags().BoolVar(&cmd.Connect, \"connect\", false, \"If true will run vcluster connect directly after the vcluster was created\")\n\treturn cobraCmd\n}", "func NewCommit() (cli.Command, error) {\n\treturn CommitCmd{}, nil\n}", "func NewCmdCreate() (*cobra.Command, *Options) {\n\to := &Options{}\n\n\tcmd := &cobra.Command{\n\t\tUse: \"create\",\n\t\tShort: \"Create a new TestRun resource to record the test case resources\",\n\t\tLong: cmdLong,\n\t\tExample: fmt.Sprintf(cmdExample, root.BinaryName),\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\terr := o.Run()\n\t\t\thelper.CheckErr(err)\n\t\t},\n\t}\n\tcmd.Flags().StringVarP(&o.Namespace, \"ns\", \"n\", \"\", \"the namespace to filter the TestRun resources\")\n\tcmd.Flags().StringVarP(&o.TestGitURL, \"test-url\", \"u\", \"\", \"the git URL of the test case which is used to remove the resources\")\n\tcmd.Flags().StringVarP(&o.RemoveScript, \"remove-script\", \"s\", \"bin/destroy.sh\", \"the script in the test git url used to remove the resources\")\n\treturn cmd, o\n}", "func NewCmd(name, fullName string) *cobra.Command {\n\treturn &cobra.Command{\n\t\tUse: name,\n\t\tShort: \"Print the version information\",\n\t\tLong: \"Print the version information\",\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\tfmt.Printf(\"kam version %s\\n\", Version)\n\t\t},\n\t}\n}", "func NewCmd() *Repl {\n\treturn &Repl{\n\t\tansi: NewANSI(false),\n\t\tconfig: cmdConfig(),\n\t\tevaluator: interpreter.New(),\n\t}\n}", "func NewCmd(cxt *command.Context) *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"touch\",\n\t\tShort: \"Make the service catalog attempt to re-provision an instance\",\n\t\tExample: \"svcat touch instance wordpress-mysql-instance\",\n\t}\n\tcmd.AddCommand(newTouchInstanceCmd(cxt))\n\treturn cmd\n}", "func New(c *Cfg) Cmd {\n\tif c.New != nil {\n\t\treturn c.New()\n\t}\n\treturn (*nilCmd)(c)\n}", "func NewCreateCmd() *cobra.Command {\n\treturn &cobra.Command{\n\t\tUse: \"create SUBCOMMAND\",\n\t\tShort: \"Create objects\",\n\t\tLong: `Create objects like users, etc.`,\n\t}\n}", "func NewDeleteCmd(globalFlags *flags.GlobalFlags) *cobra.Command {\n\tdescription := `\n#######################################################\n##################### loft delete #####################\n#######################################################\n\t`\n\tif upgrade.IsPlugin == \"true\" {\n\t\tdescription = `\n#######################################################\n##################### loft delete #####################\n#######################################################\n\t`\n\t}\n\tc := &cobra.Command{\n\t\tUse: \"delete\",\n\t\tShort: \"Deletes loft resources\",\n\t\tLong: description,\n\t\tArgs: cobra.NoArgs,\n\t}\n\n\tc.AddCommand(NewSpaceCmd(globalFlags))\n\tc.AddCommand(NewVirtualClusterCmd(globalFlags))\n\treturn c\n}", "func NewCmd(use string) Builder {\n\treturn &builder{\n\t\tcmd: cobra.Command{\n\t\t\tUse: use,\n\t\t},\n\t}\n}", "func (c *CLI) NewCmd(command string, args ...string) icmd.Cmd {\n\treturn icmd.Cmd{\n\t\tCommand: append([]string{command}, args...),\n\t\tEnv: append(c.BaseEnvironment(), c.env...),\n\t}\n}", "func NewCmdDiagnostic(out io.Writer) *cobra.Command {\n\topts := &diagsOpts{}\n\n\tcmd := &cobra.Command{\n\t\tUse: \"diagnose\",\n\t\tShort: \"Collects diagnostics about the nodes in the cluster\",\n\t\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\tif len(args) != 0 {\n\t\t\t\treturn fmt.Errorf(\"Unexpected args: %v\", args)\n\t\t\t}\n\n\t\t\treturn doDiagnostics(out, opts)\n\t\t},\n\t}\n\n\t// PersistentFlags\n\taddPlanFileFlag(cmd.PersistentFlags(), &opts.planFilename)\n\tcmd.Flags().BoolVar(&opts.verbose, \"verbose\", false, \"enable verbose logging from the installation\")\n\tcmd.Flags().StringVarP(&opts.outputFormat, \"output\", \"o\", \"simple\", \"installation output format (options \\\"simple\\\"|\\\"raw\\\")\")\n\n\treturn cmd\n}", "func NewDiff(ns string) Diff {\n\treturn &graph{\n\t\tns: ns,\n\t\tdependencies: make(map[*Container][]*dependency),\n\t}\n}", "func NewCmd(o *Options) *cobra.Command {\n\tc := command{\n\t\tCommand: cli.Command{Options: o.Options},\n\t\topts: o,\n\t}\n\n\tcmd := &cobra.Command{\n\t\tUse: \"new-lambda\",\n\t\tShort: \"New local Lambda Function\",\n\t\tLong: `Creates a new local lambda function setup to start development`,\n\t\tRunE: func(_ *cobra.Command, args []string) error { return c.Run(args) },\n\t}\n\n\tcmd.Args = cobra.ExactArgs(1)\n\n\tcmd.Flags().StringVarP(&o.Namespace, \"namespace\", \"n\", \"default\", \"Namespace to bind\")\n\tcmd.Flags().BoolVar(&o.Expose, \"expose\", false, \"Create the namespace if not existing\")\n\tcmd.Flags().StringVar(&o.ClusterDomain, \"cluster-domain\", \"\", \"Cluster Domain of your cluster\")\n\n\treturn cmd\n}", "func NewDeploymentCmd(cli *client.Cli) *cobra.Command {\n\n\tvar DeploymentCmd = &cobra.Command{\n\t\tUse: \"deployment COMMAND\",\n\t\tShort: \"Manage deployments\",\n\t}\n\n\tDeploymentCmd.AddCommand(newGetCommand(cli))\n\tDeploymentCmd.AddCommand(newCreateCommand(cli))\n\tDeploymentCmd.AddCommand(newPromoteCommand(cli))\n\n\treturn DeploymentCmd\n}", "func NewCmd(id, command string, args ...string) *CmdMsg {\n\tmsg := &CmdMsg{}\n\tmsg.ID = id\n\tmsg.Type = \"cmd\"\n\tmsg.Command = command\n\tmsg.Args = args\n\treturn msg\n}", "func newFileTransferCmd() *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"transfer\",\n\t\tShort: \"run file Transfer gNOI RPC\",\n\t\tAliases: []string{\"trans\", \"tr\"},\n\t\tPreRun: func(cmd *cobra.Command, args []string) {\n\t\t\tgApp.Config.SetLocalFlagsFromFile(cmd)\n\t\t},\n\t\tRunE: gApp.RunEFileTransfer,\n\t\tSilenceUsage: true,\n\t}\n\tgApp.InitFileTransferFlags(cmd)\n\treturn cmd\n}", "func NewCmd(db *bolt.DB) *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"card\",\n\t\tShort: \"Card operations\",\n\t\tExample: example,\n\t}\n\n\tcmd.AddCommand(cadd.NewCmd(db, os.Stdin), ccopy.NewCmd(db), cedit.NewCmd(db), cls.NewCmd(db), crm.NewCmd(db, os.Stdin))\n\n\treturn cmd\n}", "func NewCmdUpgrade() (*cobra.Command, *Options) {\n\to := &Options{}\n\n\tcmd := &cobra.Command{\n\t\tUse: \"upgrade\",\n\t\tAliases: []string{\"update\"},\n\t\tShort: \"Upgrades the GitOps git repository with the latest configuration and versions the Version Stream\",\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\terr := o.Run()\n\t\t\thelper.CheckErr(err)\n\t\t},\n\t}\n\to.Options.AddFlags(cmd)\n\to.HelmfileResolve.AddFlags(cmd, \"\")\n\treturn cmd, o\n}", "func NewCmdCreate(out io.Writer) *cobra.Command {\n\tcf := &run.CreateFlags{}\n\n\tcmd := &cobra.Command{\n\t\tUse: \"create <image>\",\n\t\tShort: \"Create a new VM without starting it\",\n\t\tLong: dedent.Dedent(`\n\t\t\tCreate a new VM by combining the given image and kernel.\n\t\t\tVarious VM tunables can be set during creation by using\n\t\t\tthe flags for this command. The image and kernel are\n\t\t\tmatched by prefix based on their ID and name.\n\t\t\t\n\t\t\tIf the name flag (-n, --name) is not specified,\n\t\t\tthe VM is given a random name. Using the copy files\n\t\t\tflag (-f, --copy-files), additional files can be added to\n\t\t\tthe VM during creation with the syntax /host/path:/vm/path.\n\n\t\t\tExample usage:\n\t\t\t\t$ ignite create my-image my-kernel \\\n\t\t\t\t\t--name my-vm \\\n\t\t\t\t\t--cpus 2 \\\n\t\t\t\t\t--memory 2048 \\\n\t\t\t\t\t--size 6GB\n\t\t`),\n\t\tArgs: cobra.ExactArgs(1),\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\terrutils.Check(func() error {\n\t\t\t\tco, err := cf.NewCreateOptions(loader.NewResLoader(), args[0])\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\n\t\t\t\treturn run.Create(co)\n\t\t\t}())\n\t\t},\n\t}\n\n\taddCreateFlags(cmd.Flags(), cf)\n\treturn cmd\n}", "func newCmd(cmd string, arg ...string) *exec.Cmd {\n\tc := exec.Command(cmd, arg...)\n\tc.Stdout = os.Stdout\n\tc.Stderr = os.Stderr\n\treturn c\n}", "func NewConnectCmd(f factory.Factory) *cobra.Command {\n\tconnectCmd := &cobra.Command{\n\t\tUse: \"connect\",\n\t\tShort: \"Connect an external cluster to devspace cloud\",\n\t\tLong: `\n#######################################################\n################# devspace connect ####################\n#######################################################\n\t`,\n\t\tArgs: cobra.NoArgs,\n\t}\n\n\tconnectCmd.AddCommand(newClusterCmd(f))\n\n\treturn connectCmd\n}", "func NewCmd(f cmdutil.Factory, clusteradmFlags *genericclioptionsclusteradm.ClusteradmFlags, cmFlags *genericclioptionscm.CMFlags, streams genericclioptions.IOStreams) *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"get\",\n\t\tShort: \"get a resource\",\n\t}\n\n\tcmd.AddCommand(clusters.NewCmd(cmFlags, streams))\n\tcmd.AddCommand(credentials.NewCmd(cmFlags, streams))\n\tcmd.AddCommand(machinepools.NewCmd(cmFlags, streams))\n\tcmd.AddCommand(clusteradmgettoken.NewCmd(clusteradmFlags, streams))\n\tcmd.AddCommand(clusterpoolhosts.NewCmd(f, cmFlags, streams))\n\tcmd.AddCommand(clusterclaim.NewCmd(f, cmFlags, streams))\n\tcmd.AddCommand(clusterpools.NewCmd(f, cmFlags, streams))\n\tcmd.AddCommand(config.NewCmd(clusteradmFlags, cmFlags, streams))\n\tcmd.AddCommand(policies.NewCmd(f, cmFlags, streams))\n\n\treturn cmd\n}", "func NewCmdPreviewCreate() (*cobra.Command, *Options) {\n\to := &Options{}\n\n\tcmd := &cobra.Command{\n\t\tUse: \"create\",\n\t\tShort: \"Creates a preview\",\n\t\tLong: cmdLong,\n\t\tExample: fmt.Sprintf(cmdExample, rootcmd.BinaryName),\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\terr := o.Run()\n\t\t\thelper.CheckErr(err)\n\t\t},\n\t}\n\tcmd.Flags().IntVarP(&o.Number, \"pr\", \"\", 0, \"the Pull Request number. If not specified we will use $BRANCH_NAME\")\n\n\to.Options.AddFlags(cmd)\n\treturn cmd, o\n}", "func NewCmd(db *bolt.DB) *cobra.Command {\n\topts := tfaOptions{}\n\n\tcmd := &cobra.Command{\n\t\tUse: \"2fa <name>\",\n\t\tShort: \"List two-factor authentication codes\",\n\t\tLong: `List two-factor authentication codes.\n\nUse the [-i info] flag to display information about the setup key, it also generates a QR code with the key in URL format that can be scanned by any authenticator.`,\n\t\tExample: example,\n\t\tArgs: cmdutil.MustExistLs(db, cmdutil.TOTP),\n\t\tPreRunE: auth.Login(db),\n\t\tRunE: run2FA(db, &opts),\n\t\tPostRun: func(cmd *cobra.Command, args []string) {\n\t\t\t// Reset variables (session)\n\t\t\topts = tfaOptions{}\n\t\t},\n\t}\n\n\tcmd.AddCommand(add.NewCmd(db, os.Stdin), rm.NewCmd(db, os.Stdin))\n\n\tf := cmd.Flags()\n\tf.BoolVarP(&opts.copy, \"copy\", \"c\", false, \"copy code to clipboard\")\n\tf.BoolVarP(&opts.info, \"info\", \"i\", false, \"display information about the setup key\")\n\tf.DurationVarP(&opts.timeout, \"timeout\", \"t\", 0, \"clipboard clearing timeout\")\n\n\treturn cmd\n}", "func (c DefaultCommander) NewCmd(command string, stderr io.Writer, stdin io.Reader) *exec.Cmd {\n\tcmd := exec.Command(\"sh\", \"-c\", command)\n\tif stderr != nil {\n\t\tcmd.Stderr = stderr\n\t}\n\tif stdin != nil {\n\t\tcmd.Stdin = stdin\n\t}\n\treturn cmd\n}", "func NewCmd(name string, description string) *Cmd {\n\tcmd := new(Cmd)\n\tcmd.name = name\n\tcmd.description = description\n\tcmd.shouldRenderHelp = false\n\tcmd.parsed = false\n\tcmd.namedArgMap = make(map[string]*NamedArg)\n\tcmd.subCmds = make(map[string]*Cmd)\n\n\tcmd.AddBoolArg(\n\t\t\"help\", \"h\", &cmd.shouldRenderHelp, cmd.shouldRenderHelp,\n\t\tfalse, fmt.Sprintf(\"Print '%s' usage information.\", name))\n\n\treturn cmd\n}", "func New(cmd *cobra.Command, args []string) {\n\t// Create object for current working directory\n\tpwd, err := teflon.NewTeflonObject(\".\")\n\tif err != nil {\n\t\tlog.Fatalln(\"Couldn't create object for '.' :\", err)\n\t}\n\n\t// Create a show.\n\tif showFlag {\n\t\tnshws, err := pwd.CreateShow(args[0], newShowProtoFlag)\n\t\tif err != nil {\n\t\t\tlog.Fatalln(\"ABORT: Couldnt create show:\", err)\n\t\t}\n\t\tfor _, shw := range nshws {\n\t\t\tfmt.Println(shw.Path)\n\t\t}\n\t\treturn\n\t}\n\n\t// If nothing else commands otherwise new will create an ordinary file-system\n\t// object.\n\tnobjs, err := pwd.CreateObject(args[0], newFileFlag)\n\tif err != nil {\n\t\tlog.Fatalln(\"ABORT: Couldn't create objects:\", err)\n\t}\n\tclose(teflon.Events)\n\tfor _, obj := range nobjs {\n\t\tfmt.Println(obj.Path)\n\t}\n}", "func NewCmdVersion() *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"version\",\n\t\tShort: \"Show dmaas-operator version\",\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\tshowVersion()\n\t\t},\n\t}\n\n\treturn cmd\n}", "func NewCmd(cmd ...*exec.Cmd) *CmdReal {\n\tif len(cmd) > 0 {\n\t\treturn &CmdReal{cmd: cmd[0]}\n\t}\n\treturn &CmdReal{cmd: new(exec.Cmd)}\n}", "func newCommand(devfileObj parser.DevfileObj, devfileCmd v1alpha2.Command) (command, error) {\n\tvar cmd command\n\n\tcommandType, err := common.GetCommandType(devfileCmd)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tswitch commandType {\n\n\tcase v1alpha2.ApplyCommandType:\n\t\tcmd = newApplyCommand(devfileObj, devfileCmd)\n\n\tcase v1alpha2.CompositeCommandType:\n\t\tif util.SafeGetBool(devfileCmd.Composite.Parallel) {\n\t\t\tcmd = newParallelCompositeCommand(devfileObj, devfileCmd)\n\t\t}\n\t\tcmd = newCompositeCommand(devfileObj, devfileCmd)\n\n\tcase v1alpha2.ExecCommandType:\n\t\tcmd = newExecCommand(devfileObj, devfileCmd)\n\t}\n\n\tif err = cmd.CheckValidity(); err != nil {\n\t\treturn nil, err\n\t}\n\treturn cmd, nil\n}", "func NewCmd(name, fullName string) *cobra.Command {\n\n\taddCmd := newCmdAdd(addRecommendedCommandName, odoutil.GetFullName(fullName, addRecommendedCommandName))\n\n\tvar cmd = &cobra.Command{\n\t\tUse: name,\n\t\tShort: \"Manage services in an environment\",\n\t\tLong: \"Manage services in a GitOps environment where service source repositories are synchronized\",\n\t\tExample: fmt.Sprintf(\"%s\\n%s\\n\\n See sub-commands individually for more examples\",\n\t\t\tfullName, addRecommendedCommandName),\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t},\n\t}\n\n\tcmd.Flags().AddFlagSet(addCmd.Flags())\n\tcmd.AddCommand(addCmd)\n\n\tcmd.Annotations = map[string]string{\"command\": \"main\"}\n\tcmd.SetUsageTemplate(odoutil.CmdUsageTemplate)\n\treturn cmd\n}", "func NewCmdCreateTerraform(f Factory, out io.Writer, errOut io.Writer) *cobra.Command {\n\toptions := &CreateTerraformOptions{\n\t\tCreateOptions: CreateOptions{\n\t\t\tCommonOptions: CommonOptions{\n\t\t\t\tFactory: f,\n\t\t\t\tOut: out,\n\t\t\t\tErr: errOut,\n\t\t\t},\n\t\t},\n\t}\n\n\tcmd := &cobra.Command{\n\t\tUse: \"terraform\",\n\t\tShort: \"Creates a Jenkins X terraform plan\",\n\t\tExample: createTerraformExample,\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\toptions.Cmd = cmd\n\t\t\toptions.Args = args\n\t\t\terr := options.Run()\n\t\t\tCheckErr(err)\n\t\t},\n\t}\n\n\toptions.addCommonFlags(cmd)\n\toptions.addFlags(cmd)\n\taddGitRepoOptionsArguments(cmd, &options.GitRepositoryOptions)\n\n\treturn cmd\n}", "func NewCmd(o *cli.Options) *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"get\",\n\t\tShort: \"Gets Kyma-related resources.\",\n\t\tLong: \"Use this command to get Kyma-related resources.\",\n\t}\n\n\tcmd.AddCommand(schema.NewCmd(schema.NewOptions(o, os.Stdout, refMap)))\n\treturn cmd\n}", "func New(command string) *LocalCmd {\n\treturn &LocalCmd{\n\t\tcmd: command,\n\t}\n}", "func NewCmdGitMdget(cl *libcmdline.CommandLine, g *libkb.GlobalContext) cli.Command {\n\treturn cli.Command{\n\t\tName: \"mdget\",\n\t\tUsage: \"Fetch and decrypt repo metadata, printing the result as JSON\",\n\t\tArgumentHelp: \"[<folder>]\",\n\t\tAction: func(c *cli.Context) {\n\t\t\tcl.ChooseCommand(&CmdGitMdget{Contextified: libkb.NewContextified(g)}, \"mdget\", c)\n\t\t},\n\t}\n}", "func NewModeDiff(m *modeKinds) ModeDiff {\n\treturn ModeDiff{\n\t\tmodeKinds: m,\n\t\tpos: NewChannelModes(m),\n\t\tneg: NewChannelModes(m),\n\t}\n}", "func New(title, ipswOld, ipswNew string, kdks []string) *Diff {\n\tif len(kdks) == 0 {\n\t\treturn &Diff{\n\t\t\tTitle: title,\n\t\t\tOld: Context{\n\t\t\t\tIPSWPath: ipswOld,\n\t\t\t\tMount: make(map[string]mount),\n\t\t\t},\n\t\t\tNew: Context{\n\t\t\t\tIPSWPath: ipswNew,\n\t\t\t\tMount: make(map[string]mount),\n\t\t\t},\n\t\t}\n\t}\n\treturn &Diff{\n\t\tTitle: title,\n\t\tOld: Context{\n\t\t\tIPSWPath: ipswOld,\n\t\t\tMount: make(map[string]mount),\n\t\t\tKDK: kdks[0],\n\t\t},\n\t\tNew: Context{\n\t\t\tIPSWPath: ipswNew,\n\t\t\tMount: make(map[string]mount),\n\t\t\tKDK: kdks[1],\n\t\t},\n\t}\n}", "func New() *Command {\n\treturn &Command{}\n}", "func New(b BuildInfo) *Command {\n\tfs := flag.NewFlagSet(\"changelog\", flag.ExitOnError)\n\n\treturn &Command{\n\t\tfs: fs,\n\t\tb: b,\n\t\tfile: fs.String(fileOptName, dfltChangelogFile, \"changelog file name\"),\n\t\tdebug: fs.Bool(debugOptName, false, \"log debug information\"),\n\t\ttoStdOut: fs.Bool(stdOutOptName, false, \"output changelog to stdout instead to file\"),\n\t\thistory: fs.Bool(historyOptName, false, \"create history of old versions tags (output is always stdout)\"),\n\t\tignore: fs.Bool(ignoreOptName, false, \"ignore parsing errors of invalid (not conventional) commit messages\"),\n\t\tsinceTag: fs.String(sinceTagOptName, \"\", fmt.Sprintf(\"in combination with -%s: if a tag is specified, the changelog will be created from that tag on\", historyOptName)),\n\t\tinitConfig: fs.Bool(initDfltConfigOptName, false, fmt.Sprintf(\"initialize a default changelog configuration '%s'\", config.FileName)),\n\t\tnoPrompt: fs.Bool(noPromptOptName, false, \"do not prompt for next version\"),\n\t\tversion: fs.Bool(versionOptName, false, \"show program version information\"),\n\t\tnum: fs.Int(numOptName, 0, fmt.Sprintf(\"in combination with -%s: the number of tags to go back\", historyOptName)),\n\t}\n}", "func NewSyncCmd(f factory.Factory, globalFlags *flags.GlobalFlags) *cobra.Command {\n\tcmd := &SyncCmd{GlobalFlags: globalFlags}\n\n\tsyncCmd := &cobra.Command{\n\t\tUse: \"sync\",\n\t\tShort: \"Starts a bi-directional sync between the target container and the local path\",\n\t\tLong: `\n#############################################################################\n################### devspace sync ###########################################\n#############################################################################\nStarts a bi-directional(default) sync between the target container path\nand local path:\n\ndevspace sync --path=.:/app # localPath is current dir and remotePath is /app\ndevspace sync --path=.:/app --image-selector nginx:latest\ndevspace sync --path=.:/app --exclude=node_modules,test\ndevspace sync --path=.:/app --pod=my-pod --container=my-container\n#############################################################################`,\n\t\tRunE: func(cobraCmd *cobra.Command, args []string) error {\n\t\t\t// Print upgrade message if new version available\n\t\t\tupgrade.PrintUpgradeMessage(f.GetLog())\n\t\t\tplugin.SetPluginCommand(cobraCmd, args)\n\t\t\treturn cmd.Run(f)\n\t\t},\n\t}\n\n\tsyncCmd.Flags().StringVarP(&cmd.Container, \"container\", \"c\", \"\", \"Container name within pod where to sync to\")\n\tsyncCmd.Flags().StringVar(&cmd.Pod, \"pod\", \"\", \"Pod to sync to\")\n\tsyncCmd.Flags().StringVarP(&cmd.LabelSelector, \"label-selector\", \"l\", \"\", \"Comma separated key=value selector list (e.g. release=test)\")\n\tsyncCmd.Flags().StringVar(&cmd.ImageSelector, \"image-selector\", \"\", \"The image to search a pod for (e.g. nginx, nginx:latest, ${runtime.images.app}, nginx:${runtime.images.app.tag})\")\n\tsyncCmd.Flags().BoolVar(&cmd.Pick, \"pick\", true, \"Select a pod\")\n\n\tsyncCmd.Flags().StringSliceVarP(&cmd.Exclude, \"exclude\", \"e\", []string{}, \"Exclude directory from sync\")\n\tsyncCmd.Flags().StringVar(&cmd.Path, \"path\", \"\", \"Path to use (Default is current directory). Example: ./local-path:/remote-path or local-path:.\")\n\n\tsyncCmd.Flags().BoolVar(&cmd.DownloadOnInitialSync, \"download-on-initial-sync\", true, \"DEPRECATED: Downloads all locally non existing remote files in the beginning\")\n\tsyncCmd.Flags().StringVar(&cmd.InitialSync, \"initial-sync\", \"\", \"The initial sync strategy to use (mirrorLocal, mirrorRemote, preferLocal, preferRemote, preferNewest, keepAll)\")\n\n\tsyncCmd.Flags().BoolVar(&cmd.NoWatch, \"no-watch\", false, \"Synchronizes local and remote and then stops\")\n\n\tsyncCmd.Flags().BoolVar(&cmd.UploadOnly, \"upload-only\", false, \"If set DevSpace will only upload files\")\n\tsyncCmd.Flags().BoolVar(&cmd.DownloadOnly, \"download-only\", false, \"If set DevSpace will only download files\")\n\n\tsyncCmd.Flags().BoolVar(&cmd.Wait, \"wait\", true, \"Wait for the pod(s) to start if they are not running\")\n\tsyncCmd.Flags().BoolVar(&cmd.Polling, \"polling\", false, \"If polling should be used to detect file changes in the container\")\n\n\treturn syncCmd\n}", "func NewCmd(db *bolt.DB, r io.Reader) *cobra.Command {\n\tvar opts addOptions\n\n\tcmd := &cobra.Command{\n\t\tUse: \"add <name>\",\n\t\tShort: \"Add a two-factor authentication code\",\n\t\tLong: `Add a two-factor authentication code. The name must be one already used by an entry.\n\nServices tipically show an hyperlinked \"Enter manually\", \"Enter this text code\" or similar messages, copy the hexadecimal code given and submit it when requested by Kure. After this, your entry will have a synchronized token with the service.`,\n\t\tExample: example,\n\t\tArgs: cmdutil.MustExist(db, cmdutil.Entry), // There must exist an entry with the same name\n\t\tPreRunE: auth.Login(db),\n\t\tRunE: opts.runAdd(db, r),\n\t\tPostRun: func(cmd *cobra.Command, args []string) {\n\t\t\topts = addOptions{}\n\t\t},\n\t}\n\n\tcmd.Flags().IntVarP(&opts.digits, \"digits\", \"d\", 6, \"TOTP length {6|7|8}\")\n\n\treturn cmd\n}", "func NewDeleteCmd(globalCfg *config.GlobalImpl) *cobra.Command {\n\tdeleteOptions := config.NewDeleteOptions()\n\n\tcmd := &cobra.Command{\n\t\tUse: \"delete\",\n\t\tShort: \"DEPRECATED: delete releases from state file (helm delete)\",\n\t\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\tdeleteImpl := config.NewDeleteImpl(globalCfg, deleteOptions)\n\t\t\terr := config.NewCLIConfigImpl(deleteImpl.GlobalImpl)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tif err := deleteImpl.ValidateConfig(); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\ta := app.New(deleteImpl)\n\t\t\treturn toCLIError(deleteImpl.GlobalImpl, a.Delete(deleteImpl))\n\t\t},\n\t}\n\n\tf := cmd.Flags()\n\tf.StringVar(&globalCfg.GlobalOptions.Args, \"args\", \"\", \"pass args to helm exec\")\n\tf.StringVar(&deleteOptions.Cascade, \"cascade\", \"\", \"pass cascade to helm exec, default: background\")\n\tf.IntVar(&deleteOptions.Concurrency, \"concurrency\", 0, \"maximum number of concurrent helm processes to run, 0 is unlimited\")\n\tf.BoolVar(&deleteOptions.Purge, \"purge\", false, \"purge releases i.e. free release names and histories\")\n\tf.BoolVar(&deleteOptions.SkipCharts, \"skip-charts\", false, \"don't prepare charts when deleting releases\")\n\n\treturn cmd\n}", "func NewCmd(db *bolt.DB, r io.Reader) *cobra.Command {\n\topts := phraseOptions{}\n\n\tcmd := &cobra.Command{\n\t\tUse: \"phrase <name>\",\n\t\tShort: \"Create an entry using a passphrase\",\n\t\tAliases: []string{\"passphrase\", \"p\"},\n\t\tExample: example,\n\t\tArgs: cmdutil.MustNotExist(db, cmdutil.Entry),\n\t\tPreRunE: auth.Login(db),\n\t\tRunE: runPhrase(db, r, &opts),\n\t\tPostRun: func(cmd *cobra.Command, args []string) {\n\t\t\t// Reset variables (session)\n\t\t\topts = phraseOptions{\n\t\t\t\tseparator: \" \",\n\t\t\t}\n\t\t},\n\t}\n\n\tf := cmd.Flags()\n\tf.Uint64VarP(&opts.length, \"length\", \"l\", 0, \"number of words\")\n\tf.StringVarP(&opts.separator, \"separator\", \"s\", \" \", \"character that separates each word\")\n\tf.StringSliceVarP(&opts.incl, \"include\", \"i\", nil, \"words to include in the passphrase\")\n\tf.StringSliceVarP(&opts.excl, \"exclude\", \"e\", nil, \"words to exclude from the passphrase\")\n\tf.StringVarP(&opts.list, \"list\", \"L\", \"WordList\", \"passphrase list used {NoList|WordList|SyllableList}\")\n\n\treturn cmd\n}", "func NewGetBlockCmd(hash string, verbose *int) *GetBlockCmd {\n\treturn &GetBlockCmd{\n\t\tHash: hash,\n\t\tVerbose: verbose,\n\t}\n}", "func newSyncCommand() cli.Command {\n\treturn new(syncCommand).getCommand()\n}", "func NewGenerateCmd(client *registry.HTTPClient) *cobra.Command {\n\tgenerateCmd := &cobra.Command{\n\t\tUse: \"generate\",\n\t\tShort: \"Generate a Lockfile to track image digests\",\n\t\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\tif err := validateGenerateCmdFlags(cmd); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tenvPath, err := cmd.Flags().GetString(\"env-file\")\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tenvPath = filepath.ToSlash(envPath)\n\t\t\t_ = godotenv.Load(envPath)\n\t\t\twm, err := getDefaultWrapperManager(cmd, client)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tgenerator, err := generate.NewGenerator(cmd)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tlFile, err := os.Create(generator.LockfileName)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tdefer lFile.Close()\n\t\t\tif err := generator.GenerateLockfile(wm, lFile); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\treturn nil\n\t\t},\n\t}\n\tgenerateCmd.Flags().String(\n\t\t\"base-dir\", \".\", \"Top level directory to collect files from\",\n\t)\n\tgenerateCmd.Flags().StringSlice(\n\t\t\"dockerfiles\", []string{}, \"Path to Dockerfiles\",\n\t)\n\tgenerateCmd.Flags().StringSlice(\n\t\t\"compose-files\", []string{}, \"Path to docker-compose files\",\n\t)\n\tgenerateCmd.Flags().StringSlice(\n\t\t\"dockerfile-globs\", []string{}, \"Glob pattern to select Dockerfiles\",\n\t)\n\tgenerateCmd.Flags().StringSlice(\n\t\t\"compose-file-globs\", []string{},\n\t\t\"Glob pattern to select docker-compose files\",\n\t)\n\tgenerateCmd.Flags().Bool(\n\t\t\"dockerfile-recursive\", false, \"Recursively collect Dockerfiles\",\n\t)\n\tgenerateCmd.Flags().Bool(\n\t\t\"compose-file-recursive\", false,\n\t\t\"Recursively collect docker-compose files\",\n\t)\n\tgenerateCmd.Flags().String(\n\t\t\"lockfile-name\", \"docker-lock.json\",\n\t\t\"Lockfile name to be output in the current working directory\",\n\t)\n\tgenerateCmd.Flags().String(\n\t\t\"config-file\", getDefaultConfigPath(),\n\t\t\"Path to config file for auth credentials\",\n\t)\n\tgenerateCmd.Flags().String(\n\t\t\"env-file\", \".env\", \"Path to .env file\",\n\t)\n\tgenerateCmd.Flags().Bool(\n\t\t\"dockerfile-env-build-args\", false,\n\t\t\"Use environment vars as build args for Dockerfiles\",\n\t)\n\treturn generateCmd\n}", "func NewGenerateCmd() (*cobra.Command, error) {\n\tgenerateCmd := &cobra.Command{\n\t\tUse: \"generate\",\n\t\tShort: \"Generate a Lockfile to track image digests\",\n\t\tPreRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\treturn bindPFlags(cmd, []string{\n\t\t\t\t\"base-dir\",\n\t\t\t\t\"dockerfiles\",\n\t\t\t\t\"composefiles\",\n\t\t\t\t\"kubernetesfiles\",\n\t\t\t\t\"lockfile-name\",\n\t\t\t\t\"dockerfile-globs\",\n\t\t\t\t\"composefile-globs\",\n\t\t\t\t\"kubernetesfile-globs\",\n\t\t\t\t\"dockerfile-recursive\",\n\t\t\t\t\"composefile-recursive\",\n\t\t\t\t\"kubernetesfile-recursive\",\n\t\t\t\t\"exclude-all-dockerfiles\",\n\t\t\t\t\"exclude-all-composefiles\",\n\t\t\t\t\"exclude-all-kubernetesfiles\",\n\t\t\t\t\"ignore-missing-digests\",\n\t\t\t\t\"update-existing-digests\",\n\t\t\t})\n\t\t},\n\t\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\tflags, err := parseFlags()\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tgenerator, err := SetupGenerator(flags)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tvar lockfileByt bytes.Buffer\n\n\t\t\terr = generator.GenerateLockfile(&lockfileByt)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tlockfileContents := lockfileByt.Bytes()\n\n\t\t\tif len(lockfileContents) == 0 {\n\t\t\t\treturn errors.New(\"no images found\")\n\t\t\t}\n\n\t\t\twriter, err := os.Create(\n\t\t\t\tflags.FlagsWithSharedValues.LockfileName,\n\t\t\t)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tdefer writer.Close()\n\n\t\t\t_, err = writer.Write(lockfileContents)\n\t\t\tif err == nil {\n\t\t\t\tfmt.Println(\"successfully generated lockfile!\")\n\t\t\t}\n\n\t\t\treturn err\n\t\t},\n\t}\n\tgenerateCmd.Flags().String(\n\t\t\"base-dir\", \".\", \"Top level directory to collect files from\",\n\t)\n\tgenerateCmd.Flags().StringSlice(\n\t\t\"dockerfiles\", []string{}, \"Paths to Dockerfiles\",\n\t)\n\tgenerateCmd.Flags().StringSlice(\n\t\t\"composefiles\", []string{}, \"Paths to docker-compose files\",\n\t)\n\tgenerateCmd.Flags().StringSlice(\n\t\t\"kubernetesfiles\", []string{}, \"Paths to kubernetes files\",\n\t)\n\tgenerateCmd.Flags().String(\n\t\t\"lockfile-name\", \"docker-lock.json\",\n\t\t\"Lockfile name to be output in the current working directory\",\n\t)\n\tgenerateCmd.Flags().StringSlice(\n\t\t\"dockerfile-globs\", []string{}, \"Glob pattern to select Dockerfiles\",\n\t)\n\tgenerateCmd.Flags().StringSlice(\n\t\t\"composefile-globs\", []string{},\n\t\t\"Glob pattern to select docker-compose files\",\n\t)\n\tgenerateCmd.Flags().StringSlice(\n\t\t\"kubernetesfile-globs\", []string{},\n\t\t\"Glob pattern to select kubernetes files\",\n\t)\n\tgenerateCmd.Flags().Bool(\n\t\t\"dockerfile-recursive\", false, \"Recursively collect Dockerfiles\",\n\t)\n\tgenerateCmd.Flags().Bool(\n\t\t\"composefile-recursive\", false,\n\t\t\"Recursively collect docker-compose files\",\n\t)\n\tgenerateCmd.Flags().Bool(\n\t\t\"kubernetesfile-recursive\", false,\n\t\t\"Recursively collect kubernetes files\",\n\t)\n\tgenerateCmd.Flags().Bool(\n\t\t\"exclude-all-dockerfiles\", false,\n\t\t\"Do not collect Dockerfiles unless referenced by docker-compose files\",\n\t)\n\tgenerateCmd.Flags().Bool(\n\t\t\"exclude-all-composefiles\", false,\n\t\t\"Do not collect docker-compose files\",\n\t)\n\tgenerateCmd.Flags().Bool(\n\t\t\"exclude-all-kubernetesfiles\", false,\n\t\t\"Do not collect kubernetes files\",\n\t)\n\tgenerateCmd.Flags().Bool(\n\t\t\"ignore-missing-digests\", false,\n\t\t\"Do not fail if unable to find digests\",\n\t)\n\tgenerateCmd.Flags().Bool(\n\t\t\"update-existing-digests\", false,\n\t\t\"Query registries for new digests even if they are hardcoded in files\",\n\t)\n\n\treturn generateCmd, nil\n}", "func NewLockCmd() *cobra.Command {\n\tlockCmd := &cobra.Command{\n\t\tUse: \"lock\",\n\t\tShort: \"Manage image digests with Lockfiles\",\n\t}\n\n\treturn lockCmd\n}", "func New(dir, name string, args ...string) Command {\n\tc := Command{}\n\n\tc.cmd = exec.Command(name, args...)\n\tc.cmd.Dir = dir\n\n\treturn c\n}", "func New() *cobra.Command {\n\tcommand := cobra.Command{\n\t\tUse: \"%template%\",\n\t\tShort: \"%template%\",\n\t\tLong: \"%template%\",\n\n\t\tArgs: cobra.NoArgs,\n\n\t\tSilenceErrors: false,\n\t\tSilenceUsage: true,\n\t}\n\n\t/* configure instance */\n\tcommand.AddCommand(\n\t\tdemo.Panic(),\n\t\tdemo.Stderr(),\n\t\tdemo.Stdout(),\n\t)\n\n\treturn &command\n}", "func NewCommand(name, usage string, call CallMethod) *Command {\n\tref := reflect.ValueOf(call)\n\tif ref.Kind() != reflect.Func {\n\t\tpanic(fmt.Sprintf(\"Call must be method, but is %s\", ref.Kind()))\n\t}\n\treturn &Command{\n\t\tName: name,\n\t\tUsage: usage,\n\t\tOptions: DefaultOptions,\n\t\tArguments: make([]*Argument, 0),\n\t\tCall: ref,\n\t}\n}", "func NewCommand(name, usage string, call CallMethod) *Command {\n\tref := reflect.ValueOf(call)\n\tif ref.Kind() != reflect.Func {\n\t\tpanic(fmt.Sprintf(\"Call must be method, but is %s\", ref.Kind()))\n\t}\n\treturn &Command{\n\t\tName: name,\n\t\tUsage: usage,\n\t\tOptions: DefaultOptions,\n\t\tArguments: make([]*Argument, 0),\n\t\tCall: ref,\n\t}\n}", "func NewCommand(io ui.IO, newClient newClientFunc) *Command {\n\treturn &Command{\n\t\tio: io,\n\t\tnewClient: newClient,\n\t}\n}", "func NewAddCmd() *cobra.Command {\n\taddCmd := &cobra.Command{\n\t\tUse: \"add\",\n\t\tShort: \"Add password to your vault\",\n\t\tLong: `Add password to email/username. Password can be randomly generated.`,\n\t\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\tu, err := EnterSession()\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\terr = addPassword(u)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfmt.Fprintln(cmd.OutOrStdout(), \"successfully added\")\n\n\t\t\treturn nil\n\t\t},\n\t}\n\treturn addCmd\n}", "func NewAttachCmd(f factory.Factory, globalFlags *flags.GlobalFlags) *cobra.Command {\n\tcmd := &AttachCmd{GlobalFlags: globalFlags}\n\n\tattachCmd := &cobra.Command{\n\t\tUse: \"attach\",\n\t\tShort: \"Attaches to a container\",\n\t\tLong: `\n#######################################################\n################# devspace attach #####################\n#######################################################\nAttaches to a running container\n\ndevspace attach\ndevspace attach --pick # Select pod to enter\ndevspace attach -c my-container\ndevspace attach -n my-namespace\n#######################################################`,\n\t\tRunE: func(cobraCmd *cobra.Command, args []string) error {\n\t\t\tplugin.SetPluginCommand(cobraCmd, args)\n\t\t\treturn cmd.Run(f, cobraCmd, args)\n\t\t},\n\t}\n\n\tattachCmd.Flags().StringVarP(&cmd.Container, \"container\", \"c\", \"\", \"Container name within pod where to execute command\")\n\tattachCmd.Flags().StringVar(&cmd.Pod, \"pod\", \"\", \"Pod to open a shell to\")\n\tattachCmd.Flags().StringVar(&cmd.ImageSelector, \"image-selector\", \"\", \"The image to search a pod for (e.g. nginx, nginx:latest, image(app), nginx:tag(app))\")\n\tattachCmd.Flags().StringVar(&cmd.Image, \"image\", \"\", \"Image is the config name of an image to select in the devspace config (e.g. 'default'), it is NOT a docker image like myuser/myimage\")\n\tattachCmd.Flags().StringVarP(&cmd.LabelSelector, \"label-selector\", \"l\", \"\", \"Comma separated key=value selector list (e.g. release=test)\")\n\n\tattachCmd.Flags().BoolVar(&cmd.Pick, \"pick\", true, \"Select a pod\")\n\n\treturn attachCmd\n}", "func NewDeleteCmd(ui terminal.UI, context plugin.PluginContext) *DeleteCmd {\n\treturn &DeleteCmd{\n\t\tui: ui,\n\t\tcontext: context,\n\t}\n}", "func NewCmd(traefikConfiguration *cmd.TraefikConfiguration, traefikPointersConfiguration *cmd.TraefikConfiguration) *flaeg.Command {\n\treturn &flaeg.Command{\n\t\tName: \"healthcheck\",\n\t\tDescription: `Calls traefik /ping to check health (web provider must be enabled)`,\n\t\tConfig: traefikConfiguration,\n\t\tDefaultPointersConfig: traefikPointersConfiguration,\n\t\tRun: runCmd(traefikConfiguration),\n\t\tMetadata: map[string]string{\n\t\t\t\"parseAllSources\": \"true\",\n\t\t},\n\t}\n}", "func NewCmd(args ...interface{}) *redis.Cmd {\n\treturn redis.NewCmd(args...)\n}", "func GetCmdCreateCdp(cdc *codec.Codec) *cobra.Command {\n\treturn &cobra.Command{\n\t\tUse: \"create [collateral] [debt]\",\n\t\tShort: \"create a new cdp\",\n\t\tLong: strings.TrimSpace(\n\t\t\tfmt.Sprintf(`Create a new cdp, depositing some collateral and drawing some debt.\n\nExample:\n$ %s tx %s create 10000000uatom 1000jpyx --from myKeyName\n`, version.ClientName, types.ModuleName)),\n\t\tArgs: cobra.ExactArgs(2),\n\t\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\tinBuf := bufio.NewReader(cmd.InOrStdin())\n\t\t\tcliCtx := context.NewCLIContext().WithCodec(cdc)\n\t\t\ttxBldr := auth.NewTxBuilderFromCLI(inBuf).WithTxEncoder(utils.GetTxEncoder(cdc))\n\n\t\t\tcollateral, err := sdk.ParseCoin(args[0])\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tdebt, err := sdk.ParseCoin(args[1])\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tmsg := types.NewMsgCreateCDP(cliCtx.GetFromAddress(), collateral, debt)\n\t\t\terr = msg.ValidateBasic()\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\treturn utils.GenerateOrBroadcastMsgs(cliCtx, txBldr, []sdk.Msg{msg})\n\t\t},\n\t}\n}", "func NewCmd() *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"archive-images\",\n\t\tShort: \"Export Capact Docker images to a tar archive\",\n\t\tLong: \"Subcommand for various manifest generation operations\",\n\t}\n\n\tcmd.AddCommand(NewFromHelmCharts())\n\n\treturn cmd\n}", "func New(c string, args ...string) *Cmd {\n\tcmd := &Cmd{\n\t\tCmd: exec.Command(c, args...),\n\t}\n\tcmd.Run = func() error {\n\t\treturn cmd.Cmd.Run()\n\t}\n\treturn cmd\n}", "func NewFmtCmd() *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"fmt\",\n\t\tShort: \"fmt helps to deal with data format operations\",\n\t\tPersistentPreRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\tvar err error\n\t\t\tif inputBytes, err = getInput(); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif err = getOutput(); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\treturn nil\n\t\t},\n\t\tPersistentPostRun: func(cmd *cobra.Command, args []string) {\n\t\t\toutput.Close()\n\t\t},\n\t}\n\n\tcmd.PersistentFlags().StringVarP(&inputFile, \"input\", \"i\", \"\", \"Read input from file\")\n\tcmd.PersistentFlags().StringVarP(&outputFile, \"output\", \"o\", \"\", \"Write output to file\")\n\n\treturn cmd\n}", "func NewGetBlockCmd(hash string, verbose *bool) *btcjson.GetBlockCmd {\n\treturn &btcjson.GetBlockCmd{\n\t\tHash: hash,\n\t\tVerbose: verbose,\n\t}\n}", "func NewCmd(o *Options) *cobra.Command {\n\n\tc := command{\n\t\tCommand: cli.Command{Options: o.Options},\n\t\topts: o,\n\t}\n\n\tcmd := &cobra.Command{\n\t\tUse: \"module <MODULE_IMAGE>\",\n\t\tShort: \"Unpacks an OCI container image module bundled as an from the contents of the given path\",\n\t\tLong: `Use this command to unpack a Kyma module.\n\n### Detailed description\n\nKyma modules are individual components that can be deployed into a Kyma runtime. Modules are built and distributed as OCI container images. \nWith this command, you can unpack the contents of an image so that they can be deployed into a cluster or inspected by developers.\n`,\n\n\t\tRunE: func(_ *cobra.Command, args []string) error { return c.Run(args) },\n\t\tAliases: []string{\"mod\"},\n\t}\n\n\treturn cmd\n}", "func NewManifestCmd() *cobra.Command {\n\tmanifestCmd := &cobra.Command{\n\t\tUse: \"manifest\",\n\t\tShort: \"Manipulate manifest lists\",\n\t}\n\tmanifestCmd.AddCommand(\n\t\tNewManifestCreateCmd(),\n\t\tNewManifestAnnotateCmd(),\n\t\tNewManifestInspectCmd(),\n\t)\n\n\treturn manifestCmd\n}", "func NewCommand(src, dst ID, cmdData interface{}) (*Command, error) {\n\tvar t = getType(cmdData)\n\n\tif t == nil {\n\t\treturn nil, errors.Errorf(\"attempt to submit a nil command\")\n\t}\n\n\tswitch t.Kind() {\n\tcase reflect.Struct:\n\t\tif t.Name() == \"\" {\n\t\t\treturn nil, errors.Errorf(\"attempt to submit anonymous struct as command\")\n\t\t}\n\tcase reflect.String:\n\t\tif fmt.Sprint(cmdData) == \"\" {\n\t\t\treturn nil, errors.Errorf(\"attempt to submit an empty string as command\")\n\t\t}\n\tdefault:\n\t\treturn nil, errors.Errorf(\"attempt to submit %s %#+v as command\", t.Kind(), cmdData)\n\t}\n\n\treturn &Command{src: src, dst: dst, args: cmdData, ctx: context.Background()}, nil\n\n}", "func NewDiffFormat(source string) formatter.Format {\n\tswitch source {\n\tcase formatter.TableFormatKey:\n\t\treturn defaultDiffTableFormat\n\t}\n\treturn formatter.Format(source)\n}", "func (c *Command) New() *cobra.Command {\n\tnewCommand := &cobra.Command{\n\t\tUse: \"annactl\",\n\t\tShort: \"Manage the API of the anna project. For more information see https://github.com/the-anna-project/annactl.\",\n\t\tLong: \"Manage the API of the anna project. For more information see https://github.com/the-anna-project/annactl.\",\n\t\tRun: c.Execute,\n\t}\n\n\tnewCommand.AddCommand(c.endpointCommand.New())\n\tnewCommand.AddCommand(c.versionCommand.New())\n\n\treturn newCommand\n}", "func newCreateProjectCmd() *cobra.Command {\n\tvar (\n\t\toptions core.CreateProjectOptions\n\t)\n\n\tcreateProjectCmd := cobra.Command{\n\t\tUse: \"project NAME\",\n\t\tShort: `Create a new verless project`,\n\t\tArgs: cobra.ExactArgs(1),\n\t\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\tpath := args[0]\n\t\t\treturn core.CreateProject(path, options)\n\t\t},\n\t}\n\n\tcreateProjectCmd.Flags().BoolVar(&options.Overwrite, \"overwrite\",\n\t\tfalse, `overwrite the directory if it already exists`)\n\n\treturn &createProjectCmd\n}", "func NewCmdDelete(name, fullName string) *cobra.Command {\n\n\tdo := NewDeleteOptions()\n\n\tvar componentDeleteCmd = &cobra.Command{\n\t\tUse: fmt.Sprintf(\"%s <component_name>\", name),\n\t\tShort: \"Delete an existing component\",\n\t\tLong: \"Delete an existing component.\",\n\t\tExample: fmt.Sprintf(deleteExample, fullName),\n\t\tArgs: cobra.MaximumNArgs(1),\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\tgenericclioptions.GenericRun(do, cmd, args)\n\t\t},\n\t}\n\n\tcomponentDeleteCmd.Flags().BoolVarP(&do.componentForceDeleteFlag, \"force\", \"f\", false, \"Delete component without prompting\")\n\tcomponentDeleteCmd.Flags().BoolVarP(&do.componentDeleteAllFlag, \"all\", \"a\", false, \"Delete component and local config\")\n\n\t// Add a defined annotation in order to appear in the help menu\n\tcomponentDeleteCmd.Annotations = map[string]string{\"command\": \"component\"}\n\tcomponentDeleteCmd.SetUsageTemplate(odoutil.CmdUsageTemplate)\n\tcompletion.RegisterCommandHandler(componentDeleteCmd, completion.ComponentNameCompletionHandler)\n\t//Adding `--context` flag\n\tgenericclioptions.AddContextFlag(componentDeleteCmd, &do.componentContext)\n\n\t//Adding `--project` flag\n\tprojectCmd.AddProjectFlag(componentDeleteCmd)\n\t//Adding `--application` flag\n\tappCmd.AddApplicationFlag(componentDeleteCmd)\n\n\treturn componentDeleteCmd\n}", "func (r Describe) NewCommand(ctx context.Context, name string) *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: name[strings.LastIndex(name, \".\")+1:] + \" [NAME[:VERSION]] [RESOURCE] [FILTER]\",\n\t\tShort: \"Describe registered releases with states information.\",\n\t\tLong: \"Describe registered releases with states information\",\n\t\tArgs: require.Args(r.validation),\n\t\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\tvar name string\n\t\t\tvar version semver.Version\n\t\t\tif len(args) > 0 {\n\t\t\t\tparts := strings.Split(args[0], \":\")\n\t\t\t\tparts = append(parts, \"\")\n\t\t\t\tname = parts[0]\n\n\t\t\t\tif parts[1] != \"\" {\n\t\t\t\t\tvar err error\n\t\t\t\t\tversion, err = semver.Parse(parts[1])\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tvar resource string\n\t\t\tif len(args) > 1 {\n\t\t\t\tresource = args[1]\n\t\t\t}\n\n\t\t\tvar filter string\n\t\t\tif len(args) > 2 {\n\t\t\t\tfilter = args[2]\n\t\t\t}\n\n\t\t\topts := describeOptions{\n\t\t\t\tName: name,\n\t\t\t\tVersion: version,\n\t\t\t\tResource: resource,\n\t\t\t\tFilter: filter,\n\t\t\t}\n\t\t\treturn r.run(ctx, cmd.OutOrStdout(), opts)\n\t\t},\n\t\tSilenceErrors: true,\n\t\tSilenceUsage: true,\n\t}\n\n\tcmd.SetUsageTemplate(global.DefaultUsageTemplate)\n\tcmd.SetHelpCommand(&cobra.Command{Hidden: true})\n\n\tflags := cmd.Flags()\n\tflags.SortFlags = false\n\tflags.Bool(\"help\", false, \"Show help information.\")\n\treturn cmd\n}", "func newCommand(name, argsSpec, description string, init func(*flag.FlagSet), f func(*flag.FlagSet) error) command {\n\tfs := flag.NewFlagSet(name, flag.ExitOnError)\n\tinit(fs)\n\tif fs.Usage == nil {\n\t\tfs.Usage = func() {\n\t\t\tfmt.Fprintln(os.Stderr, \"#\", description)\n\t\t\tfmt.Fprintln(os.Stderr, name, argsSpec)\n\t\t\tfs.PrintDefaults()\n\t\t}\n\t}\n\treturn command{fs, f}\n}", "func NewCmdCreateDeployment(f cmdutil.Factory, ioStreams genericclioptions.IOStreams) *cobra.Command {\n\toptions := &DeploymentOpts{\n\t\tPrintFlags: genericclioptions.NewPrintFlags(\"created\").WithTypeSetter(scheme.Scheme),\n\t\tIOStreams: ioStreams,\n\t}\n\n\tcmd := &cobra.Command{\n\t\tUse: \"deployment NAME --image=image [--dry-run=server|client|none]\",\n\t\tDisableFlagsInUseLine: true,\n\t\tAliases: []string{\"deploy\"},\n\t\tShort: deploymentLong,\n\t\tLong: deploymentLong,\n\t\tExample: deploymentExample,\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\tcmdutil.CheckErr(options.Complete(f, cmd, args))\n\t\t\tcmdutil.CheckErr(options.Run())\n\t\t},\n\t}\n\n\toptions.PrintFlags.AddFlags(cmd)\n\n\tcmdutil.AddApplyAnnotationFlags(cmd)\n\tcmdutil.AddValidateFlags(cmd)\n\tcmdutil.AddGeneratorFlags(cmd, \"\")\n\tcmd.Flags().StringSliceVar(&options.Images, \"image\", []string{}, \"Image name to run.\")\n\t_ = cmd.MarkFlagRequired(\"image\")\n\tcmdutil.AddFieldManagerFlagVar(cmd, &options.FieldManager, \"kubectl-create\")\n\n\treturn cmd\n}", "func NewCommand(commandName string, p1 int64, p2 int64) Command {\n\tcommandCode := commandCodes[commandName]\n\n\treturn Command{commandCode, p1, p2, false, false, \"\"}\n}", "func New(appName string) (*ffcli.Command, *Config) {\n\tcfg := Config{\n\t\tAppName: appName,\n\t}\n\n\tfs := flag.NewFlagSet(appName, flag.ExitOnError)\n\tcfg.RegisterFlags(fs)\n\n\treturn &ffcli.Command{\n\t\tShortUsage: appName + \" [flags] <subcommand> [flags] [<arg>...]\",\n\t\tFlagSet: fs,\n\t\tExec: cfg.Exec,\n\t}, &cfg\n}", "func Cmd(params base.Parameters) *subcommands.Command {\n\treturn &subcommands.Command{\n\t\tUsageLine: \"semantic-diff SCRIPT CONFIG [CONFIG CONFIG ...]\",\n\t\tShortDesc: \"interprets a high-level config, compares the result to existing configs\",\n\t\tLongDesc: `Interprets a high-level config, compares the result to existing configs.\n\nTHIS SUBCOMMAND WILL BE DELETED AFTER IT IS NO LONGER USEFUL. DO NOT DEPEND ON\nIT IN ANY AUTOMATIC SCRIPTS. FOR MANUAL USE ONLY. IF YOU REALLY-REALLY NEED TO\nUSE IT FROM AUTOMATION, PLEASE FILE A BUG.\n\nUses semantic comparison. Normalizes all protos before comparing them via\n'git diff'. Intended to be used manually when switching existing *.cfg to be\ngenerated from *.star.\n\nAccepts a path to the entry-point *.star script and paths to existing configs\nto diff against. Their filenames (not full paths) will be used to find\ncorresponding generated files, and also to figure out the proto schema to use.\n\nExample:\n\n $ lucicfg semantic-diff main.star configs/cr-buildbucket.cfg configs/luci-milo.cfg\n`,\n\t\tCommandRun: func() subcommands.CommandRun {\n\t\t\tdr := &diffRun{}\n\t\t\tdr.Init(params)\n\t\t\tdr.AddMetaFlags()\n\t\t\tdr.Flags.StringVar(&dr.outputDir, \"output-dir\", \"\", \"Where to put normalized configs if you want them preserved after the command completes.\")\n\t\t\treturn dr\n\t\t},\n\t}\n}", "func New(p provider) (*Command, error) {\n\tverifiableStore, err := verifiablestore.New(p)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"new vc store : %w\", err)\n\t}\n\n\tdidStore, err := didstore.New(p)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"new did store : %w\", err)\n\t}\n\n\treturn &Command{\n\t\tverifiableStore: verifiableStore,\n\t\tdidStore: didStore,\n\t\tresolver: verifiable.NewVDRKeyResolver(p.VDRegistry()),\n\t\tctx: p,\n\t\tdocumentLoader: p.JSONLDDocumentLoader(),\n\t}, nil\n}", "func NewCommand() *cobra.Command {\n\tcmd, _ := newCommandHooks()\n\treturn cmd\n}", "func NewCmdVersion() *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"version\",\n\t\tShort: \"Print Maya version information\",\n\t\tLong: `Print Maya version information for the current context\n\nExample:\nmaya version\n\t`,\n\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\tfmt.Printf(\"Version: %s\\n\", getVersion())\n\t\t\tfmt.Printf(\"Git commit: %s\", getGitCommit())\n\t\t\t//fmt.Printf(\"Git tree state: %s\\n\", GitTreeState)\n\t\t\tfmt.Printf(\"Go-Version: %s\\n\", runtime.Version())\n\t\t\tfmt.Printf(\"GOARCH: %s\\n\", runtime.GOARCH)\n\t\t\tfmt.Printf(\"GOOS: %s\\n\", runtime.GOOS)\n\n\t\t},\n\t}\n\n\treturn cmd\n}", "func NewCmdView(name, fullName string) *cobra.Command {\n\to := NewViewOptions()\n\tpreferenceViewCmd := &cobra.Command{\n\t\tUse: name,\n\t\tShort: \"View current preference values\",\n\t\tLong: \"View current preference values\",\n\t\tExample: fmt.Sprintf(fmt.Sprint(\"\\n\", viewExample), fullName),\n\n\t\tArgs: cobra.ExactArgs(0),\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\tgenericclioptions.GenericRun(o, cmd, args)\n\t\t},\n\t}\n\tclientset.Add(preferenceViewCmd, clientset.PREFERENCE)\n\treturn preferenceViewCmd\n}", "func newCmdRevert(streams genericclioptions.IOStreams) *cobra.Command {\n\to := newDryrunOptions(streams)\n\to.enabledDryRun = false\n\n\tcmd := &cobra.Command{\n\t\tUse: \"revert -f [saved_state_csv_file]\",\n\t\tShort: \"revert all WPA instance dry-run configuration from a csv backup file\",\n\t\tExample: fmt.Sprintf(dryrunRevertExample, \"dry-run revert\"),\n\t\tSilenceUsage: true,\n\t\tRunE: func(c *cobra.Command, args []string) error {\n\t\t\tif o.csvFile == \"\" {\n\t\t\t\treturn fmt.Errorf(\"the revert command requires a file as input use `--help` for an example\")\n\t\t\t}\n\t\t\tif err := o.complete(c, args); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\treturn o.runRevert()\n\t\t},\n\t}\n\n\tcmd.Flags().StringVarP(&o.csvFile, \"csv-file\", \"f\", \"\", \"read WPA dry-run configuration from csv file\")\n\to.configFlags.AddFlags(cmd.Flags())\n\n\treturn cmd\n}", "func NewCmdDelete(f cmdutil.Factory) *cobra.Command {\n\tva := cmdcompl.ValidArgsSlide{timeentryhlp.AliasCurrent, timeentryhlp.AliasLast}\n\tcmd := &cobra.Command{\n\t\tUse: \"delete { <time-entry-id> | \" +\n\t\t\tva.IntoUseOptions() + \" }...\",\n\t\tAliases: []string{\"del\", \"rm\", \"remove\"},\n\t\tArgs: cmdutil.RequiredNamedArgs(\"time entry id\"),\n\t\tValidArgs: va.IntoValidArgs(),\n\t\tShort: `Delete time entry(ies), use id \"` +\n\t\t\ttimeentryhlp.AliasCurrent + `\" to apply to time entry in progress`,\n\t\tLong: heredoc.Docf(`\n\t\t\tDelete time entries\n\n\t\t\tIf you want to delete the current (running) time entry you can use \"%s\" instead of its ID.\n\n\t\t\t**Important**: this action can't be reverted, once the time entry is deleted its ID is lost.\n\t\t`,\n\t\t\ttimeentryhlp.AliasCurrent,\n\t\t),\n\t\tExample: heredoc.Docf(`\n\t\t\t# trying to delete a time entry that does not exist, or from other workspace\n\t\t\t$ %[1]s 62af70d849445270d7c09fbc\n\t\t\tdelete time entry \"62af70d849445270d7c09fbc\": TIMEENTRY with id 62af70d849445270d7c09fbc doesn't belong to WORKSPACE with id cccccccccccccccccccccccc (code: 501)\n\n\t\t\t# deleting the running time entry\n\t\t\t$ %[1]s current\n\t\t\t# no output\n\n\t\t\t# deleting the last time entry\n\t\t\t$ %[1]s last\n\t\t\t# no output\n\n\t\t\t# deleting multiple time entries\n\t\t\t$ %[1]s 62b5b51085815e619d7ae18d 62b5d55185815e619d7af928\n\t\t\t# no output\n\t\t`, \"clockify-cli delete\"),\n\t\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\tvar err error\n\t\t\tvar w, u string\n\n\t\t\tif w, err = f.GetWorkspaceID(); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tif u, err = f.GetUserID(); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tc, err := f.Client()\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tfor i := range args {\n\t\t\t\tp := api.DeleteTimeEntryParam{\n\t\t\t\t\tWorkspace: w,\n\t\t\t\t\tTimeEntryID: args[i],\n\t\t\t\t}\n\n\t\t\t\tif p.TimeEntryID == timeentryhlp.AliasCurrent {\n\t\t\t\t\tte, err := c.GetTimeEntryInProgress(\n\t\t\t\t\t\tapi.GetTimeEntryInProgressParam{\n\t\t\t\t\t\t\tWorkspace: p.Workspace,\n\t\t\t\t\t\t\tUserID: u,\n\t\t\t\t\t\t})\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\n\t\t\t\t\tif te == nil {\n\t\t\t\t\t\treturn errors.New(\"there is no time entry in progress\")\n\t\t\t\t\t}\n\n\t\t\t\t\tp.TimeEntryID = te.ID\n\t\t\t\t}\n\n\t\t\t\tif p.TimeEntryID == timeentryhlp.AliasLast {\n\t\t\t\t\tte, err := timeentryhlp.GetLatestEntryEntry(c, p.Workspace, u)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\n\t\t\t\t\tp.TimeEntryID = te.ID\n\t\t\t\t}\n\n\t\t\t\tif err := c.DeleteTimeEntry(p); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn nil\n\t\t},\n\t}\n\n\treturn cmd\n}", "func NewCommand(name string) Command {\n\treturn Command{name: name}\n}", "func NewDeleteCmd() *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"delete\",\n\t\tShort: \"delete\",\n\t\tLong: \"delete resource\",\n\t}\n\tcmd.PersistentFlags().BoolVarP(&flagErrIfNotExist, \"errIfNotExist\", \"\", false,\n\t\t\"If true, return error when result is empty\")\n\n\tcmd.AddCommand(NewDeleteStrategyCmd())\n\treturn cmd\n}", "func NewMountCommand(errOut io.Writer) *cobra.Command {\n\tmountSecretOpts := &mountSecretOpts{\n\t\terrOut: errOut,\n\t}\n\tcmd := &cobra.Command{\n\t\tUse: \"mount --FLAGS\",\n\t\tShort: \"Mount a secret with certs\",\n\t\tLong: \"This command mounts the secret with valid certs signed by etcd-cert-signer-controller\",\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\tif err := mountSecretOpts.validateMountSecretOpts(); err != nil {\n\t\t\t\tif cmd.HasParent() {\n\t\t\t\t\tklog.Fatal(err)\n\t\t\t\t}\n\t\t\t\tfmt.Fprint(mountSecretOpts.errOut, err.Error())\n\t\t\t}\n\t\t\tif err := mountSecretOpts.Run(context.TODO()); err != nil {\n\t\t\t\tif cmd.HasParent() {\n\t\t\t\t\tklog.Fatal(err)\n\t\t\t\t}\n\t\t\t\tfmt.Fprint(mountSecretOpts.errOut, err.Error())\n\t\t\t}\n\t\t},\n\t}\n\n\tmountSecretOpts.AddFlags(cmd.Flags())\n\treturn cmd\n}", "func newPlanCmd() *cobra.Command {\n\tpc := &planCmd{}\n\n\tplanCmd := &cobra.Command{\n\t\tUse: \"plan [-f SORUCE]\",\n\t\tShort: \"Generate and show an execution plan\",\n\t\tLong: planLong,\n\t\tExample: planExample,\n\t\tDisableFlagsInUseLine: true,\n\t\tSilenceUsage: true,\n\t\tSilenceErrors: true,\n\t\tArgs: cobra.MaximumNArgs(0),\n\t\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\tif err := pc.meta.init(args); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\treturn pc.run(args)\n\t\t},\n\t}\n\n\tf := planCmd.Flags()\n\tf.BoolVar(&pc.destroy, \"destroy\", false, \"create plan to destroy resources\")\n\n\tpc.addMetaFlags(planCmd)\n\n\treturn planCmd\n}", "func newImportCmd() *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"import [moniker] [keyfile]\",\n\t\tShort: \"import a private key to import a new keyfile\",\n\t\tLong: `\nImport keys to [moniker] from private key file [keyfile].\n`,\n\t\tArgs: cobra.ExactArgs(2),\n\t\tRunE: importKey,\n\t}\n\n\treturn cmd\n}", "func newDbCmd(f cmdutil.Factory, out io.Writer, errOut io.Writer) *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"db\",\n\t\tShort: \"Database management operations for the Ziti Edge Controller\",\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\terr := cmd.Help()\n\t\t\tcmdhelper.CheckErr(err)\n\t\t},\n\t}\n\n\tcmd.AddCommand(newDbSnapshotCmd(f, out, errOut))\n\tcmd.AddCommand(newDbCheckIntegrityCmd(f, out, errOut))\n\tcmd.AddCommand(newDbCheckIntegrityStatusCmd(f, out, errOut))\n\n\treturn cmd\n}" ]
[ "0.8430281", "0.6510477", "0.6467907", "0.6434617", "0.6412178", "0.63949656", "0.63839775", "0.6368971", "0.6363146", "0.6276115", "0.6205063", "0.6188749", "0.61736095", "0.61705375", "0.61700404", "0.6160562", "0.61471707", "0.6146327", "0.61446273", "0.61357003", "0.61230785", "0.6086823", "0.6081004", "0.6075808", "0.60664505", "0.6060681", "0.6060281", "0.60295117", "0.60248595", "0.6022728", "0.6019889", "0.5987733", "0.59723246", "0.595493", "0.5949625", "0.59481484", "0.594434", "0.5937676", "0.59369296", "0.592952", "0.5921986", "0.5914157", "0.5912143", "0.5893418", "0.5889729", "0.58814424", "0.5872636", "0.5862762", "0.5853941", "0.5832242", "0.58239585", "0.5776857", "0.57765865", "0.5775495", "0.57707036", "0.5765222", "0.5762686", "0.5751707", "0.57452244", "0.5744665", "0.5737693", "0.57358724", "0.57295483", "0.57295483", "0.57266116", "0.57188034", "0.5707531", "0.5701514", "0.5700319", "0.5691009", "0.5684331", "0.56840754", "0.5680532", "0.5676973", "0.56743366", "0.56598866", "0.5657755", "0.56513506", "0.5650293", "0.56487", "0.56352293", "0.5629376", "0.56262016", "0.5614487", "0.5609327", "0.5608916", "0.556921", "0.5568948", "0.5560946", "0.5558395", "0.55525655", "0.55520165", "0.5549922", "0.5546315", "0.55423206", "0.5540108", "0.5538695", "0.55384964", "0.5529275", "0.5528178" ]
0.78918034
1
All writes out all the differences between the specified releases
func (s *DiffCmd) All(w io.Writer) error { differ, err := diff.New(s.releaseRepo, s.release1, s.release2) if err != nil { return err } d, err := differ.Diff() if err != nil { return err } s.printDiffResult(w, d) return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func copyVersions(srcStore, dstStore dvid.Store, d1, d2 dvid.Data, uuids []dvid.UUID) error {\n\tif len(uuids) == 0 {\n\t\tdvid.Infof(\"no versions given for copy... aborting\\n\")\n\t\treturn nil\n\t}\n\tsrcDB, ok := srcStore.(rawQueryDB)\n\tif !ok {\n\t\treturn fmt.Errorf(\"source store %q doesn't have required raw range query\", srcStore)\n\t}\n\tdstDB, ok := dstStore.(rawPutDB)\n\tif !ok {\n\t\treturn fmt.Errorf(\"destination store %q doesn't have raw Put query\", dstStore)\n\t}\n\tvar dataInstanceChanged bool\n\tif d2 == nil {\n\t\td2 = d1\n\t} else {\n\t\tdataInstanceChanged = true\n\t}\n\tversionsOnPath, versionsToStore, err := calcVersionPath(uuids)\n\tif err != nil {\n\t\treturn err\n\t}\n\tvar wg sync.WaitGroup\n\twg.Add(1)\n\n\tstatsTotal := new(txStats)\n\tstatsTotal.lastTime = time.Now()\n\tstatsTotal.name = fmt.Sprintf(\"%q total\", d1.DataName())\n\tstatsStored := new(txStats)\n\tstatsStored.lastTime = time.Now()\n\tstatsStored.name = fmt.Sprintf(\"stored into %q\", d2.DataName())\n\tvar kvTotal, kvSent int\n\tvar bytesTotal, bytesSent uint64\n\n\t// Start goroutine to receive all key-value pairs, process, and store them.\n\trawCh := make(chan *storage.KeyValue, 5000)\n\tgo func() {\n\t\tvar maxVersionKey storage.Key\n\t\tvar numStoredKV int\n\t\tkvsToStore := make(map[dvid.VersionID]*storage.KeyValue, len(versionsToStore))\n\t\tfor _, v := range versionsToStore {\n\t\t\tkvsToStore[v] = nil\n\t\t}\n\t\tfor {\n\t\t\tkv := <-rawCh\n\t\t\tif kv != nil && !storage.Key(kv.K).IsDataKey() {\n\t\t\t\tdvid.Infof(\"Skipping non-data key-value %x ...\\n\", []byte(kv.K))\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif kv == nil || maxVersionKey == nil || bytes.Compare(kv.K, maxVersionKey) > 0 {\n\t\t\t\tif numStoredKV > 0 {\n\t\t\t\t\tvar lastKV *storage.KeyValue\n\t\t\t\t\tfor _, v := range versionsToStore {\n\t\t\t\t\t\tcurKV := kvsToStore[v]\n\t\t\t\t\t\tif lastKV == nil || (curKV != nil && bytes.Compare(lastKV.V, curKV.V) != 0) {\n\t\t\t\t\t\t\tif curKV != nil {\n\t\t\t\t\t\t\t\tkeybuf := make(storage.Key, len(curKV.K))\n\t\t\t\t\t\t\t\tcopy(keybuf, curKV.K)\n\t\t\t\t\t\t\t\tif dataInstanceChanged {\n\t\t\t\t\t\t\t\t\terr = storage.ChangeDataKeyInstance(keybuf, d2.InstanceID())\n\t\t\t\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\t\t\t\tdvid.Errorf(\"could not change instance ID of key to %d: %v\\n\", d2.InstanceID(), err)\n\t\t\t\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tstorage.ChangeDataKeyVersion(keybuf, v)\n\t\t\t\t\t\t\t\tkvSent++\n\t\t\t\t\t\t\t\tbytesSent += uint64(len(curKV.V) + len(keybuf))\n\t\t\t\t\t\t\t\tif err := dstDB.RawPut(keybuf, curKV.V); err != nil {\n\t\t\t\t\t\t\t\t\tdvid.Errorf(\"can't put k/v pair to destination instance %q: %v\\n\", d2.DataName(), err)\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tstatsStored.addKV(keybuf, curKV.V)\n\t\t\t\t\t\t\t\tlastKV = curKV\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif kv == nil {\n\t\t\t\t\twg.Done()\n\t\t\t\t\tdvid.Infof(\"Sent %d %q key-value pairs (%s, out of %d kv pairs, %s)\\n\",\n\t\t\t\t\t\tkvSent, d1.DataName(), humanize.Bytes(bytesSent), kvTotal, humanize.Bytes(bytesTotal))\n\t\t\t\t\tdvid.Infof(\"Total KV Stats for %q:\\n\", d1.DataName())\n\t\t\t\t\tstatsTotal.printStats()\n\t\t\t\t\tdvid.Infof(\"Total KV Stats for newly stored %q:\\n\", d2.DataName())\n\t\t\t\t\tstatsStored.printStats()\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\ttk, err := storage.TKeyFromKey(kv.K)\n\t\t\t\tif err != nil {\n\t\t\t\t\tdvid.Errorf(\"couldn't get %q TKey from Key %v: %v\\n\", d1.DataName(), kv.K, err)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tmaxVersionKey, err = storage.MaxVersionDataKey(d1.InstanceID(), tk)\n\t\t\t\tif err != nil {\n\t\t\t\t\tdvid.Errorf(\"couldn't get max version key from Key %v: %v\\n\", kv.K, err)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tfor _, v := range versionsToStore {\n\t\t\t\t\tkvsToStore[v] = nil\n\t\t\t\t}\n\t\t\t\tnumStoredKV = 0\n\t\t\t}\n\t\t\tcurV, err := storage.VersionFromDataKey(kv.K)\n\t\t\tif err != nil {\n\t\t\t\tdvid.Errorf(\"unable to get version from key-value: %v\\n\", err)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tcurBytes := uint64(len(kv.V) + len(kv.K))\n\t\t\tif _, onPath := versionsOnPath[curV]; onPath {\n\t\t\t\tfor _, v := range versionsToStore {\n\t\t\t\t\tif curV <= v {\n\t\t\t\t\t\tkvsToStore[v] = kv\n\t\t\t\t\t\tnumStoredKV++\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tkvTotal++\n\t\t\tbytesTotal += curBytes\n\t\t\tstatsTotal.addKV(kv.K, kv.V)\n\t\t}\n\t}()\n\n\t// Send all kv pairs for the source data instance down the channel.\n\tbegKey, endKey := storage.DataInstanceKeyRange(d1.InstanceID())\n\tkeysOnly := false\n\tif err := srcDB.RawRangeQuery(begKey, endKey, keysOnly, rawCh, nil); err != nil {\n\t\treturn fmt.Errorf(\"push voxels %q range query: %v\", d1.DataName(), err)\n\t}\n\twg.Wait()\n\treturn nil\n}", "func minorRelease(f *os.File, release, draftURL, changelogURL string) {\n\t// Check for draft and use it if available\n\tlog.Printf(\"Checking if draft release notes exist for %s...\", release)\n\n\tresp, err := http.Get(draftURL)\n\tif err == nil {\n\t\tdefer resp.Body.Close()\n\t}\n\n\tif err == nil && resp.StatusCode == 200 {\n\t\tlog.Print(\"Draft found - using for release notes...\")\n\t\t_, err = io.Copy(f, resp.Body)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"error during copy to file: %v\", err)\n\t\t\treturn\n\t\t}\n\t\tf.WriteString(\"\\n\")\n\t} else {\n\t\tlog.Print(\"Failed to find draft - creating generic template... (error message/status code printed below)\")\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Error message: %v\", err)\n\t\t} else {\n\t\t\tlog.Printf(\"Response status code: %d\", resp.StatusCode)\n\t\t}\n\t\tf.WriteString(\"## Major Themes\\n\\n* TBD\\n\\n## Other notable improvements\\n\\n* TBD\\n\\n## Known Issues\\n\\n* TBD\\n\\n## Provider-specific Notes\\n\\n* TBD\\n\\n\")\n\t}\n\n\t// Aggregate all previous release in series\n\tf.WriteString(fmt.Sprintf(\"### Previous Release Included in %s\\n\\n\", release))\n\n\t// Regexp Example:\n\t// Assume the release tag is v1.7.0, this regexp matches \"- [v1.7.0-\" in\n\t// \"- [v1.7.0-rc.1](#v170-rc1)\"\n\t// \"- [v1.7.0-beta.2](#v170-beta2)\"\n\t// \"- [v1.7.0-alpha.3](#v170-alpha3)\"\n\treAnchor, _ := regexp.Compile(fmt.Sprintf(\"- \\\\[%s-\", release))\n\n\tresp, err = http.Get(changelogURL)\n\tif err == nil {\n\t\tdefer resp.Body.Close()\n\t}\n\n\tif err == nil && resp.StatusCode == 200 {\n\t\tbuf := new(bytes.Buffer)\n\t\tbuf.ReadFrom(resp.Body)\n\t\tfor _, line := range strings.Split(buf.String(), \"\\n\") {\n\t\t\tif anchor := reAnchor.FindStringSubmatch(line); anchor != nil {\n\t\t\t\tf.WriteString(line + \"\\n\")\n\t\t\t}\n\t\t}\n\t\tf.WriteString(\"\\n\")\n\t} else {\n\t\tlog.Print(\"Failed to fetch past changelog for minor release - continuing... (error message/status code printed below)\")\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Error message: %v\", err)\n\t\t} else {\n\t\t\tlog.Printf(\"Response status code: %d\", resp.StatusCode)\n\t\t}\n\t}\n}", "func writeRevisions(dw printers.PrefixWriter, revisions []*revisionDesc, printDetails bool) {\n\trevSection := dw.WriteAttribute(\"Revisions\", \"\")\n\tdw.Flush()\n\tfor _, revisionDesc := range revisions {\n\t\tready := apis.Condition{\n\t\t\tType: apis.ConditionReady,\n\t\t\tStatus: corev1.ConditionUnknown,\n\t\t}\n\t\tfor _, cond := range revisionDesc.revision.Status.Conditions {\n\t\t\tif cond.Type == apis.ConditionReady {\n\t\t\t\tready = cond\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tsection := revSection.WriteColsLn(formatBullet(revisionDesc.percent, ready.Status), revisionHeader(revisionDesc))\n\t\tif ready.Status == corev1.ConditionFalse {\n\t\t\tsection.WriteAttribute(\"Error\", ready.Reason)\n\t\t}\n\t\trevision.WriteImage(section, revisionDesc.revision)\n\t\trevision.WriteReplicas(section, revisionDesc.revision)\n\t\tif printDetails {\n\t\t\trevision.WritePort(section, revisionDesc.revision)\n\t\t\trevision.WriteEnv(section, revisionDesc.revision, printDetails)\n\t\t\trevision.WriteEnvFrom(section, revisionDesc.revision, printDetails)\n\t\t\trevision.WriteScale(section, revisionDesc.revision)\n\t\t\trevision.WriteConcurrencyOptions(section, revisionDesc.revision)\n\t\t\trevision.WriteResources(section, revisionDesc.revision)\n\t\t}\n\t}\n}", "func (ghp *GithubProvider) GenerateReleaseText(mainRepo []string, coreRepos [][]string, otherRepos [][]string, major *bool, minor *bool, patch *bool) ([]string, string) {\n\n\tchanges, lastRelease, GhpErr := ghp.getChangesSinceLastRelease(mainRepo[0], mainRepo[1])\n\ttu.CheckExit(GhpErr)\n\n\tcurrentVersion := lastRelease.TagName\n\tnextVersion := ghp.calculateNextVersion(currentVersion, major, minor, patch)\n\n\tvar releaseText []string\n\n\tfmt.Println(\"nextVersion :: \" + nextVersion)\n\tfmt.Println(\"currentVersion :: \" + *currentVersion)\n\treleaseText = append(releaseText, \"# Changes since last version:\\n\")\n\treleaseText = append(releaseText, \"## Changes to \"+mainRepo[0]+\"/\"+mainRepo[1]+\":\\n\")\n\tfor _, change := range changes {\n\t\treleaseText = append(releaseText, fmt.Sprintf(\" - %s see %s/%s/pull/%d \\n\", change.Title, mainRepo[0], mainRepo[1], change.PrNum))\n\t}\n\treleaseText = append(releaseText, \"# Changes to core repositories:\\n\")\n\tfor _, coreRepo := range coreRepos {\n\t\tcoreChanges, changeErr := ghp.getChangesSinceRelease(lastRelease, coreRepo[0], coreRepo[1])\n\t\ttu.CheckWarn(changeErr)\n\t\treleaseText = append(releaseText, \"## Changes to \"+coreRepo[0]+\"/\"+coreRepo[1]+\":\\n\")\n\t\tif len(coreChanges) == 0 {\n\t\t\treleaseText = append(releaseText, \" - No changes in this version\")\n\t\t}\n\t\tfor _, coreChange := range coreChanges {\n\t\t\treleaseText = append(releaseText, fmt.Sprintf(\" - %s see %s/%s/pull/%d \\n\", coreChange.Title, coreRepo[0], coreRepo[1], coreChange.PrNum))\n\t\t}\n\t}\n\tfmt.Println(\"--------------------------------------------------\")\n\tfmt.Println(\"The Folowing repos will also be tagged with \" + nextVersion)\n\tfmt.Println(\"--------------------------------------------------\")\n\tfmt.Println(mainRepo[0] + \"/\" + mainRepo[1])\n\tfor _, repo := range coreRepos {\n\t\tfmt.Println(repo[0] + \"/\" + repo[1])\n\t}\n\tfor _, repo := range otherRepos {\n\t\tfmt.Println(repo[0] + \"/\" + repo[1])\n\t}\n\n\treturn releaseText, nextVersion\n}", "func exchangeVersions(rw io.ReadWriter, versionLine []byte) (them []byte, err error) {\n\t// Contrary to the RFC, we do not ignore lines that don't\n\t// start with \"SSH-2.0-\" to make the library usable with\n\t// nonconforming servers.\n\tfor _, c := range versionLine {\n\t\t// The spec disallows non US-ASCII chars, and\n\t\t// specifically forbids null chars.\n\t\tif c < 32 {\n\t\t\treturn nil, errors.New(\"ssh1: junk character in version line\")\n\t\t}\n\t}\n\tif _, err = rw.Write(append(versionLine, '\\r', '\\n')); err != nil {\n\t\treturn\n\t}\n\n\tthem, err = readVersion(rw)\n\treturn them, err\n}", "func StoreReleases(dbOwner, dbFolder, dbName string, releases map[string]ReleaseEntry) error {\n\tdbQuery := `\n\t\tUPDATE sqlite_databases\n\t\tSET release_list = $4, release_count = $5\n\t\tWHERE user_id = (\n\t\t\t\tSELECT user_id\n\t\t\t\tFROM users\n\t\t\t\tWHERE lower(user_name) = lower($1)\n\t\t\t)\n\t\t\tAND folder = $2\n\t\t\tAND db_name = $3`\n\tcommandTag, err := pdb.Exec(dbQuery, dbOwner, dbFolder, dbName, releases, len(releases))\n\tif err != nil {\n\t\tlog.Printf(\"Storing releases for database '%s%s%s' failed: %v\\n\", dbOwner, dbFolder, dbName, err)\n\t\treturn err\n\t}\n\tif numRows := commandTag.RowsAffected(); numRows != 1 {\n\t\tlog.Printf(\"Wrong number of rows (%v) affected when storing releases for database: '%s%s%s'\\n\", numRows,\n\t\t\tdbOwner, dbFolder, dbName)\n\t}\n\treturn nil\n}", "func templateHTML(releases []models.HelmRelease, w io.Writer) error {\n\n\tsum := internalSummery{\n\t\tOutdatedReleases: make(map[string][]uiHelmRelease),\n\t\tDeprecatedReleases: make(map[string][]uiHelmRelease),\n\t\tGoodReleases: make(map[string][]uiHelmRelease),\n\t}\n\n\tfor _, c := range releases {\n\t\tuiC := uiHelmRelease{\n\t\t\tName: c.Name,\n\t\t\tNamespace: c.Namespace,\n\t\t\tDeprecated: c.Deprecated,\n\t\t\tInstalledVersion: c.InstalledVersion,\n\t\t\tLatestVersion: c.LatestVersion,\n\t\t\tOutdated: c.InstalledVersion != c.LatestVersion,\n\t\t}\n\n\t\tif uiC.Deprecated {\n\t\t\tsum.DeprecatedReleases[uiC.Namespace] = append(sum.DeprecatedReleases[uiC.Namespace], uiC)\n\t\t} else if uiC.Outdated {\n\t\t\tsum.OutdatedReleases[uiC.Namespace] = append(sum.OutdatedReleases[uiC.Namespace], uiC)\n\t\t} else {\n\t\t\tsum.GoodReleases[uiC.Namespace] = append(sum.GoodReleases[uiC.Namespace], uiC)\n\t\t}\n\t}\n\n\tfor i, v := range sum.DeprecatedReleases {\n\t\tsort.Sort(ByName(v))\n\t\tsum.DeprecatedReleases[i] = v\n\t}\n\n\tfor i, v := range sum.OutdatedReleases {\n\t\tsort.Sort(ByName(v))\n\t\tsum.OutdatedReleases[i] = v\n\t}\n\n\tfor i, v := range sum.GoodReleases {\n\t\tsort.Sort(ByName(v))\n\t\tsum.GoodReleases[i] = v\n\t}\n\n\tt := template.Must(template.New(\"index.html\").Funcs(getFunctions()).ParseFS(views, \"views/*\"))\n\terr := t.Execute(w, sum)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func releases(ctx context.Context, c *github.Client, org string, project string) ([]*release, error) {\n\tvar result []*release\n\n\topts := &github.ListOptions{PerPage: 100}\n\n\tklog.Infof(\"Downloading releases for %s/%s ...\", org, project)\n\n\tfor page := 1; page != 0; {\n\t\topts.Page = page\n\t\trs, resp, err := c.Repositories.ListReleases(ctx, org, project, opts)\n\t\tif err != nil {\n\t\t\treturn result, err\n\t\t}\n\n\t\tpage = resp.NextPage\n\t\tuntil := time.Now()\n\n\t\tfor _, r := range rs {\n\t\t\tname := r.GetName()\n\t\t\tif name == \"\" {\n\t\t\t\tname = r.GetTagName()\n\t\t\t}\n\n\t\t\trel := &release{\n\t\t\t\tName: name,\n\t\t\t\tDraft: r.GetDraft(),\n\t\t\t\tPrerelease: r.GetPrerelease(),\n\t\t\t\tPublishedAt: r.GetPublishedAt().Time,\n\t\t\t\tActiveUntil: until,\n\t\t\t\tDownloads: map[string]int{},\n\t\t\t\tDownloadRatios: map[string]float64{},\n\t\t\t}\n\n\t\t\tfor _, a := range r.Assets {\n\t\t\t\tif ignoreAssetRe.MatchString(a.GetName()) {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\trel.Downloads[a.GetName()] = a.GetDownloadCount()\n\t\t\t\trel.DownloadsTotal += int64(a.GetDownloadCount())\n\t\t\t}\n\n\t\t\tif !rel.Draft && !rel.Prerelease {\n\t\t\t\tuntil = rel.PublishedAt\n\t\t\t}\n\n\t\t\tresult = append(result, rel)\n\t\t}\n\t}\n\n\tfor _, r := range result {\n\t\tr.DaysActive = r.ActiveUntil.Sub(r.PublishedAt).Hours() / 24\n\t\tr.DownloadsPerDay = float64(r.DownloadsTotal) / r.DaysActive\n\n\t\tfor k, v := range r.Downloads {\n\t\t\tr.DownloadRatios[k] = float64(v) / float64(r.DownloadsTotal)\n\t\t}\n\t}\n\n\treturn result, nil\n}", "func (c Provider) Releases(params []string) (rs *results.ResultSet, err error) {\n\tname := params[0]\n\tclient, err := ftp.Dial(MirrorsFTP)\n\tif err != nil {\n\t\tlog.Debugf(\"Failed to connect to FTP server: %s\\n\", err)\n\t\terr = results.Unavailable\n\t\treturn\n\t}\n\tif err = client.Login(\"anonymous\", \"anonymous\"); err != nil {\n\t\tlog.Debugf(\"Failed to login to FTP server: %s\\n\", err)\n\t\terr = results.Unavailable\n\t\treturn\n\t}\n\tentries, err := client.List(\"gnu\" + \"/\" + name)\n\tif err != nil {\n\t\tlog.Debugf(\"FTP Error: %s\\n\", err.Error())\n\t\terr = results.NotFound\n\t\treturn\n\t}\n\trs = results.NewResultSet(name)\n\tfor _, entry := range entries {\n\t\tif entry.Type != ftp.EntryTypeFile {\n\t\t\tcontinue\n\t\t}\n\t\tif sm := TarballRegex.FindStringSubmatch(entry.Name); len(sm) > 2 {\n\t\t\tr := results.NewResult(sm[1], sm[2], fmt.Sprintf(GNUFormat, name, entry.Name), entry.Time)\n\t\t\trs.AddResult(r)\n\t\t}\n\t}\n\tif rs.Len() == 0 {\n\t\terr = results.NotFound\n\t}\n\tsort.Sort(rs)\n\treturn\n}", "func runReleaseCases(t *testing.T, tests []releaseCase, rcmd releaseCmd) {\n\tvar buf bytes.Buffer\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tc := &helm.FakeClient{Rels: tt.rels}\n\t\t\tcmd := rcmd(c, &buf)\n\t\t\tcmd.ParseFlags(tt.flags)\n\t\t\terr := cmd.RunE(cmd, tt.args)\n\t\t\tif (err != nil) != tt.err {\n\t\t\t\tt.Errorf(\"expected error, got '%v'\", err)\n\t\t\t}\n\t\t\tre := regexp.MustCompile(tt.expected)\n\t\t\tif !re.Match(buf.Bytes()) {\n\t\t\t\tt.Errorf(\"expected\\n%q\\ngot\\n%q\", tt.expected, buf.String())\n\t\t\t}\n\t\t\tbuf.Reset()\n\t\t})\n\t}\n}", "func (db *db) takeNewVersions() {\n\tfor version := range db.newVersions {\n\t\t// This is just to make functional tests easier to write.\n\t\tdelay := db.sequins.config.Test.UpgradeDelay.Duration\n\t\tif delay != 0 {\n\t\t\ttime.Sleep(delay)\n\t\t}\n\n\t\tdb.upgrade(version)\n\t}\n}", "func TestCreateProcessHistoryReport(t *testing.T) {\n\n timeSlice := float64(TIMESLICE)\n\n testtable := []struct {\n tname string\n }{\n {\n tname: \"ok\",\n },\n }\n\n CreateDummyProcessMetricsHistory(&testProcessMetricsArray)\n\n for _, tc := range testtable {\n\n t.Run(tc.tname, func(t *testing.T) {\n\n var buffer bytes.Buffer\n writer := bufio.NewWriter(&buffer)\n\n err := json.NewEncoder(writer).Encode(CreateProcessHistoryReport(testProcessMetricsArray, timeSlice))\n if err != nil {\n t.Fatalf(\"failed writing json: %s\", err)\n }\n writer.Flush()\n\n goldenPath := filepath.Join(\"testdata\", filepath.FromSlash(t.Name()) + \".golden\")\n\n\n if *update {\n\n t.Log(\"update golden file\")\n if err := ioutil.WriteFile(goldenPath, buffer.Bytes(), 0644); err != nil {\n t.Fatalf(\"failed to update golden file %s: %s\", goldenPath, err)\n }\n\n }\n\n\n goldenData, err := ioutil.ReadFile(goldenPath)\n\n if err != nil {\n t.Fatalf(\"failed reading .golden file %s: %s\", goldenPath, err)\n }\n\n t.Log(string(buffer.Bytes()))\n\n if !bytes.Equal(buffer.Bytes(), goldenData) {\n t.Errorf(\"bytes do not match .golden file %s\", goldenPath)\n }\n\n })\n }\n\n}", "func TestNewRevisionFundChecks(t *testing.T) {\n\t// helper func for revisions\n\trevWithValues := func(renterFunds, hostCollateralAvailable uint64) types.FileContractRevision {\n\t\tvalidOuts := make([]types.SiacoinOutput, 2)\n\t\tmissedOuts := make([]types.SiacoinOutput, 3)\n\n\t\t// funds remaining for renter, and payout to host.\n\t\tvalidOuts[0].Value = types.NewCurrency64(renterFunds)\n\t\tvalidOuts[1].Value = types.NewCurrency64(0)\n\n\t\t// Void payout from renter\n\t\tmissedOuts[0].Value = types.NewCurrency64(renterFunds)\n\n\t\t// Collateral\n\t\tmissedOuts[1].Value = types.NewCurrency64(hostCollateralAvailable)\n\n\t\treturn types.FileContractRevision{\n\t\t\tNewValidProofOutputs: validOuts,\n\t\t\tNewMissedProofOutputs: missedOuts,\n\t\t}\n\t}\n\n\t// Cost is less than renter funds should be okay.\n\t_, err := newDownloadRevision(revWithValues(100, 0), types.NewCurrency64(99))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\t// Cost equal to renter funds should be okay.\n\t_, err = newDownloadRevision(revWithValues(100, 0), types.NewCurrency64(100))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\t// Cost is more than renter funds should fail.\n\t_, err = newDownloadRevision(revWithValues(100, 0), types.NewCurrency64(101))\n\tif !errors.Contains(err, types.ErrRevisionCostTooHigh) {\n\t\tt.Fatal(err)\n\t}\n\n\t// Collateral checks (in each, renter funds <= cost)\n\t//\n\t// Cost less than collateral should be okay.\n\t_, err = newUploadRevision(revWithValues(100, 100), crypto.Hash{}, types.NewCurrency64(99), types.NewCurrency64(99))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\t// Using up all collateral should be okay.\n\t_, err = newUploadRevision(revWithValues(100, 100), crypto.Hash{}, types.NewCurrency64(99), types.NewCurrency64(100))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\t// Not enough collateral should cause an error.\n\t_, err = newUploadRevision(revWithValues(100, 100), crypto.Hash{}, types.NewCurrency64(99), types.NewCurrency64(100))\n\tif errors.Contains(err, types.ErrRevisionCollateralTooLow) {\n\t\tt.Fatal(err)\n\t}\n}", "func list_versions(w rest.ResponseWriter, r *rest.Request) {\n\tarch := r.PathParam(\"arch\")\n\tversions := []string{\"nightly\", \"beta\", \"stable\"}\n\t// get the numbered versions available\n\tdb_directories := get_directories(cache_instance, db, arch)\n\tfor _, dir := range db_directories {\n\t\tversion_path := strings.Split(dir.Path, \"/\")\n\t\tversion := version_path[len(version_path)-1]\n\t\tif version != \"snapshots\" {\n\t\t\tversions = append(versions, version)\n\t\t}\n\t}\n\t// Filter things folders we don't want in the versions out\n\n\tw.WriteJson(versions)\n}", "func (bs *Builders) CreateReleaseArchives() {\n\tfor _, b := range *bs {\n\t\tb.ReleaseArchive()\n\t}\n}", "func getFilesFromRelease(p string, r io.Reader) ([]*FileInfo, Paragraph, error) {\n\tdir := path.Dir(p)\n\n\td, err := NewParser(r).Read()\n\tif err != nil {\n\t\treturn nil, nil, errors.Wrap(err, \"NewParser(r).Read()\")\n\t}\n\n\tmd5sums := d[\"MD5Sum\"]\n\tsha1sums := d[\"SHA1\"]\n\tsha256sums := d[\"SHA256\"]\n\n\tif len(md5sums) == 0 && len(sha1sums) == 0 && len(sha256sums) == 0 {\n\t\treturn nil, d, nil\n\t}\n\n\tm := make(map[string]*FileInfo)\n\n\tfor _, l := range md5sums {\n\t\tp, size, csum, err := parseChecksum(l)\n\t\tp = path.Join(dir, path.Clean(p))\n\t\tif err != nil {\n\t\t\treturn nil, nil, errors.Wrap(err, \"parseChecksum for md5sums\")\n\t\t}\n\n\t\tfi := &FileInfo{\n\t\t\tpath: p,\n\t\t\tsize: size,\n\t\t\tmd5sum: csum,\n\t\t}\n\t\tm[p] = fi\n\t}\n\n\tfor _, l := range sha1sums {\n\t\tp, size, csum, err := parseChecksum(l)\n\t\tp = path.Join(dir, path.Clean(p))\n\t\tif err != nil {\n\t\t\treturn nil, nil, errors.Wrap(err, \"parseChecksum for sha1sums\")\n\t\t}\n\n\t\tfi, ok := m[p]\n\t\tif ok {\n\t\t\tfi.sha1sum = csum\n\t\t} else {\n\t\t\tfi := &FileInfo{\n\t\t\t\tpath: p,\n\t\t\t\tsize: size,\n\t\t\t\tsha1sum: csum,\n\t\t\t}\n\t\t\tm[p] = fi\n\t\t}\n\t}\n\n\tfor _, l := range sha256sums {\n\t\tp, size, csum, err := parseChecksum(l)\n\t\tp = path.Join(dir, path.Clean(p))\n\t\tif err != nil {\n\t\t\treturn nil, nil, errors.Wrap(err, \"parseChecksum for sha256sums\")\n\t\t}\n\n\t\tfi, ok := m[p]\n\t\tif ok {\n\t\t\tfi.sha256sum = csum\n\t\t} else {\n\t\t\tfi := &FileInfo{\n\t\t\t\tpath: p,\n\t\t\t\tsize: size,\n\t\t\t\tsha256sum: csum,\n\t\t\t}\n\t\t\tm[p] = fi\n\t\t}\n\t}\n\n\t// WORKAROUND: some (e.g. dell) repositories have invalid Release\n\t// that contains wrong checksum for Release itself. Ignore them.\n\tdelete(m, path.Join(dir, \"Release\"))\n\tdelete(m, path.Join(dir, \"Release.gpg\"))\n\tdelete(m, path.Join(dir, \"InRelease\"))\n\n\tl := make([]*FileInfo, 0, len(m))\n\tfor _, fi := range m {\n\t\tl = append(l, fi)\n\t}\n\treturn l, d, nil\n}", "func patchRelease(f *os.File, info *ReleaseInfo) {\n\t// Release note for different labels\n\tf.WriteString(fmt.Sprintf(\"## Changelog since %s\\n\\n\", info.startTag))\n\n\tif len(info.releaseActionRequiredPRs) > 0 {\n\t\tf.WriteString(\"### Action Required\\n\\n\")\n\t\tfor _, pr := range info.releaseActionRequiredPRs {\n\t\t\tf.WriteString(fmt.Sprintf(\"* %s (#%d, @%s)\\n\", extractReleaseNoteFromPR(info.prMap[pr]), pr, *info.prMap[pr].User.Login))\n\t\t}\n\t\tf.WriteString(\"\\n\")\n\t}\n\n\tif len(info.releasePRs) > 0 {\n\t\tf.WriteString(\"### Other notable changes\\n\\n\")\n\t\tfor _, pr := range info.releasePRs {\n\t\t\tf.WriteString(fmt.Sprintf(\"* %s (#%d, @%s)\\n\", extractReleaseNoteFromPR(info.prMap[pr]), pr, *info.prMap[pr].User.Login))\n\t\t}\n\t\tf.WriteString(\"\\n\")\n\t} else {\n\t\tf.WriteString(\"**No notable changes for this release**\\n\\n\")\n\t}\n}", "func Test12(t *testing.T) {\n\tcustommutatingBaseCollection := mutatingBaseCollectionVersions.DeepCopy()\n\tnewCollection := mutatingBaseCollectionVersions.DeepCopy()\n\tcustommutatingBaseCollection.Spec.Version = \"1.2.3\"\n\tnewCollection.Spec.Version = \"1.2.4\"\n\tcustommutatingBaseCollection.Spec.Versions[0].Version = \"1.2.4\"\n\tnewCollection.Spec.Versions[0].Version = \"1.2.4\"\n\n\terr := processUpdate(custommutatingBaseCollection, newCollection)\n\tif err != nil {\n\t\tt.Fatal(\"Unexpected error during mutation.\", err)\n\t}\n\n\texpectedversion0 := kabanerov1alpha1.CollectionVersion{\n\t\tDesiredState: \"active\",\n\t\tRepositoryUrl: \"https://github.com/some/collection/kabanero-index.yaml\",\n\t\tVersion: \"1.2.4\"}\n\n\tif newCollection.Spec.Versions[0] != expectedversion0 {\n\t\tt.Fatal(\"New collection.Spec.Versions[0] values do not match expected collection.Spec.Versions[0] values. New versions[0]: \", newCollection.Spec.Versions[0], \"Expected versions[0]: \", expectedversion0)\n\t}\n\n\tif newCollection.Spec.RepositoryUrl != \"https://github.com/some/collection/kabanero-index.yaml\" {\n\t\tt.Fatal(\"New collection.Spec.RepositoryUrl values do not match expected value of https://github.com/some/collection/kabanero-index.yaml. RepositoryUrl found: \", newCollection.Spec.RepositoryUrl)\n\t}\n\tif newCollection.Spec.Version != \"1.2.4\" {\n\t\tt.Fatal(\"New collection.Spec.Version values do not match expected value of 1.2.3. Version found: \", newCollection.Spec.Version)\n\t}\n\tif newCollection.Spec.DesiredState != \"active\" {\n\t\tt.Fatal(\"New collection.Spec.DesiredState values do not match expected value of active. DesiredStateme found: \", newCollection.Spec.DesiredState)\n\t}\n}", "func LatestVersions(releases []*semver.Version, minVersion *semver.Version) []*semver.Version {\n\tvar versionSlice []*semver.Version\n\t// This is just an example structure of the code, if you implement this interface, the test cases in main_test.go are very easy to run\n\t// by bruce\n\tif len(releases) == 0{\n\t\treturn versionSlice\n\t}\n\n\t// sort O(n * log(n))\n\tsemver.Sort(releases)\n\n\t// iterate releases, find target versions\n\t// O(n)\n\tindex := len(releases) - 1\n\ttarget := index\n\tfor index >=0 && releases[index].Compare(*minVersion) >= 0 {\n\t\tif index == len(releases) - 1{\n\t\t\tversionSlice = append(versionSlice, releases[index])\n\t\t}else if releases[target].Major != releases[index].Major || releases[target].Minor != releases[index].Minor {\n\t\t\tversionSlice = append(versionSlice, releases[index])\n\t\t\ttarget = index\n\t\t}\n\t\tindex--\n\t}\n\n\treturn versionSlice\n}", "func Test4(t *testing.T) {\n\tnewCollection := mutatingBaseCollectionVersions.DeepCopy()\n\tnewCollection.Spec.RepositoryUrl = \"https://github.com/some/collection/alternate-kabanero-index.yaml\"\n\tnewCollection.Spec.Version = \"4.5.6\"\n\tnewCollection.Spec.DesiredState = \"inactive\"\n\terr := processUpdate(&mutatingBaseCollection, newCollection)\n\tif err != nil {\n\t\tt.Fatal(\"Unexpected error during mutation.\", err)\n\t}\n\n\texpectedversion0 := kabanerov1alpha1.CollectionVersion{\n\t\tRepositoryUrl: \"https://github.com/some/collection/alternate-kabanero-index.yaml\",\n\t\tVersion: \"4.5.6\",\n\t\tDesiredState: \"inactive\"}\n\n\tif newCollection.Spec.Versions[0] != expectedversion0 {\n\t\tt.Fatal(\"New collection.Spec.Versions[0] values do not match expected collection.Spec.Versions[0] values. New versions[0]: \", newCollection.Spec.Versions[0], \"Expected versions[0]: \", expectedversion0)\n\t}\n}", "func printReleases(m map[string]string) string {\n\trelease := parseUrlforStu(m[\"release\"])\n\tvar output string\n\tfor _, thing := range release.Items {\n\t\toutput += strFormatOut(thing)\n\t\toutput += \"\\n\"\n\t}\n\treturn output\n}", "func (u *Updater) UpdateTo(release Release) error {\n\tif release == nil {\n\t\tvar err error\n\t\trelease, err = u.Check()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif release == nil {\n\t\t\treturn errors.New(\"The application is already up to date.\")\n\t\t}\n\t}\n\n\twriters := make([]AbortWriter, 0)\n\tvar abort = func() {\n\t\tfor _, w := range writers {\n\t\t\tw.Abort()\n\t\t}\n\t}\n\n\tfor _, a := range release.Assets() {\n\t\tw, err := u.WriterForAsset(a)\n\t\tif err != nil {\n\t\t\tabort()\n\t\t\treturn err\n\t\t}\n\n\t\twriters = append(writers, w)\n\n\t\tif w != nil {\n\t\t\terr := a.Write(w)\n\t\t\tif err != nil {\n\t\t\t\tabort()\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func (c Provider) Releases(name string) (rs *results.ResultSet, s results.Status) {\n\tr, s := c.Latest(name)\n\tif s != results.OK {\n\t\treturn\n\t}\n\trs = results.NewResultSet(name)\n\trs.AddResult(r)\n\treturn\n}", "func ZipVerifyRefs(releases, refs io.Reader, w io.Writer) error {\n\t// Define a grouper, working on one set of refs and releases with the same\n\t// key at a time. Here, we do verification and write out the generated\n\t// biblioref.\n\tenc := json.NewEncoder(w)\n\tkeyer := func(s string) (string, error) {\n\t\tif k := lineColumn(s, \"\\t\", 2); k == \"\" {\n\t\t\treturn k, fmt.Errorf(\"cannot get key: %s\", s)\n\t\t} else {\n\t\t\treturn k, nil\n\t\t}\n\t}\n\tgrouper := func(g *zipkey.Group) error {\n\t\tif len(g.G0) == 0 || len(g.G1) == 0 {\n\t\t\treturn nil\n\t\t}\n\t\tpivot, err := stringToRelease(lineColumn(g.G0[0], \"\\t\", 3))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfor _, line := range g.G1 {\n\t\t\tre, err := stringToRelease(lineColumn(line, \"\\t\", 3))\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tresult := Verify(pivot, re, 5)\n\t\t\tswitch result.Status {\n\t\t\tcase StatusExact, StatusStrong:\n\t\t\t\tif result.Reason == ReasonDOI {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tbr := generateBiblioRef(re, pivot, result.Status, result.Reason, \"fuzzy\")\n\t\t\t\tif err := enc.Encode(br); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t}\n\tzipper := zipkey.New(releases, refs, keyer, grouper)\n\treturn zipper.Run()\n}", "func releasedToolsOnly(tools []string) []string {\n\n var released []string\n\n for _,v:= range tools {\n if validateRelease(v,3) { released=append(released,v)}\n }\n return released\n\n}", "func TestBuildv16(t *testing.T) {\n\tdir := t.TempDir()\n\n\topts := Options{\n\t\tIndexDir: dir,\n\t\tRepositoryDescription: zoekt.Repository{\n\t\t\tName: \"repo\",\n\t\t\tSource: \"./testdata/repo/\",\n\t\t},\n\t\tDisableCTags: true,\n\t}\n\topts.SetDefaults()\n\n\tb, err := NewBuilder(opts)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tfor _, p := range []string{\"main.go\"} {\n\t\tblob, err := os.ReadFile(filepath.Join(\"../testdata/repo\", p))\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t\tb.AddFile(p, blob)\n\t}\n\n\twantP := filepath.Join(\"../testdata/shards\", \"repo_v16.00000.zoekt\")\n\n\t// fields indexTime and id depend on time. For this test, we copy the fields from\n\t// the old shard.\n\t_, wantMetadata, err := zoekt.ReadMetadataPath(wantP)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tb.indexTime = wantMetadata.IndexTime\n\tb.id = wantMetadata.ID\n\n\tif err := b.Finish(); err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tgotP := filepath.Join(dir, \"repo_v16.00000.zoekt\")\n\n\tif *update {\n\t\tdata, err := os.ReadFile(gotP)\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t\terr = os.WriteFile(wantP, data, 0644)\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t\treturn\n\t}\n\n\tgot, err := os.ReadFile(gotP)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\twant, err := os.ReadFile(wantP)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tif d := cmp.Diff(want, got); d != \"\" {\n\t\tt.Errorf(\"mismatch (-want +got):\\n%s\", d)\n\t}\n}", "func compareRelease(owner, repo, tagName string) (*github.CommitsComparison, error) {\n\tclient, ctx := defaultGithubClient(), context.Background()\n\tdefer timeTrack(time.Now(), \"API call to client.Repositories.CompareCommits()\")\n\tcc, _, err := client.Repositories.CompareCommits(ctx, owner, repo, tagName, \"HEAD\")\n\tif cc != nil {\n\t\treverseCommitOrder(cc)\n\t}\n\treturn cc, err\n}", "func changesInternal(old, new *types.Package, oldRootPackagePath, newRootPackagePath string) Report {\n\td := newDiffer(old, new)\n\td.checkPackage(oldRootPackagePath)\n\tr := Report{}\n\tfor _, m := range d.incompatibles.collect(oldRootPackagePath, newRootPackagePath) {\n\t\tr.Changes = append(r.Changes, Change{Message: m, Compatible: false})\n\t}\n\tfor _, m := range d.compatibles.collect(oldRootPackagePath, newRootPackagePath) {\n\t\tr.Changes = append(r.Changes, Change{Message: m, Compatible: true})\n\t}\n\treturn r\n}", "func writeToRevisionFile(strSlice []string, filename string) error {\n\tfile, err := os.Create(filename)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer file.Close()\n\tfor idx, str := range strSlice {\n\t\tif idx == len(strSlice)-1 {\n\t\t\t// do not add new line to last line\n\t\t\tfile.WriteString(str)\n\t\t} else {\n\t\t\tfile.WriteString(str + \"\\n\")\n\t\t}\n\t}\n\n\treturn nil\n}", "func Test11(t *testing.T) {\n\tcustommutatingBaseCollection := mutatingBaseCollectionVersions.DeepCopy()\n\tnewCollection := mutatingBaseCollectionVersions.DeepCopy()\n\tcustommutatingBaseCollection.Spec.Version = \"1.2.4\"\n\tnewCollection.Spec.Version = \"1.2.4\"\n\tcustommutatingBaseCollection.Spec.Versions[0].Version = \"2.0.0\"\n\tnewCollection.Spec.Versions[0].Version = \"2.0.1\"\n\n\terr := processUpdate(custommutatingBaseCollection, newCollection)\n\tif err != nil {\n\t\tt.Fatal(\"Unexpected error during mutation.\", err)\n\t}\n\n\tif newCollection.Spec.RepositoryUrl != \"https://github.com/some/collection/kabanero-index.yaml\" {\n\t\tt.Fatal(\"New collection.Spec.RepositoryUrl values do not match expected value of https://github.com/some/collection/kabanero-index.yaml. RepositoryUrl found: \", newCollection.Spec.RepositoryUrl)\n\t}\n\tif newCollection.Spec.Version != \"2.0.1\" {\n\t\tt.Fatal(\"New collection.Spec.Version values do not match expected value of 1.2.3. Version found: \", newCollection.Spec.Version)\n\t}\n\tif newCollection.Spec.DesiredState != \"active\" {\n\t\tt.Fatal(\"New collection.Spec.DesiredState values do not match expected value of active. DesiredStateme found: \", newCollection.Spec.DesiredState)\n\t}\n}", "func Test6(t *testing.T) {\n\tnewCollection := mutatingBaseCollectionVersions.DeepCopy()\n\tnewCollection.Spec.RepositoryUrl = \"https://github.com/some/collection/other-alternate-kabanero-index.yaml\"\n\tnewCollection.Spec.Version = \"7.8.9\"\n\tnewCollection.Spec.DesiredState = \"active\"\n\tnewCollection.Spec.Versions[0].RepositoryUrl = \"https://github.com/some/collection/alternate-kabanero-index.yaml\"\n\tnewCollection.Spec.Versions[0].Version = \"4.5.6\"\n\tnewCollection.Spec.Versions[0].DesiredState = \"inactive\"\n\terr := processUpdate(&mutatingBaseCollection, newCollection)\n\tif err == nil {\n\t\tt.Fatal(\"An error condition should have been reported. New collection.Spec and new collection.Spec.versions[0] contain conflicting data.\", err)\n\t}\n}", "func (c Provider) Releases(name string) (rs *results.ResultSet, s results.Status) {\n\tclient, err := ftp.Dial(MirrorsFTP)\n\tif err != nil {\n\t\ts = results.Unavailable\n\t\treturn\n\t}\n\terr = client.Login(\"anonymous\", \"anonymous\")\n\tif err != nil {\n\t\ts = results.Unavailable\n\t\treturn\n\t}\n\tentries, err := client.List(\"gnu\" + \"/\" + name)\n\tif err != nil {\n\t\tfmt.Printf(\"FTP Error: %s\\n\", err.Error())\n\t\ts = results.NotFound\n\t\treturn\n\t}\n\trs = results.NewResultSet(name)\n\tfor _, entry := range entries {\n\t\tif entry.Type != ftp.EntryTypeFile {\n\t\t\tcontinue\n\t\t}\n\t\tsm := TarballRegex.FindStringSubmatch(entry.Name)\n\t\tif len(sm) == 0 {\n\t\t\tcontinue\n\t\t}\n\t\tr := results.NewResult(sm[1], sm[2], fmt.Sprintf(GNUFormat, name, entry.Name), entry.Time)\n\t\trs.AddResult(r)\n\t\ts = results.OK\n\t}\n\treturn\n}", "func getAllRevisions(\n\tctx context.Context,\n\tdb *client.DB,\n\tstartKey, endKey roachpb.Key,\n\tstartTime, endTime hlc.Timestamp,\n) ([]versionedValues, error) {\n\t// TODO(dt): version check.\n\theader := roachpb.Header{Timestamp: endTime}\n\treq := &roachpb.ExportRequest{\n\t\tSpan: roachpb.Span{Key: startKey, EndKey: endKey},\n\t\tStartTime: startTime,\n\t\tMVCCFilter: roachpb.MVCCFilter_All,\n\t\tReturnSST: true,\n\t}\n\tresp, pErr := client.SendWrappedWith(ctx, db.GetSender(), header, req)\n\tif pErr != nil {\n\t\treturn nil, pErr.GoError()\n\t}\n\n\tvar res []versionedValues\n\tfor _, file := range resp.(*roachpb.ExportResponse).Files {\n\t\tsst := engine.MakeRocksDBSstFileReader()\n\t\tdefer sst.Close()\n\n\t\tif err := sst.IngestExternalFile(file.SST); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tstart, end := engine.MVCCKey{Key: startKey}, engine.MVCCKey{Key: endKey}\n\t\tif err := sst.Iterate(start, end, func(kv engine.MVCCKeyValue) (bool, error) {\n\t\t\tif len(res) == 0 || !res[len(res)-1].Key.Equal(kv.Key.Key) {\n\t\t\t\tres = append(res, versionedValues{Key: kv.Key.Key})\n\t\t\t}\n\t\t\tres[len(res)-1].Values = append(res[len(res)-1].Values, roachpb.Value{Timestamp: kv.Key.Timestamp, RawBytes: kv.Value})\n\t\t\treturn false, nil\n\t\t}); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn res, nil\n}", "func Updates(oldEntrys []Entry, newEntrys []Entry) []Entry {\n\tresult := []Entry{}\n\tfor _, oldE := range oldEntrys {\n\t\tfor _, newE := range newEntrys {\n\t\t\tif oldE.DeviceFile == newE.DeviceFile {\n\t\t\t\tif !oldE.Equal(newE) {\n\t\t\t\t\tresult = append(result, newE) //For some reason not equal\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn result\n}", "func cmdGetReleases(ccmd *cobra.Command, args []string) {\n\taplSvc := apl.NewClient()\n\toutput := runGetCommand(args, aplSvc.Releases.Get)\n\tif output != nil {\n\t\tfields := []string{\"ID\", \"StackID\", \"Version\", \"CreatedTime\"}\n\t\tprintTableResultsCustom(output.(apl.Release), fields)\n\t}\n}", "func Test2(t *testing.T) {\n\tnewCollection := mutatingBaseCollection.DeepCopy()\n\tnewCollection.Spec.RepositoryUrl = \"https://github.com/some/collection/alternate-kabanero-index.yaml\"\n\tnewCollection.Spec.Version = \"4.5.6\"\n\terr := processUpdate(&mutatingBaseCollection, newCollection)\n\tif err != nil {\n\t\tt.Fatal(\"Unexpected error during mutation.\", err)\n\t}\n\n\texpectedversion0 := kabanerov1alpha1.CollectionVersion{\n\t\tRepositoryUrl: \"https://github.com/some/collection/alternate-kabanero-index.yaml\",\n\t\tVersion: \"4.5.6\",\n\t\tDesiredState: \"active\"}\n\n\tif newCollection.Spec.Versions[0] != expectedversion0 {\n\t\tt.Fatal(\"Mutated versions[0] does not match expected versions[0] values. Mutated versions[0]: \", newCollection.Spec.Versions[0], \"Expected versions[0]: \", expectedversion0)\n\t}\n}", "func generateDownloadHTML() (err error) {\n\tfds := []*fileDesc{}\n\n\t// It happens we want releases in reverse order in the HTML table:\n\tsort.Sort(sort.Reverse(sort.StringSlice(targetFiles)))\n\twebFolder := filepath.Join(srtgearsRoot, relativeWebFldr)\n\n\tparams := map[string]interface{}{\n\t\t\"ReleaseDate\": time.Now().Format(\"2006-01-02\"),\n\t}\n\n\t// Fill fds scice\n\tfor i, targetFile := range targetFiles {\n\t\tfd := fileDesc{Name: filepath.Base(targetFile)}\n\t\tfds = append(fds, &fd)\n\n\t\t// the regexp patter is for folder name (without extension)\n\t\tnameNoExt := fd.Name[:len(fd.Name)-len(filepath.Ext(fd.Name))]\n\t\tif parts := rlsFldrPttrn.FindStringSubmatch(nameNoExt); len(parts) > 0 {\n\t\t\t// [full string, version, os, arch]\n\t\t\tparams[\"Version\"] = parts[1]\n\t\t\tfd.OS = osNameMap[parts[2]]\n\t\t\tfd.Arch = archNameMap[parts[3]]\n\t\t} else {\n\t\t\t// Never to happen, file name was already matched earlier\n\t\t\treturn fmt.Errorf(\"Target name does not match pattern: %s\", targetFile)\n\t\t}\n\t\tif i%2 != 0 {\n\t\t\tfd.Class = \"alt\"\n\t\t}\n\t\tif fd.URL, err = filepath.Rel(webFolder, targetFile); err != nil {\n\t\t\treturn\n\t\t}\n\t\t// We need forward slashes \"/\" in urls:\n\t\tfd.URL = \"/\" + filepath.ToSlash(fd.URL)\n\t\tvar fi os.FileInfo\n\t\tif fi, err = os.Stat(targetFile); err != nil {\n\t\t\treturn\n\t\t}\n\t\tfd.Size = fmt.Sprintf(\"%.2f MB\", float64(fi.Size())/(1<<20))\n\n\t\t// Hash and include checksum\n\t\tvar content []byte\n\t\tif content, err = ioutil.ReadFile(targetFile); err != nil {\n\t\t\treturn\n\t\t}\n\t\tfd.SHA256 = fmt.Sprintf(\"%x\", sha256.Sum256(content))\n\t}\n\tparams[\"Fds\"] = fds\n\n\t// Now generate download table:\n\tt := template.Must(template.New(\"\").Parse(dltable))\n\tbuf := &bytes.Buffer{}\n\tif err = t.Execute(buf, params); err != nil {\n\t\treturn\n\t}\n\toutf := \"download-table.html\"\n\tif err = ioutil.WriteFile(outf, buf.Bytes(), 0); err != nil {\n\t\treturn\n\t}\n\tlog.Println(\"Download table written to:\", outf)\n\t// Also print to console:\n\tos.Stdout.Write(buf.Bytes())\n\treturn\n}", "func TestAudit(t *testing.T) {\n\tfor version := range versions {\n\t\trunTestWithVersion(t, version)\n\t}\n}", "func (c Provider) Releases(params []string) (rs *results.ResultSet, err error) {\n\tname := params[0]\n\t// Query the API\n\turl := fmt.Sprintf(SeriesAPI, name)\n\tvar seriesList SeriesList\n\tif err = util.FetchJSON(url, \"series\", &seriesList); err != nil {\n\t\treturn\n\t}\n\t// Proccess Releases\n\tvar lrs Releases\n\tfor _, s := range seriesList.Entries {\n\t\t// Only Active Series\n\t\tif !s.Active {\n\t\t\tcontinue\n\t\t}\n\t\t// Only stable or supported\n\t\tswitch s.Status {\n\t\tcase \"Active Development\":\n\t\tcase \"Current Stable Release\":\n\t\tcase \"Supported\":\n\t\tdefault:\n\t\t\tcontinue\n\t\t}\n\t\turl := fmt.Sprintf(ReleasesAPI, name, s.Name)\n\t\tvar vl VersionList\n\t\tif err = util.FetchJSON(url, \"releases\", &vl); err != nil {\n\t\t\tcontinue\n\t\t}\n\t\tfor i := len(vl.Versions) - 1; i >= 0; i-- {\n\t\t\tr := vl.Versions[i]\n\t\t\turl := fmt.Sprintf(FilesAPI, name, s.Name, r.Number)\n\t\t\tvar fl FileList\n\t\t\tif err = util.FetchJSON(url, \"files\", &fl); err != nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tvar lr Release\n\t\t\tfor _, f := range fl.Files {\n\t\t\t\tif f.Type != \"Code Release Tarball\" {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tlr.name = name\n\t\t\t\tlr.series = s.Name\n\t\t\t\tlr.release = r.Number\n\t\t\t\tlr.uploaded = f.Uploaded\n\t\t\t}\n\t\t\tlrs = append(lrs, lr)\n\t\t}\n\t}\n\tif len(lrs) == 0 {\n\t\terr = results.NotFound\n\t\treturn\n\t}\n\trs = lrs.Convert(name)\n\terr = nil\n\treturn\n}", "func Test7(t *testing.T) {\n\tnewCollection := mutatingBaseCollectionVersions.DeepCopy()\n\terr := processUpdate(&mutatingBaseCollection, newCollection)\n\tif err != nil {\n\t\tt.Fatal(\"Unexpected error during mutation.\", err)\n\t}\n\n\texpectedversion0 := kabanerov1alpha1.CollectionVersion{\n\t\tDesiredState: \"active\",\n\t\tRepositoryUrl: \"https://github.com/some/collection/kabanero-index.yaml\",\n\t\tVersion: \"1.2.3\"}\n\n\tif newCollection.Spec.Versions[0] != expectedversion0 {\n\t\tt.Fatal(\"New collection.Spec.Versions[0] values do not match expected collection.Spec.Versions[0] values. New versions[0]: \", newCollection.Spec.Versions[0], \"Expected versions[0]: \", expectedversion0)\n\t}\n\n\tif newCollection.Spec.RepositoryUrl != \"https://github.com/some/collection/kabanero-index.yaml\" {\n\t\tt.Fatal(\"New collection.Spec.RepositoryUrl values do not match expected value of https://github.com/some/collection/kabanero-index.yaml. RepositoryUrl found: \", newCollection.Spec.RepositoryUrl)\n\t}\n\tif newCollection.Spec.Version != \"1.2.3\" {\n\t\tt.Fatal(\"New collection.Spec.Version values do not match expected value of 1.2.3. Version found: \", newCollection.Spec.Version)\n\t}\n\tif newCollection.Spec.DesiredState != \"active\" {\n\t\tt.Fatal(\"New collection.Spec.DesiredState values do not match expected value of active. DesiredStateme found: \", newCollection.Spec.DesiredState)\n\t}\n}", "func Test10(t *testing.T) {\n\tcustommutatingBaseCollection := mutatingBaseCollectionVersions.DeepCopy()\n\tnewCollection := mutatingBaseCollectionVersions.DeepCopy()\n\tcustommutatingBaseCollection.Spec.Version = \"1.2.4\"\n\tnewCollection.Spec.Version = \"1.2.5\"\n\tcustommutatingBaseCollection.Spec.Versions[0].Version = \"2.0.0\"\n\tnewCollection.Spec.Versions[0].Version = \"2.0.0\"\n\n\terr := processUpdate(custommutatingBaseCollection, newCollection)\n\tif err != nil {\n\t\tt.Fatal(\"Unexpected error during mutation.\", err)\n\t}\n\n\texpectedversion0 := kabanerov1alpha1.CollectionVersion{\n\t\tDesiredState: \"active\",\n\t\tRepositoryUrl: \"https://github.com/some/collection/kabanero-index.yaml\",\n\t\tVersion: \"1.2.5\"}\n\n\tif newCollection.Spec.Versions[0] != expectedversion0 {\n\t\tt.Fatal(\"New collection.Spec.Versions[0] values do not match expected collection.Spec.Versions[0] values. New versions[0]: \", newCollection.Spec.Versions[0], \"Expected versions[0]: \", expectedversion0)\n\t}\n}", "func (a ASCIITableWriter) Write(out io.Writer, p *api.Project) error {\n\ttable := tablewriter.NewWriter(out)\n\ttable.SetHeader([]string{\"RELEASE\", \"Downloads\"})\n\n\tfor _, r := range p.Releases() {\n\t\ttable.Append([]string{r.Name, strconv.Itoa(r.DownloadCount())})\n\t}\n\ttable.Render()\n\treturn nil\n}", "func Test13(t *testing.T) {\n\tcustommutatingBaseCollection := mutatingBaseCollectionVersions.DeepCopy()\n\tnewCollection := mutatingBaseCollectionVersions.DeepCopy()\n\tcustommutatingBaseCollection.Spec.Version = \"1.2.4\"\n\tnewCollection.Spec.Version = \"1.2.5\"\n\tcustommutatingBaseCollection.Spec.Versions[0].Version = \"2.0.0\"\n\tnewCollection.Spec.Versions[0].Version = \"2.0.1\"\n\terr := processUpdate(custommutatingBaseCollection, newCollection)\n\tif err == nil {\n\t\tt.Fatal(\"An error condition should have been reported. Current and new collection.Spec and current and new collection.Spec.versions[0] have different values.\", err)\n\t}\n}", "func (tc *TableCollection) releaseTables(ctx context.Context, opt releaseOpt) error {\n\ttc.timestamp = hlc.Timestamp{}\n\tif len(tc.leasedTables) > 0 {\n\t\tlog.VEventf(ctx, 2, \"releasing %d tables\", len(tc.leasedTables))\n\t\tfor _, table := range tc.leasedTables {\n\t\t\tif err := tc.leaseMgr.Release(table); err != nil {\n\t\t\t\tlog.Warning(ctx, err)\n\t\t\t}\n\t\t}\n\t\ttc.leasedTables = tc.leasedTables[:0]\n\t}\n\ttc.uncommittedTables = nil\n\n\tif opt == blockForDBCacheUpdate {\n\t\tfor _, uc := range tc.uncommittedDatabases {\n\t\t\tif !uc.dropped {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\terr := tc.dbCacheSubscriber.waitForCacheState(\n\t\t\t\tfunc(dc *databaseCache) (bool, error) {\n\t\t\t\t\tdesc, err := dc.getCachedDatabaseDesc(uc.name, false /*required*/)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn false, err\n\t\t\t\t\t}\n\t\t\t\t\tif desc == nil {\n\t\t\t\t\t\treturn true, nil\n\t\t\t\t\t}\n\t\t\t\t\t// If the database name still exists but it now references another\n\t\t\t\t\t// db, we're good - it means that the database name has been reused\n\t\t\t\t\t// within the same transaction.\n\t\t\t\t\treturn desc.ID != uc.id, nil\n\t\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\ttc.uncommittedDatabases = nil\n\ttc.releaseAllDescriptors()\n\treturn nil\n}", "func (d *Release) Diff() map[string]*Changes {\n\tm := d.merge()\n\tif len(m) == 0 {\n\t\treturn nil\n\t}\n\t// Builds the list of available prefixes of deploy keys\n\t// with project name and env values.\n\tprefix := make([]string, 0)\n\tfor _, ev1 := range d.env1 {\n\t\tfor _, ev2 := range d.env2 {\n\t\t\tpid := string(d.ref.Key())\n\t\t\tprefix = append(prefix, Key(pid, ev1, ev2)+\"_\")\n\t\t}\n\t}\n\t// Returns only the name of the variable from the deploy key.\n\tname := func(s string) string {\n\t\tfor _, v := range prefix {\n\t\t\tif strings.HasPrefix(s, v) {\n\t\t\t\treturn strings.TrimPrefix(s, v)\n\t\t\t}\n\t\t}\n\t\tpanic(\"deploy: fails to find prefix in deploy key named: \" + s)\n\t}\n\tc := make(map[string]*Changes)\n\tfor k, v := range d.dep {\n\t\tn := name(k)\n\t\tif _, ok := c[n]; !ok {\n\t\t\tc[n] = &Changes{Var: n, Log: make(map[string][2]interface{})}\n\t\t}\n\t\tcv := d.dst[k]\n\t\tc[n].Log[k] = change(cv, v)\n\t}\n\treturn c\n}", "func (c *Config) CheckReleases(readOnly bool) ([]ReleaseList, error) {\n\tif c == nil || c.client == nil {\n\t\treturn nil, errors.New(\"uninitialized client\")\n\t}\n\n\tif err := c.loadStateFile(); err != nil {\n\t\treturn nil, errors.Wrap(err, \"cannot load state file\")\n\t}\n\n\tnewReleases := make(chan ReleaseList)\n\trepoQ := make(chan RepoConfig)\n\tctx := context.Background()\n\n\t// Launch workers\n\tfor i := 0; i < releaseWorkerCount; i++ {\n\t\tgo c.checkReleaseWorker(ctx, i+1, repoQ, newReleases)\n\t}\n\n\t// Queue jobs\n\tgo func() {\n\t\tfor _, r := range c.Repositories {\n\t\t\trepoQ <- r\n\t\t}\n\t\tclose(repoQ)\n\t}()\n\n\t// Collect results\n\tvar newReleaseList []ReleaseList\n\tfor resultCount := len(c.Repositories); resultCount > 0; {\n\t\trel := <-newReleases\n\t\tresultCount--\n\n\t\tif len(rel) == 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\t// Queue the release for states updates\n\t\tnewReleaseList = append(newReleaseList, rel)\n\t}\n\n\t// Leave now if the result list is empty or if we don't need to save them\n\tif len(newReleaseList) == 0 || readOnly {\n\t\treturn newReleaseList, nil\n\t}\n\n\t// Update repository states\n\tfor _, s := range newReleaseList {\n\t\t// Update states\n\t\tif c.states == nil {\n\t\t\trm := make(map[string]RepoState)\n\t\t\tc.states = &States{Repositories: rm}\n\t\t}\n\t\tc.states.Repositories[s[0].Repo] = *(s[0].RepoState)\n\t}\n\n\t// Save states\n\tlogrus.Debug(\"Saving states...\")\n\tif err := c.writeStateFile(); err != nil {\n\t\treturn newReleaseList, errors.Wrap(err, \"cannot write state file\")\n\t}\n\n\treturn newReleaseList, nil\n}", "func ChooseVersions() (err error) {\n\tstate := state.Instance\n\n\t// when defined, use set version\n\tif provider == nil {\n\t\terr = errors.New(\"osd must be setup when upgrading with release stream\")\n\t} else if shouldUpgrade() {\n\t\terr = setupUpgradeVersion()\n\t} else {\n\t\t_, err = setupVersion()\n\t}\n\n\t// Set the versions in metadata. If upgrade hasn't been chosen, it should still be omitted from the end result.\n\tmetadata.Instance.SetClusterVersion(state.Cluster.Version)\n\tmetadata.Instance.SetUpgradeVersion(state.Upgrade.ReleaseName)\n\n\treturn err\n}", "func getEnvoyReleaseDates() (map[string]string, error) {\n\turl := \"https://api.github.com/repos/envoyproxy/envoy/releases\"\n\tresp, err := http.Get(url)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer resp.Body.Close() //nolint\n\n\tif resp.StatusCode != http.StatusOK {\n\t\treturn nil, fmt.Errorf(\"received %v status code from %v\", resp.StatusCode, url)\n\t}\n\tbody, err := io.ReadAll(resp.Body) // fully read the response\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error reading %s: %w\", url, err)\n\t}\n\n\tvar releases []gitHubRelease\n\tif err := json.Unmarshal(body, &releases); err != nil {\n\t\treturn nil, fmt.Errorf(\"error unmarshalling GitHub Releases: %w\", err)\n\t}\n\n\tm := map[string]string{}\n\tfor _, r := range releases { //nolint:gocritic\n\t\tif r.Draft || r.PreRelease {\n\t\t\tcontinue\n\t\t}\n\t\t// clean inputs \"v1.15.4\" -> \"2021-05-11T19:11:09Z\" into \"1.15.4\" -> \"2021-05-11\"\n\t\tm[strings.TrimPrefix(r.Name, \"v\")] = r.PublishedAt[0:10]\n\t}\n\treturn m, nil\n}", "func logDifferences(current, creates, updates, deletes Components, logf func(format string, args ...interface{})) error {\n\tlog := func(action string, current, desired *Component) error {\n\t\tdiff, err := componentDiffText(current, desired)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tlogf(\"%s\", action)\n\t\tif diff != \"\" {\n\t\t\tlogf(\"Diff:\\n%s\", diff)\n\t\t}\n\t\tif current != nil && desired != nil && !reflect.DeepEqual(current.SecretValues, desired.SecretValues) {\n\t\t\tlogrus.Info(\"Diff: secrets have changed, not shown here\")\n\t\t}\n\t\treturn nil\n\t}\n\n\tfor _, d := range deletes {\n\t\tlogf(\"Delete: %s\", d.Name)\n\t}\n\n\tfor _, d := range creates {\n\t\tif err := log(\"Create: \"+d.Name, nil, d); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tfor _, d := range updates {\n\t\tc := current[d.Name]\n\t\tif err := log(\"Update: \"+d.Name, c, d); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func releasesIdentical(r1, r2 *shipper.Release) bool {\n\tif r1 == nil || r2 == nil {\n\t\treturn r1 == r2\n\t}\n\treturn r1.Namespace == r2.Namespace &&\n\t\tr1.Name == r2.Name\n}", "func (vs *versionSet) createManifest(\n\tdirname string, fileNum, nextFileNum fileNum,\n) (err error) {\n\tvar (\n\t\tfilename = makeFilename(vs.fs, dirname, fileTypeManifest, fileNum)\n\t\tmanifestFile vfs.File\n\t\tmanifest *writer\n\t)\n\tdefer func() {\n\t\tif manifest != nil {\n\t\t\terr = firstError(err, manifest.close())\n\t\t}\n\t\tif manifestFile != nil {\n\t\t\terr = firstError(err, manifestFile.Close())\n\t\t}\n\t\tif err != nil {\n\t\t\terr = firstError(err, vs.fs.Remove(filename))\n\t\t}\n\t}()\n\tmanifestFile, err = vs.fs.Create(filename)\n\tif err != nil {\n\t\treturn err\n\t}\n\tmanifest = newWriter(manifestFile)\n\n\tsnapshot := versionEdit{}\n\tcv := vs.currentVersion()\n\tfor _, meta := range cv.files {\n\t\tsnapshot.newFiles = append(snapshot.newFiles, newFileEntry{\n\t\t\tmeta: meta,\n\t\t})\n\t}\n\n\t// When creating a version snapshot for an existing DB, this snapshot VersionEdit will be\n\t// immediately followed by another VersionEdit (being written in logAndApply()). That\n\t// VersionEdit always contains a LastSeqNum, so we don't need to include that in the snapshot.\n\t// But it does not necessarily include MinUnflushedLogNum, NextFileNum, so we initialize those\n\t// using the corresponding fields in the versionSet (which came from the latest preceding\n\t// VersionEdit that had those fields).\n\tsnapshot.nextFileNum = nextFileNum\n\n\tw, err1 := manifest.next()\n\tif err1 != nil {\n\t\treturn err1\n\t}\n\tif err := snapshot.encode(w); err != nil {\n\t\treturn err\n\t}\n\n\tif vs.manifest != nil {\n\t\tif err := vs.manifest.close(); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tvs.manifest = nil\n\t}\n\tif vs.manifestFile != nil {\n\t\tif err := vs.manifestFile.Close(); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tvs.manifestFile = nil\n\t}\n\n\tvs.manifest, manifest = manifest, nil\n\tvs.manifestFile, manifestFile = manifestFile, nil\n\treturn nil\n}", "func (ghp *GithubProvider) getChangesSinceRelease(release *github.RepositoryRelease, owner string, repo string) ([]changes, error) {\n\n\tresult := []changes{}\n\n\tprOpts := github.PullRequestListOptions{\n\t\tState: \"all\",\n\t\tBase: \"\",\n\t}\n\tprs, _, prErr := ghp.client.PullRequests.List(ghp.ctx, owner, repo, &prOpts)\n\tif prErr != nil {\n\t\treturn nil, prErr\n\t}\n\n\tfor _, pr := range prs {\n\t\tif pr.MergedAt != nil && pr.MergedAt.After(release.CreatedAt.Time) {\n\t\t\tPrNumTmp := strings.Split(pr.GetURL(), \"/\")\n\t\t\tPrNum, _ := strconv.Atoi(PrNumTmp[len(PrNumTmp)-1])\n\t\t\tresult = append(result, changes{\n\t\t\t\tpr.GetTitle(),\n\t\t\t\tpr.GetURL(),\n\t\t\t\tPrNum,\n\t\t\t})\n\t\t}\n\t}\n\n\treturn result, nil\n\n}", "func GetInstalledReleases(o GetInstalledReleasesOptions) ([]ReleaseSpec, error) {\n\n\ttillerNamespace := \"kube-system\"\n\tlabels := \"OWNER=TILLER,STATUS in (DEPLOYED,FAILED)\"\n\tif !o.IncludeFailed {\n\t\tlabels = strings.Replace(labels, \"FAILED\", \"\", -1)\n\t}\n\tstorage, err := getTillerStorage(o.KubeContext, tillerNamespace)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar releaseSpecs []ReleaseSpec\n\tlist, err := listReleases(o.KubeContext, o.Namespace, storage, tillerNamespace, labels)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor _, releaseData := range list {\n\n\t\tif releaseData.status != \"DEPLOYED\" {\n\t\t\tcontinue\n\t\t}\n\n\t\tvar releaseSpec ReleaseSpec\n\t\treleaseSpec.ReleaseName = releaseData.name\n\t\treleaseSpec.ChartName = releaseData.chart\n\t\treleaseSpec.ChartVersion = releaseData.version\n\n\t\treleaseSpecs = append(releaseSpecs, releaseSpec)\n\t}\n\n\tif !o.IncludeFailed {\n\t\treturn releaseSpecs, nil\n\t}\n\n\tfor _, releaseData := range list {\n\t\tif releaseData.status != \"FAILED\" {\n\t\t\tcontinue\n\t\t}\n\n\t\texists := false\n\t\tfor _, rs := range releaseSpecs {\n\t\t\tif releaseData.name == rs.ReleaseName {\n\t\t\t\texists = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif exists {\n\t\t\tcontinue\n\t\t}\n\n\t\tvar releaseSpec ReleaseSpec\n\t\treleaseSpec.ReleaseName = releaseData.name\n\t\treleaseSpec.ChartName = releaseData.chart\n\t\treleaseSpec.ChartVersion = releaseData.version\n\n\t\treleaseSpecs = append(releaseSpecs, releaseSpec)\n\t}\n\n\treturn releaseSpecs, nil\n}", "func LimitVersions(uui dvid.UUID, configFName string) error {\n\tif manager == nil {\n\t\treturn fmt.Errorf(\"can't limit versions with uninitialized manager\")\n\t}\n\tf, err := os.Open(configFName)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdata, err := ioutil.ReadAll(f)\n\tif err != nil {\n\t\treturn err\n\t}\n\tvar tc transferConfig\n\tif err := json.Unmarshal(data, &tc); err != nil {\n\t\treturn err\n\t}\n\tokUUIDs := make(map[dvid.UUID]bool, len(tc.Versions))\n\tokVersions := make(map[dvid.VersionID]bool, len(tc.Versions))\n\tfor _, uuid := range tc.Versions {\n\t\tokUUIDs[uuid] = true\n\t\tif v, found := manager.uuidToVersion[uuid]; found {\n\t\t\tancestry, err := manager.getAncestry(v)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfor _, ancestorV := range ancestry {\n\t\t\t\tancestorUUID, found := manager.versionToUUID[ancestorV]\n\t\t\t\tif !found {\n\t\t\t\t\treturn fmt.Errorf(\"version %d has no UUID equivalent\", ancestorV)\n\t\t\t\t}\n\t\t\t\tokUUIDs[ancestorUUID] = true\n\t\t\t\tokVersions[ancestorV] = true\n\t\t\t}\n\t\t}\n\t}\n\tmanager.repoMutex.Lock()\n\tmanager.idMutex.Lock()\n\tvar repo *repoT\n\tfor uuid, r := range manager.repos {\n\t\tif _, found := okUUIDs[uuid]; found {\n\t\t\tif repo == nil {\n\t\t\t\trepo = r\n\t\t\t}\n\t\t} else {\n\t\t\tdelete(manager.repos, uuid)\n\t\t\tdelete(manager.uuidToVersion, uuid)\n\t\t}\n\t}\n\tfor v := range manager.versionToUUID {\n\t\tif !okVersions[v] {\n\t\t\tdelete(manager.versionToUUID, v)\n\t\t}\n\t}\n\tfor v, node := range repo.dag.nodes {\n\t\tif !okVersions[v] {\n\t\t\tdelete(repo.dag.nodes, v)\n\t\t} else {\n\t\t\tvar parents, children []dvid.VersionID\n\t\t\tfor _, parent := range node.parents {\n\t\t\t\tif okVersions[parent] {\n\t\t\t\t\tparents = append(parents, parent)\n\t\t\t\t}\n\t\t\t}\n\t\t\tnode.parents = parents\n\t\t\tfor _, child := range node.children {\n\t\t\t\tif okVersions[child] {\n\t\t\t\t\tchildren = append(children, child)\n\t\t\t\t}\n\t\t\t}\n\t\t\tnode.children = children\n\t\t}\n\t}\n\tmanager.idMutex.Unlock()\n\tmanager.repoMutex.Unlock()\n\treturn nil\n}", "func (db *db) backfillVersions() error {\n\tdb.refreshLock.Lock()\n\tdefer db.refreshLock.Unlock()\n\n\tversions, err := db.sequins.backend.ListVersions(db.name, \"\", db.sequins.config.RequireSuccessFile)\n\tif err != nil {\n\t\treturn err\n\t} else if len(versions) == 0 {\n\t\treturn nil\n\t}\n\n\t// Only look at the last 3 versions, to keep this next part quick.\n\tif len(versions) > 3 {\n\t\tversions = versions[len(versions)-3:]\n\t}\n\n\t// Iterate through all the versions we know about, and track the remote and\n\t// local partitions for it. We don't download anything we don't have, but if\n\t// one is ready - because we have all the partitions locally, or because our\n\t// peers already do - we can switch to it immediately. Even if none are\n\t// available immediately, we can still start watching out for peers on old\n\t// versions for which we have data locally, in case they start to appear (as\n\t// would happen if a bunch of nodes with stale data started up together).\n\tfor i := len(versions) - 1; i >= 0; i-- {\n\t\tv := versions[i]\n\t\tfiles, err := db.sequins.backend.ListFiles(db.name, v)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tversion := newVersion(db.sequins, db.localPath(v), db.name, v, len(files))\n\t\tif version.ready() {\n\t\t\t// The version is complete, most likely because our peers have it. We\n\t\t\t// can switch to it right away, and build any (possibly underreplicated)\n\t\t\t// partitions in the background.\n\t\t\t// TODO: In the case that we *do* have some data locally, this will cause\n\t\t\t// us to advertise that before we're actually listening over HTTP.\n\t\t\tlog.Println(\"Starting with pre-loaded version\", v, \"of\", db.name)\n\n\t\t\tdb.mux.prepare(version)\n\t\t\tdb.upgrade(version)\n\t\t\tdb.trackVersion(version, versionBuilding)\n\t\t\tgo func() {\n\t\t\t\terr := version.build(files)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Println(\"Error building version %s of %s: %s\", v, db.name, err)\n\t\t\t\t\tdb.trackVersion(version, versionError)\n\t\t\t\t}\n\n\t\t\t\tlog.Println(\"Finished building version\", v, \"of\", db.name)\n\t\t\t\tdb.trackVersion(version, versionAvailable)\n\t\t\t\tversion.advertiseAndWait()\n\t\t\t}()\n\n\t\t\tbreak\n\t\t} else if version.getBlockStore() != nil {\n\t\t\t// The version isn't complete, but we have partitions locally and can\n\t\t\t// start waiting on peers. This happens if, for example, a complete\n\t\t\t// cluster with stored data comes up all at once.\n\t\t\tdb.switchVersion(version)\n\t\t} else {\n\t\t\tversion.close()\n\t\t}\n\t}\n\n\tgo db.cleanupStore()\n\treturn nil\n}", "func (c Provider) Releases(params []string) (rs *results.ResultSet, err error) {\n\t// Query the API\n\tid := strings.Join(strings.Split(params[1], \"/\"), \"%2f\")\n\turl := fmt.Sprintf(TagsEndpoint, params[0], id)\n\tvar tags Tags\n\tif err = util.FetchJSON(url, \"releases\", &tags); err != nil {\n\t\treturn\n\t}\n\trs = tags.Convert(params[0], params[1])\n\treturn\n}", "func (f *Factory) findReleases(ctx context.Context, u *url.URL) ([]*claircore.Distribution, error) {\n\tdir, err := u.Parse(\"dists/\")\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"debian: unable to construct URL: %w\", err)\n\t}\n\treq, err := http.NewRequestWithContext(ctx, http.MethodGet, dir.String(), nil)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"debian: unable to construct request: %w\", err)\n\t}\n\tres, err := f.c.Do(req)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"debian: unable to do request: %w\", err)\n\t}\n\tdefer res.Body.Close()\n\tswitch res.StatusCode {\n\tcase http.StatusOK:\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"debian: unexpected status fetching %q: %s\", dir.String(), res.Status)\n\t}\n\tvar buf bytes.Buffer\n\tif _, err := buf.ReadFrom(res.Body); err != nil {\n\t\treturn nil, fmt.Errorf(\"debian: unable to read dists listing: %w\", err)\n\t}\n\tms := linkRegexp.FindAllStringSubmatch(buf.String(), -1)\n\n\tvar todos []*claircore.Distribution\nListing:\n\tfor _, m := range ms {\n\t\tdist := m[1]\n\t\tswitch {\n\t\tcase dist == \"\":\n\t\t\tcontinue\n\t\tcase dist[0] == '/', dist[0] == '?':\n\t\t\tcontinue\n\t\t}\n\t\tfor _, s := range skipList {\n\t\t\tif strings.Contains(dist, s) {\n\t\t\t\tcontinue Listing\n\t\t\t}\n\t\t}\n\t\tdist = strings.Trim(dist, \"/\")\n\t\trf, err := dir.Parse(path.Join(dist, `Release`))\n\t\tif err != nil {\n\t\t\tzlog.Info(ctx).\n\t\t\t\tErr(err).\n\t\t\t\tStringer(\"context\", dir).\n\t\t\t\tStr(\"target\", path.Join(dist, `Release`)).\n\t\t\t\tMsg(\"unable to construct URL\")\n\t\t\tcontinue\n\t\t}\n\t\treq, err := http.NewRequestWithContext(ctx, http.MethodGet, rf.String(), nil)\n\t\tif err != nil {\n\t\t\tzlog.Info(ctx).\n\t\t\t\tErr(err).\n\t\t\t\tStringer(\"url\", rf).\n\t\t\t\tMsg(\"unable to construct request\")\n\t\t\tcontinue\n\t\t}\n\t\treq.Header.Set(\"range\", \"bytes=0-512\")\n\t\tres, err := f.c.Do(req)\n\t\tif err != nil {\n\t\t\tzlog.Info(ctx).\n\t\t\t\tErr(err).\n\t\t\t\tStringer(\"url\", rf).\n\t\t\t\tMsg(\"unable to do request\")\n\t\t\tcontinue\n\t\t}\n\t\tbuf.Reset()\n\t\tbuf.ReadFrom(res.Body)\n\t\tres.Body.Close()\n\t\tswitch res.StatusCode {\n\t\tcase http.StatusPartialContent, http.StatusOK:\n\t\tcase http.StatusNotFound: // Probably extremely old, it's fine.\n\t\t\tcontinue\n\t\tdefault:\n\t\t\tzlog.Info(ctx).\n\t\t\t\tStr(\"status\", res.Status).\n\t\t\t\tStringer(\"url\", rf).\n\t\t\t\tMsg(\"unexpected response\")\n\t\t\tcontinue\n\t\t}\n\t\ttp := textproto.NewReader(bufio.NewReader(io.MultiReader(&buf, bytes.NewReader([]byte(\"\\r\\n\\r\\n\")))))\n\t\th, err := tp.ReadMIMEHeader()\n\t\tif err != nil {\n\t\t\tzlog.Info(ctx).Err(err).Msg(\"unable to read MIME-ish headers\")\n\t\t\tcontinue\n\t\t}\n\t\tsv := h.Get(\"Version\")\n\t\tif sv == \"\" {\n\t\t\tzlog.Debug(ctx).Str(\"dist\", dist).Msg(\"no version assigned, skipping\")\n\t\t\tcontinue\n\t\t}\n\t\tvs := strings.Split(sv, \".\")\n\t\tif len(vs) == 1 {\n\t\t\tzlog.Debug(ctx).Str(\"dist\", dist).Msg(\"no version assigned, skipping\")\n\t\t\tcontinue\n\t\t}\n\t\tver, err := strconv.ParseInt(vs[0], 10, 32)\n\t\tif err != nil {\n\t\t\tzlog.Info(ctx).Err(err).Msg(\"unable to parse version\")\n\t\t\tcontinue\n\t\t}\n\n\t\ttodos = append(todos, mkDist(dist, int(ver)))\n\t}\n\n\treturn todos, nil\n}", "func storeDependenciesVersion(workingDirectoryPath string) {\n\n\tversions := dependenciesVersion{\n\t\tAlien4cloud: alien4cloudVersion,\n\t\tConsul: consulVersion,\n\t\tTerraform: terraformVersion,\n\t\tYorc: yorcVersion,\n\t}\n\n\tbSlice, err := yaml.Marshal(versions)\n\tif err != nil {\n\t\tfmt.Printf(\"Failed to marshall dependencies versions %v\\n\", err)\n\t\treturn\n\t}\n\n\tfilename := filepath.Join(workingDirectoryPath, \"versions.yaml\")\n\tfile, err := os.OpenFile(filename, os.O_CREATE|os.O_WRONLY, 0600)\n\n\tif err != nil {\n\t\tfmt.Printf(\"Failed to open file file %s: %v\\n\", filename, err)\n\t\treturn\n\t}\n\n\tdefer file.Close()\n\n\t_, err = fmt.Fprintf(file, string(bSlice[:]))\n\n\tif err != nil {\n\t\tfmt.Printf(\"Failed to write in file %s: %v\\n\", filename, err)\n\t}\n}", "func Test9(t *testing.T) {\n\tnewCollection := mutatingBaseCollectionVersions.DeepCopy()\n\tnewCollection.Spec.RepositoryUrl = \"\"\n\tnewCollection.Spec.Version = \"\"\n\tnewCollection.Spec.DesiredState = \"\"\n\tnewCollection.Spec.Versions[0].RepositoryUrl = \"\"\n\tnewCollection.Spec.Versions[0].Version = \"\"\n\tnewCollection.Spec.Versions[0].DesiredState = \"\"\n\n\terr := processUpdate(&mutatingBaseCollection, newCollection)\n\tif err == nil {\n\t\tt.Fatal(\"An error condition should have been reported. New collection.Spec and new collection.Spec.versions[0] contain empty fields.\", err)\n\t}\n}", "func OpenShiftToolsVersions1(version string) ([]string) {\n files:=getOpenShiftFileList(\"https://mirror.openshift.com/pub/openshift-v4/clients/ocp/\"+version+\"/sha256sum.txt\")\n return files\n}", "func TestMeta_DoubleBuffering_Even(t *testing.T) {\n\ttargetVersion := 4\n\n\tt.Run(\"restore works\", func(t *testing.T) {\n\t\tw, f, cleanup := testMeta(t, targetVersion)\n\t\tdefer cleanup()\n\n\t\tnw := &wal{}\n\t\terr := nw.restoreMetaPage(f.Name())\n\t\trequire.NoError(t, err)\n\n\t\trequire.Equal(t, 4, int(nw.meta.Version))\n\t\trequire.Equal(t, w.meta, nw.meta)\n\t})\n\n\t// simulate when we failed during commiting a newer update\n\t// in slot A - so falls back to previous meta in slot B\n\tt.Run(\"metaA corrupt\", func(t *testing.T) {\n\t\tw, f, cleanup := testMeta(t, targetVersion)\n\t\tdefer cleanup()\n\n\t\tf.WriteAt([]byte(\"randomcorruption\"), meta_header_size+2)\n\n\t\tnw := &wal{}\n\t\terr := nw.restoreMetaPage(f.Name())\n\t\trequire.NoError(t, err)\n\n\t\tem := &meta{}\n\t\t*em = *w.meta\n\n\t\tem.Version--\n\t\tem.BytesStore = map[string][]byte{\n\t\t\t\"foo\": []byte(\"val3\"),\n\t\t}\n\t\trequire.Equal(t, 3, int(nw.meta.Version))\n\t\trequire.Equal(t, em, nw.meta)\n\t})\n\n\t// simulate a failure in commiting this latest update\n\t// falls back to previous state in slot A\n\tt.Run(\"metaB corrupt\", func(t *testing.T) {\n\t\tw, f, cleanup := testMeta(t, targetVersion)\n\t\tdefer cleanup()\n\n\t\tf.WriteAt([]byte(\"randomcorruption\"), meta_header_size+meta_page_size+2)\n\n\t\tnw := &wal{}\n\t\terr := nw.restoreMetaPage(f.Name())\n\t\trequire.NoError(t, err)\n\n\t\tem := &meta{}\n\t\t*em = *w.meta\n\n\t\tem.BytesStore = map[string][]byte{\n\t\t\t\"foo\": []byte(\"val4\"),\n\t\t}\n\t\trequire.Equal(t, 4, int(nw.meta.Version))\n\t\trequire.Equal(t, em, nw.meta)\n\t})\n\n}", "func AvailableUpdates(repoOrg string, repoName string, currentVersion string) (avail bool, newVersion string, releaseURL string, err error) {\n\tnewVersion = \"\"\n\tctx := context.Background()\n\tclient := ddevgh.GetGithubClient(ctx)\n\topt := &github.ListOptions{Page: 1}\n\treleases, _, err := client.Repositories.ListReleases(ctx, repoOrg, repoName, opt)\n\tif err != nil {\n\t\treturn false, newVersion, \"\", err\n\t}\n\n\tif isReleaseVersion(currentVersion) {\n\t\tcv, err := semver.NewVersion(currentVersion)\n\t\tif err != nil {\n\t\t\treturn false, newVersion, \"\", err\n\t\t}\n\t\tfor _, release := range releases {\n\t\t\tif *release.Prerelease {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tnewReleaseVersion, err := semver.NewVersion(*release.TagName)\n\t\t\tif err != nil {\n\t\t\t\treturn false, newVersion, \"\", err\n\t\t\t}\n\t\t\tnewVersion = *release.TagName\n\n\t\t\tif cv.Compare(newReleaseVersion) == -1 {\n\t\t\t\treturn true, newVersion, *release.HTMLURL, nil\n\t\t\t}\n\t\t}\n\t}\n\n\treturn false, newVersion, \"\", nil\n}", "func TestMeta_DoubleBuffering_Odd(t *testing.T) {\n\ttargetVersion := 5\n\n\tt.Run(\"restore works\", func(t *testing.T) {\n\t\tw, f, cleanup := testMeta(t, targetVersion)\n\t\tdefer cleanup()\n\n\t\tnw := &wal{}\n\t\terr := nw.restoreMetaPage(f.Name())\n\t\trequire.NoError(t, err)\n\n\t\trequire.Equal(t, 5, int(nw.meta.Version))\n\t\trequire.Equal(t, w.meta, nw.meta)\n\t})\n\n\t// simulate a failure in commiting this latest update\n\t// falls back to previous state in slot B\n\tt.Run(\"metaA corrupt\", func(t *testing.T) {\n\t\tw, f, cleanup := testMeta(t, targetVersion)\n\t\tdefer cleanup()\n\n\t\tf.WriteAt([]byte(\"randomcorruption\"), meta_header_size+2)\n\n\t\tnw := &wal{}\n\t\terr := nw.restoreMetaPage(f.Name())\n\t\trequire.NoError(t, err)\n\n\t\tem := &meta{}\n\t\t*em = *w.meta\n\n\t\tem.BytesStore = map[string][]byte{\n\t\t\t\"foo\": []byte(\"val5\"),\n\t\t}\n\t\trequire.Equal(t, 5, int(nw.meta.Version))\n\t\trequire.Equal(t, em, nw.meta)\n\t})\n\n\t// simulate a failure in commiting this latest update\n\t// falls back to previous state in slot A\n\tt.Run(\"metaB corrupt\", func(t *testing.T) {\n\t\tw, f, cleanup := testMeta(t, targetVersion)\n\t\tdefer cleanup()\n\n\t\tf.WriteAt([]byte(\"randomcorruption\"), meta_header_size+meta_page_size+2)\n\n\t\tnw := &wal{}\n\t\terr := nw.restoreMetaPage(f.Name())\n\t\trequire.NoError(t, err)\n\n\t\tem := &meta{}\n\t\t*em = *w.meta\n\n\t\tem.Version--\n\t\tem.BytesStore = map[string][]byte{\n\t\t\t\"foo\": []byte(\"val4\"),\n\t\t}\n\t\trequire.Equal(t, 4, int(nw.meta.Version))\n\t\trequire.Equal(t, em, nw.meta)\n\t})\n}", "func (info *Info) writeFiles(w io.Writer, open func(fi FileInfo) (io.ReadCloser, error)) error {\n\tfor _, fi := range info.UpvertedFiles() {\n\t\tr, err := open(fi)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"error opening %v: %s\", fi, err)\n\t\t}\n\t\twn, err := io.CopyN(w, r, fi.Length)\n\t\tr.Close()\n\t\tif wn != fi.Length {\n\t\t\treturn fmt.Errorf(\"error copying %v: %s\", fi, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (c *Client) AllReleases() ([]db.Release, error) {\n\tnames, err := c.names()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tuiprogress.Start()\n\tbar := uiprogress.AddBar(len(names))\n\tbar.PrependFunc(func(b *uiprogress.Bar) string {\n\t\trate := float64(b.Current()) / b.TimeElapsed().Seconds()\n\t\tremainingCount := b.Total - b.Current()\n\t\tremainingTime := time.Duration(float64(remainingCount)/rate) * time.Second\n\n\t\treturn fmt.Sprintf(\n\t\t\t\"%v left (%.f/s)\",\n\t\t\tremainingTime,\n\t\t\trate,\n\t\t)\n\t})\n\treleases := make(chan db.Release)\n\tc.addReleases(names, releases, bar)\n\tclose(releases)\n\treturn releaseChanToSlice(releases), nil\n}", "func Test5(t *testing.T) {\n\tnewCollection := mutatingBaseCollectionVersions.DeepCopy()\n\tnewCollection.Spec.Versions[0].RepositoryUrl = \"https://github.com/some/collection/alternate-kabanero-index.yaml\"\n\tnewCollection.Spec.Versions[0].Version = \"4.5.6\"\n\tnewCollection.Spec.Versions[0].DesiredState = \"inactive\"\n\terr := processUpdate(&mutatingBaseCollection, newCollection)\n\tif err != nil {\n\t\tt.Fatal(\"Unexpected error during mutation.\", err)\n\t}\n\n\tif newCollection.Spec.RepositoryUrl != \"https://github.com/some/collection/alternate-kabanero-index.yaml\" {\n\t\tt.Fatal(\"New collection.Spec.RepositoryUrl values do not match expected value of https://github.com/some/collection/alternate-kabanero-index.yaml. RepositoryUrl found: \", newCollection.Spec.RepositoryUrl)\n\t}\n\tif newCollection.Spec.Version != \"4.5.6\" {\n\t\tt.Fatal(\"New collection.Spec.Version values do not match expected value of 4.5.6. Version found: \", newCollection.Spec.Version)\n\t}\n\tif newCollection.Spec.DesiredState != \"inactive\" {\n\t\tt.Fatal(\"New collection.Spec.DesiredState values do not match expected value of inactive. DesiredStateme found: \", newCollection.Spec.DesiredState)\n\t}\n}", "func TestDiff(t *testing.T) {\n\tt.Parallel()\n\n\tsrc, err := ioutil.TempFile(\"\", \"unarchive_test.txt\")\n\trequire.NoError(t, err)\n\tdefer os.Remove(src.Name())\n\n\tdestInvalid, err := ioutil.TempFile(\"\", \"unarchive_test.txt\")\n\trequire.NoError(t, err)\n\tdefer os.Remove(destInvalid.Name())\n\n\tt.Run(\"source does not exist\", func(t *testing.T) {\n\t\tu := &Unarchive{\n\t\t\tSource: \"\",\n\t\t\tDestination: \"/tmp\",\n\t\t}\n\t\tstatus := resource.NewStatus()\n\n\t\terr := u.diff(status)\n\n\t\tassert.EqualError(t, err, \"cannot unarchive: stat : no such file or directory\")\n\t\tassert.Equal(t, resource.StatusCantChange, status.StatusCode())\n\t\tassert.True(t, status.HasChanges())\n\t})\n\n\tt.Run(\"destination is not directory\", func(t *testing.T) {\n\t\tu := &Unarchive{\n\t\t\tSource: src.Name(),\n\t\t\tDestination: destInvalid.Name(),\n\t\t}\n\t\tstatus := resource.NewStatus()\n\n\t\terr := u.diff(status)\n\n\t\tassert.EqualError(t, err, fmt.Sprintf(\"invalid destination \\\"%s\\\", must be directory\", u.Destination))\n\t\tassert.Equal(t, resource.StatusCantChange, status.StatusCode())\n\t\tassert.True(t, status.HasChanges())\n\t})\n\n\tt.Run(\"destination does not exist\", func(t *testing.T) {\n\t\tu := &Unarchive{\n\t\t\tSource: src.Name(),\n\t\t\tDestination: \"\",\n\t\t}\n\t\tstatus := resource.NewStatus()\n\n\t\terr := u.diff(status)\n\n\t\tassert.EqualError(t, err, fmt.Sprintf(\"destination \\\"%s\\\" does not exist\", u.Destination))\n\t\tassert.Equal(t, resource.StatusCantChange, status.StatusCode())\n\t\tassert.True(t, status.HasChanges())\n\t})\n\n\tt.Run(\"unarchive\", func(t *testing.T) {\n\t\tu := &Unarchive{\n\t\t\tSource: src.Name(),\n\t\t\tDestination: \"/tmp\",\n\t\t}\n\t\tstatus := resource.NewStatus()\n\n\t\terr := u.diff(status)\n\n\t\tassert.NoError(t, err)\n\t\tassert.Equal(t, u.Source, status.Diffs()[\"unarchive\"].Original())\n\t\tassert.Equal(t, u.Destination, status.Diffs()[\"unarchive\"].Current())\n\t\tassert.Equal(t, resource.StatusWillChange, status.StatusCode())\n\t\tassert.True(t, status.HasChanges())\n\t})\n}", "func (c Provider) Releases(name string) (rs *results.ResultSet, s results.Status) {\n\trs, s = c.GetReleases(name, 100)\n\treturn\n}", "func TestCreateProcessAverageReport(t *testing.T) {\n\n timeSlice := float64(TIMESLICE)\n processName := \"proc5\"\n\n testtable := []struct {\n tname string\n }{\n {\n tname: \"ok\",\n },\n }\n\n CreateDummyProcessMetrics(testNodeMetricsMap)\n\n for _, tc := range testtable {\n\n t.Run(tc.tname, func(t *testing.T) {\n\n var buffer bytes.Buffer\n writer := bufio.NewWriter(&buffer)\n\n err := json.NewEncoder(writer).Encode(CreateProcessAverageReport(testNodeMetricsMap, processName, timeSlice))\n if err != nil {\n t.Fatalf(\"failed writing json: %s\", err)\n }\n writer.Flush()\n\n goldenPath := filepath.Join(\"testdata\", filepath.FromSlash(t.Name()) + \".golden\")\n\n\n if *update {\n\n t.Log(\"update golden file\")\n if err := ioutil.WriteFile(goldenPath, buffer.Bytes(), 0644); err != nil {\n t.Fatalf(\"failed to update golden file %s: %s\", goldenPath, err)\n }\n\n }\n\n\n goldenData, err := ioutil.ReadFile(goldenPath)\n\n if err != nil {\n t.Fatalf(\"failed reading .golden file %s: %s\", goldenPath, err)\n }\n\n t.Log(string(buffer.Bytes()))\n\n if !bytes.Equal(buffer.Bytes(), goldenData) {\n t.Errorf(\"bytes do not match .golden file %s\", goldenPath)\n }\n\n })\n }\n\n}", "func (tx *Tx) commit() {\n\tfor v, val := range tx.writes {\n\t\tv.mu.Lock()\n\t\tv.val = val\n\t\tv.version++\n\t\tv.mu.Unlock()\n\t}\n}", "func (a *Agent) ListReleases(\n\tctx context.Context,\n\tnamespace string,\n\tfilter *types.ReleaseListFilter,\n) ([]*release.Release, error) {\n\tctx, span := telemetry.NewSpan(ctx, \"helm-list-releases\")\n\tdefer span.End()\n\n\ttelemetry.WithAttributes(span,\n\t\ttelemetry.AttributeKV{Key: \"namespace\", Value: namespace},\n\t)\n\n\tlsel := fmt.Sprintf(\"owner=helm,status in (%s)\", strings.Join(filter.StatusFilter, \",\"))\n\n\t// list secrets\n\tsecretList, err := a.K8sAgent.Clientset.CoreV1().Secrets(namespace).List(\n\t\tcontext.Background(),\n\t\tv1.ListOptions{\n\t\t\tLabelSelector: lsel,\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, telemetry.Error(ctx, span, err, \"error getting secret list\")\n\t}\n\n\t// before decoding to helm release, only keep the latest releases for each chart\n\tlatestMap := make(map[string]corev1.Secret)\n\n\tfor _, secret := range secretList.Items {\n\t\trelName, relNameExists := secret.Labels[\"name\"]\n\n\t\tif !relNameExists {\n\t\t\tcontinue\n\t\t}\n\n\t\tid := fmt.Sprintf(\"%s/%s\", secret.Namespace, relName)\n\n\t\tif currLatest, exists := latestMap[id]; exists {\n\t\t\t// get version\n\t\t\tcurrVersionStr, currVersionExists := currLatest.Labels[\"version\"]\n\t\t\tversionStr, versionExists := secret.Labels[\"version\"]\n\n\t\t\tif versionExists && currVersionExists {\n\t\t\t\tcurrVersion, currErr := strconv.Atoi(currVersionStr)\n\t\t\t\tversion, err := strconv.Atoi(versionStr)\n\t\t\t\tif currErr == nil && err == nil && currVersion < version {\n\t\t\t\t\tlatestMap[id] = secret\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tlatestMap[id] = secret\n\t\t}\n\t}\n\n\tchartList := []string{}\n\tres := make([]*release.Release, 0)\n\n\tfor _, secret := range latestMap {\n\t\trel, isErr, err := kubernetes.ParseSecretToHelmRelease(secret, chartList)\n\n\t\tif !isErr && err == nil {\n\t\t\tres = append(res, rel)\n\t\t}\n\t}\n\n\treturn res, nil\n}", "func (b *BackuPOT) versions() ([]time.Time, error) {\n times, err := b.client.GetVersions()\n if err != nil {\n return nil, backupotDebug.Error(err)\n }\n results := make([]time.Time, 0)\n for _, version := range times {\n num, err := strconv.Atoi(version)\n if err != nil {\n return nil, backupotDebug.Error(err)\n }\n results = append(results, time.Unix(int64(num), 0))\n }\n return results, nil\n}", "func (p prerelease) updateAllGoModFiles() error {\n\tfmt.Println(\"Updating all module versions in go.mod files...\")\n\tfor _, modFilePath := range p.ModuleSetRelease.ModPathMap {\n\t\tif err := p.updateGoModVersions(modFilePath); err != nil {\n\t\t\treturn fmt.Errorf(\"could not update module versions in file %v: %v\", modFilePath, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (c *Client) ReleasesSince(t time.Time) ([]db.Release, error) {\n\tchanges := [][]interface{}{}\n\n\terr := c.client.Call(\"changelog\", []interface{}{t.Unix(), true}, &changes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treleases := []db.Release{}\n\tfor _, change := range changes {\n\t\tif change[3] == \"new release\" {\n\t\t\treleases = append(releases, db.Release{\n\t\t\t\tName: change[0].(string),\n\t\t\t\tVersion: change[1].(string),\n\t\t\t\tTime: time.Unix(change[2].(int64), 0),\n\t\t\t})\n\t\t}\n\n\t}\n\treturn releases, nil\n}", "func publish(flags publisherFlags) error {\n\thelper := buildtool.MustLoad()\n\tpackageBase, err := helper.ResolveBuildPath(flags.packageBase)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"cannot resolve out_dir %q: %v\", flags.packageBase, err)\n\t}\n\tp4 := p4lib.New()\n\tchangelist, err := makeChangelist(p4, helper, flags.name, flags.change, flags.description)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error getting/creating changelist: %v\", err)\n\t}\n\tglog.Info(\"changelist: \", changelist)\n\n\t// Search input artifacts for package manifests\n\tpkgBuilder := packagemanifest.NewPackageBuilder()\n\tfor _, inputs := range helper.Invocation().Inputs {\n\t\tfor _, input := range inputs.Artifacts {\n\t\t\tp, ok := buildtool.ResolveArtifact(input)\n\t\t\tif !ok {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif !packagemanifest.IsPkgArtifact(input) {\n\t\t\t\treturn fmt.Errorf(\"non-package artifact found in inputs: %s\", p)\n\t\t\t}\n\t\t\tif err := pkgBuilder.AddPkgManifest(p); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\tvar paths []string\n\tfor _, e := range pkgBuilder.Entries() {\n\t\tpaths = append(paths, path.Join(packageBase, e.PkgPath))\n\t}\n\n\t// p4.Reconcile could generate a command that is too long to execute if we were to give it all the paths in one call\n\tif err := partition(paths, 100, func(pathsSubset []string) error {\n\t\tfor _, path := range pathsSubset {\n\t\t\tglog.Info(path)\n\t\t}\n\t\treconcileResult, err := p4.Reconcile(pathsSubset, changelist)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"error reconciling %q: %v\", pathsSubset, err)\n\t\t}\n\t\tglog.Info(reconcileResult)\n\t\treturn nil\n\t}); err != nil {\n\t\treturn err\n\t}\n\tdescriptions, err := p4.Describe([]int{changelist})\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error describing changelist: %v\", err)\n\t}\n\tglog.Info(\"flags.submitCl: \", flags.submitCl)\n\tif areAllChangelistDescriptionsEmpty(descriptions) {\n\t\tglog.Info(\"changelist empty, cleaning up\")\n\t\tcleanupChange(p4, changelist)\n\t} else if flags.submitCl {\n\t\tglog.Info(\"submitting cl: \", changelist)\n\t\tif _, err := p4.Submit(changelist); err != nil {\n\t\t\tglog.Errorf(\"error in submit: %v\", err)\n\t\t\tif err := cleanupChange(p4, changelist); err != nil {\n\t\t\t\tglog.Error(err)\n\t\t\t}\n\t\t\treturn err\n\t\t}\n\t}\n\tif err := writePublishResult(paths, flags.name, helper); err != nil {\n\t\treturn fmt.Errorf(\"error writing publish results: %v\", err)\n\t}\n\treturn nil\n}", "func Release(version, commit, date string) {\n\tif version == \"\" {\n\t\tversion = \"dev\"\n\t} else if version[0] == 'v' {\n\t\tversion = version[1:]\n\t}\n\tif commit == \"\" {\n\t\tcommit = \"-\"\n\t}\n\tif date == \"\" {\n\t\tdate = \"-\"\n\t}\n\tVersion, Commit, Date = version, commit, date\n}", "func (sfc *stepFactoryCreator) getInstalledReleases() (map[string]kymahelm.ReleaseStatus, error) {\n\n\texistingReleases := make(map[string]kymahelm.ReleaseStatus)\n\n\treleases, err := sfc.helmClient.ListReleases()\n\tif err != nil {\n\t\treturn nil, errors.New(\"Helm error: \" + err.Error())\n\t}\n\n\tif releases != nil {\n\t\tlog.Println(\"Helm releases list:\")\n\n\t\tfor _, release := range releases {\n\t\t\tvar lastDeployedRev int\n\n\t\t\tstatusCode := release.Status\n\t\t\tif statusCode == kymahelm.StatusDeployed {\n\t\t\t\tlastDeployedRev = release.CurrentRevision\n\t\t\t} else {\n\t\t\t\tlastDeployedRev, err = sfc.helmClient.ReleaseDeployedRevision(kymahelm.NamespacedName{Namespace: release.Namespace, Name: release.Name})\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, errors.New(\"Helm error: \" + err.Error())\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tlog.Printf(\"%s status: %s, last deployed revision: %d\", release.Name, statusCode, lastDeployedRev)\n\t\t\texistingReleases[release.Name] = kymahelm.ReleaseStatus{\n\t\t\t\tStatus: statusCode,\n\t\t\t\tCurrentRevision: release.CurrentRevision,\n\t\t\t\tLastDeployedRevision: lastDeployedRev,\n\t\t\t}\n\t\t}\n\t}\n\treturn existingReleases, nil\n}", "func MultiRefWrite(refToImage map[name.Reference]v1.Image, w io.Writer, opts ...WriteOption) error {\n\t// process options\n\to := &writeOptions{\n\t\tupdates: nil,\n\t}\n\tfor _, option := range opts {\n\t\tif err := option(o); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\timageToTags := dedupRefToImage(refToImage)\n\tsize, mBytes, err := getSizeAndManifest(imageToTags)\n\tif err != nil {\n\t\treturn sendUpdateReturn(o, err)\n\t}\n\n\treturn writeImagesToTar(imageToTags, mBytes, size, w, o)\n}", "func (c Releases) Frames() data.Frames {\n\tframe := data.NewFrame(\n\t\t\"releases\",\n\t\tdata.NewField(\"name\", nil, []string{}),\n\t\tdata.NewField(\"created_by\", nil, []string{}),\n\t\tdata.NewField(\"is_draft\", nil, []bool{}),\n\t\tdata.NewField(\"is_prerelease\", nil, []bool{}),\n\t\tdata.NewField(\"tag\", nil, []string{}),\n\t\tdata.NewField(\"url\", nil, []string{}),\n\t\tdata.NewField(\"created_at\", nil, []time.Time{}),\n\t\tdata.NewField(\"published_at\", nil, []*time.Time{}),\n\t)\n\n\tfor _, v := range c {\n\t\tvar publishedAt *time.Time\n\t\tif !v.PublishedAt.IsZero() {\n\t\t\tt := v.PublishedAt.Time\n\t\t\tpublishedAt = &t\n\t\t}\n\n\t\tframe.AppendRow(\n\t\t\tv.Name,\n\t\t\tv.Author.Login,\n\t\t\tv.IsDraft,\n\t\t\tv.IsPrerelease,\n\t\t\tv.TagName,\n\t\t\tv.URL,\n\t\t\tv.CreatedAt.Time,\n\t\t\tpublishedAt,\n\t\t)\n\t}\n\n\treturn data.Frames{frame}\n}", "func toolVersions() []string {\n\n return readWebsiteInfo(baseToolsURL)\n\n}", "func (a *Agent) UpgradeReleaseByValues(\n\tctx context.Context,\n\tconf *UpgradeReleaseConfig,\n\tdoAuth *oauth2.Config,\n\tdisablePullSecretsInjection bool,\n\tignoreDependencies bool,\n) (*release.Release, error) {\n\tctx, span := telemetry.NewSpan(ctx, \"helm-upgrade-release-by-values\")\n\tdefer span.End()\n\n\ttelemetry.WithAttributes(span,\n\t\ttelemetry.AttributeKV{Key: \"project-id\", Value: conf.Cluster.ProjectID},\n\t\ttelemetry.AttributeKV{Key: \"cluster-id\", Value: conf.Cluster.ID},\n\t\ttelemetry.AttributeKV{Key: \"name\", Value: conf.Name},\n\t\ttelemetry.AttributeKV{Key: \"stack-name\", Value: conf.StackName},\n\t\ttelemetry.AttributeKV{Key: \"stack-revision\", Value: conf.StackRevision},\n\t)\n\n\t// grab the latest release\n\trel, err := a.GetRelease(ctx, conf.Name, 0, !ignoreDependencies)\n\tif err != nil {\n\t\treturn nil, telemetry.Error(ctx, span, err, \"Could not get release to be upgraded\")\n\t}\n\n\tch := rel.Chart\n\n\tif conf.Chart != nil {\n\t\tch = conf.Chart\n\t}\n\n\tcmd := action.NewUpgrade(a.ActionConfig)\n\tcmd.Namespace = rel.Namespace\n\n\tcmd.PostRenderer, err = NewPorterPostrenderer(\n\t\tconf.Cluster,\n\t\tconf.Repo,\n\t\ta.K8sAgent,\n\t\trel.Namespace,\n\t\tconf.Registries,\n\t\tdoAuth,\n\t\tdisablePullSecretsInjection,\n\t)\n\n\tif err != nil {\n\t\treturn nil, telemetry.Error(ctx, span, err, \"error getting porter postrenderer\")\n\t}\n\n\tif conf.StackName != \"\" && conf.StackRevision > 0 {\n\t\tconf.Values[\"stack\"] = map[string]interface{}{\n\t\t\t\"enabled\": true,\n\t\t\t\"name\": conf.StackName,\n\t\t\t\"revision\": conf.StackRevision,\n\t\t}\n\t}\n\n\tres, err := cmd.Run(conf.Name, ch, conf.Values)\n\tif err != nil {\n\t\t// refer: https://github.com/helm/helm/blob/release-3.8/pkg/action/action.go#L62\n\t\t// issue tracker: https://github.com/helm/helm/issues/4558\n\t\tif err.Error() == \"another operation (install/upgrade/rollback) is in progress\" {\n\t\t\tsecretList, err := a.K8sAgent.Clientset.CoreV1().Secrets(rel.Namespace).List(\n\t\t\t\tcontext.Background(),\n\t\t\t\tv1.ListOptions{\n\t\t\t\t\tLabelSelector: fmt.Sprintf(\"owner=helm,status in (pending-install, pending-upgrade, pending-rollback),name=%s\", rel.Name),\n\t\t\t\t},\n\t\t\t)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, telemetry.Error(ctx, span, err, \"error getting secret list\")\n\t\t\t}\n\n\t\t\tif len(secretList.Items) > 0 {\n\t\t\t\tmostRecentSecret := secretList.Items[0]\n\n\t\t\t\tfor i := 1; i < len(secretList.Items); i += 1 {\n\t\t\t\t\toldVersion, _ := strconv.Atoi(mostRecentSecret.Labels[\"version\"])\n\t\t\t\t\tnewVersion, _ := strconv.Atoi(secretList.Items[i].Labels[\"version\"])\n\n\t\t\t\t\tif oldVersion < newVersion {\n\t\t\t\t\t\tmostRecentSecret = secretList.Items[i]\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif time.Since(mostRecentSecret.CreationTimestamp.Time) >= time.Minute {\n\t\t\t\t\thelmSecrets := driver.NewSecrets(a.K8sAgent.Clientset.CoreV1().Secrets(rel.Namespace))\n\n\t\t\t\t\trel.Info.Status = release.StatusFailed\n\n\t\t\t\t\terr = helmSecrets.Update(mostRecentSecret.GetName(), rel)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, telemetry.Error(ctx, span, err, \"error updating helm secrets\")\n\t\t\t\t\t}\n\n\t\t\t\t\t// retry upgrade\n\t\t\t\t\tres, err = cmd.Run(conf.Name, ch, conf.Values)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, telemetry.Error(ctx, span, err, \"error running upgrade after updating helm secrets\")\n\t\t\t\t\t}\n\n\t\t\t\t\treturn res, nil\n\t\t\t\t} else {\n\t\t\t\t\t// ask the user to wait for about a minute before retrying for the above fix to kick in\n\t\t\t\t\treturn nil, telemetry.Error(ctx, span, err, \"another operation (install/upgrade/rollback) is in progress. If this error persists, please wait for 60 seconds to force an upgrade\")\n\t\t\t\t}\n\t\t\t}\n\t\t} else if strings.Contains(err.Error(), \"current release manifest contains removed kubernetes api(s)\") || strings.Contains(err.Error(), \"resource mapping not found for name\") {\n\t\t\t// ref: https://helm.sh/docs/topics/kubernetes_apis/#updating-api-versions-of-a-release-manifest\n\t\t\t// in this case, we manually update the secret containing the new manifests\n\t\t\tsecretList, err := a.K8sAgent.Clientset.CoreV1().Secrets(rel.Namespace).List(\n\t\t\t\tcontext.Background(),\n\t\t\t\tv1.ListOptions{\n\t\t\t\t\tLabelSelector: fmt.Sprintf(\"owner=helm,name=%s\", rel.Name),\n\t\t\t\t},\n\t\t\t)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, telemetry.Error(ctx, span, err, \"error getting secret list\")\n\t\t\t}\n\n\t\t\tif len(secretList.Items) > 0 {\n\t\t\t\tmostRecentSecret := secretList.Items[0]\n\n\t\t\t\tfor i := 1; i < len(secretList.Items); i += 1 {\n\t\t\t\t\toldVersion, _ := strconv.Atoi(mostRecentSecret.Labels[\"version\"])\n\t\t\t\t\tnewVersion, _ := strconv.Atoi(secretList.Items[i].Labels[\"version\"])\n\n\t\t\t\t\tif oldVersion < newVersion {\n\t\t\t\t\t\tmostRecentSecret = secretList.Items[i]\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// run the equivalent of `helm template` to get the manifest string for the new release\n\t\t\t\tinstallCmd := action.NewInstall(a.ActionConfig)\n\n\t\t\t\tinstallCmd.ReleaseName = conf.Name\n\t\t\t\tinstallCmd.Namespace = rel.Namespace\n\t\t\t\tinstallCmd.DryRun = true\n\t\t\t\tinstallCmd.Replace = true\n\n\t\t\t\tinstallCmd.ClientOnly = false\n\t\t\t\tinstallCmd.IncludeCRDs = true\n\n\t\t\t\tnewRelDryRun, err := installCmd.Run(ch, conf.Values)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, telemetry.Error(ctx, span, err, \"error running install cmd\")\n\t\t\t\t}\n\n\t\t\t\toldManifestBuffer := bytes.NewBufferString(rel.Manifest)\n\t\t\t\tnewManifestBuffer := bytes.NewBufferString(newRelDryRun.Manifest)\n\n\t\t\t\tversionMapper := &DeprecatedAPIVersionMapper{}\n\n\t\t\t\tupdatedManifestBuffer, err := versionMapper.Run(oldManifestBuffer, newManifestBuffer)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, telemetry.Error(ctx, span, err, \"error running version mapper\")\n\t\t\t\t}\n\n\t\t\t\trel.Manifest = updatedManifestBuffer.String()\n\n\t\t\t\thelmSecrets := driver.NewSecrets(a.K8sAgent.Clientset.CoreV1().Secrets(rel.Namespace))\n\n\t\t\t\terr = helmSecrets.Update(mostRecentSecret.GetName(), rel)\n\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, telemetry.Error(ctx, span, err, \"error updating helm secret\")\n\t\t\t\t}\n\n\t\t\t\tres, err := cmd.Run(conf.Name, ch, conf.Values)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, telemetry.Error(ctx, span, err, \"error running upgrade after updating helm secrets\")\n\t\t\t\t}\n\n\t\t\t\treturn res, nil\n\t\t\t}\n\t\t}\n\n\t\treturn nil, telemetry.Error(ctx, span, err, \"error running upgrade\")\n\t}\n\n\treturn res, nil\n}", "func Test8(t *testing.T) {\n\tnewCollection := mutatingBaseCollectionVersions.DeepCopy()\n\tnewCollection.Spec.Versions[0].RepositoryUrl = \"\"\n\tnewCollection.Spec.Versions[0].Version = \"\"\n\tnewCollection.Spec.Versions[0].DesiredState = \"\"\n\n\terr := processUpdate(&mutatingBaseCollection, newCollection)\n\tif err != nil {\n\t\tt.Fatal(\"Unexpected error during mutation.\", err)\n\t}\n\n\texpectedversion0 := kabanerov1alpha1.CollectionVersion{\n\t\tDesiredState: \"active\",\n\t\tRepositoryUrl: \"https://github.com/some/collection/kabanero-index.yaml\",\n\t\tVersion: \"1.2.3\"}\n\n\tif newCollection.Spec.Versions[0] != expectedversion0 {\n\t\tt.Fatal(\"New collection.Spec.Versions[0] values do not match expected collection.Spec.Versions[0] values. New versions[0]: \", newCollection.Spec.Versions[0], \"Expected versions[0]: \", expectedversion0)\n\t}\n}", "func checkVersions(goFile goPack) error {\n\tout, _ := exec.Command(\"go\", \"version\").Output()\n\toutString := string(out[13:17])\n\n\tif outString != goFile.GoVersion {\n\t\treturn errors.New(\"Versions do not match\")\n\t}\n\n\treturn nil\n}", "func (a *Archive) LatestReleases() (ent, team *Release) {\n\treturn a.ent, a.team\n}", "func TestListVersionsRacey(t *testing.T) {\n\t// This test is quite slow, skip it on -short\n\tif testing.Short() {\n\t\tt.Skip(\"Skipping slow test in short mode\")\n\t}\n\n\tsm, clean := mkNaiveSM(t)\n\tdefer clean()\n\n\twg := &sync.WaitGroup{}\n\tid := mkPI(\"github.com/sdboyer/gps\")\n\tfor i := 0; i < 20; i++ {\n\t\twg.Add(1)\n\t\tgo func() {\n\t\t\t_, err := sm.ListVersions(id)\n\t\t\tif err != nil {\n\t\t\t\tt.Errorf(\"listing versions failed with err %s\", err.Error())\n\t\t\t}\n\t\t\twg.Done()\n\t\t}()\n\t}\n\n\twg.Wait()\n}", "func (m *manager) Sync(ctx context.Context) error {\n\t// Get release history for this release name\n\treleases, err := m.storageBackend.History(m.releaseName)\n\tif err != nil && !notFoundErr(err) {\n\t\treturn fmt.Errorf(\"failed to retrieve release history: %w\", err)\n\t}\n\n\t// Cleanup non-deployed release versions. If all release versions are\n\t// non-deployed, this will ensure that failed installations are correctly\n\t// retried.\n\tfor _, rel := range releases {\n\t\tif rel.Info != nil && rel.Info.Status != rpb.StatusDeployed {\n\t\t\t_, err := m.storageBackend.Delete(rel.Name, rel.Version)\n\t\t\tif err != nil && !notFoundErr(err) {\n\t\t\t\treturn fmt.Errorf(\"failed to delete stale release version: %w\", err)\n\t\t\t}\n\t\t}\n\t}\n\n\t// Load the most recently deployed release from the storage backend.\n\tdeployedRelease, err := m.getDeployedRelease()\n\tif errors.Is(err, driver.ErrReleaseNotFound) {\n\t\treturn nil\n\t}\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to get deployed release: %w\", err)\n\t}\n\tm.deployedRelease = deployedRelease\n\tm.isInstalled = true\n\n\t// Get the next candidate release to determine if an upgrade is necessary.\n\tcandidateRelease, err := m.getCandidateRelease(m.namespace, m.releaseName, m.chart, m.values)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to get candidate release: %w\", err)\n\t}\n\tif deployedRelease.Manifest != candidateRelease.Manifest {\n\t\tm.isUpgradeRequired = true\n\t}\n\n\treturn nil\n}", "func BeeReleasesInfo() (repos []Releases) {\n\tvar url = \"https://api.github.com/repos/beego/bee/releases\"\n\tresp, err := http.Get(url)\n\tif err != nil {\n\t\tbeeLogger.Log.Warnf(\"Get bee releases from github error: %s\", err)\n\t\treturn\n\t}\n\tdefer resp.Body.Close()\n\tbodyContent, _ := ioutil.ReadAll(resp.Body)\n\tif err = json.Unmarshal(bodyContent, &repos); err != nil {\n\t\tbeeLogger.Log.Warnf(\"Unmarshal releases body error: %s\", err)\n\t\treturn\n\t}\n\treturn\n}", "func PrintDiff(w io.Writer, diffs []Diff) {\n\tfor _, diff := range diffs {\n\t\tif diff.Deleted {\n\t\t\tfmt.Fprintln(w, fmt.Sprintf(\"-%s\", diff.Type), fmt.Sprintf(\"%s=%s\", diff.KV.Key, diff.KV.Value))\n\t\t} else {\n\t\t\tfmt.Fprintln(w, fmt.Sprintf(\"+%s\", diff.Type), fmt.Sprintf(\"%s=%s\", diff.KV.Key, diff.KV.Value))\n\t\t}\n\t}\n}", "func matchReleasedSignatures(allTargets []client.TargetSignedStruct) []trustTagRow {\n\tsignatureRows := []trustTagRow{}\n\t// do a first pass to get filter on tags signed into \"targets\" or \"targets/releases\"\n\treleasedTargetRows := map[trustTagKey][]string{}\n\tfor _, tgt := range allTargets {\n\t\tif isReleasedTarget(tgt.Role.Name) {\n\t\t\treleasedKey := trustTagKey{tgt.Target.Name, hex.EncodeToString(tgt.Target.Hashes[notary.SHA256])}\n\t\t\treleasedTargetRows[releasedKey] = []string{}\n\t\t}\n\t}\n\n\t// now fill out all signers on released keys\n\tfor _, tgt := range allTargets {\n\t\ttargetKey := trustTagKey{tgt.Target.Name, hex.EncodeToString(tgt.Target.Hashes[notary.SHA256])}\n\t\t// only considered released targets\n\t\tif _, ok := releasedTargetRows[targetKey]; ok && !isReleasedTarget(tgt.Role.Name) {\n\t\t\treleasedTargetRows[targetKey] = append(releasedTargetRows[targetKey], notaryRoleToSigner(tgt.Role.Name))\n\t\t}\n\t}\n\n\t// compile the final output as a sorted slice\n\tfor targetKey, signers := range releasedTargetRows {\n\t\tsignatureRows = append(signatureRows, trustTagRow{targetKey, signers})\n\t}\n\tsort.Slice(signatureRows, func(i, j int) bool {\n\t\treturn sortorder.NaturalLess(signatureRows[i].SignedTag, signatureRows[j].SignedTag)\n\t})\n\treturn signatureRows\n}", "func diffVersionInfo(a, b map[string]string) (deleted, added map[string]string, changed map[string][2]string) {\n\t// from a to b\n\n\tdeleted = make(map[string]string)\n\tadded = make(map[string]string)\n\tchanged = make(map[string][2]string)\n\n\t// keys in a but not in b -> delete\n\tfor ka, va := range a {\n\t\tif vb, ok := b[ka]; !ok {\n\t\t\tdeleted[ka] = va\n\t\t} else {\n\t\t\tif va != vb {\n\t\t\t\tchanged[ka] = [2]string{va, vb}\n\t\t\t}\n\t\t}\n\t}\n\n\t// keys in b but not in a -> add\n\tfor kb, vb := range b {\n\t\tif _, ok := a[kb]; !ok {\n\t\t\tadded[kb] = vb\n\t\t}\n\t}\n\treturn deleted, added, changed\n}", "func (a *Report) writeDeps(prefix string, bucket *Architecture) error {\n\tsuffix := bucket.GetPathSuffix()\n\tdepsPath := filepath.Join(ReportOutputDir, fmt.Sprintf(\"%sused_libs%s\", prefix, suffix))\n\n\t// Emit dependencies\n\tvar depNames []string\n\tfor nom := range bucket.Dependencies {\n\t\t// Skip provided\n\t\tif _, ok := bucket.Symbols[nom]; ok {\n\t\t\tcontinue\n\t\t}\n\t\tif _, ok := bucket.HiddenSymbols[nom]; ok {\n\t\t\tcontinue\n\t\t}\n\t\tdepNames = append(depNames, nom)\n\t}\n\tsort.Strings(depNames)\n\n\tif len(depNames) < 1 {\n\t\tif err := truncateFile(depsPath); err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t}\n\n\t// The \"used_libs\" dependencies file\n\tdepsFi, err := os.Create(depsPath)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer depsFi.Close()\n\n\tfor _, dep := range depNames {\n\t\tif _, err = fmt.Fprintf(depsFi, \"%s\\n\", dep); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func (d *Diff) Diff() (err error) {\n\n\td.tmpDir, err = os.MkdirTemp(os.TempDir(), \"ipsw-diff\")\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to create temp dir: %v\", err)\n\t}\n\tdefer os.RemoveAll(d.tmpDir)\n\n\td.Old.Info, err = info.Parse(d.Old.IPSWPath)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to parse 'Old' IPSW: %v\", err)\n\t}\n\td.New.Info, err = info.Parse(d.New.IPSWPath)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to parse 'New' IPSW: %v\", err)\n\t}\n\n\td.Old.Version = d.Old.Info.Plists.BuildManifest.ProductVersion\n\td.Old.Build = d.Old.Info.Plists.BuildManifest.ProductBuildVersion\n\td.Old.Folder, err = d.Old.Info.GetFolder()\n\tif err != nil {\n\t\tlog.Errorf(\"failed to get folder from 'Old' IPSW metadata: %v\", err)\n\t}\n\td.Old.Folder = filepath.Join(d.tmpDir, d.Old.Folder)\n\n\td.New.Version = d.New.Info.Plists.BuildManifest.ProductVersion\n\td.New.Build = d.New.Info.Plists.BuildManifest.ProductBuildVersion\n\td.New.Folder, err = d.New.Info.GetFolder()\n\tif err != nil {\n\t\tlog.Errorf(\"failed to get folder from 'New' IPSW metadata: %v\", err)\n\t}\n\td.New.Folder = filepath.Join(d.tmpDir, d.New.Folder)\n\n\tif d.Title == \"\" {\n\t\td.Title = fmt.Sprintf(\"%s (%s) .vs %s (%s)\", d.Old.Version, d.Old.Build, d.New.Version, d.New.Build)\n\t}\n\n\tlog.Info(\"Diffing KERNELCACHES\")\n\tif err := d.parseKernelcache(); err != nil {\n\t\treturn err\n\t}\n\n\tif d.Old.KDK != \"\" && d.New.KDK != \"\" {\n\t\tlog.Info(\"Diffing KDKS\")\n\t\tif err := d.parseKDKs(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tlog.Info(\"Diffing DYLD_SHARED_CACHES\")\n\tif err := d.mountSystemOsDMGs(); err != nil {\n\t\treturn fmt.Errorf(\"failed to mount DMGs: %v\", err)\n\t}\n\tdefer d.unmountSystemOsDMGs()\n\n\tif err := d.parseDSC(); err != nil {\n\t\treturn err\n\t}\n\n\tlog.Info(\"Diffing launchd PLIST\")\n\tif err := d.parseLaunchdPlists(); err != nil {\n\t\treturn fmt.Errorf(\"failed to parse launchd config plists: %v\", err)\n\t}\n\n\tlog.Info(\"Diffing ENTITLEMENTS\")\n\td.Ents, err = d.parseEntitlements()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func writeEntries(file string, all map[string]entry) error {\n\n\tdb, err := bolt.Open(file, 0600, nil)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tdefer db.Close()\n\n\terr = db.Update(func(tx *bolt.Tx) error {\n\t\ttx.DeleteBucket(hgcbucket)\n\t\tbucket, err := tx.CreateBucketIfNotExists(hgcbucket)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfor _, v := range all {\n\t\t\t// v.id should be same a k\n\t\t\t//fmt.Printf(\"WRiting %v = %v\\n\", v.id, v.data)\n\t\t\terr = bucket.Put([]byte(v.id), []byte(v.data))\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t})\n\n\treturn err\n}", "func (e *affectedE2E) IndexArtifacts(t *testing.T) {\n\tctx := zlog.Test(e.ctx, t)\n\tconst (\n\t\tinsertManifest = `\n\t\tINSERT INTO\tmanifest \n\t\t\t(hash)\n\t\tVALUES ($1)\n\t\tON CONFLICT DO NOTHING;\n\t\t`\n\t\tinsertPkg = ` \n\t\tINSERT INTO package (name, kind, version, norm_kind, norm_version, module, arch, id)\n\t\tVALUES ($1, $2, $3, $4, $5, $6, $7, $8)\n\t\tON CONFLICT DO NOTHING;\n\t\t`\n\t\tinsertDist = `\n\t\tINSERT INTO dist \n\t\t\t(name, did, version, version_code_name, version_id, arch, cpe, pretty_name, id) \n\t\tVALUES \n\t\t\t($1, $2, $3, $4, $5, $6, $7, $8, $9) \n\t\tON CONFLICT DO NOTHING;\n\t\t`\n\t\tinsertRepo = `\n\t\tINSERT INTO repo\n\t\t\t(name, key, uri, id)\n\t\tVALUES ($1, $2, $3, $4)\n\t\tON CONFLICT DO NOTHING;\n\t\t`\n\t)\n\t_, err := e.pool.Exec(ctx, insertManifest, e.ir.Hash.String())\n\tif err != nil {\n\t\tt.Fatalf(\"failed to insert manifest: %v\", err)\n\t}\n\tfor _, pkg := range e.ir.Packages {\n\t\tvar nVer pgtype.Int4Array\n\t\tnVer.Status = pgtype.Present\n\t\tnVer.Set(pkg.NormalizedVersion.V)\n\t\t_, err := e.pool.Exec(ctx, insertPkg,\n\t\t\tpkg.Name,\n\t\t\tpkg.Kind,\n\t\t\tpkg.Version,\n\t\t\tpkg.NormalizedVersion.Kind,\n\t\t\t&nVer,\n\t\t\tpkg.Module,\n\t\t\tpkg.Arch,\n\t\t\tpkg.ID,\n\t\t)\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"failed to insert package: %v\", err)\n\t\t}\n\t\tif pkg.Source != nil {\n\t\t\tpkg := pkg.Source\n\t\t\tnVer.Set(pkg.NormalizedVersion.V)\n\t\t\t_, err := e.pool.Exec(ctx, insertPkg,\n\t\t\t\tpkg.Name,\n\t\t\t\tpkg.Kind,\n\t\t\t\tpkg.Version,\n\t\t\t\tpkg.NormalizedVersion.Kind,\n\t\t\t\t&nVer,\n\t\t\t\tpkg.Module,\n\t\t\t\tpkg.Arch,\n\t\t\t\tpkg.ID,\n\t\t\t)\n\t\t\tif err != nil {\n\t\t\t\tt.Fatalf(\"failed to insert source package: %v\", err)\n\t\t\t}\n\t\t}\n\t}\n\tfor _, dist := range e.ir.Distributions {\n\t\t_, err := e.pool.Exec(ctx, insertDist,\n\t\t\tdist.Name,\n\t\t\tdist.DID,\n\t\t\tdist.Version,\n\t\t\tdist.VersionCodeName,\n\t\t\tdist.VersionID,\n\t\t\tdist.Arch,\n\t\t\tdist.CPE,\n\t\t\tdist.PrettyName,\n\t\t\tdist.ID,\n\t\t)\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"failed to insert dist: %v\", err)\n\t\t}\n\t}\n\tfor _, repo := range e.ir.Repositories {\n\t\t_, err := e.pool.Exec(ctx, insertRepo,\n\t\t\trepo.Name,\n\t\t\trepo.Key,\n\t\t\trepo.URI,\n\t\t\trepo.ID,\n\t\t)\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"failed to insert repo: %v\", err)\n\t\t}\n\t}\n}", "func (b Band) Versions() []ttnpb.PHYVersion {\n\tvar versions []ttnpb.PHYVersion\n\tfor _, swapParameter := range b.downgrades() {\n\t\tif swapParameter.downgrade != nil {\n\t\t\tversions = append(versions, swapParameter.version)\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn versions\n}", "func Release(path string, change parser.SemVerChange, ch chan Result, options ReleaseOptions) {\n\tdefer close(ch)\n\n\t// Get Git User\n\tuser, err := git.GetUser(path)\n\tif err != nil {\n\t\tch <- Result{\n\t\t\tError: fmt.Errorf(\"[Git] get user: %v\", err),\n\t\t}\n\t\treturn\n\t}\n\tch <- Result{\n\t\tPhase: PhaseGetGitUser,\n\t\tMessage: user.String(),\n\t}\n\n\t// Parse Commits\n\tcommits, err := parser.ParseCommits(path)\n\tif err != nil {\n\t\tch <- Result{\n\t\t\tError: fmt.Errorf(\"[Release] parse commits: %v\", err),\n\t\t}\n\t\treturn\n\t}\n\tch <- Result{\n\t\tPhase: PhaseParseCommits,\n\t\tMessage: strconv.Itoa(len(commits)),\n\t}\n\n\t// Read version from last bump commit if exist\n\tvar version string\n\tif len(commits) > 0 {\n\t\tlastCommit := commits[len(commits)-1]\n\t\tif lastCommit.SemVer != \"\" {\n\t\t\tversion = lastCommit.SemVer\n\t\t\tch <- Result{\n\t\t\t\tPhase: PhaseLastVersionFromCommit,\n\t\t\t\tMessage: version,\n\t\t\t}\n\t\t}\n\t}\n\n\t// Read version from npm (package.json) if exist\n\tvar npmVersion string\n\tisNpm := npm.HasPackage(path)\n\tif isNpm {\n\t\tpkg, err := npm.ParsePackage(path)\n\t\tif err != nil {\n\t\t\tch <- Result{\n\t\t\t\tError: fmt.Errorf(\n\t\t\t\t\t\"[Release] parse npm package: %v\",\n\t\t\t\t\terr,\n\t\t\t\t),\n\t\t\t}\n\t\t\treturn\n\t\t}\n\t\tnpmVersion = pkg.Version\n\t\tch <- Result{\n\t\t\tPhase: PhaseLastVersionFromPackage,\n\t\t\tMessage: npmVersion,\n\t\t}\n\t}\n\n\t// Inconsistency between commit history and package.json version\n\tif npmVersion != \"\" && npmVersion != version {\n\t\tch <- Result{\n\t\t\tPhase: PhaseLastVersionInconsistency,\n\t\t\tMessage: fmt.Sprintf(\n\t\t\t\t\"package.json: %s, git: %s\",\n\t\t\t\tnpmVersion,\n\t\t\t\tversion,\n\t\t\t),\n\t\t}\n\t\tversion = npmVersion\n\t}\n\n\t// Find Change\n\tif change == \"\" {\n\t\tchange = semver.GetChange(commits)\n\t\tch <- Result{\n\t\t\tPhase: PhaseChangeFound,\n\t\t\tMessage: string(change),\n\t\t}\n\t}\n\n\t// Calculate new version\n\tnewVersion, err := semver.GetVersion(version, change)\n\tif err != nil {\n\t\tch <- Result{\n\t\t\tError: fmt.Errorf(\n\t\t\t\t\"[Release] get semver version: %v\",\n\t\t\t\terr,\n\t\t\t),\n\t\t}\n\t\treturn\n\t}\n\tch <- Result{\n\t\tPhase: PhaseNextVersion,\n\t\tMessage: newVersion,\n\t}\n\n\t// Generate changelog\n\tcf, _, err := changelog.Save(path, newVersion, version, change, commits, user)\n\tif err != nil {\n\t\tch <- Result{\n\t\t\tError: fmt.Errorf(\"[Release] save changelog: %v\", err),\n\t\t}\n\t\treturn\n\t}\n\tch <- Result{\n\t\tPhase: PhaseChangelogUpdated,\n\t\tMessage: cf,\n\t}\n\n\t// Version: npm\n\tif isNpm {\n\t\t_, err = npm.Version(path, newVersion, string(change))\n\t\tif err != nil {\n\t\t\tch <- Result{\n\t\t\t\tError: fmt.Errorf(\"[npm] version: %v\", err),\n\t\t\t}\n\t\t\treturn\n\t\t}\n\t\tch <- Result{\n\t\t\tPhase: PhasePackageVersion,\n\t\t}\n\t}\n\n\t// Release: Git\n\terr = git.Release(path, newVersion, user, options.SuppressPush)\n\tif err != nil {\n\t\tch <- Result{\n\t\t\tError: fmt.Errorf(\"[Release] git: %v\", err),\n\t\t}\n\t\treturn\n\t}\n\tch <- Result{\n\t\tPhase: PhaseGitRelease,\n\t\tMessage: newVersion,\n\t}\n\n\t// Publish: npm\n\tif isNpm {\n\t\t_, err = npm.Publish(path)\n\t\tif err != nil {\n\t\t\tch <- Result{\n\t\t\t\tError: fmt.Errorf(\"[npm] publish: %v\", err),\n\t\t\t}\n\t\t\treturn\n\t\t}\n\t\tch <- Result{\n\t\t\tPhase: PhasePackagePublish,\n\t\t}\n\t}\n}", "func TestContractSet(t *testing.T) {\n\t// create contract set\n\tc1 := &SafeContract{header: contractHeader{Transaction: types.Transaction{\n\t\tFileContractRevisions: []types.FileContractRevision{{\n\t\t\tParentID: types.FileContractID{1},\n\t\t\tNewValidProofOutputs: []types.SiacoinOutput{{}, {}},\n\t\t\tUnlockConditions: types.UnlockConditions{\n\t\t\t\tPublicKeys: []types.SiaPublicKey{{}, {}},\n\t\t\t},\n\t\t}},\n\t}}}\n\tid1 := c1.header.ID()\n\tc2 := &SafeContract{header: contractHeader{Transaction: types.Transaction{\n\t\tFileContractRevisions: []types.FileContractRevision{{\n\t\t\tParentID: types.FileContractID{2},\n\t\t\tNewValidProofOutputs: []types.SiacoinOutput{{}, {}},\n\t\t\tUnlockConditions: types.UnlockConditions{\n\t\t\t\tPublicKeys: []types.SiaPublicKey{{}, {}},\n\t\t\t},\n\t\t}},\n\t}}}\n\tid2 := c2.header.ID()\n\tcs := &ContractSet{\n\t\tcontracts: map[types.FileContractID]*SafeContract{\n\t\t\tid1: c1,\n\t\t\tid2: c2,\n\t\t},\n\t}\n\n\t// uncontested acquire/release\n\tc1 = cs.mustAcquire(t, id1)\n\tcs.Return(c1)\n\n\t// 100 concurrent serialized mutations\n\tvar wg sync.WaitGroup\n\tfor i := 0; i < 100; i++ {\n\t\twg.Add(1)\n\t\tgo func() {\n\t\t\tdefer wg.Done()\n\t\t\tc1 := cs.mustAcquire(t, id1)\n\t\t\tc1.header.Transaction.FileContractRevisions[0].NewRevisionNumber++\n\t\t\ttime.Sleep(time.Duration(fastrand.Intn(100)))\n\t\t\tcs.Return(c1)\n\t\t}()\n\t}\n\twg.Wait()\n\tc1 = cs.mustAcquire(t, id1)\n\tcs.Return(c1)\n\tif c1.header.LastRevision().NewRevisionNumber != 100 {\n\t\tt.Fatal(\"expected exactly 100 increments, got\", c1.header.LastRevision().NewRevisionNumber)\n\t}\n\n\t// a blocked acquire shouldn't prevent a return\n\tc1 = cs.mustAcquire(t, id1)\n\tgo func() {\n\t\ttime.Sleep(time.Millisecond)\n\t\tcs.Return(c1)\n\t}()\n\tc1 = cs.mustAcquire(t, id1)\n\tcs.Return(c1)\n\n\t// delete and reinsert id2\n\tc2 = cs.mustAcquire(t, id2)\n\tcs.Delete(c2)\n\tcs.mu.Lock()\n\tcs.contracts[id2] = c2\n\tcs.mu.Unlock()\n\n\t// call all the methods in parallel haphazardly\n\tfuncs := []func(){\n\t\tfunc() { cs.Len() },\n\t\tfunc() { cs.IDs() },\n\t\tfunc() { cs.View(id1); cs.View(id2) },\n\t\tfunc() { cs.ViewAll() },\n\t\tfunc() { cs.Return(cs.mustAcquire(t, id1)) },\n\t\tfunc() { cs.Return(cs.mustAcquire(t, id2)) },\n\t\tfunc() {\n\t\t\tc3 := &SafeContract{header: contractHeader{\n\t\t\t\tTransaction: types.Transaction{\n\t\t\t\t\tFileContractRevisions: []types.FileContractRevision{{\n\t\t\t\t\t\tParentID: types.FileContractID{3},\n\t\t\t\t\t\tNewValidProofOutputs: []types.SiacoinOutput{{}, {}},\n\t\t\t\t\t\tUnlockConditions: types.UnlockConditions{\n\t\t\t\t\t\t\tPublicKeys: []types.SiaPublicKey{{}, {}},\n\t\t\t\t\t\t},\n\t\t\t\t\t}},\n\t\t\t\t},\n\t\t\t}}\n\t\t\tid3 := c3.header.ID()\n\t\t\tcs.mu.Lock()\n\t\t\tcs.contracts[id3] = c3\n\t\t\tcs.mu.Unlock()\n\t\t\tcs.mustAcquire(t, id3)\n\t\t\tcs.Delete(c3)\n\t\t},\n\t}\n\twg = sync.WaitGroup{}\n\tfor _, fn := range funcs {\n\t\twg.Add(1)\n\t\tgo func(fn func()) {\n\t\t\tdefer wg.Done()\n\t\t\tfor i := 0; i < 100; i++ {\n\t\t\t\ttime.Sleep(time.Duration(fastrand.Intn(100)))\n\t\t\t\tfn()\n\t\t\t}\n\t\t}(fn)\n\t}\n\twg.Wait()\n}", "func GetVersion(w http.ResponseWriter, r *http.Request) {\n\t\n\tvar softwareVersion string\n\ttype data struct {\n\t\tVersion string\n\t}\n\n\tif r.Body == nil {\n\t\tfmt.Println(\"EMPTY\")\n\t}\n\n\tdecoder := json.NewDecoder(r.Body)\n\n\tvar t data\n\terr := decoder.Decode(&t)\n\n\tif err == io.EOF {\n\t\tsoftwareVersion = \"\"\n\t} else if err != nil {\n\t\tpanic(err)\n\t}\n\n\tif t.Version != \"\" {\n\t\tsoftwareVersion = t.Version\n\t}\n\n\tparams := mux.Vars(r)\n\tsoftwareName := params[\"software\"]\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\n\tfor _, yamlFile := range listYamlFiles(\".\") {\n\t\tif strings.Split(yamlFile, \".\")[0] == softwareName {\n\t\t\tc := parseYaml(yamlFile)\n\t\t\t_, body, _ := gorequest.New().Get(c.URL).End()\n\t\t\t\n\t\t\tarray := strings.Split(body, \"\\n\")\n\n\t\t\tvar availableVersions []string\n\t\t\tvar av []string\n\n\t\t\tfor _, version := range array {\n\t\t\t\tav = append(av, version)\n\t\t\t}\n\n\t\t\tsort.Strings(av)\n\n\t\t\tfor _, v := range av {\n\t\t\t\tif strings.Contains(v, c.Matcher) {\n\t\t\t\t\tavailableVersions = append(availableVersions, v)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tlatestVersion := availableVersions[len(availableVersions)-1]\n\t\t\tr, _ := regexp.Compile(\"(\\\\d+)(?:\\\\.(\\\\d+))*\")\n\t\t\tif softwareVersion == \"\" {\n\t\t\t\tif err := json.NewEncoder(w).Encode(r.FindString(latestVersion)); err != nil {\n\t\t\t\t\tpanic(err)\n\t\t\t\t}\n\t\t\t} else {\n\n\t\t\t\tlatestVersionInt64 := parseVersion(r.FindString(latestVersion), 4)\n\t\t\t\tsoftwareVersionInt64 := parseVersion(softwareVersion, 4)\n\n\t\t\t\tif latestVersionInt64 > softwareVersionInt64 {\n\t\t\t\t\tw.WriteHeader(http.StatusOK)\n\t\t\t\t\tif err := json.NewEncoder(w).Encode(r.FindString(latestVersion)); err != nil {\n\t\t\t\t\t\tpanic(err)\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\tw.WriteHeader(http.StatusOK)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}", "func PageVersions(name, latest, disabled string, versions []string) *Renderer {\n\treturn &Renderer{\n\t\tfilenames: []string{\n\t\t\t\"templates/layout.html\",\n\t\t\t\"templates/logo.html\",\n\t\t\t\"templates/pages/versions.html\",\n\t\t},\n\t\tcontext: struct {\n\t\t\tPackage string\n\t\t\tLatest string\n\t\t\tDisabled string\n\t\t\tVersions []string\n\t\t}{\n\t\t\tPackage: name,\n\t\t\tLatest: latest,\n\t\t\tDisabled: disabled,\n\t\t\tVersions: versions,\n\t\t},\n\t}\n}", "func TestEnvoyVersionsJson(t *testing.T) {\n\treleaseDates, err := getEnvoyReleaseDates()\n\trequire.NoError(t, err)\n\n\tdata, err := os.ReadFile(envoyVersionsPath)\n\trequire.NoError(t, err)\n\n\tevs := version.EnvoyVersions{}\n\terr = json.Unmarshal(data, &evs)\n\trequire.NoErrorf(t, err, \"error parsing json from %s\", envoyVersionsPath)\n\trequire.Greaterf(t, len(evs.Versions), 2, \"expected more than two versions\")\n\n\trequire.NotEmptyf(t, evs.LatestVersion, \"latest version isn't in %s\", envoyVersionsPath)\n\trequire.Containsf(t, evs.Versions, evs.LatestVersion, \"latest version isn't in the version list of %s\", envoyVersionsPath)\n\trequire.Equalf(t, evs.LatestVersion, version.LastKnownEnvoy, \"version.LastKnownEnvoy doesn't match latest version in %s\", envoyVersionsPath)\n\n\t// Ensure there's an option besides the latest version\n\trequire.GreaterOrEqualf(t, len(evs.Versions), 2, \"expected more than two versions\")\n\n\ttype testCase struct{ version, platform, tarballURL string }\n\n\tvar tests []testCase\n\tfor v, ev := range evs.Versions {\n\t\trequire.NotEmptyf(t, releaseDates[v], \"version %s is not a published envoyproxy/proxy release\", v)\n\t\trequire.Equalf(t, releaseDates[v], ev.ReleaseDate, \"releaseDate for %s doesn't match envoyproxy/proxy\", v)\n\t\trequire.GreaterOrEqualf(t, len(ev.Tarballs), 2, \"expected at least two platforms for version %s\", v)\n\n\t\tfor p, tb := range ev.Tarballs {\n\t\t\ttests = append(tests, testCase{v, p, tb})\n\t\t}\n\t}\n\n\tfor _, tc := range tests {\n\t\tname := fmt.Sprintf(\"%s-%s\", tc.version, tc.platform)\n\t\ttarballURL := tc.tarballURL\n\t\tt.Run(name, func(t *testing.T) {\n\t\t\trequire.Regexpf(t, \"https://.*.tar.(gz|xz)\", tarballURL, \"expected an https tar.gz or xz %s\", tarballURL)\n\t\t\tres, err := http.Head(tarballURL)\n\t\t\trequire.NoErrorf(t, err, \"error from HEAD %s\", tarballURL)\n\t\t\tdefer res.Body.Close() //nolint\n\n\t\t\trequire.NoErrorf(t, err, \"error reading %s\", tarballURL)\n\t\t\trequire.Equalf(t, 200, res.StatusCode, \"unexpected HTTP status reading %s\", tarballURL)\n\t\t\trequire.Greaterf(t, res.ContentLength, int64(5<<20), \"expected at least 5MB size %s\", tarballURL)\n\t\t})\n\t}\n}" ]
[ "0.56417835", "0.55465364", "0.5406076", "0.5396831", "0.5360329", "0.5337291", "0.52985406", "0.5244649", "0.523514", "0.5119895", "0.5109302", "0.51012754", "0.5056543", "0.50452995", "0.50312495", "0.50085276", "0.49578676", "0.4948824", "0.49134958", "0.49077567", "0.4907483", "0.4890809", "0.48843336", "0.48770547", "0.48721144", "0.48687848", "0.48640385", "0.4854747", "0.48547307", "0.4847242", "0.48435476", "0.4814935", "0.48077524", "0.48037952", "0.47990894", "0.4779831", "0.47662205", "0.47578987", "0.47433707", "0.4740098", "0.4732412", "0.47304487", "0.47272378", "0.470953", "0.47084454", "0.4700088", "0.46953502", "0.4686944", "0.4684859", "0.46787456", "0.4677487", "0.46754324", "0.46591353", "0.46544313", "0.4648858", "0.46436024", "0.46425772", "0.4638828", "0.46303442", "0.46261668", "0.4624086", "0.4622126", "0.4618019", "0.46169713", "0.46142903", "0.46119723", "0.46089926", "0.4607867", "0.46024418", "0.4600701", "0.45945895", "0.45930076", "0.4591654", "0.45907134", "0.45900378", "0.45884284", "0.45826846", "0.4574962", "0.45682123", "0.45570934", "0.4553479", "0.4548296", "0.4544667", "0.45323026", "0.45302695", "0.45290807", "0.45287427", "0.45268705", "0.4520711", "0.45204234", "0.45192236", "0.45186204", "0.4518583", "0.4517986", "0.45159188", "0.45149463", "0.45139438", "0.45087647", "0.4505024", "0.45028913" ]
0.61751723
0
Job writes out the job differences between the specified releases
func (s *DiffCmd) Job(job string, w io.Writer) error { differ, err := diff.New(s.releaseRepo, s.release1, s.release2) if err != nil { return err } d, err := differ.DiffJob(job) if err != nil { return err } s.printDiffResult(w, d) return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func TestCreateProcessHistoryReport(t *testing.T) {\n\n timeSlice := float64(TIMESLICE)\n\n testtable := []struct {\n tname string\n }{\n {\n tname: \"ok\",\n },\n }\n\n CreateDummyProcessMetricsHistory(&testProcessMetricsArray)\n\n for _, tc := range testtable {\n\n t.Run(tc.tname, func(t *testing.T) {\n\n var buffer bytes.Buffer\n writer := bufio.NewWriter(&buffer)\n\n err := json.NewEncoder(writer).Encode(CreateProcessHistoryReport(testProcessMetricsArray, timeSlice))\n if err != nil {\n t.Fatalf(\"failed writing json: %s\", err)\n }\n writer.Flush()\n\n goldenPath := filepath.Join(\"testdata\", filepath.FromSlash(t.Name()) + \".golden\")\n\n\n if *update {\n\n t.Log(\"update golden file\")\n if err := ioutil.WriteFile(goldenPath, buffer.Bytes(), 0644); err != nil {\n t.Fatalf(\"failed to update golden file %s: %s\", goldenPath, err)\n }\n\n }\n\n\n goldenData, err := ioutil.ReadFile(goldenPath)\n\n if err != nil {\n t.Fatalf(\"failed reading .golden file %s: %s\", goldenPath, err)\n }\n\n t.Log(string(buffer.Bytes()))\n\n if !bytes.Equal(buffer.Bytes(), goldenData) {\n t.Errorf(\"bytes do not match .golden file %s\", goldenPath)\n }\n\n })\n }\n\n}", "func minorRelease(f *os.File, release, draftURL, changelogURL string) {\n\t// Check for draft and use it if available\n\tlog.Printf(\"Checking if draft release notes exist for %s...\", release)\n\n\tresp, err := http.Get(draftURL)\n\tif err == nil {\n\t\tdefer resp.Body.Close()\n\t}\n\n\tif err == nil && resp.StatusCode == 200 {\n\t\tlog.Print(\"Draft found - using for release notes...\")\n\t\t_, err = io.Copy(f, resp.Body)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"error during copy to file: %v\", err)\n\t\t\treturn\n\t\t}\n\t\tf.WriteString(\"\\n\")\n\t} else {\n\t\tlog.Print(\"Failed to find draft - creating generic template... (error message/status code printed below)\")\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Error message: %v\", err)\n\t\t} else {\n\t\t\tlog.Printf(\"Response status code: %d\", resp.StatusCode)\n\t\t}\n\t\tf.WriteString(\"## Major Themes\\n\\n* TBD\\n\\n## Other notable improvements\\n\\n* TBD\\n\\n## Known Issues\\n\\n* TBD\\n\\n## Provider-specific Notes\\n\\n* TBD\\n\\n\")\n\t}\n\n\t// Aggregate all previous release in series\n\tf.WriteString(fmt.Sprintf(\"### Previous Release Included in %s\\n\\n\", release))\n\n\t// Regexp Example:\n\t// Assume the release tag is v1.7.0, this regexp matches \"- [v1.7.0-\" in\n\t// \"- [v1.7.0-rc.1](#v170-rc1)\"\n\t// \"- [v1.7.0-beta.2](#v170-beta2)\"\n\t// \"- [v1.7.0-alpha.3](#v170-alpha3)\"\n\treAnchor, _ := regexp.Compile(fmt.Sprintf(\"- \\\\[%s-\", release))\n\n\tresp, err = http.Get(changelogURL)\n\tif err == nil {\n\t\tdefer resp.Body.Close()\n\t}\n\n\tif err == nil && resp.StatusCode == 200 {\n\t\tbuf := new(bytes.Buffer)\n\t\tbuf.ReadFrom(resp.Body)\n\t\tfor _, line := range strings.Split(buf.String(), \"\\n\") {\n\t\t\tif anchor := reAnchor.FindStringSubmatch(line); anchor != nil {\n\t\t\t\tf.WriteString(line + \"\\n\")\n\t\t\t}\n\t\t}\n\t\tf.WriteString(\"\\n\")\n\t} else {\n\t\tlog.Print(\"Failed to fetch past changelog for minor release - continuing... (error message/status code printed below)\")\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Error message: %v\", err)\n\t\t} else {\n\t\t\tlog.Printf(\"Response status code: %d\", resp.StatusCode)\n\t\t}\n\t}\n}", "func copyVersions(srcStore, dstStore dvid.Store, d1, d2 dvid.Data, uuids []dvid.UUID) error {\n\tif len(uuids) == 0 {\n\t\tdvid.Infof(\"no versions given for copy... aborting\\n\")\n\t\treturn nil\n\t}\n\tsrcDB, ok := srcStore.(rawQueryDB)\n\tif !ok {\n\t\treturn fmt.Errorf(\"source store %q doesn't have required raw range query\", srcStore)\n\t}\n\tdstDB, ok := dstStore.(rawPutDB)\n\tif !ok {\n\t\treturn fmt.Errorf(\"destination store %q doesn't have raw Put query\", dstStore)\n\t}\n\tvar dataInstanceChanged bool\n\tif d2 == nil {\n\t\td2 = d1\n\t} else {\n\t\tdataInstanceChanged = true\n\t}\n\tversionsOnPath, versionsToStore, err := calcVersionPath(uuids)\n\tif err != nil {\n\t\treturn err\n\t}\n\tvar wg sync.WaitGroup\n\twg.Add(1)\n\n\tstatsTotal := new(txStats)\n\tstatsTotal.lastTime = time.Now()\n\tstatsTotal.name = fmt.Sprintf(\"%q total\", d1.DataName())\n\tstatsStored := new(txStats)\n\tstatsStored.lastTime = time.Now()\n\tstatsStored.name = fmt.Sprintf(\"stored into %q\", d2.DataName())\n\tvar kvTotal, kvSent int\n\tvar bytesTotal, bytesSent uint64\n\n\t// Start goroutine to receive all key-value pairs, process, and store them.\n\trawCh := make(chan *storage.KeyValue, 5000)\n\tgo func() {\n\t\tvar maxVersionKey storage.Key\n\t\tvar numStoredKV int\n\t\tkvsToStore := make(map[dvid.VersionID]*storage.KeyValue, len(versionsToStore))\n\t\tfor _, v := range versionsToStore {\n\t\t\tkvsToStore[v] = nil\n\t\t}\n\t\tfor {\n\t\t\tkv := <-rawCh\n\t\t\tif kv != nil && !storage.Key(kv.K).IsDataKey() {\n\t\t\t\tdvid.Infof(\"Skipping non-data key-value %x ...\\n\", []byte(kv.K))\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif kv == nil || maxVersionKey == nil || bytes.Compare(kv.K, maxVersionKey) > 0 {\n\t\t\t\tif numStoredKV > 0 {\n\t\t\t\t\tvar lastKV *storage.KeyValue\n\t\t\t\t\tfor _, v := range versionsToStore {\n\t\t\t\t\t\tcurKV := kvsToStore[v]\n\t\t\t\t\t\tif lastKV == nil || (curKV != nil && bytes.Compare(lastKV.V, curKV.V) != 0) {\n\t\t\t\t\t\t\tif curKV != nil {\n\t\t\t\t\t\t\t\tkeybuf := make(storage.Key, len(curKV.K))\n\t\t\t\t\t\t\t\tcopy(keybuf, curKV.K)\n\t\t\t\t\t\t\t\tif dataInstanceChanged {\n\t\t\t\t\t\t\t\t\terr = storage.ChangeDataKeyInstance(keybuf, d2.InstanceID())\n\t\t\t\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\t\t\t\tdvid.Errorf(\"could not change instance ID of key to %d: %v\\n\", d2.InstanceID(), err)\n\t\t\t\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tstorage.ChangeDataKeyVersion(keybuf, v)\n\t\t\t\t\t\t\t\tkvSent++\n\t\t\t\t\t\t\t\tbytesSent += uint64(len(curKV.V) + len(keybuf))\n\t\t\t\t\t\t\t\tif err := dstDB.RawPut(keybuf, curKV.V); err != nil {\n\t\t\t\t\t\t\t\t\tdvid.Errorf(\"can't put k/v pair to destination instance %q: %v\\n\", d2.DataName(), err)\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tstatsStored.addKV(keybuf, curKV.V)\n\t\t\t\t\t\t\t\tlastKV = curKV\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif kv == nil {\n\t\t\t\t\twg.Done()\n\t\t\t\t\tdvid.Infof(\"Sent %d %q key-value pairs (%s, out of %d kv pairs, %s)\\n\",\n\t\t\t\t\t\tkvSent, d1.DataName(), humanize.Bytes(bytesSent), kvTotal, humanize.Bytes(bytesTotal))\n\t\t\t\t\tdvid.Infof(\"Total KV Stats for %q:\\n\", d1.DataName())\n\t\t\t\t\tstatsTotal.printStats()\n\t\t\t\t\tdvid.Infof(\"Total KV Stats for newly stored %q:\\n\", d2.DataName())\n\t\t\t\t\tstatsStored.printStats()\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\ttk, err := storage.TKeyFromKey(kv.K)\n\t\t\t\tif err != nil {\n\t\t\t\t\tdvid.Errorf(\"couldn't get %q TKey from Key %v: %v\\n\", d1.DataName(), kv.K, err)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tmaxVersionKey, err = storage.MaxVersionDataKey(d1.InstanceID(), tk)\n\t\t\t\tif err != nil {\n\t\t\t\t\tdvid.Errorf(\"couldn't get max version key from Key %v: %v\\n\", kv.K, err)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tfor _, v := range versionsToStore {\n\t\t\t\t\tkvsToStore[v] = nil\n\t\t\t\t}\n\t\t\t\tnumStoredKV = 0\n\t\t\t}\n\t\t\tcurV, err := storage.VersionFromDataKey(kv.K)\n\t\t\tif err != nil {\n\t\t\t\tdvid.Errorf(\"unable to get version from key-value: %v\\n\", err)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tcurBytes := uint64(len(kv.V) + len(kv.K))\n\t\t\tif _, onPath := versionsOnPath[curV]; onPath {\n\t\t\t\tfor _, v := range versionsToStore {\n\t\t\t\t\tif curV <= v {\n\t\t\t\t\t\tkvsToStore[v] = kv\n\t\t\t\t\t\tnumStoredKV++\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tkvTotal++\n\t\t\tbytesTotal += curBytes\n\t\t\tstatsTotal.addKV(kv.K, kv.V)\n\t\t}\n\t}()\n\n\t// Send all kv pairs for the source data instance down the channel.\n\tbegKey, endKey := storage.DataInstanceKeyRange(d1.InstanceID())\n\tkeysOnly := false\n\tif err := srcDB.RawRangeQuery(begKey, endKey, keysOnly, rawCh, nil); err != nil {\n\t\treturn fmt.Errorf(\"push voxels %q range query: %v\", d1.DataName(), err)\n\t}\n\twg.Wait()\n\treturn nil\n}", "func patchRelease(f *os.File, info *ReleaseInfo) {\n\t// Release note for different labels\n\tf.WriteString(fmt.Sprintf(\"## Changelog since %s\\n\\n\", info.startTag))\n\n\tif len(info.releaseActionRequiredPRs) > 0 {\n\t\tf.WriteString(\"### Action Required\\n\\n\")\n\t\tfor _, pr := range info.releaseActionRequiredPRs {\n\t\t\tf.WriteString(fmt.Sprintf(\"* %s (#%d, @%s)\\n\", extractReleaseNoteFromPR(info.prMap[pr]), pr, *info.prMap[pr].User.Login))\n\t\t}\n\t\tf.WriteString(\"\\n\")\n\t}\n\n\tif len(info.releasePRs) > 0 {\n\t\tf.WriteString(\"### Other notable changes\\n\\n\")\n\t\tfor _, pr := range info.releasePRs {\n\t\t\tf.WriteString(fmt.Sprintf(\"* %s (#%d, @%s)\\n\", extractReleaseNoteFromPR(info.prMap[pr]), pr, *info.prMap[pr].User.Login))\n\t\t}\n\t\tf.WriteString(\"\\n\")\n\t} else {\n\t\tf.WriteString(\"**No notable changes for this release**\\n\\n\")\n\t}\n}", "func lookUpJobRelease(releases []*manifest.Release, jobRelease string) bool {\n\tfor _, release := range releases {\n\t\tif release.Name == jobRelease {\n\t\t\treturn true\n\t\t}\n\t}\n\n\treturn false\n}", "func (c *Config) checkReleaseWorker(ctx context.Context, wID int, repoQueue <-chan RepoConfig, newRel chan<- ReleaseList) {\n\tlogrus.Debugf(\"[%d] checkReleaseWorker starting.\", wID)\n\tfor r := range repoQueue {\n\t\tlogrus.Debugf(\"[%d] checkReleaseWorker - repository '%s'\", wID, r.Repo)\n\t\tost := c.getOldState(r.Repo)\n\t\tnr, err := c.checkRepoReleases(ctx, wID, r.Prereleases, ost)\n\t\tif err != nil {\n\t\t\tlogrus.Errorf(\"[%d] Check for repo '%s' failed: %s\\n\", wID, r.Repo, err)\n\t\t\tnewRel <- nil\n\t\t\tcontinue\n\t\t}\n\t\tnewRel <- nr\n\t\tlogrus.Debugf(\"[%d] checkReleaseWorker - job done.\", wID)\n\t}\n\tlogrus.Debugf(\"[%d] checkReleaseWorker leaving.\", wID)\n}", "func unlockTables(d *ddlCtx, t *meta.Meta, job *model.Job, arg *LockTablesArg) (ver int64, err error) {\n\tif arg.IndexOfUnlock >= len(arg.UnlockTables) {\n\t\treturn ver, nil\n\t}\n\tjob.SchemaID = arg.UnlockTables[arg.IndexOfUnlock].SchemaID\n\tjob.TableID = arg.UnlockTables[arg.IndexOfUnlock].TableID\n\ttbInfo, err := getTableInfo(t, job.TableID, job.SchemaID)\n\tif err != nil {\n\t\tif infoschema.ErrDatabaseNotExists.Equal(err) || infoschema.ErrTableNotExists.Equal(err) {\n\t\t\t// The table maybe has been dropped. just ignore this err and go on.\n\t\t\targ.IndexOfUnlock++\n\t\t\tjob.Args = []interface{}{arg}\n\t\t\treturn ver, nil\n\t\t}\n\t\treturn ver, err\n\t}\n\n\tneedUpdateTableInfo := unlockTable(tbInfo, arg)\n\tif needUpdateTableInfo {\n\t\tver, err = updateVersionAndTableInfo(d, t, job, tbInfo, true)\n\t\tif err != nil {\n\t\t\treturn ver, errors.Trace(err)\n\t\t}\n\t}\n\n\targ.IndexOfUnlock++\n\tjob.Args = []interface{}{arg}\n\treturn ver, nil\n}", "func compareRelease(owner, repo, tagName string) (*github.CommitsComparison, error) {\n\tclient, ctx := defaultGithubClient(), context.Background()\n\tdefer timeTrack(time.Now(), \"API call to client.Repositories.CompareCommits()\")\n\tcc, _, err := client.Repositories.CompareCommits(ctx, owner, repo, tagName, \"HEAD\")\n\tif cc != nil {\n\t\treverseCommitOrder(cc)\n\t}\n\treturn cc, err\n}", "func Release(path string, change parser.SemVerChange, ch chan Result, options ReleaseOptions) {\n\tdefer close(ch)\n\n\t// Get Git User\n\tuser, err := git.GetUser(path)\n\tif err != nil {\n\t\tch <- Result{\n\t\t\tError: fmt.Errorf(\"[Git] get user: %v\", err),\n\t\t}\n\t\treturn\n\t}\n\tch <- Result{\n\t\tPhase: PhaseGetGitUser,\n\t\tMessage: user.String(),\n\t}\n\n\t// Parse Commits\n\tcommits, err := parser.ParseCommits(path)\n\tif err != nil {\n\t\tch <- Result{\n\t\t\tError: fmt.Errorf(\"[Release] parse commits: %v\", err),\n\t\t}\n\t\treturn\n\t}\n\tch <- Result{\n\t\tPhase: PhaseParseCommits,\n\t\tMessage: strconv.Itoa(len(commits)),\n\t}\n\n\t// Read version from last bump commit if exist\n\tvar version string\n\tif len(commits) > 0 {\n\t\tlastCommit := commits[len(commits)-1]\n\t\tif lastCommit.SemVer != \"\" {\n\t\t\tversion = lastCommit.SemVer\n\t\t\tch <- Result{\n\t\t\t\tPhase: PhaseLastVersionFromCommit,\n\t\t\t\tMessage: version,\n\t\t\t}\n\t\t}\n\t}\n\n\t// Read version from npm (package.json) if exist\n\tvar npmVersion string\n\tisNpm := npm.HasPackage(path)\n\tif isNpm {\n\t\tpkg, err := npm.ParsePackage(path)\n\t\tif err != nil {\n\t\t\tch <- Result{\n\t\t\t\tError: fmt.Errorf(\n\t\t\t\t\t\"[Release] parse npm package: %v\",\n\t\t\t\t\terr,\n\t\t\t\t),\n\t\t\t}\n\t\t\treturn\n\t\t}\n\t\tnpmVersion = pkg.Version\n\t\tch <- Result{\n\t\t\tPhase: PhaseLastVersionFromPackage,\n\t\t\tMessage: npmVersion,\n\t\t}\n\t}\n\n\t// Inconsistency between commit history and package.json version\n\tif npmVersion != \"\" && npmVersion != version {\n\t\tch <- Result{\n\t\t\tPhase: PhaseLastVersionInconsistency,\n\t\t\tMessage: fmt.Sprintf(\n\t\t\t\t\"package.json: %s, git: %s\",\n\t\t\t\tnpmVersion,\n\t\t\t\tversion,\n\t\t\t),\n\t\t}\n\t\tversion = npmVersion\n\t}\n\n\t// Find Change\n\tif change == \"\" {\n\t\tchange = semver.GetChange(commits)\n\t\tch <- Result{\n\t\t\tPhase: PhaseChangeFound,\n\t\t\tMessage: string(change),\n\t\t}\n\t}\n\n\t// Calculate new version\n\tnewVersion, err := semver.GetVersion(version, change)\n\tif err != nil {\n\t\tch <- Result{\n\t\t\tError: fmt.Errorf(\n\t\t\t\t\"[Release] get semver version: %v\",\n\t\t\t\terr,\n\t\t\t),\n\t\t}\n\t\treturn\n\t}\n\tch <- Result{\n\t\tPhase: PhaseNextVersion,\n\t\tMessage: newVersion,\n\t}\n\n\t// Generate changelog\n\tcf, _, err := changelog.Save(path, newVersion, version, change, commits, user)\n\tif err != nil {\n\t\tch <- Result{\n\t\t\tError: fmt.Errorf(\"[Release] save changelog: %v\", err),\n\t\t}\n\t\treturn\n\t}\n\tch <- Result{\n\t\tPhase: PhaseChangelogUpdated,\n\t\tMessage: cf,\n\t}\n\n\t// Version: npm\n\tif isNpm {\n\t\t_, err = npm.Version(path, newVersion, string(change))\n\t\tif err != nil {\n\t\t\tch <- Result{\n\t\t\t\tError: fmt.Errorf(\"[npm] version: %v\", err),\n\t\t\t}\n\t\t\treturn\n\t\t}\n\t\tch <- Result{\n\t\t\tPhase: PhasePackageVersion,\n\t\t}\n\t}\n\n\t// Release: Git\n\terr = git.Release(path, newVersion, user, options.SuppressPush)\n\tif err != nil {\n\t\tch <- Result{\n\t\t\tError: fmt.Errorf(\"[Release] git: %v\", err),\n\t\t}\n\t\treturn\n\t}\n\tch <- Result{\n\t\tPhase: PhaseGitRelease,\n\t\tMessage: newVersion,\n\t}\n\n\t// Publish: npm\n\tif isNpm {\n\t\t_, err = npm.Publish(path)\n\t\tif err != nil {\n\t\t\tch <- Result{\n\t\t\t\tError: fmt.Errorf(\"[npm] publish: %v\", err),\n\t\t\t}\n\t\t\treturn\n\t\t}\n\t\tch <- Result{\n\t\t\tPhase: PhasePackagePublish,\n\t\t}\n\t}\n}", "func Release(version, commit, date string) {\n\tif version == \"\" {\n\t\tversion = \"dev\"\n\t} else if version[0] == 'v' {\n\t\tversion = version[1:]\n\t}\n\tif commit == \"\" {\n\t\tcommit = \"-\"\n\t}\n\tif date == \"\" {\n\t\tdate = \"-\"\n\t}\n\tVersion, Commit, Date = version, commit, date\n}", "func TestDaemon_Release(t *testing.T) {\n\td, start, clean, _, _, _ := mockDaemon(t)\n\tstart()\n\tdefer clean()\n\tw := newWait(t)\n\n\tctx := context.Background()\n\n\t// Perform a release\n\tid := updateImage(ctx, d, t)\n\n\t// Check that job is queued\n\tstat, err := d.JobStatus(ctx, id)\n\tif err != nil {\n\t\tt.Fatalf(\"Error: %s\", err.Error())\n\t} else if stat.Err != \"\" {\n\t\tt.Fatal(\"Job status error should be empty\")\n\t} else if stat.StatusString != job.StatusQueued {\n\t\tt.Fatalf(\"Expected %v but got %v\", job.StatusQueued, stat.StatusString)\n\t}\n\n\t// Wait for job to succeed\n\tw.ForJobSucceeded(d, id)\n\n\t// Wait and check that the git manifest has been altered\n\tw.Eventually(func() bool {\n\t\tco, err := d.Repo.Clone(ctx, d.GitConfig)\n\t\tif err != nil {\n\t\t\treturn false\n\t\t}\n\t\tdefer co.Clean()\n\t\t// open a file\n\t\tdirs := co.AbsolutePaths()\n\t\tif file, err := os.Open(filepath.Join(dirs[0], \"helloworld-deploy.yaml\")); err == nil {\n\n\t\t\t// make sure it gets closed\n\t\t\tdefer file.Close()\n\n\t\t\t// create a new scanner and read the file line by line\n\t\t\tscanner := bufio.NewScanner(file)\n\t\t\tfor scanner.Scan() {\n\t\t\t\tif strings.Contains(scanner.Text(), newHelloImage) {\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tt.Fatal(err)\n\t\t}\n\t\t// If we get here we haven't found the line we are looking for.\n\t\treturn false\n\t}, \"Waiting for new manifest\")\n\n}", "func TestDiff(t *testing.T) {\n\tt.Parallel()\n\n\tsrc, err := ioutil.TempFile(\"\", \"unarchive_test.txt\")\n\trequire.NoError(t, err)\n\tdefer os.Remove(src.Name())\n\n\tdestInvalid, err := ioutil.TempFile(\"\", \"unarchive_test.txt\")\n\trequire.NoError(t, err)\n\tdefer os.Remove(destInvalid.Name())\n\n\tt.Run(\"source does not exist\", func(t *testing.T) {\n\t\tu := &Unarchive{\n\t\t\tSource: \"\",\n\t\t\tDestination: \"/tmp\",\n\t\t}\n\t\tstatus := resource.NewStatus()\n\n\t\terr := u.diff(status)\n\n\t\tassert.EqualError(t, err, \"cannot unarchive: stat : no such file or directory\")\n\t\tassert.Equal(t, resource.StatusCantChange, status.StatusCode())\n\t\tassert.True(t, status.HasChanges())\n\t})\n\n\tt.Run(\"destination is not directory\", func(t *testing.T) {\n\t\tu := &Unarchive{\n\t\t\tSource: src.Name(),\n\t\t\tDestination: destInvalid.Name(),\n\t\t}\n\t\tstatus := resource.NewStatus()\n\n\t\terr := u.diff(status)\n\n\t\tassert.EqualError(t, err, fmt.Sprintf(\"invalid destination \\\"%s\\\", must be directory\", u.Destination))\n\t\tassert.Equal(t, resource.StatusCantChange, status.StatusCode())\n\t\tassert.True(t, status.HasChanges())\n\t})\n\n\tt.Run(\"destination does not exist\", func(t *testing.T) {\n\t\tu := &Unarchive{\n\t\t\tSource: src.Name(),\n\t\t\tDestination: \"\",\n\t\t}\n\t\tstatus := resource.NewStatus()\n\n\t\terr := u.diff(status)\n\n\t\tassert.EqualError(t, err, fmt.Sprintf(\"destination \\\"%s\\\" does not exist\", u.Destination))\n\t\tassert.Equal(t, resource.StatusCantChange, status.StatusCode())\n\t\tassert.True(t, status.HasChanges())\n\t})\n\n\tt.Run(\"unarchive\", func(t *testing.T) {\n\t\tu := &Unarchive{\n\t\t\tSource: src.Name(),\n\t\t\tDestination: \"/tmp\",\n\t\t}\n\t\tstatus := resource.NewStatus()\n\n\t\terr := u.diff(status)\n\n\t\tassert.NoError(t, err)\n\t\tassert.Equal(t, u.Source, status.Diffs()[\"unarchive\"].Original())\n\t\tassert.Equal(t, u.Destination, status.Diffs()[\"unarchive\"].Current())\n\t\tassert.Equal(t, resource.StatusWillChange, status.StatusCode())\n\t\tassert.True(t, status.HasChanges())\n\t})\n}", "func main() {\n\tflag.Parse()\n\n\t// TODO: deserves a better CLI...\n\tif len(flag.Args()) < 1 || flag.Arg(0) != \"branch\" {\n\t\tpanic(\"Branch is the only supported operation. Diff and print are not implemented. Write is handled by prowtrans\")\n\t} else if flag.Arg(0) == \"branch\" {\n\t\tif len(flag.Args()) != 2 {\n\t\t\tpanic(\"must specify branch name\")\n\t\t}\n\t} else if len(flag.Args()) != 1 {\n\t\tpanic(\"too many arguments\")\n\t}\n\n\tif os.Args[1] == \"branch\" {\n\t\tif err := filepath.Walk(*inputDir, func(src string, file os.FileInfo, err error) error {\n\t\t\tif err != nil {\n\t\t\t\tfmt.Printf(\"error: %s\\n\", err.Error())\n\t\t\t}\n\n\t\t\tif file.IsDir() {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tif filepath.Ext(file.Name()) != \".yaml\" && filepath.Ext(file.Name()) != \".yml\" || file.Name() == \".base.yaml\" {\n\t\t\t\tlog.Println(\"skipping\", file.Name())\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tjobs := configuration.ReadTransformJobsConfig(src)\n\t\t\tif jobs.SupportReleaseBranching {\n\t\t\t\tbranch := \"release-\" + flag.Arg(1)\n\t\t\t\tjobs.Defaults.Branches = []string{branch}\n\t\t\t\tjobs.SupportReleaseBranching = false\n\t\t\t\tjobs.Defaults.Modifier = strings.Replace(jobs.Defaults.Modifier, \"master_\", fmt.Sprintf(\"%s_\", branch), 1)\n\n\t\t\t\tfor key, transform := range jobs.Transforms {\n\t\t\t\t\ttransform.JobAllowlist = branchJobSlices(transform.JobAllowlist, branch)\n\t\t\t\t\ttransform.JobDenylist = branchJobSlices(transform.JobDenylist, branch)\n\n\t\t\t\t\tfor key, val := range transform.Labels {\n\t\t\t\t\t\ttransform.Labels[key] = strings.Replace(val, \"master\", branch, 1)\n\t\t\t\t\t}\n\n\t\t\t\t\tjobs.Transforms[key] = transform\n\n\t\t\t\t}\n\t\t\t\tname := file.Name()\n\t\t\t\text := filepath.Ext(name)\n\t\t\t\tname = name[:len(name)-len(ext)] + \"-\" + flag.Arg(1) + ext\n\n\t\t\t\tdst := path.Join(*inputDir, name)\n\t\t\t\tif err := configuration.WriteTransformJobConfig(jobs, dst); err != nil {\n\t\t\t\t\texit(err, \"writing branched config failed\")\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn nil\n\t\t}); err != nil {\n\t\t\texit(err, \"walking through the private meta config files failed\")\n\t\t}\n\t} else {\n\t\t// may be useful to add the print and diff functionality here.\n\t\texit(nil, \"other operations are currently not supported by this utility. Please see prowgen or prowtrans\")\n\t}\n}", "func TestCreateProcessAverageReport(t *testing.T) {\n\n timeSlice := float64(TIMESLICE)\n processName := \"proc5\"\n\n testtable := []struct {\n tname string\n }{\n {\n tname: \"ok\",\n },\n }\n\n CreateDummyProcessMetrics(testNodeMetricsMap)\n\n for _, tc := range testtable {\n\n t.Run(tc.tname, func(t *testing.T) {\n\n var buffer bytes.Buffer\n writer := bufio.NewWriter(&buffer)\n\n err := json.NewEncoder(writer).Encode(CreateProcessAverageReport(testNodeMetricsMap, processName, timeSlice))\n if err != nil {\n t.Fatalf(\"failed writing json: %s\", err)\n }\n writer.Flush()\n\n goldenPath := filepath.Join(\"testdata\", filepath.FromSlash(t.Name()) + \".golden\")\n\n\n if *update {\n\n t.Log(\"update golden file\")\n if err := ioutil.WriteFile(goldenPath, buffer.Bytes(), 0644); err != nil {\n t.Fatalf(\"failed to update golden file %s: %s\", goldenPath, err)\n }\n\n }\n\n\n goldenData, err := ioutil.ReadFile(goldenPath)\n\n if err != nil {\n t.Fatalf(\"failed reading .golden file %s: %s\", goldenPath, err)\n }\n\n t.Log(string(buffer.Bytes()))\n\n if !bytes.Equal(buffer.Bytes(), goldenData) {\n t.Errorf(\"bytes do not match .golden file %s\", goldenPath)\n }\n\n })\n }\n\n}", "func (c *Controller) triggerJobs(logger logrus.FieldLogger, instance string, change client.ChangeInfo) error {\n\tcloneURI := source.CloneURIFromOrgRepo(instance, change.Project)\n\tbaseSHA, err := c.gc.GetBranchRevision(instance, change.Project, change.Branch)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"GetBranchRevision: %w\", err)\n\t}\n\n\ttype triggeredJob struct {\n\t\tname string\n\t\treport bool\n\t}\n\tvar triggeredJobs []triggeredJob\n\ttriggerTimes := map[string]time.Time{}\n\n\trefs, err := CreateRefs(instance, change.Project, change.Branch, baseSHA, change)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"createRefs from %s at %s: %w\", cloneURI, baseSHA, err)\n\t}\n\n\ttype jobSpec struct {\n\t\tspec prowapi.ProwJobSpec\n\t\tlabels map[string]string\n\t\tannotations map[string]string\n\t}\n\tvar jobSpecs []jobSpec\n\tbaseSHAGetter := func() (string, error) { return baseSHA, nil }\n\tvar hasRelatedChanges *bool\n\t// This headSHAGetter will return the empty string instead of the head SHA in cases where we can be certain that change does not\n\t// modify inrepoconfig. This allows multiple changes to share a ProwYAML cache entry so long as they don't touch inrepo config themselves.\n\theadSHAGetter := func() (string, error) {\n\t\tchanges, err := client.ChangedFilesProvider(&change)()\n\t\tif err != nil {\n\t\t\t// This is a best effort optimization, log the error, but just use CurrentRevision in this case.\n\t\t\tlogger.WithError(err).Info(\"Failed to get changed files for the purpose of prowYAML cache optimization. Skipping optimization.\")\n\t\t\treturn change.CurrentRevision, nil\n\t\t}\n\t\tif config.ContainsInRepoConfigPath(changes) {\n\t\t\treturn change.CurrentRevision, nil\n\t\t}\n\t\tif hasRelatedChanges == nil {\n\t\t\tif res, err := c.gc.HasRelatedChanges(instance, change.ChangeID, change.CurrentRevision); err != nil {\n\t\t\t\tlogger.WithError(err).Info(\"Failed to get related changes for the purpose of prowYAML cache optimization. Skipping optimization.\")\n\t\t\t\treturn change.CurrentRevision, nil\n\t\t\t} else {\n\t\t\t\thasRelatedChanges = &res\n\t\t\t}\n\t\t}\n\t\tif *hasRelatedChanges {\n\t\t\t// If the change is part of a chain the commit may include files not identified by the API.\n\t\t\t// So we can't easily check if the change includes inrepo config file changes.\n\t\t\treturn change.CurrentRevision, nil\n\t\t}\n\t\t// If we know the change doesn't touch the inrepo config itself, we don't need to check out the head commits.\n\t\t// This is particularly useful because it lets multiple changes share a ProwYAML cache entry so long as they don't touch inrepo config themselves.\n\t\treturn \"\", nil\n\t}\n\n\tswitch change.Status {\n\tcase client.Merged:\n\t\tvar postsubmits []config.Postsubmit\n\t\t// Gerrit server might be unavailable intermittently, retry inrepoconfig\n\t\t// processing for increased reliability.\n\t\tfor attempt := 0; attempt < inRepoConfigRetries; attempt++ {\n\t\t\tpostsubmits, err = c.inRepoConfigCache.GetPostsubmits(cloneURI, baseSHAGetter, headSHAGetter)\n\t\t\t// Break if there was no error, or if there was a merge conflict\n\t\t\tif err == nil {\n\t\t\t\tgerritMetrics.inrepoconfigResults.WithLabelValues(instance, change.Project, client.ResultSuccess).Inc()\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tif strings.Contains(err.Error(), \"Merge conflict in\") {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\t// Postsubmit jobs are triggered only once. Still try to fall back on\n\t\t// static jobs if failed to retrieve inrepoconfig jobs.\n\t\tif err != nil {\n\t\t\tgerritMetrics.inrepoconfigResults.WithLabelValues(instance, change.Project, client.ResultError).Inc()\n\n\t\t\t// Reports error back to Gerrit. handleInRepoConfigError is\n\t\t\t// responsible for not sending the same message again and again on\n\t\t\t// the same commit.\n\t\t\tif postErr := c.handleInRepoConfigError(err, instance, change); postErr != nil {\n\t\t\t\tlogger.WithError(postErr).Error(\"Failed reporting inrepoconfig processing error back to Gerrit.\")\n\t\t\t}\n\t\t\t// Static postsubmit jobs are included as part of output from\n\t\t\t// inRepoConfigCache.GetPostsubmits, fallback to static only\n\t\t\t// when inrepoconfig failed.\n\t\t\tpostsubmits = append(postsubmits, c.config().GetPostsubmitsStatic(cloneURI)...)\n\t\t}\n\n\t\tfor _, postsubmit := range postsubmits {\n\t\t\tif shouldRun, err := postsubmit.ShouldRun(change.Branch, client.ChangedFilesProvider(&change)); err != nil {\n\t\t\t\treturn fmt.Errorf(\"failed to determine if postsubmit %q should run: %w\", postsubmit.Name, err)\n\t\t\t} else if shouldRun {\n\t\t\t\tif change.Submitted != nil {\n\t\t\t\t\ttriggerTimes[postsubmit.Name] = change.Submitted.Time\n\t\t\t\t}\n\t\t\t\tjobSpecs = append(jobSpecs, jobSpec{\n\t\t\t\t\tspec: pjutil.PostsubmitSpec(postsubmit, refs),\n\t\t\t\t\tlabels: postsubmit.Labels,\n\t\t\t\t\tannotations: postsubmit.Annotations,\n\t\t\t\t})\n\t\t\t}\n\t\t}\n\tcase client.New:\n\t\tvar presubmits []config.Presubmit\n\t\t// Gerrit server might be unavailable intermittently, retry inrepoconfig\n\t\t// processing for increased reliability.\n\t\tfor attempt := 0; attempt < inRepoConfigRetries; attempt++ {\n\t\t\tpresubmits, err = c.inRepoConfigCache.GetPresubmits(cloneURI, baseSHAGetter, headSHAGetter)\n\t\t\tif err == nil {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif err != nil {\n\t\t\t// Reports error back to Gerrit. handleInRepoConfigError is\n\t\t\t// responsible for not sending the same message again and again on\n\t\t\t// the same commit.\n\t\t\tif postErr := c.handleInRepoConfigError(err, instance, change); postErr != nil {\n\t\t\t\tlogger.WithError(postErr).Error(\"Failed reporting inrepoconfig processing error back to Gerrit.\")\n\t\t\t}\n\t\t\t// There is no need to keep going when failed to get inrepoconfig\n\t\t\t// jobs.\n\t\t\t// Imagining the scenario that:\n\t\t\t// - Commit #abc triggered static job job-A, inrepoconfig jobs job-B\n\t\t\t// and job-C\n\t\t\t// - Both job-B and job-C failed\n\t\t\t// - Commit #def was pushed. Inrepoconfig failed, falling back to\n\t\t\t// trigger static job job-A.\n\t\t\t// - job-A passed.\n\t\t\t// - Prow would make decision on the result of job-A and ignore the\n\t\t\t// rest. (Yes this is a Prow bug, which should not be a problem when\n\t\t\t// each prowjob is reported to an individual Gerrit Check).\n\t\t\t// So long story short: kicking off partial prowjobs is worse than\n\t\t\t// kicking off nothing.\n\t\t\treturn err\n\t\t}\n\n\t\taccount, err := c.gc.Account(instance)\n\t\tif err != nil {\n\t\t\t// This would happen if authenticateOnce hasn't done register this instance yet\n\t\t\treturn fmt.Errorf(\"account not found for %q: %w\", instance, err)\n\t\t}\n\n\t\tlastUpdate, ok := c.tracker.Current()[instance][change.Project]\n\t\tif !ok {\n\t\t\tlastUpdate = time.Now()\n\t\t\tlogger.WithField(\"lastUpdate\", lastUpdate).Warnf(\"lastUpdate not found, falling back to now\")\n\t\t}\n\n\t\trevision := change.Revisions[change.CurrentRevision]\n\t\tfailedJobs := failedJobs(account.AccountID, revision.Number, change.Messages...)\n\t\tfailed, all := presubmitContexts(failedJobs, presubmits, logger)\n\t\tmessages := currentMessages(change, lastUpdate)\n\t\tlogger.WithField(\"failed\", len(failed)).Debug(\"Failed jobs parsed from previous comments.\")\n\t\tfilters := []pjutil.Filter{\n\t\t\tmessageFilter(messages, failed, all, triggerTimes, logger),\n\t\t}\n\t\t// Automatically trigger the Prow jobs if the revision is new and the\n\t\t// change is not in WorkInProgress.\n\t\tif revision.Created.Time.After(lastUpdate) && !change.WorkInProgress {\n\t\t\tfilters = append(filters, &timeAnnotationFilter{\n\t\t\t\tFilter: pjutil.NewTestAllFilter(),\n\t\t\t\teventTime: revision.Created.Time,\n\t\t\t\ttriggerTimes: triggerTimes,\n\t\t\t})\n\t\t}\n\t\ttoTrigger, err := pjutil.FilterPresubmits(pjutil.NewAggregateFilter(filters), client.ChangedFilesProvider(&change), change.Branch, presubmits, logger)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"filter presubmits: %w\", err)\n\t\t}\n\t\t// At this point triggerTimes should be properly populated as a side effect of FilterPresubmits.\n\n\t\t// Reply with help information to run the presubmit Prow jobs if requested.\n\t\tfor _, msg := range messages {\n\t\t\tneedsHelp, note := pjutil.ShouldRespondWithHelp(msg.Message, len(toTrigger))\n\t\t\t// Lock for projectOptOutHelp, which is a map.\n\t\t\tc.lock.RLock()\n\t\t\toptedOut := isProjectOptOutHelp(c.projectsOptOutHelp, instance, change.Project)\n\t\t\tc.lock.RUnlock()\n\t\t\tif needsHelp && !optedOut {\n\t\t\t\trunWithTestAllNames, optionalJobsCommands, requiredJobsCommands, err := pjutil.AvailablePresubmits(client.ChangedFilesProvider(&change), change.Branch, presubmits, logger.WithField(\"help\", true))\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tmessage := pjutil.HelpMessage(instance, change.Project, change.Branch, note, runWithTestAllNames, optionalJobsCommands, requiredJobsCommands)\n\t\t\t\tif err := c.gc.SetReview(instance, change.ID, change.CurrentRevision, message, nil); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tgerritMetrics.triggerHelpLatency.WithLabelValues(instance).Observe(float64(time.Since(msg.Date.Time).Seconds()))\n\t\t\t\t// Only respond to the first message that requests help information.\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\n\t\tfor _, presubmit := range toTrigger {\n\t\t\tjobSpecs = append(jobSpecs, jobSpec{\n\t\t\t\tspec: pjutil.PresubmitSpec(presubmit, refs),\n\t\t\t\tlabels: presubmit.Labels,\n\t\t\t\tannotations: presubmit.Annotations,\n\t\t\t})\n\t\t}\n\t}\n\n\tfor _, jSpec := range jobSpecs {\n\t\tlabels, annotations := LabelsAndAnnotations(instance, jSpec.labels, jSpec.annotations, change)\n\n\t\tpj := pjutil.NewProwJob(jSpec.spec, labels, annotations)\n\t\tlogger := logger.WithField(\"prowjob\", pj.Name)\n\t\ttimeBeforeCreate := time.Now()\n\t\tif _, err := c.prowJobClient.Create(context.TODO(), &pj, metav1.CreateOptions{}); err != nil {\n\t\t\tlogger.WithError(err).Errorf(\"Failed to create ProwJob\")\n\t\t\tcontinue\n\t\t}\n\t\tgerritMetrics.jobCreationDuration.WithLabelValues(instance, change.Project).Observe((float64(time.Since(timeBeforeCreate).Seconds())))\n\t\tlogger.Infof(\"Triggered new job\")\n\t\tif eventTime, ok := triggerTimes[pj.Spec.Job]; ok {\n\t\t\tgerritMetrics.triggerLatency.WithLabelValues(instance, change.Project).Observe(float64(time.Since(eventTime).Seconds()))\n\t\t}\n\t\ttriggeredJobs = append(triggeredJobs, triggeredJob{\n\t\t\tname: jSpec.spec.Job,\n\t\t\treport: jSpec.spec.Report,\n\t\t})\n\t}\n\n\tif len(triggeredJobs) == 0 {\n\t\treturn nil\n\t}\n\n\t// comment back to gerrit if Report is set for any of the jobs\n\tvar reportingJobs int\n\tvar jobList string\n\tfor _, job := range triggeredJobs {\n\t\tif job.report {\n\t\t\tjobList += fmt.Sprintf(\"\\n * Name: %s\", job.name)\n\t\t\treportingJobs++\n\t\t}\n\t}\n\n\tif reportingJobs > 0 {\n\t\tmessage := fmt.Sprintf(\"Triggered %d prow jobs (%d suppressed reporting): \", len(triggeredJobs), len(triggeredJobs)-reportingJobs)\n\t\t// If we have a Deck URL, link to all results for the CL, otherwise list the triggered jobs.\n\t\tlink, err := deckLinkForPR(c.config().Gerrit.DeckURL, refs, change.Status)\n\t\tif err != nil {\n\t\t\tlogger.WithError(err).Error(\"Failed to generate link to job results on Deck.\")\n\t\t}\n\t\tif link != \"\" && err == nil {\n\t\t\tmessage = message + link\n\t\t} else {\n\t\t\tmessage = message + jobList\n\t\t}\n\t\tif err := c.gc.SetReview(instance, change.ID, change.CurrentRevision, message, nil); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func (r *runners) releaseLint(cmd *cobra.Command, args []string) error {\n\tvar isBuildersRelease bool\n\tvar lintReleaseData []byte\n\tvar contentType string\n\tif r.args.lintReleaseYamlDir != \"\" {\n\t\tdata, err := tarYAMLDir(r.args.lintReleaseYamlDir)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"failed to read yaml dir\")\n\t\t}\n\t\tlintReleaseData = data\n\t\t// TODO: all specfiles are charts => isBuildersRelease\n\t\tisBuildersRelease = false\n\t\tcontentType = \"application/tar\"\n\t} else if r.args.lintReleaseChart != \"\" {\n\t\tdata, err := ioutil.ReadFile(r.args.lintReleaseChart)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"failed to read chart file\")\n\t\t}\n\t\tlintReleaseData = data\n\t\tisBuildersRelease = true\n\t\tcontentType = \"application/gzip\"\n\t} else {\n\t\treturn errors.Errorf(\"a yaml directory or a chart file is required\")\n\t}\n\n\tif _, ok := validFailOnValues[r.args.lintReleaseFailOn]; !ok {\n\t\treturn errors.Errorf(\"fail-on value %q not supported, supported values are [info, warn, error, none]\", r.args.lintReleaseFailOn)\n\t}\n\n\tlintResult, err := r.api.LintRelease(r.appType, lintReleaseData, isBuildersRelease, contentType)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to lint release\")\n\t}\n\n\tif err := print.LintErrors(r.w, lintResult); err != nil {\n\t\treturn errors.Wrap(err, \"failed to print lint errors\")\n\t}\n\n\tif hasError := shouldFail(lintResult, r.args.lintReleaseFailOn); hasError {\n\t\treturn errors.Errorf(\"One or more errors of severity %q or higher were found\", r.args.lintReleaseFailOn)\n\t}\n\n\treturn nil\n}", "func (s *versioningIntegSuite) TestVersioningStateNotDestroyedByOtherUpdates() {\n\tctx := NewContext()\n\ttq := \"integration-versioning-not-destroyed\"\n\n\tres, err := s.engine.UpdateWorkerBuildIdOrdering(ctx, &workflowservice.UpdateWorkerBuildIdOrderingRequest{\n\t\tNamespace: s.namespace,\n\t\tTaskQueue: tq,\n\t\tVersionId: &taskqueuepb.VersionId{WorkerBuildId: \"foo\"},\n\t\tPreviousCompatible: nil,\n\t\tBecomeDefault: true,\n\t})\n\ts.NoError(err)\n\ts.NotNil(res)\n\n\tsdkWorker := worker.New(s.sdkClient, tq, worker.Options{})\n\tif err := sdkWorker.Start(); err != nil {\n\t\ts.Logger.Fatal(\"Error starting worker\", tag.Error(err))\n\t}\n\n\twfFunc := func(ctx workflow.Context) error {\n\t\t// This timer exists to ensure the lease-renewal on the task queue happens, to verify that doesn't blow up data.\n\t\t// The renewal interval has been lowered in this suite.\n\t\t_ = workflow.Sleep(ctx, 3*time.Second)\n\t\treturn nil\n\t}\n\tsdkWorker.RegisterWorkflow(wfFunc)\n\tid := \"integration-test-unhandled-command-new-task\"\n\tworkflowOptions := sdkclient.StartWorkflowOptions{ID: id, TaskQueue: tq}\n\tctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)\n\tdefer cancel()\n\tworkflowRun, err := s.sdkClient.ExecuteWorkflow(ctx, workflowOptions, wfFunc)\n\ts.NoError(err)\n\terr = workflowRun.Get(ctx, nil)\n\ts.NoError(err)\n\tsdkWorker.Stop()\n\n\tres2, err := s.engine.GetWorkerBuildIdOrdering(ctx, &workflowservice.GetWorkerBuildIdOrderingRequest{\n\t\tNamespace: s.namespace,\n\t\tTaskQueue: tq,\n\t})\n\ts.NoError(err)\n\ts.NotNil(res2)\n\ts.Equal(\"foo\", res2.CurrentDefault.GetVersion().GetWorkerBuildId())\n}", "func compareRevision(args []string, rev string) string {\n\tif verbose {\n\t\tfmt.Printf(\"compare: %s\\n\", args)\n\t}\n\tvar sourceRev, targetRev string\n\n\tif revision != \"\" {\n\t\tvals := strings.Split(revision, \":\")\n\t\tif len(vals) == 1 {\n\t\t\tsourceRev = vals[0]\n\t\t\ttargetRev = vals[0]\n\t\t} else if len(vals) == 2 {\n\t\t\tsourceRev = vals[0]\n\t\t\ttargetRev = vals[1]\n\t\t} else {\n\t\t\tcroak(\"incorrect value for compare -r option.\")\n\t\t}\n\t}\n\tif verbose {\n\t\tfmt.Printf(\"Checkout 1 revision: %s\\n\", sourceRev)\n\t\tfmt.Printf(\"Checkout 2 revision: %s\\n\", targetRev)\n\t}\n\tif len(args) != 2 {\n\t\tcroak(\"compare requires exactly two repository-name args, but there are %v.\", args)\n\t}\n\tsource := args[0]\n\ttarget := args[1]\n\tif !isdir(source) || !isdir(target) {\n\t\tcroak(\"both repository directories must exist.\")\n\t}\n\tTMPDIR := os.Getenv(\"TMPDIR\")\n\trsource, err := ioutil.TempDir(TMPDIR, \"sourcecheckout\")\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tos.RemoveAll(rsource)\n\trtarget, err := ioutil.TempDir(TMPDIR, \"targetcheckout\")\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tos.RemoveAll(rtarget)\n\tvar sourcedir, targetdir string\n\tunder(source, func() {\n\t\tsourcedir = checkout(rsource, sourceRev)\n\t\tif sourcedir == \"\" {\n\t\t\tpanic(\"sourcedir unexpectedly nil\")\n\t\t}\n\t})\n\tunder(target, func() {\n\t\ttargetdir = checkout(rtarget, targetRev)\n\t\tif targetdir == \"\" {\n\t\t\tpanic(\"sourcedir unexpectedly nil\")\n\t\t}\n\t})\n\tif acceptMissing {\n\t\tif !exists(sourcedir) {\n\t\t\t// replace by empty directory\n\t\t\tos.MkdirAll(sourcedir, 0755)\n\t\t}\n\t\tif !exists(targetdir) {\n\t\t\t// replace by empty directory\n\t\t\tos.MkdirAll(targetdir, 0755)\n\t\t}\n\t}\n\t// Ugh. These are the types of the original repository\n\t// directories, which in particulat do not imply the ignorables\n\t// of any corresponding checkout directories. The obvious way\n\t// to fix this - run identifyRepo() on the checkout\n\t// directories sourcedir and targetdir - works for the CVS\n\t// case but not for the Subversion case. The problem is that\n\t// the checkout diectory is a *subdirectory* of the top-level\n\t// directory where we can expect to find a .svn file.\n\tsourcetype := identifyRepo(source)\n\ttargettype := identifyRepo(target)\n\tvar diff string\n\tdollarJunk := regexp.MustCompile(` @\\(#\\) |\\$Id.*\\$|\\$Header.*\\$|$Log.*\\$`)\n\tisDollarLine := func(line string) bool {\n\t\treturn dollarJunk.MatchString(line)\n\t}\n\tsourcefiles := dirlist(sourcedir)\n\ttargetfiles := dirlist(targetdir)\n\tfor _, path := range sourcefiles.Union(targetfiles).Ordered() {\n\t\tsourcepath := filepath.Join(sourcedir, path)\n\t\ttargetpath := filepath.Join(targetdir, path)\n\t\tif isdir(sourcepath) || isdir(targetpath) || ignorable(path, sourcetype) || ignorable(path, targettype) {\n\t\t\tcontinue\n\t\t}\n\t\tif !targetfiles.Contains(path) {\n\t\t\tdiff += fmt.Sprintf(\"%s: source only\\n\", path)\n\t\t\tcontinue\n\t\t}\n\t\tif !sourcefiles.Contains(path) {\n\t\t\tdiff += fmt.Sprintf(\"%s: target only\\n\", path)\n\t\t\tcontinue\n\t\t}\n\t\tsourceText, err := ioutil.ReadFile(sourcepath)\n\t\tif err != nil {\n\t\t\tcomplain(\"%s %s is unreadable\", sourcetype.name, path)\n\t\t}\n\t\ttargetText, err := ioutil.ReadFile(targetpath)\n\t\tif err != nil {\n\t\t\tcomplain(\"%s %s is unreadable\", targettype.name, path)\n\t\t}\n\t\t// When this shelled out to diff it had these filters:\n\t\t// --ignore-matching-lines=' @(#) '\n\t\t// --ignore-matching-lines='$Id.*$'\n\t\t// --ignore-matching-lines='$Header.*$'\n\t\t// --ignore-matching-lines='$Log.*$'\n\n\t\tif !bytes.Equal(sourceText, targetText) {\n\t\t\tlines0 := difflib.SplitLines(string(sourceText))\n\t\t\tlines1 := difflib.SplitLines(string(targetText))\n\t\t\tfile0 := path + \" (\" + sourcetype.name + \")\"\n\t\t\tfile1 := path + \" (\" + targettype.name + \")\"\n\t\t\tvar text string\n\t\t\tdiffObj := difflib.LineDiffParams{\n\t\t\t\tA: lines0,\n\t\t\t\tB: lines1,\n\t\t\t\tFromFile: file0,\n\t\t\t\tToFile: file1,\n\t\t\t\tContext: 3,\n\t\t\t\tIsJunkLine: isDollarLine,\n\t\t\t}\n\t\t\tif unified {\n\t\t\t\ttext, _ = difflib.GetUnifiedDiffString(diffObj)\n\t\t\t}\n\t\t\tif context {\n\t\t\t\ttext, _ = difflib.GetContextDiffString(diffObj)\n\t\t\t}\n\t\t\tdiff += text\n\t\t} else if same {\n\t\t\tdiff += fmt.Sprintf(\"Same: %s\\n\", path)\n\t\t}\n\n\t\t// Check for permission mismatch, We have to skip directories beccause\n\t\t// of Go MkdirAll's behavior that requiring seek permission; this makes for\n\t\t// spurious mismatches in the x permission bit. The error cases here\n\t\t// can be reached by symlink entries in Subversion files.\n\t\tsstat, err1 := os.Stat(sourcepath)\n\t\tif err1 != nil {\n\t\t\tcomplain(\"source path stat: %s\", err1)\n\t\t\tcontinue\n\t\t}\n\t\ttstat, err2 := os.Stat(targetpath)\n\t\tif err2 != nil {\n\t\t\tcomplain(\"target path stat: %s\", err2)\n\t\t\tcontinue\n\t\t}\n\t\tif sstat.Mode() != tstat.Mode() {\n\t\t\tdiff += fmt.Sprintf(\"%s: %0o -> %0o\\n\", path, sstat.Mode(), tstat.Mode())\n\t\t}\n\t}\n\tos.RemoveAll(rsource)\n\tos.RemoveAll(rtarget)\n\treturn diff\n}", "func (db *db) takeNewVersions() {\n\tfor version := range db.newVersions {\n\t\t// This is just to make functional tests easier to write.\n\t\tdelay := db.sequins.config.Test.UpgradeDelay.Duration\n\t\tif delay != 0 {\n\t\t\ttime.Sleep(delay)\n\t\t}\n\n\t\tdb.upgrade(version)\n\t}\n}", "func (up *Updater) UpdateTo(rel *Release, cmdPath string) error {\n\tvar client http.Client\n\tsrc, redirectURL, err := up.api.Repositories.DownloadReleaseAsset(up.apiCtx, rel.RepoOwner, rel.RepoName, rel.AssetID, &client)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Failed to call GitHub Releases API for getting an asset(ID: %d) for repository '%s/%s': %s\", rel.AssetID, rel.RepoOwner, rel.RepoName, err)\n\t}\n\tif redirectURL != \"\" {\n\t\tlog.Println(\"Redirect URL was returned while trying to download a release asset from GitHub API. Falling back to downloading from asset URL directly:\", redirectURL)\n\t\tsrc, err = up.downloadDirectlyFromURL(redirectURL)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tdefer src.Close()\n\n\tdata, err := ioutil.ReadAll(src)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Failed reading asset body: %v\", err)\n\t}\n\n\tif up.validator == nil {\n\t\treturn uncompressAndUpdate(bytes.NewReader(data), rel.AssetURL, cmdPath)\n\t}\n\n\tvalidationSrc, validationRedirectURL, err := up.api.Repositories.DownloadReleaseAsset(up.apiCtx, rel.RepoOwner, rel.RepoName, rel.ValidationAssetID, &client)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Failed to call GitHub Releases API for getting an validation asset(ID: %d) for repository '%s/%s': %s\", rel.ValidationAssetID, rel.RepoOwner, rel.RepoName, err)\n\t}\n\tif validationRedirectURL != \"\" {\n\t\tlog.Println(\"Redirect URL was returned while trying to download a release validation asset from GitHub API. Falling back to downloading from asset URL directly:\", redirectURL)\n\t\tvalidationSrc, err = up.downloadDirectlyFromURL(validationRedirectURL)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tdefer validationSrc.Close()\n\n\tvalidationData, err := ioutil.ReadAll(validationSrc)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Failed reading validation asset body: %v\", err)\n\t}\n\n\tif err := up.validator.Validate(data, validationData); err != nil {\n\t\treturn fmt.Errorf(\"Failed validating asset content: %v\", err)\n\t}\n\n\treturn uncompressAndUpdate(bytes.NewReader(data), rel.AssetURL, cmdPath)\n}", "func (client *Client) writeJob(job *Job) {\n client.out <- job\n}", "func (o *GetReleaseOptions) Run() error {\n\tjxClient, curNs, err := o.JXClient()\n\tif err != nil {\n\t\treturn err\n\t}\n\tns := o.Namespace\n\tif ns == \"\" {\n\t\tns = curNs\n\t}\n\treleases, err := kube.GetOrderedReleases(jxClient, ns, o.Filter)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif len(releases) == 0 {\n\t\tsuffix := \"\"\n\t\tif o.Filter != \"\" {\n\t\t\tsuffix = fmt.Sprintf(\" for filter: %s\", util.ColorInfo(o.Filter))\n\t\t}\n\t\tlog.Logger().Infof(\"No Releases found in namespace %s%s.\", util.ColorInfo(ns), suffix)\n\t\tlog.Logger().Infof(\"To create a release try merging code to a master branch to trigger a pipeline or try: %s\", util.ColorInfo(\"jx start build\"))\n\t\treturn nil\n\t}\n\ttable := o.CreateTable()\n\ttable.AddRow(\"NAME\", \"VERSION\")\n\tfor _, release := range releases {\n\t\ttable.AddRow(release.Spec.Name, release.Spec.Version)\n\t}\n\ttable.Render()\n\treturn nil\n}", "func startReleasedLockHistoryCleanupJob() {\n\tlog.Info(fmt.Sprintf(\"start released lock history cleanup job(every %s)\", CleanupInterval))\n\tgopool.Go(func(ctx context.Context) {\n\t\ttick := time.NewTicker(CleanupInterval)\n\t\tdefer tick.Stop()\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn\n\t\t\tcase <-tick.C:\n\t\t\t\terr := CleanupReleasedLockHistory(ctx)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Error(\"cleanup lock history failed\", err)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t})\n}", "func CollectReleaseSpecsAndProviderLinks(mStruct *manifest.Manifest, baseDir string, namespace string) (map[string]map[string]manifest.JobSpec, map[string]map[string]manifest.JobLink, error) {\n\t// Contains YAML.load('.../release_name/job_name/job.MF')\n\tjobReleaseSpecs := map[string]map[string]manifest.JobSpec{}\n\n\t// Lists every link provided by the job\n\tjobProviderLinks := map[string]map[string]manifest.JobLink{}\n\n\tfor _, instanceGroup := range mStruct.InstanceGroups {\n\t\tfor jobIdx, job := range instanceGroup.Jobs {\n\t\t\t// make sure a map entry exists for the current job release\n\t\t\tif _, ok := jobReleaseSpecs[job.Release]; !ok {\n\t\t\t\tjobReleaseSpecs[job.Release] = map[string]manifest.JobSpec{}\n\t\t\t}\n\n\t\t\t// load job.MF into jobReleaseSpecs[job.Release][job.Name]\n\t\t\tif _, ok := jobReleaseSpecs[job.Release][job.Name]; !ok {\n\t\t\t\tjobMFFilePath := filepath.Join(baseDir, \"jobs-src\", job.Release, job.Name, \"job.MF\")\n\t\t\t\tjobMfBytes, err := ioutil.ReadFile(jobMFFilePath)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, nil, err\n\t\t\t\t}\n\n\t\t\t\tjobSpec := manifest.JobSpec{}\n\t\t\t\tif err := yaml.Unmarshal([]byte(jobMfBytes), &jobSpec); err != nil {\n\t\t\t\t\treturn nil, nil, err\n\t\t\t\t}\n\t\t\t\tjobReleaseSpecs[job.Release][job.Name] = jobSpec\n\t\t\t}\n\n\t\t\t// spec of the current jobs release/name\n\t\t\tspec := jobReleaseSpecs[job.Release][job.Name]\n\n\t\t\t// Generate instance spec for each ig instance\n\t\t\t// This will be stored inside the current job under\n\t\t\t// job.properties.bosh_containerization\n\t\t\tvar jobsInstances []manifest.JobInstance\n\t\t\tfor i := 0; i < instanceGroup.Instances; i++ {\n\n\t\t\t\t// TODO: Understand whether there are negative side-effects to using this\n\t\t\t\t// default\n\t\t\t\tazs := []string{\"\"}\n\t\t\t\tif len(instanceGroup.Azs) > 0 {\n\t\t\t\t\tazs = instanceGroup.Azs\n\t\t\t\t}\n\n\t\t\t\tfor _, az := range azs {\n\t\t\t\t\tindex := len(jobsInstances)\n\t\t\t\t\tname := fmt.Sprintf(\"%s-%s\", instanceGroup.Name, job.Name)\n\t\t\t\t\tid := fmt.Sprintf(\"%v-%v-%v\", instanceGroup.Name, index, job.Name)\n\t\t\t\t\t// TODO: not allowed to hardcode svc.cluster.local\n\t\t\t\t\taddress := fmt.Sprintf(\"%s.%s.svc.cluster.local\", id, namespace)\n\n\t\t\t\t\tjobsInstances = append(jobsInstances, manifest.JobInstance{\n\t\t\t\t\t\tAddress: address,\n\t\t\t\t\t\tAZ: az,\n\t\t\t\t\t\tID: id,\n\t\t\t\t\t\tIndex: index,\n\t\t\t\t\t\tInstance: i,\n\t\t\t\t\t\tName: name,\n\t\t\t\t\t})\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// set jobs.properties.bosh_containerization.instances with the ig instances\n\t\t\tinstanceGroup.Jobs[jobIdx].Properties.BOSHContainerization.Instances = jobsInstances\n\n\t\t\t// Create a list of fully evaluated links provided by the current job\n\t\t\t// These is specified in the job release job.MF file\n\t\t\tif spec.Provides != nil {\n\t\t\t\tvar properties map[string]interface{}\n\n\t\t\t\tfor _, provider := range spec.Provides {\n\t\t\t\t\tproperties = map[string]interface{}{}\n\t\t\t\t\tfor _, property := range provider.Properties {\n\t\t\t\t\t\t// generate a nested struct of map[string]interface{} when\n\t\t\t\t\t\t// a property is of the form foo.bar\n\t\t\t\t\t\tif strings.Contains(property, \".\") {\n\t\t\t\t\t\t\tpropertyStruct := RetrieveNestedProperty(spec, property)\n\t\t\t\t\t\t\tproperties = propertyStruct\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tproperties[property] = RetrievePropertyDefault(spec, property)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\t// Override default spec values with explicit settings from the\n\t\t\t\t\t// current bosh deployment manifest, this should be done under each\n\t\t\t\t\t// job, inside a `properties` key.\n\t\t\t\t\tfor propertyName := range properties {\n\t\t\t\t\t\tif explicitSetting, ok := LookUpProperty(job, propertyName); ok {\n\t\t\t\t\t\t\tproperties[propertyName] = explicitSetting\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tproviderName := provider.Name\n\t\t\t\t\tproviderType := provider.Type\n\n\t\t\t\t\t// instance_group.job can override the link name through the\n\t\t\t\t\t// instance_group.job.provides, via the \"as\" key\n\t\t\t\t\tif instanceGroup.Jobs[jobIdx].Provides != nil {\n\t\t\t\t\t\tif value, ok := instanceGroup.Jobs[jobIdx].Provides[providerName]; ok {\n\t\t\t\t\t\t\tswitch value.(type) {\n\t\t\t\t\t\t\tcase map[interface{}]interface{}:\n\t\t\t\t\t\t\t\tif overrideLinkName, ok := value.(map[interface{}]interface{})[\"as\"]; ok {\n\t\t\t\t\t\t\t\t\tproviderName = fmt.Sprintf(\"%v\", overrideLinkName)\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tdefault:\n\t\t\t\t\t\t\t\treturn nil, nil, fmt.Errorf(\"unexpected type detected: %T, should have been a map\", value)\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif providers, ok := jobProviderLinks[providerType]; ok {\n\t\t\t\t\t\tif _, ok := providers[providerName]; ok {\n\t\t\t\t\t\t\treturn nil, nil, fmt.Errorf(\"multiple providers for link: name=%s type=%s\", providerName, providerType)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif _, ok := jobProviderLinks[providerType]; !ok {\n\t\t\t\t\t\tjobProviderLinks[providerType] = map[string]manifest.JobLink{}\n\t\t\t\t\t}\n\n\t\t\t\t\t// construct the jobProviderLinks of the current job that provides\n\t\t\t\t\t// a link\n\t\t\t\t\tjobProviderLinks[providerType][providerName] = manifest.JobLink{\n\t\t\t\t\t\tInstances: jobsInstances,\n\t\t\t\t\t\tProperties: properties,\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn jobReleaseSpecs, jobProviderLinks, nil\n}", "func StoreReleases(dbOwner, dbFolder, dbName string, releases map[string]ReleaseEntry) error {\n\tdbQuery := `\n\t\tUPDATE sqlite_databases\n\t\tSET release_list = $4, release_count = $5\n\t\tWHERE user_id = (\n\t\t\t\tSELECT user_id\n\t\t\t\tFROM users\n\t\t\t\tWHERE lower(user_name) = lower($1)\n\t\t\t)\n\t\t\tAND folder = $2\n\t\t\tAND db_name = $3`\n\tcommandTag, err := pdb.Exec(dbQuery, dbOwner, dbFolder, dbName, releases, len(releases))\n\tif err != nil {\n\t\tlog.Printf(\"Storing releases for database '%s%s%s' failed: %v\\n\", dbOwner, dbFolder, dbName, err)\n\t\treturn err\n\t}\n\tif numRows := commandTag.RowsAffected(); numRows != 1 {\n\t\tlog.Printf(\"Wrong number of rows (%v) affected when storing releases for database: '%s%s%s'\\n\", numRows,\n\t\t\tdbOwner, dbFolder, dbName)\n\t}\n\treturn nil\n}", "func TestBuildv16(t *testing.T) {\n\tdir := t.TempDir()\n\n\topts := Options{\n\t\tIndexDir: dir,\n\t\tRepositoryDescription: zoekt.Repository{\n\t\t\tName: \"repo\",\n\t\t\tSource: \"./testdata/repo/\",\n\t\t},\n\t\tDisableCTags: true,\n\t}\n\topts.SetDefaults()\n\n\tb, err := NewBuilder(opts)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tfor _, p := range []string{\"main.go\"} {\n\t\tblob, err := os.ReadFile(filepath.Join(\"../testdata/repo\", p))\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t\tb.AddFile(p, blob)\n\t}\n\n\twantP := filepath.Join(\"../testdata/shards\", \"repo_v16.00000.zoekt\")\n\n\t// fields indexTime and id depend on time. For this test, we copy the fields from\n\t// the old shard.\n\t_, wantMetadata, err := zoekt.ReadMetadataPath(wantP)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tb.indexTime = wantMetadata.IndexTime\n\tb.id = wantMetadata.ID\n\n\tif err := b.Finish(); err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tgotP := filepath.Join(dir, \"repo_v16.00000.zoekt\")\n\n\tif *update {\n\t\tdata, err := os.ReadFile(gotP)\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t\terr = os.WriteFile(wantP, data, 0644)\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t\treturn\n\t}\n\n\tgot, err := os.ReadFile(gotP)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\twant, err := os.ReadFile(wantP)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\tif d := cmp.Diff(want, got); d != \"\" {\n\t\tt.Errorf(\"mismatch (-want +got):\\n%s\", d)\n\t}\n}", "func sendJobsToDB(jobs [] *types.GithubJob) {\n\tclient, err := aws.CreateDynamoClient()\n\n\tif err != nil {\n\t\tloggly.Error(err)\n\t\treturn\n\t}\n\n\tfor _, j := range jobs {\n\t\terr := aws.PutItem(client, TableName, *j)\n\t\tif err != nil {\n\t\t\tloggly.Error(err)\n\t\t\tbreak\n\t\t}\n\t}\n}", "func runReleaseCases(t *testing.T, tests []releaseCase, rcmd releaseCmd) {\n\tvar buf bytes.Buffer\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tc := &helm.FakeClient{Rels: tt.rels}\n\t\t\tcmd := rcmd(c, &buf)\n\t\t\tcmd.ParseFlags(tt.flags)\n\t\t\terr := cmd.RunE(cmd, tt.args)\n\t\t\tif (err != nil) != tt.err {\n\t\t\t\tt.Errorf(\"expected error, got '%v'\", err)\n\t\t\t}\n\t\t\tre := regexp.MustCompile(tt.expected)\n\t\t\tif !re.Match(buf.Bytes()) {\n\t\t\t\tt.Errorf(\"expected\\n%q\\ngot\\n%q\", tt.expected, buf.String())\n\t\t\t}\n\t\t\tbuf.Reset()\n\t\t})\n\t}\n}", "func (graph *graphRW) Release() {\n\tif graph.record && graph.parent.recordOldRevs {\n\t\tgraph.parent.rwLock.Lock()\n\t\tdefer graph.parent.rwLock.Unlock()\n\n\t\tdestGraph := graph.parent.graph\n\t\tfor key, dataUpdated := range graph.newRevs {\n\t\t\tnode, exists := destGraph.nodes[key]\n\t\t\tif _, hasTimeline := destGraph.timeline[key]; !hasTimeline {\n\t\t\t\tif !exists {\n\t\t\t\t\t// deleted, but never recorded => skip\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tdestGraph.timeline[key] = []*RecordedNode{}\n\t\t\t}\n\t\t\trecords := destGraph.timeline[key]\n\t\t\tif len(records) > 0 {\n\t\t\t\tlastRecord := records[len(records)-1]\n\t\t\t\tif lastRecord.Until.IsZero() {\n\t\t\t\t\tlastRecord.Until = time.Now()\n\t\t\t\t}\n\t\t\t}\n\t\t\tif exists {\n\t\t\t\tdestGraph.timeline[key] = append(records,\n\t\t\t\t\tdestGraph.recordNode(node, !dataUpdated))\n\t\t\t}\n\t\t}\n\n\t\t// remove past revisions from the log which are too old to keep\n\t\tnow := time.Now()\n\t\tsinceLastTrimming := now.Sub(graph.parent.lastRevTrimming)\n\t\tif sinceLastTrimming >= oldRevsTrimmingPeriod {\n\t\t\tfor key, records := range destGraph.timeline {\n\t\t\t\tvar i, j int // i = first after init period, j = first after init period to keep\n\t\t\t\tfor i = 0; i < len(records); i++ {\n\t\t\t\t\tsinceStart := records[i].Since.Sub(graph.parent.startTime)\n\t\t\t\t\tif sinceStart > graph.parent.permanentInitPeriod {\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tfor j = i; j < len(records); j++ {\n\t\t\t\t\tif records[j].Until.IsZero() {\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t\telapsed := now.Sub(records[j].Until)\n\t\t\t\t\tif elapsed <= graph.parent.recordAgeLimit {\n\t\t\t\t\t\tbreak\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif j > i {\n\t\t\t\t\tcopy(records[i:], records[j:])\n\t\t\t\t\tnewLen := len(records) - (j - i)\n\t\t\t\t\tfor k := newLen; k < len(records); k++ {\n\t\t\t\t\t\trecords[k] = nil\n\t\t\t\t\t}\n\t\t\t\t\tdestGraph.timeline[key] = records[:newLen]\n\t\t\t\t}\n\t\t\t\tif len(destGraph.timeline[key]) == 0 {\n\t\t\t\t\tdelete(destGraph.timeline, key)\n\t\t\t\t}\n\t\t\t}\n\t\t\tgraph.parent.lastRevTrimming = now\n\t\t}\n\t}\n}", "func (a *Agent) RollbackRelease(\n\tctx context.Context,\n\tname string,\n\tversion int,\n) error {\n\tctx, span := telemetry.NewSpan(ctx, \"helm-rollback-release\")\n\tdefer span.End()\n\n\ttelemetry.WithAttributes(span,\n\t\ttelemetry.AttributeKV{Key: \"name\", Value: name},\n\t\ttelemetry.AttributeKV{Key: \"version\", Value: version},\n\t)\n\n\tcmd := action.NewRollback(a.ActionConfig)\n\tcmd.Version = version\n\treturn cmd.Run(name)\n}", "func TestManifestSyncJob(t *testing.T) {\n\tforAllReplicaTypes(t, func(strategy string) {\n\t\ttest.WithRoundTripper(func(_ *test.RoundTripper) {\n\t\t\tj1, s1 := setup(t)\n\t\t\tj2, s2 := setupReplica(t, s1, strategy)\n\t\t\ts1.Clock.StepBy(1 * time.Hour)\n\t\t\treplicaToken := s2.GetToken(t, \"repository:test1/foo:pull\")\n\t\t\tsyncManifestsJob1 := j1.ManifestSyncJob(s1.Registry)\n\t\t\tsyncManifestsJob2 := j2.ManifestSyncJob(s2.Registry)\n\n\t\t\t//upload some manifests...\n\t\t\timages := make([]test.Image, 4)\n\t\t\tfor idx := range images {\n\t\t\t\timage := test.GenerateImage(\n\t\t\t\t\ttest.GenerateExampleLayer(int64(10*idx+1)),\n\t\t\t\t\ttest.GenerateExampleLayer(int64(10*idx+2)),\n\t\t\t\t)\n\t\t\t\timages[idx] = image\n\n\t\t\t\t//...to the primary account...\n\t\t\t\timage.MustUpload(t, s1, fooRepoRef, \"\")\n\n\t\t\t\t//...and most of them also to the replica account (to simulate replication having taken place)\n\t\t\t\tif idx != 0 {\n\t\t\t\t\tassert.HTTPRequest{\n\t\t\t\t\t\tMethod: \"GET\",\n\t\t\t\t\t\tPath: fmt.Sprintf(\"/v2/test1/foo/manifests/%s\", image.Manifest.Digest),\n\t\t\t\t\t\tHeader: map[string]string{\"Authorization\": \"Bearer \" + replicaToken},\n\t\t\t\t\t\tExpectStatus: http.StatusOK,\n\t\t\t\t\t\tExpectBody: assert.ByteData(image.Manifest.Contents),\n\t\t\t\t\t}.Check(t, s2.Handler)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t//some of the replicated images are also tagged\n\t\t\tfor _, db := range []*keppel.DB{s1.DB, s2.DB} {\n\t\t\t\tfor _, tagName := range []string{\"latest\", \"other\"} {\n\t\t\t\t\tmustExec(t, db,\n\t\t\t\t\t\t`INSERT INTO tags (repo_id, name, digest, pushed_at) VALUES (1, $1, $2, $3)`,\n\t\t\t\t\t\ttagName,\n\t\t\t\t\t\timages[1].Manifest.Digest,\n\t\t\t\t\t\ts1.Clock.Now(),\n\t\t\t\t\t)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t//also setup an image list manifest containing some of those images (so that we have\n\t\t\t//some manifest-manifest refs to play with)\n\t\t\timageList := test.GenerateImageList(images[1], images[2])\n\t\t\timageList.MustUpload(t, s1, fooRepoRef, \"\")\n\t\t\t//this one is replicated as well\n\t\t\tassert.HTTPRequest{\n\t\t\t\tMethod: \"GET\",\n\t\t\t\tPath: fmt.Sprintf(\"/v2/test1/foo/manifests/%s\", imageList.Manifest.Digest),\n\t\t\t\tHeader: map[string]string{\"Authorization\": \"Bearer \" + replicaToken},\n\t\t\t\tExpectStatus: http.StatusOK,\n\t\t\t\tExpectBody: assert.ByteData(imageList.Manifest.Contents),\n\t\t\t}.Check(t, s2.Handler)\n\n\t\t\t//set a well-known last_pulled_at timestamp on all manifests in the primary\n\t\t\t//DB (we will later verify that this was not touched by the manifest sync)\n\t\t\tinitialLastPulledAt := time.Unix(42, 0)\n\t\t\tmustExec(t, s1.DB, `UPDATE manifests SET last_pulled_at = $1`, initialLastPulledAt)\n\t\t\tmustExec(t, s1.DB, `UPDATE tags SET last_pulled_at = $1`, initialLastPulledAt)\n\t\t\t//we set last_pulled_at to NULL on images[3] to verify that we can merge\n\t\t\t//NULL with a non-NULL last_pulled_at from the replica side\n\t\t\tmustExec(t, s1.DB, `UPDATE manifests SET last_pulled_at = NULL WHERE digest = $1`, images[3].Manifest.Digest)\n\n\t\t\t//as an exception, in the on_first_use method, we can and want to merge\n\t\t\t//last_pulled_at timestamps from the replica into those of the primary, so\n\t\t\t//set some of those to verify the merging behavior\n\t\t\tearlierLastPulledAt := initialLastPulledAt.Add(-10 * time.Second)\n\t\t\tlaterLastPulledAt := initialLastPulledAt.Add(+10 * time.Second)\n\t\t\tmustExec(t, s2.DB, `UPDATE manifests SET last_pulled_at = NULL`)\n\t\t\tmustExec(t, s2.DB, `UPDATE tags SET last_pulled_at = NULL`)\n\t\t\tmustExec(t, s2.DB, `UPDATE manifests SET last_pulled_at = $1 WHERE digest = $2`, earlierLastPulledAt, images[1].Manifest.Digest)\n\t\t\tmustExec(t, s2.DB, `UPDATE manifests SET last_pulled_at = $1 WHERE digest = $2`, laterLastPulledAt, images[2].Manifest.Digest)\n\t\t\tmustExec(t, s2.DB, `UPDATE manifests SET last_pulled_at = $1 WHERE digest = $2`, initialLastPulledAt, images[3].Manifest.Digest)\n\t\t\tmustExec(t, s2.DB, `UPDATE tags SET last_pulled_at = $1 WHERE name = $2`, earlierLastPulledAt, \"latest\")\n\t\t\tmustExec(t, s2.DB, `UPDATE tags SET last_pulled_at = $1 WHERE name = $2`, laterLastPulledAt, \"other\")\n\n\t\t\ttr, tr0 := easypg.NewTracker(t, s2.DB.DbMap.Db)\n\t\t\ttr0.AssertEqualToFile(fmt.Sprintf(\"fixtures/manifest-sync-setup-%s.sql\", strategy))\n\t\t\ttrForPrimary, _ := easypg.NewTracker(t, s1.DB.DbMap.Db)\n\n\t\t\t//ManifestSyncJob on the primary registry should have nothing to do\n\t\t\t//since there are no replica accounts\n\t\t\texpectError(t, sql.ErrNoRows.Error(), syncManifestsJob1.ProcessOne(s1.Ctx))\n\t\t\ttrForPrimary.DBChanges().AssertEmpty()\n\t\t\t//ManifestSyncJob on the secondary registry should set the\n\t\t\t//ManifestsSyncedAt timestamp on the repo, but otherwise not do anything\n\t\t\texpectSuccess(t, syncManifestsJob2.ProcessOne(s2.Ctx))\n\t\t\ttr.DBChanges().AssertEqualf(`\n\t\t\t\t\tUPDATE repos SET next_manifest_sync_at = %d WHERE id = 1 AND account_name = 'test1' AND name = 'foo';\n\t\t\t\t`,\n\t\t\t\ts1.Clock.Now().Add(1*time.Hour).Unix(),\n\t\t\t)\n\t\t\t//second run should not have anything else to do\n\t\t\texpectError(t, sql.ErrNoRows.Error(), syncManifestsJob2.ProcessOne(s2.Ctx))\n\t\t\ttr.DBChanges().AssertEmpty()\n\n\t\t\t//in on_first_use, the sync should have merged the replica's last_pulled_at\n\t\t\t//timestamps into the primary, i.e. primary.last_pulled_at =\n\t\t\t//max(primary.last_pulled_at, replica.last_pulled_at); this only touches\n\t\t\t//the DB when the replica's last_pulled_at is after the primary's\n\t\t\tif strategy == \"on_first_use\" {\n\t\t\t\ttrForPrimary.DBChanges().AssertEqualf(`\n\t\t\t\t\t\tUPDATE manifests SET last_pulled_at = %[1]d WHERE repo_id = 1 AND digest = '%[2]s';\n\t\t\t\t\t\tUPDATE manifests SET last_pulled_at = %[3]d WHERE repo_id = 1 AND digest = '%[4]s';\n\t\t\t\t\t\tUPDATE tags SET last_pulled_at = %[3]d WHERE repo_id = 1 AND name = 'other';\n\t\t\t\t\t`,\n\t\t\t\t\tinitialLastPulledAt.Unix(),\n\t\t\t\t\timages[3].Manifest.Digest,\n\t\t\t\t\tlaterLastPulledAt.Unix(),\n\t\t\t\t\timages[2].Manifest.Digest,\n\t\t\t\t)\n\t\t\t\t//reset all timestamps to prevent divergences in the rest of the test\n\t\t\t\tmustExec(t, s1.DB, `UPDATE manifests SET last_pulled_at = $1`, initialLastPulledAt)\n\t\t\t\tmustExec(t, s1.DB, `UPDATE tags SET last_pulled_at = $1`, initialLastPulledAt)\n\t\t\t\tmustExec(t, s2.DB, `UPDATE manifests SET last_pulled_at = $1`, initialLastPulledAt)\n\t\t\t\tmustExec(t, s2.DB, `UPDATE tags SET last_pulled_at = $1`, initialLastPulledAt)\n\t\t\t\ttr.DBChanges() // skip these changes\n\t\t\t} else {\n\t\t\t\ttrForPrimary.DBChanges().AssertEmpty()\n\t\t\t}\n\n\t\t\t//delete a manifest on the primary side (this one is a simple image not referenced by anyone else)\n\t\t\ts1.Clock.StepBy(2 * time.Hour)\n\t\t\tmustExec(t, s1.DB,\n\t\t\t\t`DELETE FROM manifests WHERE digest = $1`,\n\t\t\t\timages[3].Manifest.Digest,\n\t\t\t)\n\t\t\t//move a tag on the primary side\n\t\t\tmustExec(t, s1.DB,\n\t\t\t\t`UPDATE tags SET digest = $1 WHERE name = 'latest'`,\n\t\t\t\timages[2].Manifest.Digest,\n\t\t\t)\n\n\t\t\t//again, nothing to do on the primary side\n\t\t\texpectError(t, sql.ErrNoRows.Error(), syncManifestsJob1.ProcessOne(s1.Ctx))\n\t\t\t//ManifestSyncJob on the replica side should not do anything while\n\t\t\t//the account is in maintenance; only the timestamp is updated to make sure\n\t\t\t//that the job loop progresses to the next repo\n\t\t\tmustExec(t, s2.DB, `UPDATE accounts SET in_maintenance = TRUE`)\n\t\t\texpectSuccess(t, syncManifestsJob2.ProcessOne(s2.Ctx))\n\t\t\ttr.DBChanges().AssertEqualf(`\n\t\t\t\t\tUPDATE accounts SET in_maintenance = TRUE WHERE name = 'test1';\n\t\t\t\t\tUPDATE repos SET next_manifest_sync_at = %d WHERE id = 1 AND account_name = 'test1' AND name = 'foo';\n\t\t\t\t`,\n\t\t\t\ts1.Clock.Now().Add(1*time.Hour).Unix(),\n\t\t\t)\n\t\t\texpectError(t, sql.ErrNoRows.Error(), syncManifestsJob2.ProcessOne(s2.Ctx))\n\t\t\ttr.DBChanges().AssertEmpty()\n\n\t\t\t//end maintenance\n\t\t\tmustExec(t, s2.DB, `UPDATE accounts SET in_maintenance = FALSE`)\n\t\t\ttr.DBChanges().AssertEqual(`UPDATE accounts SET in_maintenance = FALSE WHERE name = 'test1';`)\n\n\t\t\t//test that replication from external uses the inbound cache\n\t\t\tif strategy == \"from_external_on_first_use\" {\n\t\t\t\t//after the end of the maintenance, we would naively expect\n\t\t\t\t//ManifestSyncJob to actually replicate the deletion, BUT we have an\n\t\t\t\t//inbound cache with a lifetime of 6 hours, so actually nothing should\n\t\t\t\t//happen (only the tag gets synced, which includes a validation of the\n\t\t\t\t//referenced manifest)\n\t\t\t\ts1.Clock.StepBy(2 * time.Hour)\n\t\t\t\texpectSuccess(t, syncManifestsJob2.ProcessOne(s2.Ctx))\n\t\t\t\ttr.DBChanges().AssertEqualf(`\n\t\t\t\t\t\tUPDATE manifests SET validated_at = %d WHERE repo_id = 1 AND digest = '%s';\n\t\t\t\t\t\tUPDATE repos SET next_manifest_sync_at = %d WHERE id = 1 AND account_name = 'test1' AND name = 'foo';\n\t\t\t\t\t`,\n\t\t\t\t\ts1.Clock.Now().Unix(),\n\t\t\t\t\timages[1].Manifest.Digest,\n\t\t\t\t\ts1.Clock.Now().Add(1*time.Hour).Unix(),\n\t\t\t\t)\n\t\t\t\texpectError(t, sql.ErrNoRows.Error(), syncManifestsJob2.ProcessOne(s2.Ctx))\n\t\t\t\ttr.DBChanges().AssertEmpty()\n\t\t\t}\n\n\t\t\t//From now on, we will go in clock increments of 7 hours to force the\n\t\t\t//inbound cache to never hit.\n\n\t\t\t//after the end of the maintenance, ManifestSyncJob on the replica\n\t\t\t//side should delete the same manifest that we deleted in the primary\n\t\t\t//account, and also replicate the tag change (which includes a validation\n\t\t\t//of the tagged manifests)\n\t\t\ts1.Clock.StepBy(7 * time.Hour)\n\t\t\texpectSuccess(t, syncManifestsJob2.ProcessOne(s2.Ctx))\n\t\t\tmanifestValidationBecauseOfExistingTag := fmt.Sprintf(\n\t\t\t\t//this validation is skipped in \"on_first_use\" because the respective tag is unchanged\n\t\t\t\t`UPDATE manifests SET validated_at = %d WHERE repo_id = 1 AND digest = '%s';`+\"\\n\",\n\t\t\t\ts1.Clock.Now().Unix(), images[1].Manifest.Digest,\n\t\t\t)\n\t\t\tif strategy == \"on_first_use\" {\n\t\t\t\tmanifestValidationBecauseOfExistingTag = \"\"\n\t\t\t}\n\t\t\ttr.DBChanges().AssertEqualf(`\n\t\t\t\t\tDELETE FROM manifest_blob_refs WHERE repo_id = 1 AND digest = '%[1]s' AND blob_id = 7;\n\t\t\t\t\tDELETE FROM manifest_blob_refs WHERE repo_id = 1 AND digest = '%[1]s' AND blob_id = 8;\n\t\t\t\t\tDELETE FROM manifest_blob_refs WHERE repo_id = 1 AND digest = '%[1]s' AND blob_id = 9;\n\t\t\t\t\tDELETE FROM manifest_contents WHERE repo_id = 1 AND digest = '%[1]s';\n\t\t\t\t\tDELETE FROM manifests WHERE repo_id = 1 AND digest = '%[1]s';\n\t\t\t\t\t%[5]sUPDATE manifests SET validated_at = %[2]d WHERE repo_id = 1 AND digest = '%[3]s';\n\t\t\t\t\tUPDATE repos SET next_manifest_sync_at = %[4]d WHERE id = 1 AND account_name = 'test1' AND name = 'foo';\n\t\t\t\t\tUPDATE tags SET digest = '%[3]s', pushed_at = %[2]d, last_pulled_at = NULL WHERE repo_id = 1 AND name = 'latest';\n\t\t\t\t\tDELETE FROM trivy_security_info WHERE repo_id = 1 AND digest = '%[1]s';\n\t\t\t\t`,\n\t\t\t\timages[3].Manifest.Digest, //the deleted manifest\n\t\t\t\ts1.Clock.Now().Unix(),\n\t\t\t\timages[2].Manifest.Digest, //the manifest now tagged as \"latest\"\n\t\t\t\ts1.Clock.Now().Add(1*time.Hour).Unix(),\n\t\t\t\tmanifestValidationBecauseOfExistingTag,\n\t\t\t)\n\t\t\texpectError(t, sql.ErrNoRows.Error(), syncManifestsJob2.ProcessOne(s2.Ctx))\n\t\t\ttr.DBChanges().AssertEmpty()\n\n\t\t\t//cause a deliberate inconsistency on the primary side: delete a manifest that\n\t\t\t//*is* referenced by another manifest (this requires deleting the\n\t\t\t//manifest-manifest ref first, otherwise the DB will complain)\n\t\t\ts1.Clock.StepBy(7 * time.Hour)\n\t\t\tmustExec(t, s1.DB,\n\t\t\t\t`DELETE FROM manifest_manifest_refs WHERE child_digest = $1`,\n\t\t\t\timages[2].Manifest.Digest,\n\t\t\t)\n\t\t\tmustExec(t, s1.DB,\n\t\t\t\t`DELETE FROM manifests WHERE digest = $1`,\n\t\t\t\timages[2].Manifest.Digest,\n\t\t\t)\n\n\t\t\t//ManifestSyncJob should now complain since it wants to delete\n\t\t\t//images[2].Manifest, but it can't because of the manifest-manifest ref to\n\t\t\t//the image list\n\t\t\texpectedError := fmt.Sprintf(\"cannot remove deleted manifests [%s] in repo test1/foo because they are still being referenced by other manifests (this smells like an inconsistency on the primary account)\",\n\t\t\t\timages[2].Manifest.Digest,\n\t\t\t)\n\t\t\texpectError(t, expectedError, syncManifestsJob2.ProcessOne(s2.Ctx))\n\t\t\t//the tag sync went through though, so the tag should be gone (the manifest\n\t\t\t//validation is because of the \"other\" tag that still exists)\n\t\t\tmanifestValidationBecauseOfExistingTag = fmt.Sprintf(\n\t\t\t\t//this validation is skipped in \"on_first_use\" because the respective tag is unchanged\n\t\t\t\t`UPDATE manifests SET validated_at = %d WHERE repo_id = 1 AND digest = '%s';`+\"\\n\",\n\t\t\t\ts1.Clock.Now().Unix(), images[1].Manifest.Digest,\n\t\t\t)\n\t\t\tif strategy == \"on_first_use\" {\n\t\t\t\tmanifestValidationBecauseOfExistingTag = \"\"\n\t\t\t}\n\t\t\ttr.DBChanges().AssertEqualf(`%sDELETE FROM tags WHERE repo_id = 1 AND name = 'latest';`,\n\t\t\t\tmanifestValidationBecauseOfExistingTag,\n\t\t\t)\n\n\t\t\t//also remove the image list manifest on the primary side\n\t\t\ts1.Clock.StepBy(7 * time.Hour)\n\t\t\tmustExec(t, s1.DB,\n\t\t\t\t`DELETE FROM manifests WHERE digest = $1`,\n\t\t\t\timageList.Manifest.Digest,\n\t\t\t)\n\t\t\t//and remove the other tag (this is required for the 404 error message in the next step but one to be deterministic)\n\t\t\tmustExec(t, s1.DB, `DELETE FROM tags`)\n\n\t\t\t//this makes the primary side consistent again, so ManifestSyncJob\n\t\t\t//should succeed now and remove both deleted manifests from the DB\n\t\t\texpectSuccess(t, syncManifestsJob2.ProcessOne(s2.Ctx))\n\t\t\ttr.DBChanges().AssertEqualf(`\n\t\t\t\t\tDELETE FROM manifest_blob_refs WHERE repo_id = 1 AND digest = '%[1]s' AND blob_id = 4;\n\t\t\t\t\tDELETE FROM manifest_blob_refs WHERE repo_id = 1 AND digest = '%[1]s' AND blob_id = 5;\n\t\t\t\t\tDELETE FROM manifest_blob_refs WHERE repo_id = 1 AND digest = '%[1]s' AND blob_id = 6;\n\t\t\t\t\tDELETE FROM manifest_contents WHERE repo_id = 1 AND digest = '%[1]s';\n\t\t\t\t\tDELETE FROM manifest_contents WHERE repo_id = 1 AND digest = '%[2]s';\n\t\t\t\t\tDELETE FROM manifest_manifest_refs WHERE repo_id = 1 AND parent_digest = '%[2]s' AND child_digest = '%[3]s';\n\t\t\t\t\tDELETE FROM manifest_manifest_refs WHERE repo_id = 1 AND parent_digest = '%[2]s' AND child_digest = '%[1]s';\n\t\t\t\t\tDELETE FROM manifests WHERE repo_id = 1 AND digest = '%[1]s';\n\t\t\t\t\tDELETE FROM manifests WHERE repo_id = 1 AND digest = '%[2]s';\n\t\t\t\t\tUPDATE repos SET next_manifest_sync_at = %[4]d WHERE id = 1 AND account_name = 'test1' AND name = 'foo';\n\t\t\t\t\tDELETE FROM tags WHERE repo_id = 1 AND name = 'other';\n\t\t\t\t\tDELETE FROM trivy_security_info WHERE repo_id = 1 AND digest = '%[1]s';\n\t\t\t\t\tDELETE FROM trivy_security_info WHERE repo_id = 1 AND digest = '%[2]s';\n\t\t\t\t`,\n\t\t\t\timages[2].Manifest.Digest,\n\t\t\t\timageList.Manifest.Digest,\n\t\t\t\timages[1].Manifest.Digest,\n\t\t\t\ts1.Clock.Now().Add(1*time.Hour).Unix(),\n\t\t\t)\n\t\t\texpectError(t, sql.ErrNoRows.Error(), syncManifestsJob2.ProcessOne(s2.Ctx))\n\t\t\ttr.DBChanges().AssertEmpty()\n\n\t\t\t//replace the primary registry's API with something that just answers 404 most of the time\n\t\t\t//\n\t\t\t//(We do allow the /keppel/v1/auth endpoint to work properly because\n\t\t\t//otherwise the error messages are not reproducible between passes.)\n\t\t\ts1.Clock.StepBy(7 * time.Hour)\n\t\t\thttp.DefaultTransport.(*test.RoundTripper).Handlers[\"registry.example.org\"] = answerMostWith404(s1.Handler)\n\t\t\t//This is particularly devious since 404 is returned by the GET endpoint for\n\t\t\t//a manifest when the manifest was deleted. We want to check that the next\n\t\t\t//ManifestSyncJob understands that this is a network issue and not\n\t\t\t//caused by the manifest getting deleted, since the 404-generating endpoint\n\t\t\t//does not render a proper MANIFEST_UNKNOWN error.\n\t\t\texpectedError = fmt.Sprintf(\"cannot check existence of manifest test1/foo/%s on primary account: during GET https://registry.example.org/v2/test1/foo/manifests/%[1]s: expected status 200, but got 404 Not Found\",\n\t\t\t\timages[1].Manifest.Digest, //the only manifest that is left\n\t\t\t)\n\t\t\texpectError(t, expectedError, syncManifestsJob2.ProcessOne(s2.Ctx))\n\t\t\ttr.DBChanges().AssertEmpty()\n\n\t\t\t//check that the manifest sync did not update the last_pulled_at timestamps\n\t\t\t//in the primary DB (even though there were GET requests for the manifests\n\t\t\t//there)\n\t\t\tvar lastPulledAt time.Time\n\t\t\texpectSuccess(t, s1.DB.DbMap.QueryRow(`SELECT MAX(last_pulled_at) FROM manifests`).Scan(&lastPulledAt))\n\t\t\tif !lastPulledAt.Equal(initialLastPulledAt) {\n\t\t\t\tt.Error(\"last_pulled_at timestamps on the primary side were touched\")\n\t\t\t\tt.Logf(\" expected = %#v\", initialLastPulledAt)\n\t\t\t\tt.Logf(\" actual = %#v\", lastPulledAt)\n\t\t\t}\n\n\t\t\t//flip back to the actual primary registry's API\n\t\t\thttp.DefaultTransport.(*test.RoundTripper).Handlers[\"registry.example.org\"] = s1.Handler\n\t\t\t//delete the entire repository on the primary\n\t\t\ts1.Clock.StepBy(7 * time.Hour)\n\t\t\tmustExec(t, s1.DB, `DELETE FROM manifests`)\n\t\t\tmustExec(t, s1.DB, `DELETE FROM repos`)\n\t\t\t//the manifest sync should reflect the repository deletion on the replica\n\t\t\texpectSuccess(t, syncManifestsJob2.ProcessOne(s2.Ctx))\n\t\t\ttr.DBChanges().AssertEqualf(`\n\t\t\t\t\tDELETE FROM blob_mounts WHERE blob_id = 1 AND repo_id = 1;\n\t\t\t\t\tDELETE FROM blob_mounts WHERE blob_id = 2 AND repo_id = 1;\n\t\t\t\t\tDELETE FROM blob_mounts WHERE blob_id = 3 AND repo_id = 1;\n\t\t\t\t\tDELETE FROM blob_mounts WHERE blob_id = 4 AND repo_id = 1;\n\t\t\t\t\tDELETE FROM blob_mounts WHERE blob_id = 5 AND repo_id = 1;\n\t\t\t\t\tDELETE FROM blob_mounts WHERE blob_id = 6 AND repo_id = 1;\n\t\t\t\t\tDELETE FROM blob_mounts WHERE blob_id = 7 AND repo_id = 1;\n\t\t\t\t\tDELETE FROM blob_mounts WHERE blob_id = 8 AND repo_id = 1;\n\t\t\t\t\tDELETE FROM blob_mounts WHERE blob_id = 9 AND repo_id = 1;\n\t\t\t\t\tDELETE FROM manifest_blob_refs WHERE repo_id = 1 AND digest = '%[1]s' AND blob_id = 1;\n\t\t\t\t\tDELETE FROM manifest_blob_refs WHERE repo_id = 1 AND digest = '%[1]s' AND blob_id = 2;\n\t\t\t\t\tDELETE FROM manifest_blob_refs WHERE repo_id = 1 AND digest = '%[1]s' AND blob_id = 3;\n\t\t\t\t\tDELETE FROM manifest_contents WHERE repo_id = 1 AND digest = '%[1]s';\n\t\t\t\t\tDELETE FROM manifests WHERE repo_id = 1 AND digest = '%[1]s';\n\t\t\t\t\tDELETE FROM repos WHERE id = 1 AND account_name = 'test1' AND name = 'foo';\n\t\t\t\t\tDELETE FROM trivy_security_info WHERE repo_id = 1 AND digest = '%[1]s';\n\t\t\t\t`,\n\t\t\t\timages[1].Manifest.Digest,\n\t\t\t)\n\t\t\texpectError(t, sql.ErrNoRows.Error(), syncManifestsJob2.ProcessOne(s2.Ctx))\n\t\t\ttr.DBChanges().AssertEmpty()\n\t\t})\n\t})\n}", "func SwitchRelease(binToSlice string, helmBinPath string, helmVersionPath string) error {\n\n\t// Delete actual symlink\n\trmLn := &BashCmd{\n\t\tCmd: \"find\",\n\t\tArgs: []string{\"-L\", \".\", \"-xtype\", \"l\", \"-delete\"},\n\t\tExecPath: helmBinPath,\n\t}\n\t_, err := ExecBashCmd(rmLn)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Create symlink to helm new version\n\tln := &BashCmd{\n\t\tCmd: \"ln\",\n\t\tArgs: []string{\"-s\", fmt.Sprintf(\"%s/helm-%s\", helmVersionPath, binToSlice), fmt.Sprintf(\"%s/helm\", helmBinPath)},\n\t}\n\t_, err = ExecBashCmd(ln)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func deployJob(cmd *cobra.Command, args []string) {\n\tport, _ := cmd.Flags().GetString(\"port\")\n\tif port == \"\" {\n\t\tport = DefaultPort\n\t}\n\tname, _ := cmd.Flags().GetString(\"file\")\n\tyaml := unmarsharlYaml(readJobFile(name))\n if !semanticCheck(yaml) {\n os.Exit(2)\n }\n\tcurrentDir, _ := os.Getwd()\n\tvar job NewJob\n\tjob.ID = yaml.Job.ID\n\tjob.UID = fmt.Sprint(syscall.Getuid())\n\tgroup, _ := user.LookupGroup(\"odin\")\n\tgid, _ := strconv.Atoi(group.Gid)\n\tjob.GID = strconv.Itoa(gid)\n\tjob.Name = yaml.Job.Name\n\tjob.Description = yaml.Job.Description\n\tjob.File = currentDir + \"/\" + yaml.Job.File\n\tif yaml.Job.Language == \"go\" {\n\t\tjob.Language = yaml.Job.Language\n\t\tcmd := exec.Command(job.Language, \"build\", job.File)\n\t\tcmd.SysProcAttr = &syscall.SysProcAttr{}\n\t\t_, err := cmd.CombinedOutput()\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t\tos.Exit(2)\n\t\t}\n\t\tjob.File = job.File[:len(job.File)-3]\n\t} else {\n\t\tjob.Language = yaml.Job.Language\n\t}\n\tjob.Schedule = getScheduleString(name, port)\n\tjobJSON, _ := json.Marshal(job)\n\tbody := makePostRequest(fmt.Sprintf(\"http://localhost%s/jobs/add\", port), bytes.NewBuffer(jobJSON))\n\tfmt.Println(body)\n}", "func TestJobDB(t sktest.TestingT, db JobDB) {\n\tctx, cancel := context.WithCancel(context.Background())\n\tdefer cancel()\n\tmod := db.ModifiedJobsCh(ctx)\n\n\tjobs := <-mod\n\trequire.Equal(t, 0, len(jobs))\n\n\tnow := time.Now().Add(TS_RESOLUTION)\n\tj1 := types.MakeTestJob(now)\n\n\t// Insert the job.\n\trequire.NoError(t, db.PutJob(ctx, j1))\n\n\t// Ids must be URL-safe.\n\trequire.NotEqual(t, \"\", j1.Id)\n\trequire.Equal(t, url.QueryEscape(j1.Id), j1.Id)\n\n\t// Check that DbModified was set.\n\trequire.False(t, util.TimeIsZero(j1.DbModified))\n\tj1LastModified := j1.DbModified\n\n\t// Job can now be retrieved by Id.\n\tj1Again, err := db.GetJobById(ctx, j1.Id)\n\trequire.NoError(t, err)\n\tAssertDeepEqual(t, j1, j1Again)\n\n\t// Ensure that the job shows up in the modified list.\n\tfindModifiedJobs(t, mod, j1)\n\n\t// Ensure that the job shows up in the correct date ranges.\n\ttimeStart := time.Unix(0, 0).UTC()\n\tj1Before := j1.Created\n\tj1After := j1Before.Add(1 * TS_RESOLUTION)\n\ttimeEnd := now.Add(2 * TS_RESOLUTION)\n\tjobs, err = db.GetJobsFromDateRange(ctx, timeStart, j1Before, \"\")\n\trequire.NoError(t, err)\n\trequire.Equal(t, 0, len(jobs))\n\tjobs, err = db.GetJobsFromDateRange(ctx, j1Before, j1After, \"\")\n\trequire.NoError(t, err)\n\tAssertDeepEqual(t, []*types.Job{j1}, jobs)\n\tjobs, err = db.GetJobsFromDateRange(ctx, j1After, timeEnd, \"\")\n\trequire.NoError(t, err)\n\trequire.Equal(t, 0, len(jobs))\n\tjobs, err = db.GetJobsFromDateRange(ctx, j1Before, j1After, \"bogusRepo\")\n\trequire.NoError(t, err)\n\trequire.Equal(t, 0, len(jobs))\n\tjobs, err = db.GetJobsFromDateRange(ctx, j1Before, j1After, j1.Repo)\n\trequire.NoError(t, err)\n\tAssertDeepEqual(t, []*types.Job{j1}, jobs)\n\trequire.NotEqual(t, \"\", j1.Repo)\n\n\t// Insert two more jobs. Ensure at least 1 microsecond between job Created\n\t// times so that j1After != j2Before and j2After != j3Before.\n\tj2 := types.MakeTestJob(now.Add(TS_RESOLUTION))\n\tj3 := types.MakeTestJob(now.Add(2 * TS_RESOLUTION))\n\trequire.NoError(t, db.PutJobs(ctx, []*types.Job{j2, j3}))\n\n\t// Check that PutJobs assigned Ids.\n\trequire.NotEqual(t, \"\", j2.Id)\n\trequire.NotEqual(t, \"\", j3.Id)\n\t// Ids must be URL-safe.\n\trequire.Equal(t, url.QueryEscape(j2.Id), j2.Id)\n\trequire.Equal(t, url.QueryEscape(j3.Id), j3.Id)\n\n\t// Ensure that both jobs show up in the modified list.\n\tfindModifiedJobs(t, mod, j2, j3)\n\n\t// Make an update to j1 and j2. Ensure modified times change.\n\tj2LastModified := j2.DbModified\n\tj1.Status = types.JOB_STATUS_IN_PROGRESS\n\tj2.Status = types.JOB_STATUS_SUCCESS\n\trequire.NoError(t, db.PutJobs(ctx, []*types.Job{j1, j2}))\n\trequire.False(t, j1.DbModified.Equal(j1LastModified))\n\trequire.False(t, j2.DbModified.Equal(j2LastModified))\n\n\t// Ensure that both jobs show up in the modified list.\n\tfindModifiedJobs(t, mod, j1, j2)\n\n\t// Ensure that all jobs show up in the correct time ranges, in sorted order.\n\tj2Before := j2.Created\n\tj2After := j2Before.Add(1 * TS_RESOLUTION)\n\n\tj3Before := j3.Created\n\tj3After := j3Before.Add(1 * TS_RESOLUTION)\n\n\ttimeEnd = now.Add(3 * TS_RESOLUTION)\n\n\tjobs, err = db.GetJobsFromDateRange(ctx, timeStart, j1Before, \"\")\n\trequire.NoError(t, err)\n\trequire.Equal(t, 0, len(jobs))\n\n\tjobs, err = db.GetJobsFromDateRange(ctx, timeStart, j1After, \"\")\n\trequire.NoError(t, err)\n\tAssertDeepEqual(t, []*types.Job{j1}, jobs)\n\n\tjobs, err = db.GetJobsFromDateRange(ctx, timeStart, j2Before, \"\")\n\trequire.NoError(t, err)\n\tAssertDeepEqual(t, []*types.Job{j1}, jobs)\n\n\tjobs, err = db.GetJobsFromDateRange(ctx, timeStart, j2After, \"\")\n\trequire.NoError(t, err)\n\tAssertDeepEqual(t, []*types.Job{j1, j2}, jobs)\n\n\tjobs, err = db.GetJobsFromDateRange(ctx, timeStart, j3Before, \"\")\n\trequire.NoError(t, err)\n\tAssertDeepEqual(t, []*types.Job{j1, j2}, jobs)\n\n\tjobs, err = db.GetJobsFromDateRange(ctx, timeStart, j3After, \"\")\n\trequire.NoError(t, err)\n\tAssertDeepEqual(t, []*types.Job{j1, j2, j3}, jobs)\n\n\tjobs, err = db.GetJobsFromDateRange(ctx, timeStart, timeEnd, \"\")\n\trequire.NoError(t, err)\n\tAssertDeepEqual(t, []*types.Job{j1, j2, j3}, jobs)\n\n\tjobs, err = db.GetJobsFromDateRange(ctx, j1Before, timeEnd, \"\")\n\trequire.NoError(t, err)\n\tAssertDeepEqual(t, []*types.Job{j1, j2, j3}, jobs)\n\n\tjobs, err = db.GetJobsFromDateRange(ctx, j1After, timeEnd, \"\")\n\trequire.NoError(t, err)\n\tAssertDeepEqual(t, []*types.Job{j2, j3}, jobs)\n\n\tjobs, err = db.GetJobsFromDateRange(ctx, j2Before, timeEnd, \"\")\n\trequire.NoError(t, err)\n\tAssertDeepEqual(t, []*types.Job{j2, j3}, jobs)\n\n\tjobs, err = db.GetJobsFromDateRange(ctx, j2After, timeEnd, \"\")\n\trequire.NoError(t, err)\n\tAssertDeepEqual(t, []*types.Job{j3}, jobs)\n\n\tjobs, err = db.GetJobsFromDateRange(ctx, j3Before, timeEnd, \"\")\n\trequire.NoError(t, err)\n\tAssertDeepEqual(t, []*types.Job{j3}, jobs)\n\n\tjobs, err = db.GetJobsFromDateRange(ctx, j3After, timeEnd, \"\")\n\trequire.NoError(t, err)\n\tAssertDeepEqual(t, []*types.Job{}, jobs)\n}", "func (rj *ReplicationJob) Post() {\n\tvar data ReplicationReq\n\trj.DecodeJSONReq(&data)\n\tlog.Debugf(\"data: %+v\", data)\n\tp, err := dao.GetRepPolicy(data.PolicyID)\n\tif err != nil {\n\t\tlog.Errorf(\"Failed to get policy, error: %v\", err)\n\t\trj.RenderError(http.StatusInternalServerError, fmt.Sprintf(\"Failed to get policy, id: %d\", data.PolicyID))\n\t\treturn\n\t}\n\tif p == nil {\n\t\tlog.Errorf(\"Policy not found, id: %d\", data.PolicyID)\n\t\trj.RenderError(http.StatusNotFound, fmt.Sprintf(\"Policy not found, id: %d\", data.PolicyID))\n\t\treturn\n\t}\n\tif len(data.Repo) == 0 { // sync all repositories\n\t\trepoList, err := getRepoList(p.ProjectID)\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Failed to get repository list, project id: %d, error: %v\", p.ProjectID, err)\n\t\t\trj.RenderError(http.StatusInternalServerError, err.Error())\n\t\t\treturn\n\t\t}\n\t\tlog.Debugf(\"repo list: %v\", repoList)\n\t\tfor _, repo := range repoList {\n\t\t\terr := rj.addJob(repo, data.PolicyID, models.RepOpTransfer)\n\t\t\tif err != nil {\n\t\t\t\tlog.Errorf(\"Failed to insert job record, error: %v\", err)\n\t\t\t\trj.RenderError(http.StatusInternalServerError, err.Error())\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t} else { // sync a single repository\n\t\tvar op string\n\t\tif len(data.Operation) > 0 {\n\t\t\top = data.Operation\n\t\t} else {\n\t\t\top = models.RepOpTransfer\n\t\t}\n\t\terr := rj.addJob(data.Repo, data.PolicyID, op, data.TagList...)\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Failed to insert job record, error: %v\", err)\n\t\t\trj.RenderError(http.StatusInternalServerError, err.Error())\n\t\t\treturn\n\t\t}\n\t}\n}", "func (w *worker) commit(env *environment, interval func(), update bool, start time.Time) error {\n\tif w.isRunning() {\n\t\tif interval != nil {\n\t\t\tinterval()\n\t\t}\n\t\t// Create a local environment copy, avoid the data race with snapshot state.\n\t\t// https://github.com/entropy/go-entropy/issues/24299\n\t\tenv := env.copy()\n\t\tblock, err := w.engine.FinalizeAndAssemble(w.chain, env.header, env.state, env.txs, env.unclelist(), env.receipts)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\t// If we're post merge, just ignore\n\t\tif !w.isTTDReached(block.Header()) {\n\t\t\tselect {\n\t\t\tcase w.taskCh <- &task{receipts: env.receipts, state: env.state, block: block, createdAt: time.Now()}:\n\t\t\t\tw.unconfirmed.Shift(block.NumberU64() - 1)\n\t\t\t\tlog.Info(\"Commit new sealing work\", \"number\", block.Number(), \"sealhash\", w.engine.SealHash(block.Header()),\n\t\t\t\t\t\"uncles\", len(env.uncles), \"txs\", env.tcount,\n\t\t\t\t\t\"gas\", block.GasUsed(), \"fees\", totalFees(block, env.receipts),\n\t\t\t\t\t\"elapsed\", common.PrettyDuration(time.Since(start)))\n\n\t\t\tcase <-w.exitCh:\n\t\t\t\tlog.Info(\"Worker has exited\")\n\t\t\t}\n\t\t}\n\t}\n\tif update {\n\t\tw.updateSnapshot(env)\n\t}\n\treturn nil\n}", "func CutRelease(release string, rc string, isFirstMinorRelease bool, backportRelease bool,\n\tisDryRun bool, legacy bool, server string, webapp string) *AppError {\n\tvar jobName string\n\tif legacy {\n\t\tjobName = Cfg.ReleaseJobLegacy\n\t} else {\n\t\tjobName = Cfg.ReleaseJob\n\t}\n\n\tisRunning, err := IsCutReleaseRunning(jobName)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif isRunning {\n\t\treturn NewError(\"There is a release job running.\", nil)\n\t}\n\n\tshortRelease := release[:len(release)-2]\n\treleaseBranch := \"release-\" + shortRelease\n\n\tvar fullRelease string\n\tvar rcpart string\n\tif rc == \"\" {\n\t\tfullRelease = release\n\t\trcpart = \"\"\n\t} else {\n\t\tfullRelease = release + \"-\" + rc\n\t\trcpart = \"-\" + rc\n\t}\n\n\tisFirstMinorReleaseStr := \"false\"\n\tif isFirstMinorRelease {\n\t\tisFirstMinorReleaseStr = \"true\"\n\t}\n\n\tisDryRunStr := \"false\"\n\tif isDryRun {\n\t\tisDryRunStr = \"true\"\n\t}\n\n\tisDotReleaseStr := \"false\"\n\tif backportRelease {\n\t\tisDotReleaseStr = \"true\"\n\t}\n\n\tparameters := map[string]string{\n\t\t\"MM_VERSION\": release,\n\t\t\"MM_RC\": rcpart,\n\t\t\"IS_FIRST_MINOR_RELEASE\": isFirstMinorReleaseStr,\n\t\t\"IS_DRY_RUN\": isDryRunStr,\n\t\t\"IS_DOT_RELEASE\": isDotReleaseStr,\n\t\t\"IS_BACKPORT\": isDotReleaseStr,\n\t\t\"PIP_BRANCH\": releaseBranch,\n\t}\n\n\tif server != \"\" {\n\t\tparameters[\"MM_BUILDER_SERVER_DOCKER\"] = server\n\t}\n\n\tif webapp != \"\" {\n\t\tparameters[\"MM_BUILDER_WEBAPP_DOCKER\"] = webapp\n\t}\n\n\t// We want to return so the user knows the build has started.\n\t// Build jobs should report their own failure.\n\tgo func() {\n\t\tresult, err := RunJobWaitForResult(\n\t\t\tjobName,\n\t\t\tparameters)\n\t\tif err != nil || result != gojenkins.STATUS_SUCCESS {\n\t\t\tLogError(\"Release Job failed. Version=\" + fullRelease + \" err= \" + err.Error() + \" Jenkins result= \" + result)\n\t\t\treturn\n\t\t}\n\n\t\t// If Release was success trigger the Rctesting job to update\n\t\tLogInfo(\"Release Job Status: \" + result)\n\t\tif !backportRelease {\n\t\t\tLogInfo(\"Will trigger Job: \" + Cfg.RCTestingJob)\n\t\t\tRunJobParameters(Cfg.RCTestingJob, map[string]string{\"LONG_RELEASE\": fullRelease}, Cfg.CIServerJenkinsUserName, Cfg.CIServerJenkinsToken, Cfg.CIServerJenkinsURL)\n\n\t\t\t// Only update the CI servers and community if this is the latest release\n\t\t\tLogInfo(\"Setting CI Servers\")\n\t\t\tSetCIServerBranch(releaseBranch)\n\t\t}\n\t}()\n\n\treturn nil\n}", "func newResourceDelta(\n\ta *resource,\n\tb *resource,\n) *ackcompare.Delta {\n\tdelta := ackcompare.NewDelta()\n\tif (a == nil && b != nil) ||\n\t\t(a != nil && b == nil) {\n\t\tdelta.Add(\"\", a, b)\n\t\treturn delta\n\t}\n\tcustomSetDefaults(a, b)\n\n\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig, b.ko.Spec.HyperParameterTuningJobConfig) {\n\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig\", a.ko.Spec.HyperParameterTuningJobConfig, b.ko.Spec.HyperParameterTuningJobConfig)\n\t} else if a.ko.Spec.HyperParameterTuningJobConfig != nil && b.ko.Spec.HyperParameterTuningJobConfig != nil {\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective) {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective\", a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective)\n\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective != nil && b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName\", a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName)\n\t\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName != nil && b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName != nil {\n\t\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName != *b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName {\n\t\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName\", a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type\", a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type)\n\t\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type != nil && b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type != nil {\n\t\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type != *b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type {\n\t\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type\", a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges) {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ParameterRanges\", a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges)\n\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges != nil && b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges != nil {\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.CategoricalParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.CategoricalParameterRanges) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ParameterRanges.CategoricalParameterRanges\", a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.CategoricalParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.CategoricalParameterRanges)\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.ContinuousParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.ContinuousParameterRanges) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ParameterRanges.ContinuousParameterRanges\", a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.ContinuousParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.ContinuousParameterRanges)\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.IntegerParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.IntegerParameterRanges) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ParameterRanges.IntegerParameterRanges\", a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.IntegerParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.IntegerParameterRanges)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits) {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ResourceLimits\", a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits)\n\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits != nil && b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs\", a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs)\n\t\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs != nil && b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs != nil {\n\t\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs != *b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs {\n\t\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs\", a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs\", a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs)\n\t\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs != nil && b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs != nil {\n\t\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs != *b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs {\n\t\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs\", a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.Strategy, b.ko.Spec.HyperParameterTuningJobConfig.Strategy) {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.Strategy\", a.ko.Spec.HyperParameterTuningJobConfig.Strategy, b.ko.Spec.HyperParameterTuningJobConfig.Strategy)\n\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.Strategy != nil && b.ko.Spec.HyperParameterTuningJobConfig.Strategy != nil {\n\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.Strategy != *b.ko.Spec.HyperParameterTuningJobConfig.Strategy {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.Strategy\", a.ko.Spec.HyperParameterTuningJobConfig.Strategy, b.ko.Spec.HyperParameterTuningJobConfig.Strategy)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType, b.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType) {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType\", a.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType, b.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType)\n\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType != nil && b.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType != nil {\n\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType != *b.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType\", a.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType, b.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria, b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria) {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria\", a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria, b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria)\n\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria != nil && b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue, b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue\", a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue, b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue)\n\t\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue != nil && b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue != nil {\n\t\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue != *b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue {\n\t\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue\", a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue, b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobName, b.ko.Spec.HyperParameterTuningJobName) {\n\t\tdelta.Add(\"Spec.HyperParameterTuningJobName\", a.ko.Spec.HyperParameterTuningJobName, b.ko.Spec.HyperParameterTuningJobName)\n\t} else if a.ko.Spec.HyperParameterTuningJobName != nil && b.ko.Spec.HyperParameterTuningJobName != nil {\n\t\tif *a.ko.Spec.HyperParameterTuningJobName != *b.ko.Spec.HyperParameterTuningJobName {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobName\", a.ko.Spec.HyperParameterTuningJobName, b.ko.Spec.HyperParameterTuningJobName)\n\t\t}\n\t}\n\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition, b.ko.Spec.TrainingJobDefinition) {\n\t\tdelta.Add(\"Spec.TrainingJobDefinition\", a.ko.Spec.TrainingJobDefinition, b.ko.Spec.TrainingJobDefinition)\n\t} else if a.ko.Spec.TrainingJobDefinition != nil && b.ko.Spec.TrainingJobDefinition != nil {\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification != nil && b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName != nil && b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName != *b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.MetricDefinitions, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.MetricDefinitions) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.MetricDefinitions\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.MetricDefinitions, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.MetricDefinitions)\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage != nil && b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage != *b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode != nil && b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode != *b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.CheckpointConfig, b.ko.Spec.TrainingJobDefinition.CheckpointConfig) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.CheckpointConfig\", a.ko.Spec.TrainingJobDefinition.CheckpointConfig, b.ko.Spec.TrainingJobDefinition.CheckpointConfig)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.CheckpointConfig != nil && b.ko.Spec.TrainingJobDefinition.CheckpointConfig != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath, b.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.CheckpointConfig.LocalPath\", a.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath, b.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath != nil && b.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath != *b.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.CheckpointConfig.LocalPath\", a.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath, b.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI, b.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.CheckpointConfig.S3URI\", a.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI, b.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI != nil && b.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI != *b.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.CheckpointConfig.S3URI\", a.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI, b.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.DefinitionName, b.ko.Spec.TrainingJobDefinition.DefinitionName) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.DefinitionName\", a.ko.Spec.TrainingJobDefinition.DefinitionName, b.ko.Spec.TrainingJobDefinition.DefinitionName)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.DefinitionName != nil && b.ko.Spec.TrainingJobDefinition.DefinitionName != nil {\n\t\t\tif *a.ko.Spec.TrainingJobDefinition.DefinitionName != *b.ko.Spec.TrainingJobDefinition.DefinitionName {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.DefinitionName\", a.ko.Spec.TrainingJobDefinition.DefinitionName, b.ko.Spec.TrainingJobDefinition.DefinitionName)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption, b.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption\", a.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption, b.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption != nil && b.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption != nil {\n\t\t\tif *a.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption != *b.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption\", a.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption, b.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining, b.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.EnableManagedSpotTraining\", a.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining, b.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining != nil && b.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining != nil {\n\t\t\tif *a.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining != *b.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.EnableManagedSpotTraining\", a.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining, b.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation, b.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.EnableNetworkIsolation\", a.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation, b.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation != nil && b.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation != nil {\n\t\t\tif *a.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation != *b.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.EnableNetworkIsolation\", a.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation, b.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.HyperParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.HyperParameterRanges\", a.ko.Spec.TrainingJobDefinition.HyperParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.HyperParameterRanges != nil && b.ko.Spec.TrainingJobDefinition.HyperParameterRanges != nil {\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.TrainingJobDefinition.HyperParameterRanges.CategoricalParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges.CategoricalParameterRanges) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.HyperParameterRanges.CategoricalParameterRanges\", a.ko.Spec.TrainingJobDefinition.HyperParameterRanges.CategoricalParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges.CategoricalParameterRanges)\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.TrainingJobDefinition.HyperParameterRanges.ContinuousParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges.ContinuousParameterRanges) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.HyperParameterRanges.ContinuousParameterRanges\", a.ko.Spec.TrainingJobDefinition.HyperParameterRanges.ContinuousParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges.ContinuousParameterRanges)\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.TrainingJobDefinition.HyperParameterRanges.IntegerParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges.IntegerParameterRanges) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.HyperParameterRanges.IntegerParameterRanges\", a.ko.Spec.TrainingJobDefinition.HyperParameterRanges.IntegerParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges.IntegerParameterRanges)\n\t\t\t}\n\t\t}\n\t\tif !reflect.DeepEqual(a.ko.Spec.TrainingJobDefinition.InputDataConfig, b.ko.Spec.TrainingJobDefinition.InputDataConfig) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.InputDataConfig\", a.ko.Spec.TrainingJobDefinition.InputDataConfig, b.ko.Spec.TrainingJobDefinition.InputDataConfig)\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.OutputDataConfig, b.ko.Spec.TrainingJobDefinition.OutputDataConfig) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.OutputDataConfig\", a.ko.Spec.TrainingJobDefinition.OutputDataConfig, b.ko.Spec.TrainingJobDefinition.OutputDataConfig)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.OutputDataConfig != nil && b.ko.Spec.TrainingJobDefinition.OutputDataConfig != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID, b.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID\", a.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID, b.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID != nil && b.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID != *b.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID\", a.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID, b.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath, b.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath\", a.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath, b.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath != nil && b.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath != *b.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath\", a.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath, b.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.ResourceConfig, b.ko.Spec.TrainingJobDefinition.ResourceConfig) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig\", a.ko.Spec.TrainingJobDefinition.ResourceConfig, b.ko.Spec.TrainingJobDefinition.ResourceConfig)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.ResourceConfig != nil && b.ko.Spec.TrainingJobDefinition.ResourceConfig != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount, b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.InstanceCount\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount, b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount != nil && b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount != *b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.InstanceCount\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount, b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType, b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.InstanceType\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType, b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType != nil && b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType != *b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.InstanceType\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType, b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID, b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID, b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID != nil && b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID != *b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID, b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB, b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB, b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB != nil && b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB != *b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB, b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.RoleARN, b.ko.Spec.TrainingJobDefinition.RoleARN) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.RoleARN\", a.ko.Spec.TrainingJobDefinition.RoleARN, b.ko.Spec.TrainingJobDefinition.RoleARN)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.RoleARN != nil && b.ko.Spec.TrainingJobDefinition.RoleARN != nil {\n\t\t\tif *a.ko.Spec.TrainingJobDefinition.RoleARN != *b.ko.Spec.TrainingJobDefinition.RoleARN {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.RoleARN\", a.ko.Spec.TrainingJobDefinition.RoleARN, b.ko.Spec.TrainingJobDefinition.RoleARN)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.StaticHyperParameters, b.ko.Spec.TrainingJobDefinition.StaticHyperParameters) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StaticHyperParameters\", a.ko.Spec.TrainingJobDefinition.StaticHyperParameters, b.ko.Spec.TrainingJobDefinition.StaticHyperParameters)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.StaticHyperParameters != nil && b.ko.Spec.TrainingJobDefinition.StaticHyperParameters != nil {\n\t\t\tif !ackcompare.MapStringStringPEqual(a.ko.Spec.TrainingJobDefinition.StaticHyperParameters, b.ko.Spec.TrainingJobDefinition.StaticHyperParameters) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StaticHyperParameters\", a.ko.Spec.TrainingJobDefinition.StaticHyperParameters, b.ko.Spec.TrainingJobDefinition.StaticHyperParameters)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.StoppingCondition, b.ko.Spec.TrainingJobDefinition.StoppingCondition) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StoppingCondition\", a.ko.Spec.TrainingJobDefinition.StoppingCondition, b.ko.Spec.TrainingJobDefinition.StoppingCondition)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.StoppingCondition != nil && b.ko.Spec.TrainingJobDefinition.StoppingCondition != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds, b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds\", a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds, b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds != nil && b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds != *b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds\", a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds, b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds, b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds\", a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds, b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds != nil && b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds != *b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds\", a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds, b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.TuningObjective, b.ko.Spec.TrainingJobDefinition.TuningObjective) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.TuningObjective\", a.ko.Spec.TrainingJobDefinition.TuningObjective, b.ko.Spec.TrainingJobDefinition.TuningObjective)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.TuningObjective != nil && b.ko.Spec.TrainingJobDefinition.TuningObjective != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName, b.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.TuningObjective.MetricName\", a.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName, b.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName != nil && b.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName != *b.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.TuningObjective.MetricName\", a.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName, b.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.TuningObjective.Type, b.ko.Spec.TrainingJobDefinition.TuningObjective.Type) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.TuningObjective.Type\", a.ko.Spec.TrainingJobDefinition.TuningObjective.Type, b.ko.Spec.TrainingJobDefinition.TuningObjective.Type)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.TuningObjective.Type != nil && b.ko.Spec.TrainingJobDefinition.TuningObjective.Type != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.TuningObjective.Type != *b.ko.Spec.TrainingJobDefinition.TuningObjective.Type {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.TuningObjective.Type\", a.ko.Spec.TrainingJobDefinition.TuningObjective.Type, b.ko.Spec.TrainingJobDefinition.TuningObjective.Type)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.VPCConfig, b.ko.Spec.TrainingJobDefinition.VPCConfig) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.VPCConfig\", a.ko.Spec.TrainingJobDefinition.VPCConfig, b.ko.Spec.TrainingJobDefinition.VPCConfig)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.VPCConfig != nil && b.ko.Spec.TrainingJobDefinition.VPCConfig != nil {\n\t\t\tif !ackcompare.SliceStringPEqual(a.ko.Spec.TrainingJobDefinition.VPCConfig.SecurityGroupIDs, b.ko.Spec.TrainingJobDefinition.VPCConfig.SecurityGroupIDs) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.VPCConfig.SecurityGroupIDs\", a.ko.Spec.TrainingJobDefinition.VPCConfig.SecurityGroupIDs, b.ko.Spec.TrainingJobDefinition.VPCConfig.SecurityGroupIDs)\n\t\t\t}\n\t\t\tif !ackcompare.SliceStringPEqual(a.ko.Spec.TrainingJobDefinition.VPCConfig.Subnets, b.ko.Spec.TrainingJobDefinition.VPCConfig.Subnets) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.VPCConfig.Subnets\", a.ko.Spec.TrainingJobDefinition.VPCConfig.Subnets, b.ko.Spec.TrainingJobDefinition.VPCConfig.Subnets)\n\t\t\t}\n\t\t}\n\t}\n\tif !reflect.DeepEqual(a.ko.Spec.TrainingJobDefinitions, b.ko.Spec.TrainingJobDefinitions) {\n\t\tdelta.Add(\"Spec.TrainingJobDefinitions\", a.ko.Spec.TrainingJobDefinitions, b.ko.Spec.TrainingJobDefinitions)\n\t}\n\tif ackcompare.HasNilDifference(a.ko.Spec.WarmStartConfig, b.ko.Spec.WarmStartConfig) {\n\t\tdelta.Add(\"Spec.WarmStartConfig\", a.ko.Spec.WarmStartConfig, b.ko.Spec.WarmStartConfig)\n\t} else if a.ko.Spec.WarmStartConfig != nil && b.ko.Spec.WarmStartConfig != nil {\n\t\tif !reflect.DeepEqual(a.ko.Spec.WarmStartConfig.ParentHyperParameterTuningJobs, b.ko.Spec.WarmStartConfig.ParentHyperParameterTuningJobs) {\n\t\t\tdelta.Add(\"Spec.WarmStartConfig.ParentHyperParameterTuningJobs\", a.ko.Spec.WarmStartConfig.ParentHyperParameterTuningJobs, b.ko.Spec.WarmStartConfig.ParentHyperParameterTuningJobs)\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.WarmStartConfig.WarmStartType, b.ko.Spec.WarmStartConfig.WarmStartType) {\n\t\t\tdelta.Add(\"Spec.WarmStartConfig.WarmStartType\", a.ko.Spec.WarmStartConfig.WarmStartType, b.ko.Spec.WarmStartConfig.WarmStartType)\n\t\t} else if a.ko.Spec.WarmStartConfig.WarmStartType != nil && b.ko.Spec.WarmStartConfig.WarmStartType != nil {\n\t\t\tif *a.ko.Spec.WarmStartConfig.WarmStartType != *b.ko.Spec.WarmStartConfig.WarmStartType {\n\t\t\t\tdelta.Add(\"Spec.WarmStartConfig.WarmStartType\", a.ko.Spec.WarmStartConfig.WarmStartType, b.ko.Spec.WarmStartConfig.WarmStartType)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn delta\n}", "func main() {\n\t// get a bucket and mc.Client connection\n\tbucket, err := getTestConnection(\"default\")\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// start upr feed\n\tfeed, err := bucket.StartUprFeed(\"index\" /*name*/, 0)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor i := 0; i < vbcount; i++ {\n\t\terr := feed.UprRequestStream(\n\t\t\tuint16(i) /*vbno*/, uint16(0) /*opaque*/, 0 /*flag*/, 0, /*vbuuid*/\n\t\t\t0 /*seqStart*/, 0xFFFFFFFFFFFFFFFF /*seqEnd*/, 0 /*snaps*/, 0)\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"%s\", err.Error())\n\t\t}\n\t}\n\n\tvbseqNo := receiveMutations(feed, 20000)\n\n\tvbList := make([]uint16, 0)\n\tfor i := 0; i < vbcount; i++ {\n\t\tvbList = append(vbList, uint16(i))\n\t}\n\tfailoverlogMap, err := bucket.GetFailoverLogs(vbList)\n\tif err != nil {\n\t\tlog.Printf(\" error in failover log request %s\", err.Error())\n\n\t}\n\n\t// get a bucket and mc.Client connection\n\tbucket1, err := getTestConnection(\"default\")\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// add mutations to the bucket\n\tvar mutationCount = 5000\n\taddKVset(bucket1, mutationCount)\n\n\tlog.Println(\"Restarting ....\")\n\tfeed, err = bucket.StartUprFeed(\"index\" /*name*/, 0)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor i := 0; i < vbcount; i++ {\n\t\tlog.Printf(\"Vbucket %d High sequence number %d, Snapshot end sequence %d\", i, vbseqNo[i][0], vbseqNo[i][1])\n\t\tfailoverLog := failoverlogMap[uint16(i)]\n\t\terr := feed.UprRequestStream(\n\t\t\tuint16(i) /*vbno*/, uint16(0) /*opaque*/, 0, /*flag*/\n\t\t\tfailoverLog[0][0], /*vbuuid*/\n\t\t\tvbseqNo[i][0] /*seqStart*/, 0xFFFFFFFFFFFFFFFF, /*seqEnd*/\n\t\t\t0 /*snaps*/, vbseqNo[i][1])\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"%s\", err.Error())\n\t\t}\n\t}\n\n\tvar e, f *memcached.UprEvent\n\tvar mutations int\nloop:\n\tfor {\n\t\tselect {\n\t\tcase f = <-feed.C:\n\t\tcase <-time.After(time.Second):\n\t\t\tbreak loop\n\t\t}\n\n\t\tif f.Opcode == gomemcached.UPR_MUTATION {\n\t\t\tvbseqNo[f.VBucket][0] = f.Seqno\n\t\t\te = f\n\t\t\tmutations += 1\n\t\t}\n\t}\n\n\tlog.Printf(\" got %d mutations\", mutations)\n\n\texptSeq := vbseqNo[e.VBucket][0] + 1\n\n\tif e.Seqno != exptSeq {\n\t\tfmt.Printf(\"Expected seqno %v, received %v\", exptSeq+1, e.Seqno)\n\t\t//panic(err)\n\t}\n\tfeed.Close()\n}", "func (j *CronJob) writeJobsToEtcd() {\n\tvar jobs []db.CronJob\n\terr := j.db.Preload(\"Timers\").Where(\"enable = ?\", true).Find(&jobs).Error\n\tif err != nil {\n\t\txlog.Error(\"Cronjob.removeInvalidJob: query jobs failed\", xlog.String(\"err\", err.Error()))\n\t\treturn\n\t}\n\n\tfor _, job := range jobs {\n\t\t_ = j.dispatcher.dispatchJob(makeJob(job))\n\t}\n}", "func Run(versionInfo VersionInfo, logger Logger) error {\n\tlogger.Printf(\"Commitsar version: %s\\t Built on: %s\", versionInfo.Version, versionInfo.Date)\n\treturn nil\n}", "func TestNewRevisionFundChecks(t *testing.T) {\n\t// helper func for revisions\n\trevWithValues := func(renterFunds, hostCollateralAvailable uint64) types.FileContractRevision {\n\t\tvalidOuts := make([]types.SiacoinOutput, 2)\n\t\tmissedOuts := make([]types.SiacoinOutput, 3)\n\n\t\t// funds remaining for renter, and payout to host.\n\t\tvalidOuts[0].Value = types.NewCurrency64(renterFunds)\n\t\tvalidOuts[1].Value = types.NewCurrency64(0)\n\n\t\t// Void payout from renter\n\t\tmissedOuts[0].Value = types.NewCurrency64(renterFunds)\n\n\t\t// Collateral\n\t\tmissedOuts[1].Value = types.NewCurrency64(hostCollateralAvailable)\n\n\t\treturn types.FileContractRevision{\n\t\t\tNewValidProofOutputs: validOuts,\n\t\t\tNewMissedProofOutputs: missedOuts,\n\t\t}\n\t}\n\n\t// Cost is less than renter funds should be okay.\n\t_, err := newDownloadRevision(revWithValues(100, 0), types.NewCurrency64(99))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\t// Cost equal to renter funds should be okay.\n\t_, err = newDownloadRevision(revWithValues(100, 0), types.NewCurrency64(100))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\t// Cost is more than renter funds should fail.\n\t_, err = newDownloadRevision(revWithValues(100, 0), types.NewCurrency64(101))\n\tif !errors.Contains(err, types.ErrRevisionCostTooHigh) {\n\t\tt.Fatal(err)\n\t}\n\n\t// Collateral checks (in each, renter funds <= cost)\n\t//\n\t// Cost less than collateral should be okay.\n\t_, err = newUploadRevision(revWithValues(100, 100), crypto.Hash{}, types.NewCurrency64(99), types.NewCurrency64(99))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\t// Using up all collateral should be okay.\n\t_, err = newUploadRevision(revWithValues(100, 100), crypto.Hash{}, types.NewCurrency64(99), types.NewCurrency64(100))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\t// Not enough collateral should cause an error.\n\t_, err = newUploadRevision(revWithValues(100, 100), crypto.Hash{}, types.NewCurrency64(99), types.NewCurrency64(100))\n\tif errors.Contains(err, types.ErrRevisionCollateralTooLow) {\n\t\tt.Fatal(err)\n\t}\n}", "func dashboard(query_string string, only_new bool) () {\n change_list, err := gerrit.FetchChangeList(query_string)\n if err != nil {\n return\n }\n\n num_changes := len(change_list)\n\n fmt.Printf(\"%sTotal %d changes%s\\n\", theme.Title, num_changes, theme.Reset)\n if num_changes == 0 {\n return\n }\n\n // get changes in parallel\n ch_out := make(chan *change.LongChange, num_changes)\n ch_in := make(chan string, num_changes)\n\n // limit concurrent get's \n num_workers := Concurrent_GETs\n if num_workers > num_changes {\n num_workers = num_changes\n }\n\n // start 'workers'\n for i := 0; i < num_workers; i++ {\n go get_change(ch_in, ch_out)\n }\n\n for _, change := range change_list {\n ch_in <- change.Id\n }\n\n var processed = 0;\n ch_list := make([]*change.LongChange, 0)\n\n Loop:\n for {\n select {\n case change := <-ch_out:\n processed++\n ch_list = append(ch_list, change)\n if processed == num_changes {\n break Loop\n }\n }\n }\n\n // and signal exit to worker goroutines\n for i := 0; i < num_workers; i++ {\n ch_in <- \"EXIT\"\n }\n\n // sort by Updated date\n gerrit.SortChanges(ch_list)\n print_change_list(ch_list, only_new)\n\n return\n}", "func newResourceDelta(\n\ta *resource,\n\tb *resource,\n) *ackcompare.Delta {\n\tdelta := ackcompare.NewDelta()\n\tif (a == nil && b != nil) ||\n\t\t(a != nil && b == nil) {\n\t\tdelta.Add(\"\", a, b)\n\t\treturn delta\n\t}\n\n\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig, b.ko.Spec.MonitoringScheduleConfig) {\n\t\tdelta.Add(\"Spec.MonitoringScheduleConfig\", a.ko.Spec.MonitoringScheduleConfig, b.ko.Spec.MonitoringScheduleConfig)\n\t} else if a.ko.Spec.MonitoringScheduleConfig != nil && b.ko.Spec.MonitoringScheduleConfig != nil {\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition) {\n\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition)\n\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig)\n\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig != nil {\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName != nil {\n\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.BaseliningJobName)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource != nil {\n\t\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI) {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI)\n\t\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI != nil {\n\t\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI {\n\t\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.ConstraintsResource.S3URI)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource != nil {\n\t\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI) {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI)\n\t\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI != nil {\n\t\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI {\n\t\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.BaselineConfig.StatisticsResource.S3URI)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment)\n\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment != nil {\n\t\t\t\tif !ackcompare.MapStringStringPEqual(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.Environment)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification)\n\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification != nil {\n\t\t\t\tif !ackcompare.SliceStringPEqual(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerArguments, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerArguments) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerArguments\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerArguments, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerArguments)\n\t\t\t\t}\n\t\t\t\tif !ackcompare.SliceStringPEqual(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerEntrypoint, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerEntrypoint) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerEntrypoint\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerEntrypoint, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ContainerEntrypoint)\n\t\t\t\t}\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI != nil {\n\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.ImageURI)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI != nil {\n\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.PostAnalyticsProcessorSourceURI)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI != nil {\n\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringAppSpecification.RecordPreprocessorSourceURI)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringInputs, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringInputs) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringInputs\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringInputs, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringInputs)\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig)\n\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig != nil {\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID != nil {\n\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.KMSKeyID)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif !reflect.DeepEqual(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.MonitoringOutputs, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.MonitoringOutputs) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.MonitoringOutputs\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.MonitoringOutputs, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringOutputConfig.MonitoringOutputs)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources)\n\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources != nil {\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig != nil {\n\t\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount) {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount)\n\t\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount != nil {\n\t\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount {\n\t\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceCount)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType) {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType)\n\t\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType != nil {\n\t\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType {\n\t\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.InstanceType)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID) {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID)\n\t\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID != nil {\n\t\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID {\n\t\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeKMSKeyID)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB) {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB)\n\t\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB != nil {\n\t\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB {\n\t\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.MonitoringResources.ClusterConfig.VolumeSizeInGB)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig)\n\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig != nil {\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption != nil {\n\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableInterContainerTrafficEncryption)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation != nil {\n\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.EnableNetworkIsolation)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig != nil {\n\t\t\t\t\tif !ackcompare.SliceStringPEqual(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.SecurityGroupIDs, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.SecurityGroupIDs) {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.SecurityGroupIDs\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.SecurityGroupIDs, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.SecurityGroupIDs)\n\t\t\t\t\t}\n\t\t\t\t\tif !ackcompare.SliceStringPEqual(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.Subnets, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.Subnets) {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.Subnets\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.Subnets, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.NetworkConfig.VPCConfig.Subnets)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN)\n\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN != nil {\n\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.RoleARN)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition)\n\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition != nil {\n\t\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds) {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds)\n\t\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds != nil {\n\t\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds {\n\t\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinition.StoppingCondition.MaxRuntimeInSeconds)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName) {\n\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName)\n\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName != nil {\n\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName != *b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName\", a.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName, b.ko.Spec.MonitoringScheduleConfig.MonitoringJobDefinitionName)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.MonitoringType, b.ko.Spec.MonitoringScheduleConfig.MonitoringType) {\n\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringType\", a.ko.Spec.MonitoringScheduleConfig.MonitoringType, b.ko.Spec.MonitoringScheduleConfig.MonitoringType)\n\t\t} else if a.ko.Spec.MonitoringScheduleConfig.MonitoringType != nil && b.ko.Spec.MonitoringScheduleConfig.MonitoringType != nil {\n\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.MonitoringType != *b.ko.Spec.MonitoringScheduleConfig.MonitoringType {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.MonitoringType\", a.ko.Spec.MonitoringScheduleConfig.MonitoringType, b.ko.Spec.MonitoringScheduleConfig.MonitoringType)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.ScheduleConfig, b.ko.Spec.MonitoringScheduleConfig.ScheduleConfig) {\n\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.ScheduleConfig\", a.ko.Spec.MonitoringScheduleConfig.ScheduleConfig, b.ko.Spec.MonitoringScheduleConfig.ScheduleConfig)\n\t\t} else if a.ko.Spec.MonitoringScheduleConfig.ScheduleConfig != nil && b.ko.Spec.MonitoringScheduleConfig.ScheduleConfig != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression, b.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression) {\n\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression\", a.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression, b.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression)\n\t\t\t} else if a.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression != nil && b.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression != nil {\n\t\t\t\tif *a.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression != *b.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression {\n\t\t\t\t\tdelta.Add(\"Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression\", a.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression, b.ko.Spec.MonitoringScheduleConfig.ScheduleConfig.ScheduleExpression)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tif ackcompare.HasNilDifference(a.ko.Spec.MonitoringScheduleName, b.ko.Spec.MonitoringScheduleName) {\n\t\tdelta.Add(\"Spec.MonitoringScheduleName\", a.ko.Spec.MonitoringScheduleName, b.ko.Spec.MonitoringScheduleName)\n\t} else if a.ko.Spec.MonitoringScheduleName != nil && b.ko.Spec.MonitoringScheduleName != nil {\n\t\tif *a.ko.Spec.MonitoringScheduleName != *b.ko.Spec.MonitoringScheduleName {\n\t\t\tdelta.Add(\"Spec.MonitoringScheduleName\", a.ko.Spec.MonitoringScheduleName, b.ko.Spec.MonitoringScheduleName)\n\t\t}\n\t}\n\n\treturn delta\n}", "func (u *Updater) UpdateTo(release Release) error {\n\tif release == nil {\n\t\tvar err error\n\t\trelease, err = u.Check()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif release == nil {\n\t\t\treturn errors.New(\"The application is already up to date.\")\n\t\t}\n\t}\n\n\twriters := make([]AbortWriter, 0)\n\tvar abort = func() {\n\t\tfor _, w := range writers {\n\t\t\tw.Abort()\n\t\t}\n\t}\n\n\tfor _, a := range release.Assets() {\n\t\tw, err := u.WriterForAsset(a)\n\t\tif err != nil {\n\t\t\tabort()\n\t\t\treturn err\n\t\t}\n\n\t\twriters = append(writers, w)\n\n\t\tif w != nil {\n\t\t\terr := a.Write(w)\n\t\t\tif err != nil {\n\t\t\t\tabort()\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func (c *Connection) WorkerRelease(ctx context.Context) error {\n\t_, err := c.Request(ctx).Get(\"/worker/release\")\n\treturn err\n}", "func jobGenerate(ctx context.Context, cfg *Config, cluster string, tmplFN string, output io.Writer, queues *Queues) (err kv.Error) {\n\n\t// Open the template file to be used\n\ttmplFile, errGo := os.Open(tmplFN)\n\tif errGo != nil {\n\t\treturn kv.Wrap(errGo).With(\"stack\", stack.Trace().TrimRuntime())\n\t}\n\n\t// Get a working directory to be used for generating values files\n\ttmpDir, errGo := ioutil.TempDir(\"\", \"\")\n\tif errGo != nil {\n\t\treturn kv.Wrap(errGo).With(\"stack\", stack.Trace().TrimRuntime())\n\t}\n\tdefer func() {\n\t\tos.RemoveAll(tmpDir)\n\t}()\n\n\tif logger.IsDebug() {\n\t\tnames := []string{}\n\t\tfor qName, _ := range *queues {\n\t\t\tnames = append(names, qName)\n\t\t}\n\t\tlogger.Debug(\"generating job templates\", \"queues\", strings.Join(names, \", \"), \"stack\", stack.Trace().TrimRuntime())\n\t}\n\n\tfor qName, qDetails := range *queues {\n\t\tjson, errGo := json.MarshalIndent(qDetails, \"\", \" \")\n\t\tif errGo != nil {\n\t\t\treturn kv.Wrap(errGo).With(\"stack\", stack.Trace().TrimRuntime())\n\t\t}\n\n\t\tqVarsFN := filepath.Join(tmpDir, qName+\".json\")\n\t\tif errGo = ioutil.WriteFile(qVarsFN, json, 0600); errGo != nil {\n\t\t\treturn kv.Wrap(errGo).With(\"stack\", stack.Trace().TrimRuntime())\n\t\t}\n\n\t\topts := stencil.TemplateOptions{\n\t\t\tIOFiles: []stencil.TemplateIOFiles{{\n\t\t\t\tIn: tmplFile,\n\t\t\t\tOut: output,\n\t\t\t}},\n\t\t\tValueFiles: []string{qVarsFN},\n\t\t\tOverrideValues: map[string]string{\n\t\t\t\t\"QueueName\": qName,\n\t\t\t},\n\t\t}\n\t\terr, warns := stencil.Template(opts)\n\t\tlogger.Warn(spew.Sdump(warns))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\ttmplFile.Seek(0, io.SeekStart) // The template is read multiple times so we rewindow between each\n\t\t// Write comments in the output to ensure kubernetes resource sections in the file are split\n\t\toutput.Write([]byte(\"\\n---\\n\"))\n\t}\n\treturn nil\n}", "func TestBetaReleasesJSON(t *testing.T) {\n\treleases, err := notify.AllVersionsFromURL(notify.GithubMinikubeBetaReleasesURL)\n\tif err != nil {\n\t\tt.Fatalf(\"Error getting releases-bets.json: %v\", err)\n\t}\n\tcheckReleasesV2(t, releases)\n}", "func (j *Job) Equal(j2 *Job) bool {\n\treturn j.UUID == j2.UUID &&\n\t\tj.Code == j2.Code &&\n\t\tj.Status == j2.Status &&\n\t\treflect.DeepEqual(j.Modules, j2.Modules) &&\n\t\treflect.DeepEqual(j.Vars, j2.Vars) &&\n\t\tdatesAreEqual(&j.CreatedAt, &j2.CreatedAt) &&\n\t\tdatesAreEqual(j.StartedAt, j2.StartedAt) &&\n\t\tdatesAreEqual(j.FinishedAt, j2.FinishedAt) &&\n\t\tj.Error == j2.Error &&\n\t\treflect.DeepEqual(j.Results, j2.Results) &&\n\t\treflect.DeepEqual(j.Logs, j2.Logs)\n}", "func (d *JobManager) watchJobExecution(request *restful.Request, response *restful.Response) {\n\n}", "func (a ASCIITableWriter) Write(out io.Writer, p *api.Project) error {\n\ttable := tablewriter.NewWriter(out)\n\ttable.SetHeader([]string{\"RELEASE\", \"Downloads\"})\n\n\tfor _, r := range p.Releases() {\n\t\ttable.Append([]string{r.Name, strconv.Itoa(r.DownloadCount())})\n\t}\n\ttable.Render()\n\treturn nil\n}", "func TestCreateNodeAverageReport(t *testing.T) {\n\n timeSlice := float64(TIMESLICE)\n\n testtable := []struct {\n tname string\n }{\n {\n tname: \"ok\",\n },\n }\n\n CreateDummyNodeMetrics(testNodeMetricsMap)\n\n for _, tc := range testtable {\n\n t.Run(tc.tname, func(t *testing.T) {\n\n var buffer bytes.Buffer\n writer := bufio.NewWriter(&buffer)\n\n err := json.NewEncoder(writer).Encode(CreateNodeAverageReport(testNodeMetricsMap, timeSlice))\n if err != nil {\n t.Fatalf(\"failed writing json: %s\", err)\n }\n writer.Flush()\n\n goldenPath := filepath.Join(\"testdata\", filepath.FromSlash(t.Name()) + \".golden\")\n\n\n if *update {\n\n t.Log(\"update golden file\")\n if err := ioutil.WriteFile(goldenPath, buffer.Bytes(), 0644); err != nil {\n t.Fatalf(\"failed to update golden file %s: %s\", goldenPath, err)\n }\n\n }\n\n\n goldenData, err := ioutil.ReadFile(goldenPath)\n\n if err != nil {\n t.Fatalf(\"failed reading .golden file %s: %s\", goldenPath, err)\n }\n\n t.Log(string(buffer.Bytes()))\n\n if !bytes.Equal(buffer.Bytes(), goldenData) {\n t.Errorf(\"bytes do not match .golden file %s\", goldenPath)\n }\n\n })\n }\n\n}", "func rollbackRelease(c *gin.Context, r *api.HelmRelease) error {\n\tlogEntry := log.ReqEntry(c).\n\t\tWithField(\"cluster\", r.Cluster).WithField(\"namespace\", r.Namespace).WithField(\"releaseName\", r.Name)\n\n\tlogEntry.Debugf(\"getting helm action config...\")\n\trollbackConfig, err := generateHelmActionConfig(r.Cluster, r.Namespace, logEntry)\n\tif err != nil {\n\t\tlogEntry.WithField(\"error\", err).Warningf(\"failed to generate configuration for helm action\")\n\t\treturn err\n\t}\n\trollbackAction := action.NewRollback(rollbackConfig)\n\trollbackAction.Version = int(r.Revision)\n\terr = rollbackAction.Run(r.Name)\n\tif err != nil {\n\t\tlogEntry.WithField(\"error\", err).Warningf(\"failed to run rollback action\")\n\t}\n\treturn nil\n}", "func TestPostgresReplicationEventQueue_DequeueMultiple(t *testing.T) {\n\tt.Parallel()\n\tdb := testdb.New(t)\n\tctx := testhelper.Context(t)\n\n\tqueue := PostgresReplicationEventQueue{db.DB}\n\n\teventType1 := ReplicationEvent{\n\t\tJob: ReplicationJob{\n\t\t\tChange: UpdateRepo,\n\t\t\tRelativePath: \"/project/path-1\",\n\t\t\tTargetNodeStorage: \"gitaly-1\",\n\t\t\tSourceNodeStorage: \"gitaly-0\",\n\t\t\tVirtualStorage: \"praefect\",\n\t\t\tParams: nil,\n\t\t},\n\t}\n\n\teventType2 := ReplicationEvent{\n\t\tJob: ReplicationJob{\n\t\t\tChange: DeleteRepo,\n\t\t\tRelativePath: \"/project/path-1\",\n\t\t\tTargetNodeStorage: \"gitaly-1\",\n\t\t\tSourceNodeStorage: \"\",\n\t\t\tVirtualStorage: \"praefect\",\n\t\t\tParams: nil,\n\t\t},\n\t}\n\n\teventType3 := ReplicationEvent{\n\t\tJob: ReplicationJob{\n\t\t\tChange: RenameRepo,\n\t\t\tRelativePath: \"/project/path-2\",\n\t\t\tTargetNodeStorage: \"gitaly-1\",\n\t\t\tSourceNodeStorage: \"gitaly-0\",\n\t\t\tVirtualStorage: \"praefect\",\n\t\t\tParams: Params{\"RelativePath\": \"/project/path-2-renamed\"},\n\t\t},\n\t}\n\n\teventType4 := ReplicationEvent{\n\t\tJob: ReplicationJob{\n\t\t\tChange: UpdateRepo,\n\t\t\tRelativePath: \"/project/path-1\",\n\t\t\tTargetNodeStorage: \"gitaly-1\",\n\t\t\tSourceNodeStorage: \"gitaly-0\",\n\t\t\tVirtualStorage: \"backup\",\n\t\t},\n\t}\n\n\t// events to fill in the queue\n\tevents := []ReplicationEvent{eventType1, eventType1, eventType2, eventType1, eventType3, eventType4}\n\tfor i := range events {\n\t\tvar err error\n\t\tevents[i], err = queue.Enqueue(ctx, events[i])\n\t\trequire.NoError(t, err, \"failed to fill in event queue\")\n\t}\n\n\t// first request to deque\n\texpectedEvents1 := []ReplicationEvent{events[0], events[2], events[4]}\n\texpectedJobLocks1 := []JobLockRow{\n\t\t{JobID: events[0].ID, LockID: \"praefect|gitaly-1|/project/path-1\"},\n\t\t{JobID: events[2].ID, LockID: \"praefect|gitaly-1|/project/path-1\"},\n\t\t{JobID: events[4].ID, LockID: \"praefect|gitaly-1|/project/path-2\"},\n\t}\n\n\t// we expect only first two types of events by limiting count to 3\n\tdequeuedEvents1, err := queue.Dequeue(ctx, \"praefect\", \"gitaly-1\", 3)\n\trequire.NoError(t, err)\n\trequire.Len(t, dequeuedEvents1, len(expectedEvents1))\n\tfor i := range dequeuedEvents1 {\n\t\tdequeuedEvents1[i].UpdatedAt = nil // it is not possible to determine update_at value as it is generated on UPDATE in database\n\t\texpectedEvents1[i].State = JobStateInProgress\n\t\texpectedEvents1[i].Attempt--\n\t}\n\trequire.Equal(t, expectedEvents1, dequeuedEvents1)\n\n\trequireLocks(t, ctx, db, []LockRow{\n\t\t// there is only one single lock for all fetched events because of their 'repo' and 'target' combination\n\t\t{ID: \"praefect|gitaly-1|/project/path-1\", Acquired: true},\n\t\t{ID: \"praefect|gitaly-1|/project/path-2\", Acquired: true},\n\t\t{ID: \"backup|gitaly-1|/project/path-1\", Acquired: false},\n\t})\n\trequireJobLocks(t, ctx, db, expectedJobLocks1)\n\n\t// second request to deque\n\t// there must be only last event fetched from the queue\n\texpectedEvents2 := []ReplicationEvent{events[5]}\n\texpectedEvents2[0].State = JobStateInProgress\n\texpectedEvents2[0].Attempt = 2\n\n\texpectedJobLocks2 := []JobLockRow{{JobID: 6, LockID: \"backup|gitaly-1|/project/path-1\"}}\n\n\tdequeuedEvents2, err := queue.Dequeue(ctx, \"backup\", \"gitaly-1\", 100500)\n\trequire.NoError(t, err)\n\trequire.Len(t, dequeuedEvents2, 1, \"only one event must be fetched from the queue\")\n\n\tdequeuedEvents2[0].UpdatedAt = nil // it is not possible to determine update_at value as it is generated on UPDATE in database\n\trequire.Equal(t, expectedEvents2, dequeuedEvents2)\n\n\trequireLocks(t, ctx, db, []LockRow{\n\t\t{ID: \"praefect|gitaly-1|/project/path-1\", Acquired: true},\n\t\t{ID: \"praefect|gitaly-1|/project/path-2\", Acquired: true},\n\t\t{ID: \"backup|gitaly-1|/project/path-1\", Acquired: true},\n\t})\n\trequireJobLocks(t, ctx, db, append(expectedJobLocks1, expectedJobLocks2...))\n}", "func (r *Release) diff() (string, error) {\n\tvar (\n\t\targs []string\n\t\tmaxExitCode int\n\t)\n\n\tif !flags.kubectlDiff {\n\t\targs = []string{\"diff\", \"--suppress-secrets\"}\n\t\tif flags.noColors {\n\t\t\targs = append(args, \"--no-color\")\n\t\t}\n\t\tif flags.diffContext != -1 {\n\t\t\targs = append(args, \"--context\", strconv.Itoa(flags.diffContext))\n\t\t}\n\t\targs = concat(args, r.getHelmArgsFor(\"diff\"))\n\t} else {\n\t\targs = r.getHelmArgsFor(\"template\")\n\t}\n\n\tdesc := \"Diffing release [ \" + r.Name + \" ] in namespace [ \" + r.Namespace + \" ]\"\n\tcmd := CmdPipe{helmCmd(args, desc)}\n\n\tif flags.kubectlDiff {\n\t\tcmd = append(cmd, kubectl([]string{\"diff\", \"--namespace\", r.Namespace, \"-f\", \"-\"}, desc))\n\t\tmaxExitCode = 1\n\t}\n\n\tres, err := cmd.RetryExecWithThreshold(3, maxExitCode)\n\tif err != nil {\n\t\tif flags.kubectlDiff && res.code <= 1 {\n\t\t\t// kubectl diff exit status:\n\t\t\t// 0 No differences were found.\n\t\t\t// 1 Differences were found.\n\t\t\t// >1 Kubectl or diff failed with an error.\n\t\t\treturn res.output, nil\n\t\t}\n\t\treturn \"\", fmt.Errorf(\"command failed: %w\", err)\n\t}\n\n\treturn res.output, nil\n}", "func isUpToDate(ctx context.Context, kube client.Client, in *v1alpha1.ReleaseParameters, observed *release.Release, s v1alpha1.ReleaseStatus) (bool, error) {\n\toc := observed.Chart\n\tif oc == nil {\n\t\treturn false, errors.New(errChartNilInObservedRelease)\n\t}\n\n\tocm := oc.Metadata\n\tif ocm == nil {\n\t\treturn false, errors.New(errChartMetaNilInObservedRelease)\n\t}\n\tif in.Chart.Name != ocm.Name {\n\t\treturn false, nil\n\t}\n\tif in.Chart.Version != ocm.Version {\n\t\treturn false, nil\n\t}\n\tdesiredConfig, err := composeValuesFromSpec(ctx, kube, in.ValuesSpec)\n\tif err != nil {\n\t\treturn false, errors.Wrap(err, errFailedToComposeValues)\n\t}\n\n\tif !reflect.DeepEqual(desiredConfig, observed.Config) {\n\t\treturn false, nil\n\t}\n\n\tchanged, err := newPatcher().hasUpdates(ctx, kube, in.PatchesFrom, s)\n\tif err != nil {\n\t\treturn false, errors.Wrap(err, errFailedToLoadPatches)\n\t}\n\n\tif changed {\n\t\treturn false, nil\n\t}\n\n\treturn true, nil\n}", "func (db *db) backfillVersions() error {\n\tdb.refreshLock.Lock()\n\tdefer db.refreshLock.Unlock()\n\n\tversions, err := db.sequins.backend.ListVersions(db.name, \"\", db.sequins.config.RequireSuccessFile)\n\tif err != nil {\n\t\treturn err\n\t} else if len(versions) == 0 {\n\t\treturn nil\n\t}\n\n\t// Only look at the last 3 versions, to keep this next part quick.\n\tif len(versions) > 3 {\n\t\tversions = versions[len(versions)-3:]\n\t}\n\n\t// Iterate through all the versions we know about, and track the remote and\n\t// local partitions for it. We don't download anything we don't have, but if\n\t// one is ready - because we have all the partitions locally, or because our\n\t// peers already do - we can switch to it immediately. Even if none are\n\t// available immediately, we can still start watching out for peers on old\n\t// versions for which we have data locally, in case they start to appear (as\n\t// would happen if a bunch of nodes with stale data started up together).\n\tfor i := len(versions) - 1; i >= 0; i-- {\n\t\tv := versions[i]\n\t\tfiles, err := db.sequins.backend.ListFiles(db.name, v)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tversion := newVersion(db.sequins, db.localPath(v), db.name, v, len(files))\n\t\tif version.ready() {\n\t\t\t// The version is complete, most likely because our peers have it. We\n\t\t\t// can switch to it right away, and build any (possibly underreplicated)\n\t\t\t// partitions in the background.\n\t\t\t// TODO: In the case that we *do* have some data locally, this will cause\n\t\t\t// us to advertise that before we're actually listening over HTTP.\n\t\t\tlog.Println(\"Starting with pre-loaded version\", v, \"of\", db.name)\n\n\t\t\tdb.mux.prepare(version)\n\t\t\tdb.upgrade(version)\n\t\t\tdb.trackVersion(version, versionBuilding)\n\t\t\tgo func() {\n\t\t\t\terr := version.build(files)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Println(\"Error building version %s of %s: %s\", v, db.name, err)\n\t\t\t\t\tdb.trackVersion(version, versionError)\n\t\t\t\t}\n\n\t\t\t\tlog.Println(\"Finished building version\", v, \"of\", db.name)\n\t\t\t\tdb.trackVersion(version, versionAvailable)\n\t\t\t\tversion.advertiseAndWait()\n\t\t\t}()\n\n\t\t\tbreak\n\t\t} else if version.getBlockStore() != nil {\n\t\t\t// The version isn't complete, but we have partitions locally and can\n\t\t\t// start waiting on peers. This happens if, for example, a complete\n\t\t\t// cluster with stored data comes up all at once.\n\t\t\tdb.switchVersion(version)\n\t\t} else {\n\t\t\tversion.close()\n\t\t}\n\t}\n\n\tgo db.cleanupStore()\n\treturn nil\n}", "func (a *Agent) UpgradeReleaseByValues(\n\tctx context.Context,\n\tconf *UpgradeReleaseConfig,\n\tdoAuth *oauth2.Config,\n\tdisablePullSecretsInjection bool,\n\tignoreDependencies bool,\n) (*release.Release, error) {\n\tctx, span := telemetry.NewSpan(ctx, \"helm-upgrade-release-by-values\")\n\tdefer span.End()\n\n\ttelemetry.WithAttributes(span,\n\t\ttelemetry.AttributeKV{Key: \"project-id\", Value: conf.Cluster.ProjectID},\n\t\ttelemetry.AttributeKV{Key: \"cluster-id\", Value: conf.Cluster.ID},\n\t\ttelemetry.AttributeKV{Key: \"name\", Value: conf.Name},\n\t\ttelemetry.AttributeKV{Key: \"stack-name\", Value: conf.StackName},\n\t\ttelemetry.AttributeKV{Key: \"stack-revision\", Value: conf.StackRevision},\n\t)\n\n\t// grab the latest release\n\trel, err := a.GetRelease(ctx, conf.Name, 0, !ignoreDependencies)\n\tif err != nil {\n\t\treturn nil, telemetry.Error(ctx, span, err, \"Could not get release to be upgraded\")\n\t}\n\n\tch := rel.Chart\n\n\tif conf.Chart != nil {\n\t\tch = conf.Chart\n\t}\n\n\tcmd := action.NewUpgrade(a.ActionConfig)\n\tcmd.Namespace = rel.Namespace\n\n\tcmd.PostRenderer, err = NewPorterPostrenderer(\n\t\tconf.Cluster,\n\t\tconf.Repo,\n\t\ta.K8sAgent,\n\t\trel.Namespace,\n\t\tconf.Registries,\n\t\tdoAuth,\n\t\tdisablePullSecretsInjection,\n\t)\n\n\tif err != nil {\n\t\treturn nil, telemetry.Error(ctx, span, err, \"error getting porter postrenderer\")\n\t}\n\n\tif conf.StackName != \"\" && conf.StackRevision > 0 {\n\t\tconf.Values[\"stack\"] = map[string]interface{}{\n\t\t\t\"enabled\": true,\n\t\t\t\"name\": conf.StackName,\n\t\t\t\"revision\": conf.StackRevision,\n\t\t}\n\t}\n\n\tres, err := cmd.Run(conf.Name, ch, conf.Values)\n\tif err != nil {\n\t\t// refer: https://github.com/helm/helm/blob/release-3.8/pkg/action/action.go#L62\n\t\t// issue tracker: https://github.com/helm/helm/issues/4558\n\t\tif err.Error() == \"another operation (install/upgrade/rollback) is in progress\" {\n\t\t\tsecretList, err := a.K8sAgent.Clientset.CoreV1().Secrets(rel.Namespace).List(\n\t\t\t\tcontext.Background(),\n\t\t\t\tv1.ListOptions{\n\t\t\t\t\tLabelSelector: fmt.Sprintf(\"owner=helm,status in (pending-install, pending-upgrade, pending-rollback),name=%s\", rel.Name),\n\t\t\t\t},\n\t\t\t)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, telemetry.Error(ctx, span, err, \"error getting secret list\")\n\t\t\t}\n\n\t\t\tif len(secretList.Items) > 0 {\n\t\t\t\tmostRecentSecret := secretList.Items[0]\n\n\t\t\t\tfor i := 1; i < len(secretList.Items); i += 1 {\n\t\t\t\t\toldVersion, _ := strconv.Atoi(mostRecentSecret.Labels[\"version\"])\n\t\t\t\t\tnewVersion, _ := strconv.Atoi(secretList.Items[i].Labels[\"version\"])\n\n\t\t\t\t\tif oldVersion < newVersion {\n\t\t\t\t\t\tmostRecentSecret = secretList.Items[i]\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif time.Since(mostRecentSecret.CreationTimestamp.Time) >= time.Minute {\n\t\t\t\t\thelmSecrets := driver.NewSecrets(a.K8sAgent.Clientset.CoreV1().Secrets(rel.Namespace))\n\n\t\t\t\t\trel.Info.Status = release.StatusFailed\n\n\t\t\t\t\terr = helmSecrets.Update(mostRecentSecret.GetName(), rel)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, telemetry.Error(ctx, span, err, \"error updating helm secrets\")\n\t\t\t\t\t}\n\n\t\t\t\t\t// retry upgrade\n\t\t\t\t\tres, err = cmd.Run(conf.Name, ch, conf.Values)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, telemetry.Error(ctx, span, err, \"error running upgrade after updating helm secrets\")\n\t\t\t\t\t}\n\n\t\t\t\t\treturn res, nil\n\t\t\t\t} else {\n\t\t\t\t\t// ask the user to wait for about a minute before retrying for the above fix to kick in\n\t\t\t\t\treturn nil, telemetry.Error(ctx, span, err, \"another operation (install/upgrade/rollback) is in progress. If this error persists, please wait for 60 seconds to force an upgrade\")\n\t\t\t\t}\n\t\t\t}\n\t\t} else if strings.Contains(err.Error(), \"current release manifest contains removed kubernetes api(s)\") || strings.Contains(err.Error(), \"resource mapping not found for name\") {\n\t\t\t// ref: https://helm.sh/docs/topics/kubernetes_apis/#updating-api-versions-of-a-release-manifest\n\t\t\t// in this case, we manually update the secret containing the new manifests\n\t\t\tsecretList, err := a.K8sAgent.Clientset.CoreV1().Secrets(rel.Namespace).List(\n\t\t\t\tcontext.Background(),\n\t\t\t\tv1.ListOptions{\n\t\t\t\t\tLabelSelector: fmt.Sprintf(\"owner=helm,name=%s\", rel.Name),\n\t\t\t\t},\n\t\t\t)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, telemetry.Error(ctx, span, err, \"error getting secret list\")\n\t\t\t}\n\n\t\t\tif len(secretList.Items) > 0 {\n\t\t\t\tmostRecentSecret := secretList.Items[0]\n\n\t\t\t\tfor i := 1; i < len(secretList.Items); i += 1 {\n\t\t\t\t\toldVersion, _ := strconv.Atoi(mostRecentSecret.Labels[\"version\"])\n\t\t\t\t\tnewVersion, _ := strconv.Atoi(secretList.Items[i].Labels[\"version\"])\n\n\t\t\t\t\tif oldVersion < newVersion {\n\t\t\t\t\t\tmostRecentSecret = secretList.Items[i]\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// run the equivalent of `helm template` to get the manifest string for the new release\n\t\t\t\tinstallCmd := action.NewInstall(a.ActionConfig)\n\n\t\t\t\tinstallCmd.ReleaseName = conf.Name\n\t\t\t\tinstallCmd.Namespace = rel.Namespace\n\t\t\t\tinstallCmd.DryRun = true\n\t\t\t\tinstallCmd.Replace = true\n\n\t\t\t\tinstallCmd.ClientOnly = false\n\t\t\t\tinstallCmd.IncludeCRDs = true\n\n\t\t\t\tnewRelDryRun, err := installCmd.Run(ch, conf.Values)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, telemetry.Error(ctx, span, err, \"error running install cmd\")\n\t\t\t\t}\n\n\t\t\t\toldManifestBuffer := bytes.NewBufferString(rel.Manifest)\n\t\t\t\tnewManifestBuffer := bytes.NewBufferString(newRelDryRun.Manifest)\n\n\t\t\t\tversionMapper := &DeprecatedAPIVersionMapper{}\n\n\t\t\t\tupdatedManifestBuffer, err := versionMapper.Run(oldManifestBuffer, newManifestBuffer)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, telemetry.Error(ctx, span, err, \"error running version mapper\")\n\t\t\t\t}\n\n\t\t\t\trel.Manifest = updatedManifestBuffer.String()\n\n\t\t\t\thelmSecrets := driver.NewSecrets(a.K8sAgent.Clientset.CoreV1().Secrets(rel.Namespace))\n\n\t\t\t\terr = helmSecrets.Update(mostRecentSecret.GetName(), rel)\n\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, telemetry.Error(ctx, span, err, \"error updating helm secret\")\n\t\t\t\t}\n\n\t\t\t\tres, err := cmd.Run(conf.Name, ch, conf.Values)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, telemetry.Error(ctx, span, err, \"error running upgrade after updating helm secrets\")\n\t\t\t\t}\n\n\t\t\t\treturn res, nil\n\t\t\t}\n\t\t}\n\n\t\treturn nil, telemetry.Error(ctx, span, err, \"error running upgrade\")\n\t}\n\n\treturn res, nil\n}", "func Test2(t *testing.T) {\n\tnewCollection := mutatingBaseCollection.DeepCopy()\n\tnewCollection.Spec.RepositoryUrl = \"https://github.com/some/collection/alternate-kabanero-index.yaml\"\n\tnewCollection.Spec.Version = \"4.5.6\"\n\terr := processUpdate(&mutatingBaseCollection, newCollection)\n\tif err != nil {\n\t\tt.Fatal(\"Unexpected error during mutation.\", err)\n\t}\n\n\texpectedversion0 := kabanerov1alpha1.CollectionVersion{\n\t\tRepositoryUrl: \"https://github.com/some/collection/alternate-kabanero-index.yaml\",\n\t\tVersion: \"4.5.6\",\n\t\tDesiredState: \"active\"}\n\n\tif newCollection.Spec.Versions[0] != expectedversion0 {\n\t\tt.Fatal(\"Mutated versions[0] does not match expected versions[0] values. Mutated versions[0]: \", newCollection.Spec.Versions[0], \"Expected versions[0]: \", expectedversion0)\n\t}\n}", "func (v *VersionUpdater) touch(file string) error {\n\tw := v.storageClient.Bucket(config.GCS.Bucket).Object(file).NewWriter(context.Background())\n\tif err := w.Close(); err != nil {\n\t\treturn fmt.Errorf(\"Could not touch version file %s : %s\", file, err)\n\t}\n\treturn nil\n}", "func deployJob(cmd *cobra.Command, args []string) {\n port, _:= cmd.Flags().GetString(\"port\")\n if port == \"\" {\n port = DefaultPort\n }\n name, _:= cmd.Flags().GetString(\"file\")\n yaml := unmarsharlYaml(readJobFile(name))\n currentDir, _ := os.Getwd()\n var job NewJob\n job.ID = yaml.Job.ID\n job.UID = fmt.Sprint(syscall.Getuid())\n group, _ := user.LookupGroup(\"odin\")\n gid, _ := strconv.Atoi(group.Gid)\n job.GID = strconv.Itoa(gid)\n job.Name = yaml.Job.Name\n job.Description = yaml.Job.Description\n job.File = currentDir + \"/\" + yaml.Job.File\n if yaml.Job.Language == \"go\" {\n job.Language = yaml.Job.Language\n cmd := exec.Command(job.Language, \"build\", job.File)\n cmd.SysProcAttr = &syscall.SysProcAttr{}\n _, err := cmd.CombinedOutput()\n if err != nil {\n fmt.Println(err)\n os.Exit(2)\n }\n job.File = job.File[:len(job.File)-3]\n } else {\n job.Language = yaml.Job.Language\n }\n job.Schedule = getScheduleString(name, port)\n jobJSON, _ := json.Marshal(job)\n body := makePostRequest(fmt.Sprintf(\"http://localhost%s/jobs\", port), bytes.NewBuffer(jobJSON))\n fmt.Println(body)\n}", "func DownloadDiff(pr *models.PullRequest, w io.Writer, patch bool) error {\n\treturn DownloadDiffOrPatch(pr, w, false)\n}", "func (j Jobs) Swap(i, k int) {\n\tj[i], j[k] = j[k], j[i]\n}", "func Job(logger *logrus.Entry, job *puppetmaster.Job, execLogsVerbose bool) {\n\tfmt.Fprint(logger.Logger.Out, \"\\n\\nJob:\\n\")\n\n\tw := tabwriter.NewWriter(logger.Logger.Out, 20, 10, 1, ' ', tabwriter.Debug)\n\tfmt.Fprintf(w, \"UUID\\t%s\\t\\n\", job.UUID)\n\tfmt.Fprintf(w, \"Status\\t%s\\t\\n\", job.Status)\n\tfmt.Fprintf(w, \"Duration\\t%d\\t\\n\", job.Duration)\n\tfmt.Fprintf(w, \"Error\\t%s\\t\\n\", job.Error)\n\tfmt.Fprintf(w, \"Created at\\t%s\\t\\n\", job.CreatedAt)\n\tfmt.Fprintf(w, \"Started at\\t%s\\t\\n\", job.StartedAt)\n\tfmt.Fprintf(w, \"Finished at\\t%s\\t\\n\", job.FinishedAt)\n\tw.Flush()\n\n\tfmt.Fprint(logger.Logger.Out, \"\\n\\nLogs:\\n\")\n\tlogs(logger, job.Logs, execLogsVerbose)\n\n\tfmt.Fprint(logger.Logger.Out, \"\\n\\nResults:\\n\")\n\tresults(logger, job.Results)\n\n\tfmt.Fprint(logger.Logger.Out, \"\\n\\n\")\n}", "func (tc *TableCollection) releaseTables(ctx context.Context, opt releaseOpt) error {\n\ttc.timestamp = hlc.Timestamp{}\n\tif len(tc.leasedTables) > 0 {\n\t\tlog.VEventf(ctx, 2, \"releasing %d tables\", len(tc.leasedTables))\n\t\tfor _, table := range tc.leasedTables {\n\t\t\tif err := tc.leaseMgr.Release(table); err != nil {\n\t\t\t\tlog.Warning(ctx, err)\n\t\t\t}\n\t\t}\n\t\ttc.leasedTables = tc.leasedTables[:0]\n\t}\n\ttc.uncommittedTables = nil\n\n\tif opt == blockForDBCacheUpdate {\n\t\tfor _, uc := range tc.uncommittedDatabases {\n\t\t\tif !uc.dropped {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\terr := tc.dbCacheSubscriber.waitForCacheState(\n\t\t\t\tfunc(dc *databaseCache) (bool, error) {\n\t\t\t\t\tdesc, err := dc.getCachedDatabaseDesc(uc.name, false /*required*/)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn false, err\n\t\t\t\t\t}\n\t\t\t\t\tif desc == nil {\n\t\t\t\t\t\treturn true, nil\n\t\t\t\t\t}\n\t\t\t\t\t// If the database name still exists but it now references another\n\t\t\t\t\t// db, we're good - it means that the database name has been reused\n\t\t\t\t\t// within the same transaction.\n\t\t\t\t\treturn desc.ID != uc.id, nil\n\t\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\ttc.uncommittedDatabases = nil\n\ttc.releaseAllDescriptors()\n\treturn nil\n}", "func CheckForUpdate(out io.Writer) error {\n\tversion := CurrVersion\n\n\tif !isValidVersion(version) {\n\t\tfmt.Fprintf(out, messages.CLI_UNTAGGED_PROMPT)\n\t\tfmt.Fprintf(out, messages.CLI_INSTALL_CMD)\n\t\treturn nil\n\t}\n\n\t// fetch latest cli version\n\tlatestTagResp, err := api.RepoLatestRequest(\"astronomer\", \"astro-cli\")\n\tif err != nil {\n\t\tfmt.Fprintln(out, err)\n\t\tlatestTagResp.TagName = messages.NA\n\t}\n\n\t// fetch meta data around current cli version\n\tcurrentTagResp, err := api.RepoTagRequest(\"astronomer\", \"astro-cli\", string(\"v\")+version)\n\tif err != nil {\n\t\tfmt.Fprintln(out, \"Release info not found, please upgrade.\")\n\t\tfmt.Fprintln(out, messages.CLI_INSTALL_CMD)\n\t\treturn nil\n\t}\n\n\tcurrentPub := currentTagResp.PublishedAt.Format(\"2006.01.02\")\n\tcurrentTag := currentTagResp.TagName\n\tlatestPub := latestTagResp.PublishedAt.Format(\"2006.01.02\")\n\tlatestTag := latestTagResp.TagName\n\n\tfmt.Fprintf(out, messages.CLI_CURR_VERSION_DATE+\"\\n\", currentTag, currentPub)\n\tfmt.Fprintf(out, messages.CLI_LATEST_VERSION_DATE+\"\\n\", latestTag, latestPub)\n\n\tif latestTag > currentTag {\n\t\tfmt.Fprintln(out, messages.CLI_UPGRADE_PROMPT)\n\t\tfmt.Fprintln(out, messages.CLI_INSTALL_CMD)\n\t} else {\n\t\tfmt.Fprintln(out, messages.CLI_RUNNING_LATEST)\n\t}\n\n\treturn nil\n}", "func createDownloadsTable(f *os.File, releaseTag, heading string, filename ...string) error {\n\tvar urlPrefix string\n\n\tif *releaseBucket == \"kubernetes-release\" {\n\t\turlPrefix = k8sReleaseURLPrefix\n\t} else {\n\t\turlPrefix = fmt.Sprintf(\"https://storage.googleapis.com/%s/release\", *releaseBucket)\n\t}\n\n\tif *releaseBucket == \"\" {\n\t\tlog.Print(\"NOTE: empty Google Storage bucket specified. Please specify valid bucket using \\\"release-bucket\\\" flag.\")\n\t}\n\n\tif heading != \"\" {\n\t\tf.WriteString(fmt.Sprintf(\"\\n### %s\\n\", heading))\n\t}\n\n\tf.WriteString(\"\\n\")\n\tf.WriteString(\"filename | sha256 hash\\n\")\n\tf.WriteString(\"-------- | -----------\\n\")\n\n\tfiles := make([]string, 0)\n\tfor _, name := range filename {\n\t\tfs, _ := filepath.Glob(name)\n\t\tfor _, v := range fs {\n\t\t\tfiles = append(files, v)\n\t\t}\n\t}\n\n\tfor _, file := range files {\n\t\tfn := filepath.Base(file)\n\t\tsha, err := u.GetSha256(file)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to calc SHA256 of file %s: %v\", file, err)\n\t\t}\n\t\tf.WriteString(fmt.Sprintf(\"[%s](%s/%s/%s) | `%s`\\n\", fn, urlPrefix, releaseTag, fn, sha))\n\t}\n\treturn nil\n}", "func (m *DataHistoryManager) UpsertJob(job *DataHistoryJob, insertOnly bool) error {\n\tif m == nil {\n\t\treturn ErrNilSubsystem\n\t}\n\tif !m.IsRunning() {\n\t\treturn ErrSubSystemNotStarted\n\t}\n\tif job == nil {\n\t\treturn errNilJob\n\t}\n\tif job.Nickname == \"\" {\n\t\treturn fmt.Errorf(\"upsert job %w\", errNicknameUnset)\n\t}\n\n\tj, err := m.GetByNickname(job.Nickname, false)\n\tif err != nil && !errors.Is(err, errJobNotFound) {\n\t\treturn err\n\t}\n\n\tif insertOnly && j != nil ||\n\t\t(j != nil && j.Status != dataHistoryStatusActive) {\n\t\treturn fmt.Errorf(\"upsert job %w nickname: %s - status: %s \", errNicknameInUse, j.Nickname, j.Status)\n\t}\n\n\tm.m.Lock()\n\tdefer m.m.Unlock()\n\n\terr = m.validateJob(job)\n\tif err != nil {\n\t\treturn err\n\t}\n\ttoUpdate := false\n\tif !insertOnly {\n\t\tfor i := range m.jobs {\n\t\t\tif !strings.EqualFold(m.jobs[i].Nickname, job.Nickname) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\ttoUpdate = true\n\t\t\tjob.ID = m.jobs[i].ID\n\t\t\tif job.Exchange != \"\" && m.jobs[i].Exchange != job.Exchange {\n\t\t\t\tm.jobs[i].Exchange = job.Exchange\n\t\t\t}\n\t\t\tif job.Asset != \"\" && m.jobs[i].Asset != job.Asset {\n\t\t\t\tm.jobs[i].Asset = job.Asset\n\t\t\t}\n\t\t\tif !job.Pair.IsEmpty() && !m.jobs[i].Pair.Equal(job.Pair) {\n\t\t\t\tm.jobs[i].Pair = job.Pair\n\t\t\t}\n\t\t\tif !job.StartDate.IsZero() && !m.jobs[i].StartDate.Equal(job.StartDate) {\n\t\t\t\tm.jobs[i].StartDate = job.StartDate\n\t\t\t}\n\t\t\tif !job.EndDate.IsZero() && !m.jobs[i].EndDate.Equal(job.EndDate) {\n\t\t\t\tm.jobs[i].EndDate = job.EndDate\n\t\t\t}\n\t\t\tif job.Interval != 0 && m.jobs[i].Interval != job.Interval {\n\t\t\t\tm.jobs[i].Interval = job.Interval\n\t\t\t}\n\t\t\tif job.RunBatchLimit != 0 && m.jobs[i].RunBatchLimit != job.RunBatchLimit {\n\t\t\t\tm.jobs[i].RunBatchLimit = job.RunBatchLimit\n\t\t\t}\n\t\t\tif job.RequestSizeLimit != 0 && m.jobs[i].RequestSizeLimit != job.RequestSizeLimit {\n\t\t\t\tm.jobs[i].RequestSizeLimit = job.RequestSizeLimit\n\t\t\t}\n\t\t\tif job.MaxRetryAttempts != 0 && m.jobs[i].MaxRetryAttempts != job.MaxRetryAttempts {\n\t\t\t\tm.jobs[i].MaxRetryAttempts = job.MaxRetryAttempts\n\t\t\t}\n\t\t\tm.jobs[i].DataType = job.DataType\n\t\t\tm.jobs[i].Status = job.Status\n\t\t\tbreak\n\t\t}\n\t}\n\tif job.ID == uuid.Nil {\n\t\tjob.ID, err = uuid.NewV4()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tjob.rangeHolder, err = kline.CalculateCandleDateRanges(job.StartDate, job.EndDate, job.Interval, uint32(job.RequestSizeLimit))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif !toUpdate {\n\t\tm.jobs = append(m.jobs, job)\n\t}\n\n\tdbJob := m.convertJobToDBModel(job)\n\treturn m.jobDB.Upsert(dbJob)\n}", "func exchangeVersions(rw io.ReadWriter, versionLine []byte) (them []byte, err error) {\n\t// Contrary to the RFC, we do not ignore lines that don't\n\t// start with \"SSH-2.0-\" to make the library usable with\n\t// nonconforming servers.\n\tfor _, c := range versionLine {\n\t\t// The spec disallows non US-ASCII chars, and\n\t\t// specifically forbids null chars.\n\t\tif c < 32 {\n\t\t\treturn nil, errors.New(\"ssh1: junk character in version line\")\n\t\t}\n\t}\n\tif _, err = rw.Write(append(versionLine, '\\r', '\\n')); err != nil {\n\t\treturn\n\t}\n\n\tthem, err = readVersion(rw)\n\treturn them, err\n}", "func (b *BulkLGTM) ServePRDiff(res http.ResponseWriter, req *http.Request) {\n\tprNumber, err := strconv.Atoi(req.URL.Query().Get(\"number\"))\n\tif err != nil {\n\t\tres.Header().Set(\"Content-type\", \"text/plain\")\n\t\tres.WriteHeader(http.StatusInternalServerError)\n\t\treturn\n\t}\n\tobj := b.FindPR(prNumber)\n\tif obj == nil {\n\t\tres.Header().Set(\"Content-type\", \"text/plain\")\n\t\tres.WriteHeader(http.StatusNotFound)\n\t\treturn\n\t}\n\tpr, _ := obj.GetPR()\n\tresp, err := http.Get(*pr.DiffURL)\n\tif err != nil {\n\t\tres.Header().Set(\"Content-type\", \"text/plain\")\n\t\tres.WriteHeader(http.StatusInternalServerError)\n\t\treturn\n\t}\n\tdefer resp.Body.Close()\n\tdata, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\tres.Header().Set(\"Content-type\", \"text/plain\")\n\t\tres.WriteHeader(http.StatusInternalServerError)\n\t\treturn\n\t}\n\tres.Header().Set(\"Content-Type\", \"text/plain\")\n\tres.WriteHeader(http.StatusOK)\n\tres.Write(data)\n}", "func copyJobToHookQueueJob(src common.Job) (dst HookQueueOrderJobData) {\n\tdst.ID = src.UUID\n\tdst.Name = src.Name\n\tdst.Status = src.Status\n\tdst.Owner = src.Owner\n\tdst.StartTime = src.StartTime\n\tdst.CrackedHashes = src.CrackedHashes\n\tdst.TotalHashes = src.TotalHashes\n\tdst.Progress = src.Progress\n\n\treturn dst\n}", "func newTestRelease() *Release {\n\tv, _ := version.NewVersion(\"1.0.0\")\n\tt, _ := time.Parse(time.RFC1123Z, \"Fri, 13 May 2016 12:00:00 +0200\")\n\n\treturn &Release{\n\t\tversion: v,\n\t\tbuild: \"1000\",\n\t\ttitle: \"Test\",\n\t\tdescription: \"Test\",\n\t\tpublishedDateTime: NewPublishedDateTime(&t),\n\t\treleaseNotesLink: \"https://example.com/changelogs/1.0.0.html\",\n\t\tminimumSystemVersion: \"10.9\",\n\t\tdownloads: []Download{\n\t\t\t*NewDownload(\"https://example.com/1.0.0/one.dmg\", \"application/octet-stream\", 100000),\n\t\t\t*NewDownload(\"https://example.com/1.0.0/two.dmg\", \"application/octet-stream\", 100000),\n\t\t},\n\t\tisPreRelease: false,\n\t}\n}", "func Test4(t *testing.T) {\n\tnewCollection := mutatingBaseCollectionVersions.DeepCopy()\n\tnewCollection.Spec.RepositoryUrl = \"https://github.com/some/collection/alternate-kabanero-index.yaml\"\n\tnewCollection.Spec.Version = \"4.5.6\"\n\tnewCollection.Spec.DesiredState = \"inactive\"\n\terr := processUpdate(&mutatingBaseCollection, newCollection)\n\tif err != nil {\n\t\tt.Fatal(\"Unexpected error during mutation.\", err)\n\t}\n\n\texpectedversion0 := kabanerov1alpha1.CollectionVersion{\n\t\tRepositoryUrl: \"https://github.com/some/collection/alternate-kabanero-index.yaml\",\n\t\tVersion: \"4.5.6\",\n\t\tDesiredState: \"inactive\"}\n\n\tif newCollection.Spec.Versions[0] != expectedversion0 {\n\t\tt.Fatal(\"New collection.Spec.Versions[0] values do not match expected collection.Spec.Versions[0] values. New versions[0]: \", newCollection.Spec.Versions[0], \"Expected versions[0]: \", expectedversion0)\n\t}\n}", "func (s *DiffCmd) All(w io.Writer) error {\n\tdiffer, err := diff.New(s.releaseRepo, s.release1, s.release2)\n\tif err != nil {\n\t\treturn err\n\t}\n\td, err := differ.Diff()\n\tif err != nil {\n\t\treturn err\n\t}\n\ts.printDiffResult(w, d)\n\treturn nil\n}", "func (ghp *GithubProvider) GenerateReleaseText(mainRepo []string, coreRepos [][]string, otherRepos [][]string, major *bool, minor *bool, patch *bool) ([]string, string) {\n\n\tchanges, lastRelease, GhpErr := ghp.getChangesSinceLastRelease(mainRepo[0], mainRepo[1])\n\ttu.CheckExit(GhpErr)\n\n\tcurrentVersion := lastRelease.TagName\n\tnextVersion := ghp.calculateNextVersion(currentVersion, major, minor, patch)\n\n\tvar releaseText []string\n\n\tfmt.Println(\"nextVersion :: \" + nextVersion)\n\tfmt.Println(\"currentVersion :: \" + *currentVersion)\n\treleaseText = append(releaseText, \"# Changes since last version:\\n\")\n\treleaseText = append(releaseText, \"## Changes to \"+mainRepo[0]+\"/\"+mainRepo[1]+\":\\n\")\n\tfor _, change := range changes {\n\t\treleaseText = append(releaseText, fmt.Sprintf(\" - %s see %s/%s/pull/%d \\n\", change.Title, mainRepo[0], mainRepo[1], change.PrNum))\n\t}\n\treleaseText = append(releaseText, \"# Changes to core repositories:\\n\")\n\tfor _, coreRepo := range coreRepos {\n\t\tcoreChanges, changeErr := ghp.getChangesSinceRelease(lastRelease, coreRepo[0], coreRepo[1])\n\t\ttu.CheckWarn(changeErr)\n\t\treleaseText = append(releaseText, \"## Changes to \"+coreRepo[0]+\"/\"+coreRepo[1]+\":\\n\")\n\t\tif len(coreChanges) == 0 {\n\t\t\treleaseText = append(releaseText, \" - No changes in this version\")\n\t\t}\n\t\tfor _, coreChange := range coreChanges {\n\t\t\treleaseText = append(releaseText, fmt.Sprintf(\" - %s see %s/%s/pull/%d \\n\", coreChange.Title, coreRepo[0], coreRepo[1], coreChange.PrNum))\n\t\t}\n\t}\n\tfmt.Println(\"--------------------------------------------------\")\n\tfmt.Println(\"The Folowing repos will also be tagged with \" + nextVersion)\n\tfmt.Println(\"--------------------------------------------------\")\n\tfmt.Println(mainRepo[0] + \"/\" + mainRepo[1])\n\tfor _, repo := range coreRepos {\n\t\tfmt.Println(repo[0] + \"/\" + repo[1])\n\t}\n\tfor _, repo := range otherRepos {\n\t\tfmt.Println(repo[0] + \"/\" + repo[1])\n\t}\n\n\treturn releaseText, nextVersion\n}", "func (q *Queue) updateQueue() {\n\tpurge := []int{}\n\t// Loop through jobs and get the status of running jobs\n\tfor i, _ := range q.stack {\n\t\tif q.stack[i].Status == common.STATUS_RUNNING {\n\t\t\t// Build status update call\n\t\t\tjobStatus := common.RPCCall{Job: q.stack[i]}\n\n\t\t\terr := q.pool[q.stack[i].ResAssigned].Client.Call(\"Queue.TaskStatus\", jobStatus, &q.stack[i])\n\t\t\t// we care about the errors, but only from a logging perspective\n\t\t\tif err != nil {\n\t\t\t\tlog.WithField(\"rpc error\", err.Error()).Error(\"Error during RPC call.\")\n\t\t\t}\n\n\t\t\t// Check if this is now no longer running\n\t\t\tif q.stack[i].Status != common.STATUS_RUNNING {\n\t\t\t\t// Release the resources from this change\n\t\t\t\tlog.WithField(\"JobID\", q.stack[i].UUID).Debug(\"Job has finished.\")\n\n\t\t\t\t// Call out to the registered hooks that the job is complete\n\t\t\t\tgo HookOnJobFinish(Hooks.JobFinish, q.stack[i])\n\n\t\t\t\tvar hw string\n\t\t\t\tfor _, v := range q.pool[q.stack[i].ResAssigned].Tools {\n\t\t\t\t\tif v.UUID == q.stack[i].ToolUUID {\n\t\t\t\t\t\thw = v.Requirements\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tq.pool[q.stack[i].ResAssigned].Hardware[hw] = true\n\n\t\t\t\t// Set a purge time\n\t\t\t\tq.stack[i].PurgeTime = time.Now().Add(time.Duration(q.jpurge*24) * time.Hour)\n\t\t\t\t// Log purge time\n\t\t\t\tlog.WithFields(log.Fields{\n\t\t\t\t\t\"JobID\": q.stack[i].UUID,\n\t\t\t\t\t\"PurgeTime\": q.stack[i].PurgeTime,\n\t\t\t\t}).Debug(\"Updated PurgeTime value\")\n\t\t\t}\n\t\t}\n\n\t\t// Check and delete jobs past their purge timer\n\t\tif q.stack[i].Status == common.STATUS_DONE || q.stack[i].Status == common.STATUS_FAILED || q.stack[i].Status == common.STATUS_QUIT {\n\t\t\tif time.Now().After(q.stack[i].PurgeTime) {\n\t\t\t\tpurge = append(purge, i)\n\t\t\t}\n\t\t}\n\t}\n\n\t// Do we need to purge?\n\tif len(purge) > 0 {\n\t\t// Let the purge begin\n\t\tnewStack := []common.Job{}\n\t\t// Loop on the stack looking for index values that patch a value in the purge\n\t\tfor i := range q.stack {\n\t\t\t// Check if our index is in the purge\n\t\t\tvar inPurge bool\n\t\t\tfor _, v := range purge {\n\t\t\t\tif i == v {\n\t\t\t\t\tinPurge = true\t\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// It is not in the purge so append to new stack\n\t\t\tif !inPurge {\n\t\t\t\tnewStack = append(newStack, q.stack[i])\n\t\t\t}\n\t\t}\n\t\tq.stack = newStack\n\t}\n}", "func (u *UpgradeFile) GetUpgradeFileBetweenVersions(prev, target string) (*UpgradeFile, error) {\n\tprevVersion, err := semver.NewVersion(prev)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// for each upgrade note, determine if it's geq than the previous version, leq the target\n\t// version\n\tresNotes := make([]*UpgradeNote, 0)\n\n\tfor _, note := range u.UpgradeNotes {\n\t\tnotePrevVersion, err := semver.NewVersion(note.PreviousVersion)\n\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tnoteTargetVersion, err := semver.NewVersion(note.TargetVersion)\n\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// if note(prev) <= prev and note(next) >= prev, render the note\n\t\tif comp := notePrevVersion.Compare(prevVersion); comp != -1 {\n\t\t\tif comp := noteTargetVersion.Compare(prevVersion); comp != -1 {\n\t\t\t\tresNotes = append(resNotes, note)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn &UpgradeFile{\n\t\tUpgradeNotes: resNotes,\n\t}, nil\n}", "func (df *dsortFramework) job() string {\n\tif df.managerUUID == \"\" {\n\t\treturn \"dsort[-]\"\n\t}\n\treturn \"dsort[\" + df.managerUUID + \"]\"\n}", "func (u updateDownloadRevision) apply(data *journalPersist) {\n\tif len(u.NewRevisionTxn.FileContractRevisions) == 0 {\n\t\tbuild.Critical(\"updateDownloadRevision is missing its FileContractRevision\")\n\t\treturn\n\t}\n\trev := u.NewRevisionTxn.FileContractRevisions[0]\n\tc := data.Contracts[rev.ParentID.String()]\n\tc.LastRevisionTxn = u.NewRevisionTxn\n\tc.DownloadSpending = u.NewDownloadSpending\n\tdata.Contracts[rev.ParentID.String()] = c\n}", "func (r *Release) rollback(cs *currentState, p *plan) {\n\trs, ok := cs.releases[r.key()]\n\tif !ok {\n\t\treturn\n\t}\n\n\tif r.Namespace == rs.Namespace {\n\n\t\tcmd := helmCmd(concat([]string{\"rollback\", r.Name, rs.getRevision()}, r.getWait(), r.getTimeout(), r.getNoHooks(), flags.getRunFlags()), \"Rolling back release [ \"+r.Name+\" ] in namespace [ \"+r.Namespace+\" ]\")\n\t\tp.addCommand(cmd, r.Priority, r, []hookCmd{}, []hookCmd{})\n\t\tr.upgrade(p) // this is to reflect any changes in values file(s)\n\t\tp.addDecision(\"Release [ \"+r.Name+\" ] was deleted and is desired to be rolled back to \"+\n\t\t\t\"namespace [ \"+r.Namespace+\" ]\", r.Priority, create)\n\t} else {\n\t\tr.reInstall(p)\n\t\tp.addDecision(\"Release [ \"+r.Name+\" ] is deleted BUT from namespace [ \"+rs.Namespace+\n\t\t\t\" ]. Will purge delete it from there and install it in namespace [ \"+r.Namespace+\" ]\", r.Priority, create)\n\t\tp.addDecision(\"WARNING: rolling back release [ \"+r.Name+\" ] from [ \"+rs.Namespace+\" ] to [ \"+r.Namespace+\n\t\t\t\" ] might not correctly connect to existing volumes. Check https://github.com/Praqma/helmsman/blob/master/docs/how_to/apps/moving_across_namespaces.md\"+\n\t\t\t\" for details if this release uses PV and PVC.\", r.Priority, create)\n\t}\n}", "func CheckAndDownloadWebGUIRelease(checkUpdate bool, forceUpdate bool, fetchURL string, cacheDir string) (err error) {\n\tcachePath := filepath.Join(cacheDir, \"webgui\")\n\ttagPath := filepath.Join(cachePath, \"tag\")\n\textractPath := filepath.Join(cachePath, \"current\")\n\n\textractPathExist, extractPathStat, err := exists(extractPath)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif extractPathExist && !extractPathStat.IsDir() {\n\t\treturn errors.New(\"Web GUI path exists, but is a file instead of folder. Please check the path \" + extractPath)\n\t}\n\n\t// Get the latest release details\n\tWebUIURL, tag, size, err := GetLatestReleaseURL(fetchURL)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error checking for web gui release update, skipping update: %w\", err)\n\t}\n\tdat, err := os.ReadFile(tagPath)\n\ttagsMatch := false\n\tif err != nil {\n\t\tfs.Errorf(nil, \"Error reading tag file at %s \", tagPath)\n\t\tcheckUpdate = true\n\t} else if string(dat) == tag {\n\t\ttagsMatch = true\n\t}\n\tfs.Debugf(nil, \"Current tag: %s, Release tag: %s\", string(dat), tag)\n\n\tif !tagsMatch {\n\t\tfs.Infof(nil, \"A release (%s) for gui is present at %s. Use --rc-web-gui-update to update. Your current version is (%s)\", tag, WebUIURL, string(dat))\n\t}\n\n\t// if the old file exists does not exist or forced update is enforced.\n\t// TODO: Add hashing to check integrity of the previous update.\n\tif !extractPathExist || checkUpdate || forceUpdate {\n\n\t\tif tagsMatch {\n\t\t\tfs.Logf(nil, \"No update to Web GUI available.\")\n\t\t\tif !forceUpdate {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tfs.Logf(nil, \"Force update the Web GUI binary.\")\n\t\t}\n\n\t\tzipName := tag + \".zip\"\n\t\tzipPath := filepath.Join(cachePath, zipName)\n\n\t\tcachePathExist, cachePathStat, _ := exists(cachePath)\n\t\tif !cachePathExist {\n\t\t\tif err := file.MkdirAll(cachePath, 0755); err != nil {\n\t\t\t\treturn errors.New(\"Error creating cache directory: \" + cachePath)\n\t\t\t}\n\t\t}\n\n\t\tif cachePathExist && !cachePathStat.IsDir() {\n\t\t\treturn errors.New(\"Web GUI path is a file instead of folder. Please check it \" + extractPath)\n\t\t}\n\n\t\tfs.Logf(nil, \"A new release for gui (%s) is present at %s\", tag, WebUIURL)\n\t\tfs.Logf(nil, \"Downloading webgui binary. Please wait. [Size: %s, Path : %s]\\n\", strconv.Itoa(size), zipPath)\n\n\t\t// download the zip from latest url\n\t\terr = DownloadFile(zipPath, WebUIURL)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\terr = os.RemoveAll(extractPath)\n\t\tif err != nil {\n\t\t\tfs.Logf(nil, \"No previous downloads to remove\")\n\t\t}\n\t\tfs.Logf(nil, \"Unzipping webgui binary\")\n\n\t\terr = Unzip(zipPath, extractPath)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\terr = os.RemoveAll(zipPath)\n\t\tif err != nil {\n\t\t\tfs.Logf(nil, \"Downloaded ZIP cannot be deleted\")\n\t\t}\n\n\t\terr = os.WriteFile(tagPath, []byte(tag), 0644)\n\t\tif err != nil {\n\t\t\tfs.Infof(nil, \"Cannot write tag file. You may be required to redownload the binary next time.\")\n\t\t}\n\t} else {\n\t\tfs.Logf(nil, \"Web GUI exists. Update skipped.\")\n\t}\n\n\treturn nil\n}", "func TestJobTaskAndAllocationAPI(t *testing.T) {\n\trequire.NoError(t, etc.SetRootPath(RootFromDB))\n\tdb := MustResolveTestPostgres(t)\n\tMustMigrateTestPostgres(t, db, MigrationsFromDB)\n\n\t// Add a mock user.\n\tuser := RequireMockUser(t, db)\n\n\t// Add a job.\n\tjID := model.NewJobID()\n\tjIn := &model.Job{\n\t\tJobID: jID,\n\t\tJobType: model.JobTypeExperiment,\n\t\tOwnerID: &user.ID,\n\t\tQPos: decimal.New(0, 0),\n\t}\n\terr := db.AddJob(jIn)\n\trequire.NoError(t, err, \"failed to add job\")\n\n\t// Retrieve it back and make sure the mapping is exhaustive.\n\tjOut, err := db.JobByID(jID)\n\trequire.NoError(t, err, \"failed to retrieve job\")\n\trequire.True(t, reflect.DeepEqual(jIn, jOut), pprintedExpect(jIn, jOut))\n\n\t// Add a task.\n\ttID := model.NewTaskID()\n\ttIn := &model.Task{\n\t\tTaskID: tID,\n\t\tJobID: &jID,\n\t\tTaskType: model.TaskTypeTrial,\n\t\tStartTime: time.Now().UTC().Truncate(time.Millisecond),\n\t}\n\terr = db.AddTask(tIn)\n\trequire.NoError(t, err, \"failed to add task\")\n\n\t// Retrieve it back and make sure the mapping is exhaustive.\n\ttOut, err := db.TaskByID(tID)\n\trequire.NoError(t, err, \"failed to retrieve task\")\n\trequire.True(t, reflect.DeepEqual(tIn, tOut), pprintedExpect(tIn, tOut))\n\n\t// Complete it.\n\ttIn.EndTime = ptrs.Ptr(time.Now().UTC().Truncate(time.Millisecond))\n\terr = db.CompleteTask(tID, *tIn.EndTime)\n\trequire.NoError(t, err, \"failed to mark task completed\")\n\n\t// Re-retrieve it back and make sure the mapping is still exhaustive.\n\ttOut, err = db.TaskByID(tID)\n\trequire.NoError(t, err, \"failed to re-retrieve task\")\n\trequire.True(t, reflect.DeepEqual(tIn, tOut), pprintedExpect(tIn, tOut))\n\n\t// And an allocation.\n\tports := map[string]int{}\n\tports[\"dtrain_port\"] = 0\n\tports[\"inter_train_process_comm_port1\"] = 0\n\tports[\"inter_train_process_comm_port2\"] = 0\n\tports[\"c10d_port\"] = 0\n\n\taID := model.AllocationID(string(tID) + \"-1\")\n\taIn := &model.Allocation{\n\t\tAllocationID: aID,\n\t\tTaskID: tID,\n\t\tSlots: 8,\n\t\tResourcePool: \"somethingelse\",\n\t\tStartTime: ptrs.Ptr(time.Now().UTC().Truncate(time.Millisecond)),\n\t\tPorts: ports,\n\t}\n\terr = db.AddAllocation(aIn)\n\trequire.NoError(t, err, \"failed to add allocation\")\n\n\t// Update ports\n\tports[\"dtrain_port\"] = 0\n\tports[\"inter_train_process_comm_port1\"] = 0\n\tports[\"inter_train_process_comm_port2\"] = 0\n\tports[\"c10d_port\"] = 0\n\taIn.Ports = ports\n\terr = UpdateAllocationPorts(*aIn)\n\trequire.NoError(t, err, \"failed to update port offset\")\n\n\t// Retrieve it back and make sure the mapping is exhaustive.\n\taOut, err := db.AllocationByID(aIn.AllocationID)\n\trequire.NoError(t, err, \"failed to retrieve allocation\")\n\trequire.True(t, reflect.DeepEqual(aIn, aOut), pprintedExpect(aIn, aOut))\n\n\t// Complete it.\n\taIn.EndTime = ptrs.Ptr(time.Now().UTC().Truncate(time.Millisecond))\n\terr = db.CompleteAllocation(aIn)\n\trequire.NoError(t, err, \"failed to mark allocation completed\")\n\n\t// Re-retrieve it back and make sure the mapping is still exhaustive.\n\taOut, err = db.AllocationByID(aIn.AllocationID)\n\trequire.NoError(t, err, \"failed to re-retrieve allocation\")\n\trequire.True(t, reflect.DeepEqual(aIn, aOut), pprintedExpect(aIn, aOut))\n}", "func matchJobs(jobs []*Job, p *JobSearchParams) []*Job {\n\trv := []*Job{}\n\tfor _, j := range jobs {\n\t\t// Compare all attributes which are provided.\n\t\tif true &&\n\t\t\t!p.TimeStart.After(j.Created) &&\n\t\t\tj.Created.Before(p.TimeEnd) &&\n\t\t\tsearchStringEqual(p.Issue, j.Issue) &&\n\t\t\tsearchStringEqual(p.Patchset, j.Patchset) &&\n\t\t\tsearchStringEqual(p.Server, j.Server) &&\n\t\t\tsearchStringEqual(p.Repo, j.Repo) &&\n\t\t\tsearchStringEqual(p.Revision, j.Revision) &&\n\t\t\tsearchStringEqual(p.Name, j.Name) &&\n\t\t\tsearchStringEqual(string(p.Status), string(j.Status)) &&\n\t\t\tsearchBoolEqual(p.IsForce, j.IsForce) &&\n\t\t\tsearchInt64Equal(p.BuildbucketBuildId, j.BuildbucketBuildId) {\n\t\t\trv = append(rv, j)\n\t\t}\n\t}\n\treturn rv\n}", "func DownloadRelease(result string, osType string) error {\n\n\t// TODO: Figure out why we can download in a temporal folder and then move that file\n\t// Create the download file\n\ttemporalPath := fmt.Sprintf(\"%s/helm-%s\", tmpPath, result)\n\ttmpFile, err := os.Create(temporalPath)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdefer tmpFile.Close()\n\n\t// Perform request to get helm releases\n\turl := \"https://get.helm.sh/\"\n\thelm := fmt.Sprintf(\"%shelm-%s-%s.tar.gz\", url, result, osType)\n\tresp, err := http.Get(helm)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif resp.StatusCode != 200 {\n\t\terr := errors.New(string(fmt.Sprintf(\"Trying to download from %s\", url)))\n\t\treturn err\n\t}\n\n\tdefer resp.Body.Close()\n\n\t// Set progress bar\n\tvar progressBar *pb.ProgressBar\n\tcontentLength, err := strconv.Atoi(resp.Header.Get(\"Content-Length\"))\n\tif err != nil {\n\t\tprogressBar = pb.New(0)\n\t} else {\n\t\tprogressBar = pb.New(int(contentLength))\n\t}\n\n\t// Set progress bar settings\n\t// TOOD: Use writers to log into logrus\n\tprogressBar.ShowSpeed = true\n\tprogressBar.SetWidth(80)\n\tprogressBar.SetRefreshRate(time.Millisecond * 1000)\n\tprogressBar.SetUnits(pb.U_BYTES)\n\tprogressBar.Start()\n\n\t// Create Writer and read data transfered\n\twriter := io.MultiWriter(tmpFile, progressBar)\n\t_, err = io.Copy(writer, resp.Body)\n\tif err != nil {\n\t\treturn err\n\t}\n\tprogressBar.Finish()\n\n\treturn nil\n}", "func AvailableUpdates(repoOrg string, repoName string, currentVersion string) (avail bool, newVersion string, releaseURL string, err error) {\n\tnewVersion = \"\"\n\tctx := context.Background()\n\tclient := ddevgh.GetGithubClient(ctx)\n\topt := &github.ListOptions{Page: 1}\n\treleases, _, err := client.Repositories.ListReleases(ctx, repoOrg, repoName, opt)\n\tif err != nil {\n\t\treturn false, newVersion, \"\", err\n\t}\n\n\tif isReleaseVersion(currentVersion) {\n\t\tcv, err := semver.NewVersion(currentVersion)\n\t\tif err != nil {\n\t\t\treturn false, newVersion, \"\", err\n\t\t}\n\t\tfor _, release := range releases {\n\t\t\tif *release.Prerelease {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tnewReleaseVersion, err := semver.NewVersion(*release.TagName)\n\t\t\tif err != nil {\n\t\t\t\treturn false, newVersion, \"\", err\n\t\t\t}\n\t\t\tnewVersion = *release.TagName\n\n\t\t\tif cv.Compare(newReleaseVersion) == -1 {\n\t\t\t\treturn true, newVersion, *release.HTMLURL, nil\n\t\t\t}\n\t\t}\n\t}\n\n\treturn false, newVersion, \"\", nil\n}", "func Test6(t *testing.T) {\n\tnewCollection := mutatingBaseCollectionVersions.DeepCopy()\n\tnewCollection.Spec.RepositoryUrl = \"https://github.com/some/collection/other-alternate-kabanero-index.yaml\"\n\tnewCollection.Spec.Version = \"7.8.9\"\n\tnewCollection.Spec.DesiredState = \"active\"\n\tnewCollection.Spec.Versions[0].RepositoryUrl = \"https://github.com/some/collection/alternate-kabanero-index.yaml\"\n\tnewCollection.Spec.Versions[0].Version = \"4.5.6\"\n\tnewCollection.Spec.Versions[0].DesiredState = \"inactive\"\n\terr := processUpdate(&mutatingBaseCollection, newCollection)\n\tif err == nil {\n\t\tt.Fatal(\"An error condition should have been reported. New collection.Spec and new collection.Spec.versions[0] contain conflicting data.\", err)\n\t}\n}", "func Test12(t *testing.T) {\n\tcustommutatingBaseCollection := mutatingBaseCollectionVersions.DeepCopy()\n\tnewCollection := mutatingBaseCollectionVersions.DeepCopy()\n\tcustommutatingBaseCollection.Spec.Version = \"1.2.3\"\n\tnewCollection.Spec.Version = \"1.2.4\"\n\tcustommutatingBaseCollection.Spec.Versions[0].Version = \"1.2.4\"\n\tnewCollection.Spec.Versions[0].Version = \"1.2.4\"\n\n\terr := processUpdate(custommutatingBaseCollection, newCollection)\n\tif err != nil {\n\t\tt.Fatal(\"Unexpected error during mutation.\", err)\n\t}\n\n\texpectedversion0 := kabanerov1alpha1.CollectionVersion{\n\t\tDesiredState: \"active\",\n\t\tRepositoryUrl: \"https://github.com/some/collection/kabanero-index.yaml\",\n\t\tVersion: \"1.2.4\"}\n\n\tif newCollection.Spec.Versions[0] != expectedversion0 {\n\t\tt.Fatal(\"New collection.Spec.Versions[0] values do not match expected collection.Spec.Versions[0] values. New versions[0]: \", newCollection.Spec.Versions[0], \"Expected versions[0]: \", expectedversion0)\n\t}\n\n\tif newCollection.Spec.RepositoryUrl != \"https://github.com/some/collection/kabanero-index.yaml\" {\n\t\tt.Fatal(\"New collection.Spec.RepositoryUrl values do not match expected value of https://github.com/some/collection/kabanero-index.yaml. RepositoryUrl found: \", newCollection.Spec.RepositoryUrl)\n\t}\n\tif newCollection.Spec.Version != \"1.2.4\" {\n\t\tt.Fatal(\"New collection.Spec.Version values do not match expected value of 1.2.3. Version found: \", newCollection.Spec.Version)\n\t}\n\tif newCollection.Spec.DesiredState != \"active\" {\n\t\tt.Fatal(\"New collection.Spec.DesiredState values do not match expected value of active. DesiredStateme found: \", newCollection.Spec.DesiredState)\n\t}\n}", "func (c *Client) ReleasesSince(t time.Time) ([]db.Release, error) {\n\tchanges := [][]interface{}{}\n\n\terr := c.client.Call(\"changelog\", []interface{}{t.Unix(), true}, &changes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treleases := []db.Release{}\n\tfor _, change := range changes {\n\t\tif change[3] == \"new release\" {\n\t\t\treleases = append(releases, db.Release{\n\t\t\t\tName: change[0].(string),\n\t\t\t\tVersion: change[1].(string),\n\t\t\t\tTime: time.Unix(change[2].(int64), 0),\n\t\t\t})\n\t\t}\n\n\t}\n\treturn releases, nil\n}", "func storeChangesAsRecordDeltasExpectations(ctx context.Context, db *pgxpool.Pool, changes map[groupingDigest]finalState, ts time.Time) error {\n\tctx, span := trace.StartSpan(ctx, \"storeChangesAsRecordDeltasExpectations\")\n\tdefer span.End()\n\tif len(changes) == 0 {\n\t\treturn nil\n\t}\n\t// We want to make one triage record for each user who triaged data on this CL. Those records\n\t// will represent the final state.\n\tbyUser := map[string][]schema.ExpectationDeltaRow{}\n\tfor gd, fs := range changes {\n\t\tif fs.labelBefore == fs.labelAfter {\n\t\t\tcontinue // skip \"no-op\" triages, where something was triaged in one way, then undone.\n\t\t}\n\t\tbyUser[fs.userWhoTriagedLast] = append(byUser[fs.userWhoTriagedLast], schema.ExpectationDeltaRow{\n\t\t\tGroupingID: sql.FromMD5Hash(gd.grouping),\n\t\t\tDigest: sql.FromMD5Hash(gd.digest),\n\t\t\tLabelBefore: fs.labelBefore,\n\t\t\tLabelAfter: fs.labelAfter,\n\t\t})\n\t}\n\tfor user, deltas := range byUser {\n\t\tif len(deltas) == 0 {\n\t\t\tcontinue\n\t\t}\n\t\trecordID := uuid.New()\n\t\t// Write the record for this user\n\t\terr := crdbpgx.ExecuteTx(ctx, db, pgx.TxOptions{}, func(tx pgx.Tx) error {\n\t\t\t_, err := tx.Exec(ctx, `\nINSERT INTO ExpectationRecords (expectation_record_id, user_name, triage_time, num_changes)\nVALUES ($1, $2, $3, $4)`, recordID, user, ts, len(deltas))\n\t\t\treturn err // Don't wrap - crdbpgx might retry\n\t\t})\n\t\tif err != nil {\n\t\t\treturn skerr.Wrapf(err, \"storing record\")\n\t\t}\n\t\tif err := bulkWriteDeltas(ctx, db, recordID, deltas); err != nil {\n\t\t\treturn skerr.Wrapf(err, \"storing deltas\")\n\t\t}\n\t\tif err := bulkWriteExpectations(ctx, db, recordID, deltas); err != nil {\n\t\t\treturn skerr.Wrapf(err, \"storing expectations\")\n\t\t}\n\t}\n\treturn nil\n}", "func nextReleaseAfterGivenVersionFromVersionList(givenVersion *semver.Version, versionList []*spi.Version, releasesFromGivenVersion int) (*semver.Version, error) {\n\tversionBuckets := map[string]*semver.Version{}\n\n\t// Assemble a map that lists a release (x.y.0) to its latest version, with nightlies taking precedence over all else\n\tfor _, version := range versionList {\n\t\tversionSemver := version.Version()\n\t\tmajorMinor := createMajorMinorStringFromSemver(versionSemver)\n\n\t\tif _, ok := versionBuckets[majorMinor]; !ok {\n\t\t\tversionBuckets[majorMinor] = versionSemver\n\t\t} else {\n\t\t\tcurrentGreatestVersion := versionBuckets[majorMinor]\n\t\t\tversionIsNightly := strings.Contains(versionSemver.Prerelease(), \"nightly\")\n\t\t\tcurrentIsNightly := strings.Contains(currentGreatestVersion.Prerelease(), \"nightly\")\n\n\t\t\t// Make sure nightlies take precedence over other versions\n\t\t\tif versionIsNightly && !currentIsNightly {\n\t\t\t\tversionBuckets[majorMinor] = versionSemver\n\t\t\t} else if currentIsNightly && !versionIsNightly {\n\t\t\t\tcontinue\n\t\t\t} else if currentGreatestVersion.LessThan(versionSemver) {\n\t\t\t\tversionBuckets[majorMinor] = versionSemver\n\t\t\t}\n\t\t}\n\t}\n\n\t// Parse all major minor versions (x.y.0) into semver versions and place them in an array.\n\t// This is done explicitly so that we can utilize the semver library's sorting capability.\n\tmajorMinorList := []*semver.Version{}\n\tfor k := range versionBuckets {\n\t\tparsedMajorMinor, err := semver.NewVersion(k)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tmajorMinorList = append(majorMinorList, parsedMajorMinor)\n\t}\n\n\tsort.Sort(semver.Collection(majorMinorList))\n\n\t// Now that the list is sorted, we want to locate the major minor of the given version in the list.\n\tgivenMajorMinor, err := semver.NewVersion(createMajorMinorStringFromSemver(givenVersion))\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tindexOfGivenMajorMinor := -1\n\tfor i, majorMinor := range majorMinorList {\n\t\tif majorMinor.Equal(givenMajorMinor) {\n\t\t\tindexOfGivenMajorMinor = i\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif indexOfGivenMajorMinor == -1 {\n\t\treturn nil, fmt.Errorf(\"unable to find given version from list of available versions\")\n\t}\n\n\t// Next, we'll go the given version distance ahead of the given version. We want to do it this way instead of guessing\n\t// the next minor release so that we can handle major releases in the future, In other words, if the Openshift\n\t// 4.y line stops at 4.13, we'll still be able to pick 5.0 if it's the next release after 4.13.\n\tnextMajorMinorIndex := indexOfGivenMajorMinor + releasesFromGivenVersion\n\n\tif len(majorMinorList) <= nextMajorMinorIndex {\n\t\treturn nil, fmt.Errorf(\"there is no eligible next release from the list of available versions\")\n\t}\n\tnextMajorMinor := createMajorMinorStringFromSemver(majorMinorList[nextMajorMinorIndex])\n\n\tif _, ok := versionBuckets[nextMajorMinor]; !ok {\n\t\treturn nil, fmt.Errorf(\"no major/minor version found for %s\", nextMajorMinor)\n\t}\n\n\treturn versionBuckets[nextMajorMinor], nil\n}", "func runReleaseCmd(cmd *cobra.Command, args []string) {\n\tconfigFile, _ := cmd.Flags().GetString(\"config\")\n\tconfig := &config.Config{}\n\terr := config.Load(configFile)\n\tif err != nil {\n\t\tfmt.Printf(\"could not load config file: %v\\n\", err)\n\t\tos.Exit(1)\n\t\treturn\n\t}\n\n\tspinner, err := initSpinner(fmt.Sprintf(\"Releasing v%s of %s\", args[0], config.Repository))\n\tif err != nil {\n\t\tfmt.Println(\"could not init spinner\")\n\t\tos.Exit(1)\n\t\treturn\n\t}\n\tspinner.Start()\n\n\tnewRelease, err := github.NewRelease(config, args, spinner)\n\tif err != nil {\n\t\tspinner.StopFailMessage(fmt.Sprintf(\"%v\", err))\n\t\tspinner.StopFail()\n\t\tos.Exit(1)\n\t\treturn\n\t}\n\n\tcl, err := changelog.HandleChangelog(newRelease.ProjectName, newRelease.Version, newRelease.Date, spinner)\n\tif err != nil {\n\t\tspinner.StopFailMessage(fmt.Sprintf(\"%v\", err))\n\t\tspinner.StopFail()\n\t\tos.Exit(1)\n\t\treturn\n\t}\n\n\tnewRelease.Changelog = cl\n\n\tvar binaryPath string\n\tskipBinary, _ := cmd.Flags().GetBool(\"skipBinary\")\n\tif !skipBinary {\n\t\t// set project build path so we have a predictable location\n\t\tbinaryPath = fmt.Sprintf(binaryPathFmt, newRelease.ProjectName, newRelease.Version)\n\t\trunBuildCmd(cmd, []string{newRelease.Version, binaryPath})\n\t}\n\n\ttokenFile, _ := cmd.Flags().GetString(\"tokenFile\")\n\terr = newRelease.CreateGithubRelease(tokenFile, binaryPath, spinner)\n\tif err != nil {\n\t\tspinner.StopFailMessage(fmt.Sprintf(\"%v\", err))\n\t\tspinner.StopFail()\n\t\tos.Exit(1)\n\t\treturn\n\t}\n\n\tspinner.Suffix(\" Finished release\")\n\tspinner.Stop()\n}", "func mergeProllyTableData(ctx *sql.Context, tm *TableMerger, finalSch schema.Schema, mergeTbl *doltdb.Table, valueMerger *valueMerger) (*doltdb.Table, *MergeStats, error) {\n\titer, err := threeWayDiffer(ctx, tm, valueMerger)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tlr, err := tm.leftTbl.GetRowData(ctx)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tleftEditor := durable.ProllyMapFromIndex(lr).Rewriter(finalSch.GetKeyDescriptor(), finalSch.GetValueDescriptor())\n\n\tai, err := mergeTbl.GetArtifacts(ctx)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tartEditor := durable.ProllyMapFromArtifactIndex(ai).Editor()\n\n\tkeyless := schema.IsKeyless(tm.leftSch)\n\n\tpri, err := newPrimaryMerger(leftEditor, tm, valueMerger, finalSch)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tsec, err := newSecondaryMerger(ctx, tm, valueMerger, finalSch)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tconflicts, err := newConflictMerger(ctx, tm, artEditor)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tcheckValidator, err := newCheckValidator(ctx, tm, valueMerger, finalSch, artEditor)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\t// validator shares an artifact editor with conflict merge\n\tuniq, err := newUniqValidator(ctx, finalSch, tm, valueMerger, artEditor)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tnullChk, err := newNullValidator(ctx, finalSch, tm, valueMerger, artEditor, leftEditor, sec.leftMut)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\ts := &MergeStats{\n\t\tOperation: TableModified,\n\t}\n\tfor {\n\t\tdiff, err := iter.Next(ctx)\n\t\tif errors.Is(err, io.EOF) {\n\t\t\tbreak\n\t\t} else if err != nil {\n\t\t\treturn nil, nil, err\n\t\t}\n\t\tcnt, err := uniq.validateDiff(ctx, diff)\n\t\tif err != nil {\n\t\t\treturn nil, nil, err\n\t\t}\n\t\ts.ConstraintViolations += cnt\n\n\t\tcnt, err = nullChk.validateDiff(ctx, diff)\n\t\tif err != nil {\n\t\t\treturn nil, nil, err\n\t\t}\n\t\ts.ConstraintViolations += cnt\n\t\tif cnt > 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\tcnt, err = checkValidator.validateDiff(ctx, diff)\n\t\tif err != nil {\n\t\t\treturn nil, nil, err\n\t\t}\n\t\ts.ConstraintViolations += cnt\n\n\t\tswitch diff.Op {\n\t\tcase tree.DiffOpDivergentModifyConflict, tree.DiffOpDivergentDeleteConflict:\n\t\t\t// In this case, a modification or delete was made to one side, and a conflicting delete or modification\n\t\t\t// was made to the other side, so these cannot be automatically resolved.\n\t\t\ts.DataConflicts++\n\t\t\terr = conflicts.merge(ctx, diff, nil)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\tcase tree.DiffOpRightAdd:\n\t\t\ts.Adds++\n\t\t\terr = pri.merge(ctx, diff, tm.rightSch)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\terr = sec.merge(ctx, diff, tm.rightSch)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\tcase tree.DiffOpRightModify:\n\t\t\ts.Modifications++\n\t\t\terr = pri.merge(ctx, diff, tm.rightSch)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\terr = sec.merge(ctx, diff, tm.rightSch)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\tcase tree.DiffOpRightDelete:\n\t\t\ts.Deletes++\n\t\t\terr = pri.merge(ctx, diff, tm.rightSch)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\terr = sec.merge(ctx, diff, tm.rightSch)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\tcase tree.DiffOpDivergentModifyResolved:\n\t\t\t// In this case, both sides of the merge have made different changes to a row, but we were able to\n\t\t\t// resolve them automatically.\n\t\t\ts.Modifications++\n\t\t\terr = pri.merge(ctx, diff, nil)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\terr = sec.merge(ctx, diff, nil)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\tcase tree.DiffOpConvergentAdd, tree.DiffOpConvergentModify, tree.DiffOpConvergentDelete:\n\t\t\t// In this case, both sides of the merge have made the same change, so no additional changes are needed.\n\t\t\tif keyless {\n\t\t\t\ts.DataConflicts++\n\t\t\t\terr = conflicts.merge(ctx, diff, nil)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, nil, err\n\t\t\t\t}\n\t\t\t}\n\t\tdefault:\n\t\t\t// Currently, all changes are applied to the left-side of the merge, so for any left-side diff ops,\n\t\t\t// we can simply ignore them since that data is already in the destination (the left-side).\n\t\t}\n\t}\n\n\tfinalRows, err := pri.finalize(ctx)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tleftIdxs, rightIdxs, err := sec.finalize(ctx)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tfinalIdxs, err := mergeProllySecondaryIndexes(ctx, tm, leftIdxs, rightIdxs, finalSch, finalRows, conflicts.ae)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tfinalArtifacts, err := conflicts.finalize(ctx)\n\n\t// collect merged data in |finalTbl|\n\tfinalTbl, err := mergeTbl.UpdateRows(ctx, finalRows)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tfinalTbl, err = finalTbl.SetIndexSet(ctx, finalIdxs)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tfinalTbl, err = finalTbl.SetArtifacts(ctx, finalArtifacts)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\treturn finalTbl, s, nil\n}", "func statsToUpdatePipeline(projectId string, start time.Time, end time.Time) []bson.M {\n\tpipeline := []bson.M{\n\t\t{\"$match\": bson.M{\n\t\t\ttask.ProjectKey: projectId,\n\t\t\ttask.FinishTimeKey: bson.M{\"$gte\": start, \"$lt\": end},\n\t\t}},\n\t\t{\"$project\": bson.M{\n\t\t\ttask.IdKey: 0,\n\t\t\t\"project\": taskProjectKeyRef,\n\t\t\t\"requester\": taskRequesterKeyRef,\n\t\t\t\"date\": bson.M{\"$dateToString\": bson.M{\"date\": taskCreateTimeKeyRef, \"format\": \"%Y-%m-%d %H\"}},\n\t\t\t\"day\": bson.M{\"$dateToString\": bson.M{\"date\": taskCreateTimeKeyRef, \"format\": \"%Y-%m-%d\"}},\n\t\t\t\"task_name\": taskDisplayNameKeyRef,\n\t\t}},\n\t\t{\"$group\": bson.M{\n\t\t\t\"_id\": bson.M{\n\t\t\t\t\"project\": \"$project\",\n\t\t\t\t\"requester\": \"$requester\",\n\t\t\t\t\"date\": \"$date\",\n\t\t\t\t\"day\": \"$day\",\n\t\t\t},\n\t\t\t\"task_names\": bson.M{\"$addToSet\": \"$task_name\"},\n\t\t}},\n\t\t{\"$project\": bson.M{\n\t\t\t\"_id\": 0,\n\t\t\t\"project\": \"$_id.project\",\n\t\t\t\"requester\": \"$_id.requester\",\n\t\t\t\"date\": bson.M{\"$dateFromString\": bson.M{\"dateString\": \"$_id.date\", \"format\": \"%Y-%m-%d %H\"}},\n\t\t\t\"day\": bson.M{\"$dateFromString\": bson.M{\"dateString\": \"$_id.day\", \"format\": \"%Y-%m-%d\"}},\n\t\t\t\"task_names\": 1,\n\t\t}},\n\t\t{\"$sort\": bson.M{\n\t\t\t\"project\": 1,\n\t\t\t\"date\": 1,\n\t\t\t\"requester\": 1,\n\t\t}},\n\t}\n\treturn pipeline\n}", "func releases(ctx context.Context, c *github.Client, org string, project string) ([]*release, error) {\n\tvar result []*release\n\n\topts := &github.ListOptions{PerPage: 100}\n\n\tklog.Infof(\"Downloading releases for %s/%s ...\", org, project)\n\n\tfor page := 1; page != 0; {\n\t\topts.Page = page\n\t\trs, resp, err := c.Repositories.ListReleases(ctx, org, project, opts)\n\t\tif err != nil {\n\t\t\treturn result, err\n\t\t}\n\n\t\tpage = resp.NextPage\n\t\tuntil := time.Now()\n\n\t\tfor _, r := range rs {\n\t\t\tname := r.GetName()\n\t\t\tif name == \"\" {\n\t\t\t\tname = r.GetTagName()\n\t\t\t}\n\n\t\t\trel := &release{\n\t\t\t\tName: name,\n\t\t\t\tDraft: r.GetDraft(),\n\t\t\t\tPrerelease: r.GetPrerelease(),\n\t\t\t\tPublishedAt: r.GetPublishedAt().Time,\n\t\t\t\tActiveUntil: until,\n\t\t\t\tDownloads: map[string]int{},\n\t\t\t\tDownloadRatios: map[string]float64{},\n\t\t\t}\n\n\t\t\tfor _, a := range r.Assets {\n\t\t\t\tif ignoreAssetRe.MatchString(a.GetName()) {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\trel.Downloads[a.GetName()] = a.GetDownloadCount()\n\t\t\t\trel.DownloadsTotal += int64(a.GetDownloadCount())\n\t\t\t}\n\n\t\t\tif !rel.Draft && !rel.Prerelease {\n\t\t\t\tuntil = rel.PublishedAt\n\t\t\t}\n\n\t\t\tresult = append(result, rel)\n\t\t}\n\t}\n\n\tfor _, r := range result {\n\t\tr.DaysActive = r.ActiveUntil.Sub(r.PublishedAt).Hours() / 24\n\t\tr.DownloadsPerDay = float64(r.DownloadsTotal) / r.DaysActive\n\n\t\tfor k, v := range r.Downloads {\n\t\t\tr.DownloadRatios[k] = float64(v) / float64(r.DownloadsTotal)\n\t\t}\n\t}\n\n\treturn result, nil\n}", "func filterJobs(jobs []tc.Job, maxReval time.Duration, minTTL time.Duration) []Job {\n\tjobMap := map[string]time.Time{}\n\tfor _, job := range jobs {\n\t\tif job.DeliveryService == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tif !strings.HasPrefix(job.Parameters, `TTL:`) {\n\t\t\tcontinue\n\t\t}\n\t\tif !strings.HasSuffix(job.Parameters, `h`) {\n\t\t\tcontinue\n\t\t}\n\n\t\tttlHoursStr := job.Parameters\n\t\tttlHoursStr = strings.TrimPrefix(ttlHoursStr, `TTL:`)\n\t\tttlHoursStr = strings.TrimSuffix(ttlHoursStr, `h`)\n\t\tttlHours, err := strconv.Atoi(ttlHoursStr)\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"job %+v has unexpected parameters ttl format, config generation skipping!\\n\", job)\n\t\t\tcontinue\n\t\t}\n\n\t\tttl := time.Duration(ttlHours) * time.Hour\n\t\tif ttl > maxReval {\n\t\t\tttl = maxReval\n\t\t} else if ttl < minTTL {\n\t\t\tttl = minTTL\n\t\t}\n\n\t\tjobStartTime, err := time.Parse(tc.JobTimeFormat, job.StartTime)\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"job %+v has unexpected time format, config generation skipping!\\n\", job)\n\t\t\tcontinue\n\t\t}\n\n\t\tif jobStartTime.Add(maxReval).Before(time.Now()) {\n\t\t\tcontinue\n\t\t}\n\n\t\tif jobStartTime.Add(ttl).Before(time.Now()) {\n\t\t\tcontinue\n\t\t}\n\t\tif job.Keyword != JobKeywordPurge {\n\t\t\tcontinue\n\t\t}\n\n\t\tpurgeEnd := jobStartTime.Add(ttl)\n\n\t\tif existingPurgeEnd, ok := jobMap[job.AssetURL]; !ok || purgeEnd.After(existingPurgeEnd) {\n\t\t\tjobMap[job.AssetURL] = purgeEnd\n\t\t}\n\t}\n\n\tnewJobs := []Job{}\n\tfor assetURL, purgeEnd := range jobMap {\n\t\tnewJobs = append(newJobs, Job{AssetURL: assetURL, PurgeEnd: purgeEnd})\n\t}\n\tsort.Sort(Jobs(newJobs))\n\n\treturn newJobs\n}", "func updateProvisions(t *testing.T, keeper Keeper, pool Pool, ctx sdk.Context, hr int) (sdk.Rat, sdk.Int, Pool) {\n\texpInflation := keeper.nextInflation(ctx)\n\texpProvisions := (expInflation.Mul(sdk.NewRatFromInt(pool.TokenSupply())).Quo(hrsPerYrRat)).EvaluateInt()\n\tstartTotalSupply := pool.TokenSupply()\n\tpool = keeper.processProvisions(ctx)\n\tkeeper.setPool(ctx, pool)\n\n\t//check provisions were added to pool\n\trequire.Equal(t, startTotalSupply.Add(expProvisions).Int64(), pool.TokenSupply().Int64())\n\treturn expInflation, expProvisions, pool\n}", "func (p *planner) writeNonDropDatabaseChange(\n\tctx context.Context, desc *dbdesc.Mutable, jobDesc string,\n) error {\n\tif err := p.createNonDropDatabaseChangeJob(ctx, desc.ID, jobDesc); err != nil {\n\t\treturn err\n\t}\n\tb := p.Txn().NewBatch()\n\tif err := p.writeDatabaseChangeToBatch(ctx, desc, b); err != nil {\n\t\treturn err\n\t}\n\treturn p.Txn().Run(ctx, b)\n}", "func version() {\n fmt.Printf(\"v%s\\ncommit=%s\\n\", versionNumber, commitId)\n}", "func TestMeta_DoubleBuffering_Even(t *testing.T) {\n\ttargetVersion := 4\n\n\tt.Run(\"restore works\", func(t *testing.T) {\n\t\tw, f, cleanup := testMeta(t, targetVersion)\n\t\tdefer cleanup()\n\n\t\tnw := &wal{}\n\t\terr := nw.restoreMetaPage(f.Name())\n\t\trequire.NoError(t, err)\n\n\t\trequire.Equal(t, 4, int(nw.meta.Version))\n\t\trequire.Equal(t, w.meta, nw.meta)\n\t})\n\n\t// simulate when we failed during commiting a newer update\n\t// in slot A - so falls back to previous meta in slot B\n\tt.Run(\"metaA corrupt\", func(t *testing.T) {\n\t\tw, f, cleanup := testMeta(t, targetVersion)\n\t\tdefer cleanup()\n\n\t\tf.WriteAt([]byte(\"randomcorruption\"), meta_header_size+2)\n\n\t\tnw := &wal{}\n\t\terr := nw.restoreMetaPage(f.Name())\n\t\trequire.NoError(t, err)\n\n\t\tem := &meta{}\n\t\t*em = *w.meta\n\n\t\tem.Version--\n\t\tem.BytesStore = map[string][]byte{\n\t\t\t\"foo\": []byte(\"val3\"),\n\t\t}\n\t\trequire.Equal(t, 3, int(nw.meta.Version))\n\t\trequire.Equal(t, em, nw.meta)\n\t})\n\n\t// simulate a failure in commiting this latest update\n\t// falls back to previous state in slot A\n\tt.Run(\"metaB corrupt\", func(t *testing.T) {\n\t\tw, f, cleanup := testMeta(t, targetVersion)\n\t\tdefer cleanup()\n\n\t\tf.WriteAt([]byte(\"randomcorruption\"), meta_header_size+meta_page_size+2)\n\n\t\tnw := &wal{}\n\t\terr := nw.restoreMetaPage(f.Name())\n\t\trequire.NoError(t, err)\n\n\t\tem := &meta{}\n\t\t*em = *w.meta\n\n\t\tem.BytesStore = map[string][]byte{\n\t\t\t\"foo\": []byte(\"val4\"),\n\t\t}\n\t\trequire.Equal(t, 4, int(nw.meta.Version))\n\t\trequire.Equal(t, em, nw.meta)\n\t})\n\n}", "func (a *Agent) UpgradeRelease(\n\tctx context.Context,\n\tconf *UpgradeReleaseConfig,\n\tvalues string,\n\tdoAuth *oauth2.Config,\n\tdisablePullSecretsInjection bool,\n\tignoreDependencies bool,\n) (*release.Release, error) {\n\tctx, span := telemetry.NewSpan(ctx, \"helm-upgrade-release\")\n\tdefer span.End()\n\n\ttelemetry.WithAttributes(span,\n\t\ttelemetry.AttributeKV{Key: \"project-id\", Value: conf.Cluster.ProjectID},\n\t\ttelemetry.AttributeKV{Key: \"cluster-id\", Value: conf.Cluster.ID},\n\t\ttelemetry.AttributeKV{Key: \"name\", Value: conf.Name},\n\t\ttelemetry.AttributeKV{Key: \"stack-name\", Value: conf.StackName},\n\t\ttelemetry.AttributeKV{Key: \"stack-revision\", Value: conf.StackRevision},\n\t)\n\n\tvaluesYaml, err := chartutil.ReadValues([]byte(values))\n\tif err != nil {\n\t\treturn nil, telemetry.Error(ctx, span, err, \"Values could not be parsed\")\n\t}\n\n\tconf.Values = valuesYaml\n\n\treturn a.UpgradeReleaseByValues(ctx, conf, doAuth, disablePullSecretsInjection, ignoreDependencies)\n}" ]
[ "0.50827223", "0.5070301", "0.5035704", "0.49518195", "0.49108112", "0.48721832", "0.4842326", "0.47435385", "0.46941632", "0.4652561", "0.4616511", "0.46026367", "0.45869935", "0.45795777", "0.45576334", "0.45538297", "0.45508382", "0.45336103", "0.4529273", "0.4494805", "0.44873166", "0.4477706", "0.44459233", "0.44257137", "0.4422091", "0.44100416", "0.44071853", "0.4407149", "0.43963534", "0.43914318", "0.438749", "0.4386898", "0.43773848", "0.4372562", "0.43500206", "0.4345885", "0.43431586", "0.4335534", "0.43303663", "0.43258205", "0.43205124", "0.42999762", "0.42955112", "0.42812943", "0.42807192", "0.4278466", "0.4278043", "0.42743596", "0.42734697", "0.42678297", "0.42582685", "0.42536518", "0.425201", "0.42504245", "0.4244853", "0.42435867", "0.4239743", "0.4236815", "0.42284787", "0.42192796", "0.42184752", "0.42168966", "0.42146656", "0.42139384", "0.42105252", "0.42075124", "0.42069265", "0.4194602", "0.41942275", "0.41903847", "0.41883516", "0.4179325", "0.41791126", "0.41749623", "0.41748703", "0.417366", "0.41701266", "0.41674876", "0.41667974", "0.4166569", "0.41658577", "0.41652086", "0.41636038", "0.4160361", "0.4156264", "0.41510186", "0.41498965", "0.41447112", "0.41441733", "0.41414323", "0.4134354", "0.4124247", "0.41190726", "0.411818", "0.411437", "0.4107167", "0.41056958", "0.41046712", "0.41017157", "0.4095775" ]
0.5763628
0
Get env var `name` `defvalue` or return an error if it's missing.
func Get(name, defvalue string) string { if s := os.Getenv(strings.ToUpper(name)); s == "" { return defvalue } else { return s } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func getEnv(name, def string) (value string) {\n\tif value = os.Getenv(name); value == \"\" {\n\t\tvalue = def\n\t}\n\treturn\n}", "func GetStr(name string, defaultValue ...string) string {\n\tvalue, ok := os.LookupEnv(name)\n\tif !ok && len(defaultValue) == 0 {\n\t\tfmt.Printf(\"ERROR: missing %s environment variable!\\n\", name)\n\t\tos.Exit(1)\n\t}\n\n\tif !ok {\n\t\tvalue = defaultValue[0]\n\t}\n\n\treturn value\n}", "func Get(name string) interface{} {\n\tlock.Lock()\n\tvar v, found = env.variables[name]\n\tlock.Unlock()\n\n\tif found {\n\t\tv.mutex.Lock()\n\t\tif v.cachedValue != nil {\n\t\t\tdefer v.mutex.Unlock()\n\t\t\treturn v.cachedValue.value\n\t\t}\n\t\tv.mutex.Unlock()\n\t}\n\n\tvar value interface{}\n\tif !found {\n\t\t// it's for an ad-hoc value, let's go through the default chain\n\t\tfor _, source := range env.settings.DefaultSources {\n\t\t\tvalue = source.Provider().Get(name, source.Config())\n\t\t\tif value != nil {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t} else {\n\t\t// it's a variable\n\t\tfor _, s := range v.sources {\n\t\t\tsourceValue := s.source.Provider().Get(name, s.source.Config())\n\t\t\ts.cachedValue = &valuePlaceholder{value: sourceValue} // cache the given value to identify if there were changes in a refresh\n\t\t\tif value == nil && sourceValue != nil {\n\t\t\t\tvalue = sourceValue\n\t\t\t\tif v.converter != nil {\n\t\t\t\t\tvalue = v.converter(value)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif value == nil {\n\t\t\tvalue = v.defaultValue\n\t\t}\n\t\tv.cachedValue = &valuePlaceholder{value: value}\n\t}\n\n\treturn value\n}", "func Get(key string, defaultVal string) string {\n\tval, ok := os.LookupEnv(key)\n\tif !ok {\n\t\tval = defaultVal\n\t}\n\treturn val\n}", "func Get(envKey, defaultVal string) string {\n\t// Check for an environment variable\n\tenvVal, envPresent := os.LookupEnv(envKey)\n\tif envPresent && envVal != \"\" {\n\t\treturn envVal\n\t}\n\t// Check the loaded vars\n\tif val, ok := envVars[envKey]; ok && val != \"\" {\n\t\treturn val\n\t}\n\treturn defaultVal\n}", "func GetStringEnvWithDefault(name, def string) string {\n\tvar val string\n\t\n\tif val = os.Getenv(name); val == \"\" {\n\t\tlog.Printf(\"Env variant %s not found, using default value: %s\", name, def)\n\t\treturn def\n\t}\n\t\n\tlog.Printf(\"Env variant %s found, using env value: %s\", name, val)\n\treturn val\n}", "func (e EnvConfig) Get(name string, defaultValue string) string {\n\tv := os.Getenv(strings.ToUpper(name))\n\tif v == \"\" {\n\t\tv = defaultValue\n\t}\n\treturn v\n}", "func Getenv(name string, def ...string) string {\n\tval := os.Getenv(name)\n\tif val == \"\" && len(def) > 0 {\n\t\tval = def[0]\n\t}\n\n\treturn val\n}", "func Get(key, defaultValue string) string {\n\tvalue, ok := os.LookupEnv(key)\n\tif ok {\n\t\treturn value\n\t}\n\n\treturn defaultValue\n}", "func Get(key, defaultValue string) string {\n\tif v, ok := os.LookupEnv(key); ok {\n\t\treturn v\n\t}\n\treturn defaultValue\n}", "func GetFromEnvOrDefault(name, defaultValue string) string {\n\tfromEnv := os.Getenv(name)\n\tif fromEnv == \"\" {\n\t\treturn defaultValue\n\t}\n\treturn fromEnv\n}", "func GetEnvStr(name, def string) (res string) {\n\tres = def\n\n\ts := os.Getenv(name)\n\tif len(s) > 0 {\n\t\tres = s\n\t}\n\n\treturn\n}", "func getStringOpt(name, dfault string) string {\n if value := os.Getenv(name); value != \"\" {\n return value\n }\n return dfault\n}", "func ReadEnvVar(name string) (string, error) {\n value := os.Getenv(name)\n if value == \"\" {\n return \"\", errors.New(\"Missing \" + name + \" env variable.\")\n }\n return value, nil\n}", "func getenv(key, def string) string {\n\tif value, ok := os.LookupEnv(key); ok {\n\t\treturn value\n\t}\n\treturn def\n}", "func GetString(name string, defaultVal string) string {\n\tval, ok := os.LookupEnv(name)\n\tif !ok {\n\t\treturn defaultVal\n\t}\n\n\treturn val\n}", "func getEnvOr(name, defaultValue string) (out string) {\n\tout = os.Getenv(name)\n\tif out == \"\" {\n\t\tout = defaultValue\n\t}\n\treturn\n}", "func MustGet(name string) string {\n\tenvVar, ok := os.LookupEnv(name)\n\tif !ok {\n\t\tlog.Fatalf(\"environment variable (%s) has not been set\", name)\n\t}\n\tif envVar == \"\" {\n\t\tlog.Fatalf(\"environment variable (%s) is empty\", name)\n\t}\n\treturn envVar\n}", "func getEnvOrDefault(name string, fallback string) string {\n\tval := os.Getenv(name)\n\tif val == \"\" {\n\t\treturn fallback\n\t}\n\n\treturn val\n}", "func Get(envKey, defaultVal string) string {\n\tval := os.Getenv(envKey)\n\tif val == \"\" {\n\t\tval = defaultVal\n\t}\n\treturn val\n}", "func GetEnv(name string, def ...string) string {\n\tval := os.Getenv(name)\n\tif val == \"\" && len(def) > 0 {\n\t\tval = def[0]\n\t}\n\treturn val\n}", "func getOrElse(key, standard string) string {\n\tif val := os.Getenv(key); val != \"\" {\n\t\treturn val\n\t} else if standard == \"\" {\n\t\tlog.Fatalf(\"ERROR: The environment variable, %s, must be set\", key)\n\t}\n\treturn standard\n}", "func (e *echo) Val(name string) string {\n\tif val, ok := e.vars[name]; ok {\n\t\treturn val\n\t}\n\treturn os.Getenv(name)\n}", "func (e *Env) Get(k string) (interface{}, error) {\n\t// Builtin\n\tif e.isBuiltin(k) {\n\t\tbv := reflect.ValueOf(e.builtin).Elem()\n\t\tbt := reflect.TypeOf(e.builtin).Elem()\n\t\tif _, ok := bt.FieldByName(k); ok {\n\t\t\tfv := bv.FieldByName(k)\n\t\t\treturn fv.Interface(), nil\n\t\t}\n\t}\n\n\t// function arg variable\n\tif v, ok := e.funcArg[k]; ok {\n\t\treturn v, nil\n\t}\n\n\t// global variable\n\tif v, ok := e.global[k]; ok {\n\t\treturn v, nil\n\t}\n\n\t// local variable\n\treturn e.getLocalVar(k)\n}", "func ConfigVal(name, defval string) string {\n\tif path := *options.String[name]; path != \"\" {\n\t\treturn path\n\t} else if path := os.Getenv(strings.ToUpper(name)); path != \"\" {\n\t\treturn path\n\t} else if defval != \"\" {\n\t\treturn defval\n\t} else {\n\t\toptions.Usage(\"Must supply -%s or set $%s\", name, strings.ToUpper(name))\n\t\treturn \"\" // not reached\n\t}\n}", "func (f *Facade) Get(name string) string {\n\tif val, ok := f.env[name]; ok {\n\t\treturn *val\n\t}\n\treturn \"\"\n}", "func getenv(key string, def ...string) string {\n\tif v, ok := os.LookupEnv(key); ok {\n\t\treturn v\n\t}\n\tif len(def) == 0 {\n\t\tlog.Fatalf(\"%s not defined in environment\", key)\n\t}\n\treturn def[0]\n}", "func getEnvString(key string, defaultVal string) string {\n if value, exists := os.LookupEnv(key); exists {\n\t return value\n }\n\n return defaultVal\n}", "func getStringEnvVar(envVar string, def string) string {\n\tif val := os.Getenv(envVar); val != \"\" {\n\t\treturn val\n\t}\n\treturn def\n}", "func getEnvOrDefault(envVar string, defaultValue string) string {\n\tvalue := os.Getenv(envVar)\n\tif value == \"\" {\n\t\treturn defaultValue\n\t}\n\treturn value\n}", "func GetEnv(k, def string) string {\n\tv, found := os.LookupEnv(k)\n\tif !found {\n\t\tv = def\n\t}\n\treturn v\n}", "func getEnvString(env string, def string) string {\n\tval := os.Getenv(env)\n\tif len(val) == 0 {\n\t\treturn def\n\t}\n\treturn val\n}", "func envOrDefault(key string, defaultVal string) (string, error) {\n\tif envVal, ok := os.LookupEnv(key); ok {\n\t\tlog.Debugf(\"ENV variable %s. Value %s\", key, envVal)\n\t\treturn envVal, nil\n\t}\n\tlog.Debugf(\"ENV variable %s undefined, set default value: %s\", key, defaultVal)\n\treturn defaultVal, nil\n}", "func GetStringVal(envVar string, defaultValue string) string {\n\tif val := os.Getenv(envVar); val != \"\" {\n\t\treturn val\n\t} else {\n\t\treturn defaultValue\n\t}\n}", "func GetEnvVariableOrDefault(key string, defaultVal string) string {\n\tval := os.Getenv(key)\n\tif val == \"\" {\n\t\tval = defaultVal\n\t}\n\treturn val\n}", "func getRequiredEnv(name string) string {\n\tval := os.Getenv(name)\n\tif val == \"\" {\n\t\tlog.Fatalf(\"Missing required environment variable %s\\n\", name)\n\t}\n\treturn val\n}", "func GetEnvOr(envVar, defValue string) string {\n\tif val, ok := getEnv(envVar, false); ok {\n\t\treturn val\n\t} else {\n\t\treturn defValue\n\t}\n}", "func reqEnv(name string) string {\n\tval := os.Getenv(name)\n\tif len(val) == 0 {\n\t\tlog.Fatalf(\"no %s environment variable set\", name)\n\t}\n\treturn val\n}", "func GetEnv(name string, defaultValue string) string {\n\tif strVal, ok := os.LookupEnv(name); ok && len(strVal) > 0 {\n\t\treturn strVal\n\t}\n\n\treturn defaultValue\n}", "func MustGetenv(varname string, defaultValue string) string {\n\tlog.Debugf(\"Reading environment variable %v\", varname)\n\n\tvalue := os.Getenv(varname)\n\tif value == \"\" {\n\t\tif defaultValue == \"\" {\n\t\t\tlog.Fatalf(\"Missing env variable %v\", varname)\n\t\t}\n\t\tvalue = defaultValue\n\t}\n\n\treturn value\n}", "func MustGetEnv(name string) string {\n\tvalue, found := os.LookupEnv(name)\n\tif !found {\n\t\tlog.Fatalf(\"environment variable %s is missing\", name)\n\t}\n\treturn value\n}", "func GetInt(name string, defvalue int) int {\n\tenv := os.Getenv(strings.ToUpper(name))\n\tif env == \"\" {\n\t\treturn defvalue\n\t}\n\tv, err := strconv.Atoi(env)\n\tif err != nil {\n\t\tfmt.Println(fmt.Sprintf(\"%s Error: os.Getenv %s <-> %d\", \"GetInt\", strings.ToUpper(name), defvalue))\n\t\tos.Exit(0)\n\t}\n\treturn int(v)\n}", "func getEnv(key string, defaultVal string) string {\n\tif value, exists := os.LookupEnv(key); exists {\n\t\treturn value\n\t}\n\n\treturn defaultVal\n}", "func getEnv(key string, defaultVal string) string {\n\tif value, exists := os.LookupEnv(key); exists {\n\t\treturn value\n\t}\n\n\treturn defaultVal\n}", "func getEnv(key string, defaultVal string) string {\n\tif value, exists := os.LookupEnv(key); exists {\n\t\treturn value\n\t}\n\n\treturn defaultVal\n}", "func getEnv(env, defaultValue string) string {\n\tval := os.Getenv(env)\n\tif val == \"\" {\n\t\tval = defaultValue\n\t}\n\treturn val\n}", "func Get(key string) (string, error) {\n\tval := os.Getenv(key)\n\n\tif val == \"\" {\n\t\treturn \"\", ErrNotFound\n\t}\n\n\treturn val, nil\n}", "func getEnvValue(w http.ResponseWriter, req *http.Request) {\n\tenvVar, ok := req.URL.Query()[\"var\"]\n\tif !ok {\n\t\terrMessage := \"variable \" + envVar[0] + \" not found\"\n\t\tfmt.Fprintf(w, errMessage)\n\t}\n\n\tvalue := os.Getenv(envVar[0])\n\tfmt.Fprintf(w, value+\"\\n\")\n}", "func (e *Environment) Get(t token.Token) interface{} {\n\tvalue, found := e.Values[t.Lexeme]\n\tif found {\n\t\treturn value\n\t}\n\treturn &parseerror.RunTimeError{Token: t, Message: fmt.Sprintf(\"Undefined variable '%s'.\", t.Lexeme)}\n}", "func Get(variable string) string {\n\n\tvar config = map[string]Options{\n\t\t\"EnvAPIPort\": {\n\t\t\tDefault: DefaultAPIPort,\n\t\t\tEnvironment: EnvAPIPort,\n\t\t},\n\t\t\"EnvAPIIP\": {\n\t\t\tDefault: DefaultAPIIP,\n\t\t\tEnvironment: EnvAPIIP,\n\t\t},\n\t\t\"EnvBasicAuth\": {\n\t\t\tDefault: strconv.FormatBool(DefaultBasicAuthentication),\n\t\t\tEnvironment: EnvBasicAuth,\n\t\t},\n\t\t\"EnvDefaultConsulAddr\": {\n\t\t\tDefault: structs.DefaultConsulAddr,\n\t\t\tEnvironment: EnvConsulAddr,\n\t\t},\n\t\t\"EnvDatabasePath\": {\n\t\t\tDefault: structs.DefaultDatabasePath,\n\t\t\tEnvironment: EnvDatabasePath,\n\t\t},\n\t}\n\n\tfor k, v := range config {\n\t\tif k == variable {\n\t\t\tif os.Getenv(v.Environment) != \"\" {\n\t\t\t\tlog.WithFields(log.Fields{\n\t\t\t\t\t\"key\": k,\n\t\t\t\t\t\"value\": v.Environment,\n\t\t\t\t}).Debug(\"config: setting configuration\")\n\t\t\t\treturn os.Getenv(v.Environment)\n\t\t\t}\n\t\t\tlog.WithFields(log.Fields{\n\t\t\t\t\"key\": k,\n\t\t\t\t\"value\": v.Default,\n\t\t\t}).Debug(\"config: setting configuration\")\n\t\t\treturn v.Default\n\n\t\t}\n\t}\n\treturn \"\"\n}", "func GetEnvStr(key, def string) string {\n\tif value := os.Getenv(key); value != \"\" {\n\t\treturn value\n\t}\n\treturn def\n}", "func (e *Environment) Get(name string) (Object, bool) {\n\tv, ok := e.store[name]\n\tif !ok && e.outer != nil {\n\t\tv, ok = e.outer.GetVar(name)\n\t}\n\treturn v.value, ok\n}", "func (s envValueStore) get(key StoreKey) (string, error) {\n\tenvValueStoreKey := strings.ToUpper(s.prefix + string(key))\n\tval, found := os.LookupEnv(strings.ToUpper(envValueStoreKey))\n\tif !found {\n\t\treturn \"\", ParameterNotFoundError{key: key}\n\t}\n\n\treturn val, nil\n}", "func String(name, defaultValue string) string {\n\tif val, ok := os.LookupEnv(name); ok {\n\t\treturn val\n\t}\n\treturn defaultValue\n}", "func GetOSEnv(name string, defaultValue string) string {\n\tvalue := os.Getenv(name)\n\tif len(value) == 0 {\n\t\tvalue = defaultValue\n\t}\n\treturn value\n}", "func getOrDefaultEnv(arg, envKey string) string {\n\tif arg != \"\" {\n\t\treturn arg\n\t}\n\treturn os.Getenv(envKey)\n}", "func getEnvOrFail(envVar string) string {\n\tvalue := os.Getenv(envVar)\n\tif value == \"\" {\n\t\tfmt.Fprintf(os.Stderr, \"Error: environment variable %s doesn't exist or it's empty, set it and try it again\", envVar)\n\t\tos.Exit(1)\n\t}\n\treturn value\n}", "func getEnv(env, devaultValue string) string {\n\tvalue := os.Getenv(env)\n\tif value == \"\" {\n\t\treturn devaultValue\n\t}\n\treturn value\n}", "func getEnv(env, devaultValue string) string {\n\tvalue := os.Getenv(env)\n\tif value == \"\" {\n\t\treturn devaultValue\n\t}\n\treturn value\n}", "func getEnv(key, defaultVal string) string {\n\tval := os.Getenv(key)\n\tif val != \"\" {\n\t\treturn val\n\t}\n\treturn defaultVal\n}", "func getEnv(envVar string, require bool) (val string, ok bool) {\n\tif val, ok = os.LookupEnv(envVar); !ok {\n\t\tif require {\n\t\t\tpanic(fmt.Sprintf(\"env: missing required environment variable: %s\", envVar))\n\t\t}\n\t}\n\n\treturn\n}", "func mustGetEnv(name string) string {\n\tenv, ok := os.LookupEnv(name)\n\tif !ok {\n\t\tlog.WithField(\"env\", name).Fatalf(\"missing required environment variable for configuration\")\n\t}\n\tlog.WithField(name, env).Info(\"using env variable\")\n\treturn env\n}", "func (e *Environment) Get(name token.Token, index int) (interface{}, error) {\n\tif index == -1 {\n\t\tv, prs := e.values[name.Lexeme]\n\t\tif prs {\n\t\t\tif v == needsInitialization {\n\t\t\t\treturn nil, runtimeerror.Make(name, fmt.Sprintf(\"Uninitialized variable access: '%s'\", name.Lexeme))\n\t\t\t}\n\t\t\treturn v, nil\n\t\t}\n\t\tif e.enclosing != nil {\n\t\t\treturn e.enclosing.Get(name, index)\n\t\t}\n\t\treturn nil, runtimeerror.Make(name, fmt.Sprintf(\"Undefined variable '%v'\", name.Lexeme))\n\t}\n\treturn e.indexedValues[index], nil\n}", "func (es *EnvStorage) Get(name string) (string, error) {\n\tsecret, ok := os.LookupEnv(es.makeEnvName(name))\n\tif !ok {\n\t\treturn \"\", &SecretNotFound{name}\n\t}\n\treturn secret, nil\n}", "func GetUserSetVarFromString(cmd *cobra.Command, flagName, envKey string, isOptional bool) (string, error) {\n\tif cmd.Flags().Changed(flagName) {\n\t\tvalue, err := cmd.Flags().GetString(flagName)\n\t\tif err != nil {\n\t\t\treturn \"\", fmt.Errorf(flagName+\" flag not found: %s\", err)\n\t\t}\n\n\t\tif value == \"\" {\n\t\t\treturn \"\", fmt.Errorf(\"%s value is empty\", flagName)\n\t\t}\n\n\t\treturn value, nil\n\t}\n\n\tvalue, isSet := os.LookupEnv(envKey)\n\n\tif isOptional || isSet {\n\t\tif !isOptional && value == \"\" {\n\t\t\treturn \"\", fmt.Errorf(\"%s value is empty\", envKey)\n\t\t}\n\n\t\treturn value, nil\n\t}\n\n\treturn \"\", errors.New(\"Neither \" + flagName + \" (command line flag) nor \" + envKey +\n\t\t\" (environment variable) have been set.\")\n}", "func env(key string, defaultValue string) string {\n\tif value, exists := os.LookupEnv(key); exists {\n\t\treturn value\n\t}\n\treturn defaultValue\n}", "func getValue(sf reflect.StructField) (string, error) {\n\tvar (\n\t\tvalue string\n\t\terr error\n\t)\n\n\tkey, options := parseKeyForOption(sf.Tag.Get(\"env\"))\n\n\t// Get default value if exists\n\tdefaultValue := sf.Tag.Get(\"envDefault\")\n\tvalue = getValueOrDefault(key, defaultValue)\n\n\tif len(options) > 0 {\n\t\tfor _, option := range options {\n\t\t\t// TODO: Implement others options supported\n\t\t\t// For now only option supported is \"required\".\n\t\t\tswitch option {\n\t\t\tcase \"\":\n\t\t\t\tbreak\n\t\t\tcase required:\n\t\t\t\tvalue, err = getRequired(key)\n\t\t\tdefault:\n\t\t\t\terr = errTagOptionNotSupported\n\t\t\t}\n\t\t}\n\t}\n\n\treturn value, err\n}", "func getEnvVar(v string) (string, error) {\n\tvar err error\n\tenvVar := os.Getenv(v)\n\tif len(envVar) < 1 {\n\t\terr = errors.New(\"Could not retrieve Environment variable, or it had no content. \" + v)\n\t}\n\treturn envVar, err\n}", "func mustGetenv(name string) string {\n\tvalue := os.Getenv(name)\n\tif value == \"\" {\n\t\tpanic(name + \" not defined in environment\")\n\t}\n\treturn value\n}", "func getEnvWithDefault(key, fallback string) string {\n\tvalue := os.Getenv(key)\n\tif value == \"\" {\n\t\treturn fallback\n\t}\n\treturn value\n}", "func (src *EnvSource) Get(ctx context.Context, name string) (string, error) {\n\tsecret, ok := os.LookupEnv(name)\n\tif !ok {\n\t\treturn \"\", ErrNotFound\n\t}\n\treturn secret, nil\n}", "func GetEnvString(v string, def string) string {\n\tr := os.Getenv(v)\n\tif r == \"\" {\n\t\treturn def\n\t}\n\n\treturn r\n}", "func getEnvOrElse(name string, other string) string {\n\tenv, ok := os.LookupEnv(name)\n\tif ok {\n\t\tlog.WithField(name, env).Info(\"using env variable\")\n\t} else {\n\t\tenv = other\n\t\tlog.WithField(name, env).Info(\"using default setting\")\n\t}\n\treturn env\n}", "func (c *CmdEnv) get(name string) (string, *failures.Failure) {\n\tkey, err := c.openKeyFn(getEnvironmentPath(c.userScope))\n\tif err != nil {\n\t\treturn \"\", failures.FailOS.Wrap(err, locale.T(\"err_windows_registry\"))\n\t}\n\tdefer key.Close()\n\n\t// Return the backup version if it exists\n\toriginalValue, _, err := key.GetStringValue(envBackupName(name))\n\tif err != nil && !IsNotExistError(err) {\n\t\treturn \"\", failures.FailOS.Wrap(err, locale.T(\"err_windows_registry\"))\n\t} else if err == nil {\n\t\treturn originalValue, nil\n\t}\n\n\tv, _, err := key.GetStringValue(name)\n\tif err != nil && !IsNotExistError(err) {\n\t\treturn v, failures.FailOS.Wrap(err, locale.T(\"err_windows_registry\"))\n\t}\n\treturn v, nil\n}", "func GetInt(name string, defaultValue ...int) int {\n\tvalue, ok := os.LookupEnv(name)\n\tintValue, err := strconv.Atoi(value)\n\n\tif (!ok || err != nil) && len(defaultValue) == 0 {\n\t\tfmt.Printf(\"ERROR: missing or invalid %s environment variable!\\n\", name)\n\t\tos.Exit(1)\n\t}\n\n\tif !ok || err != nil {\n\t\tintValue = defaultValue[0]\n\t}\n\n\treturn intValue\n}", "func MustGet(k string) string {\n\tv := os.Getenv(k)\n\tif v == \"\" {\n\t\tlog.Panicln(\"ENV missing, key: \" + k)\n\t}\n\n\tv = *stringUtil.RemoveEmptyQuote(&v)\n\n\treturn v\n}", "func (c mockK8sClient) GetEnvValue(podSpec corev1.PodSpec, namespace string, envName string) (string, error) {\n\tenv := kubernetes.GetEnvVar(podSpec, envName)\n\tif env == nil {\n\t\treturn \"\", fmt.Errorf(\"Could not retrieve environment variable %s\", envName)\n\t}\n\n\tif env.Value != \"\" {\n\t\treturn env.Value, nil\n\t}\n\treturn \"\", fmt.Errorf(\"Error getting value for environment variable %s\", env.Name)\n}", "func getEnv(key, fallback string) string {\n\tvalue := os.Getenv(key)\n\tif len(value) == 0 {\n\t\treturn fallback\n\t}\n\n\treturn value\n}", "func getEnv(key, fallback string) string {\n\tvalue := os.Getenv(key)\n\tif len(value) == 0 {\n\t\treturn fallback\n\t}\n\n\treturn value\n}", "func getEnv(key, fallback string) string {\n\tvalue := os.Getenv(key)\n\tif len(value) == 0 {\n\t\treturn fallback\n\t}\n\n\treturn value\n}", "func getEnv(key, fallback string) string {\n\tvalue := os.Getenv(key)\n\tif len(value) == 0 {\n\t\treturn fallback\n\t}\n\n\treturn value\n}", "func getConfig(envVars []string) (value string) {\n\tvalue = \"\"\n\tfor _, v := range envVars {\n\t\tvalue = os.Getenv(v)\n\t\tif value != \"\" {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn\n}", "func Getenv(key, defaultValue string) string {\n\tvalue := os.Getenv(key)\n\tif len(value) == 0 {\n\t\treturn defaultValue\n\t}\n\treturn value\n}", "func (c *Cli) Getenv(name string) string {\n\tvar s string\n\te, err := c.EnvAttribute(name)\n\tif err != nil {\n\t\treturn s\n\t}\n\tswitch e.Type {\n\tcase \"bool\":\n\t\treturn fmt.Sprintf(\"%t\", e.BoolValue)\n\tcase \"int\":\n\t\treturn fmt.Sprintf(\"%d\", e.IntValue)\n\tcase \"int64\":\n\t\treturn fmt.Sprintf(\"%d\", e.Int64Value)\n\tcase \"uint\":\n\t\treturn fmt.Sprintf(\"%d\", e.UintValue)\n\tcase \"uint64\":\n\t\treturn fmt.Sprintf(\"%d\", e.Uint64Value)\n\tcase \"float64\":\n\t\treturn fmt.Sprintf(\"%f\", e.Float64Value)\n\tcase \"time.Duration\":\n\t\treturn fmt.Sprintf(\"%s\", e.DurationValue)\n\t}\n\treturn e.StringValue\n}", "func GetDuration(name string, defaultVal time.Duration) time.Duration {\n\tvalStr, ok := os.LookupEnv(name)\n\tif !ok {\n\t\treturn defaultVal\n\t}\n\n\tval, err := time.ParseDuration(valStr)\n\tif err != nil {\n\t\treturn defaultVal\n\t}\n\treturn val\n}", "func getEnvKeyValue(match string, partial bool) (string, string, error) {\n\tfor _, e := range os.Environ() {\n\t\tpair := strings.Split(e, \"=\")\n\t\tif len(pair) != 2 {\n\t\t\tcontinue\n\t\t}\n\n\t\tkey := pair[0]\n\t\tvalue := pair[1]\n\n\t\tif partial && strings.Contains(key, match) {\n\t\t\treturn key, value, nil\n\t\t}\n\n\t\tif strings.Compare(key, match) == 0 {\n\t\t\treturn key, value, nil\n\t\t}\n\t}\n\n\tmatchType := \"match\"\n\tif partial {\n\t\tmatchType = \"partial match\"\n\t}\n\n\treturn \"\", \"\", fmt.Errorf(\"Failed to find %s with %s\", matchType, match)\n}", "func ReadEnv(v string, defaultValue string) string {\n\tif p := os.Getenv(v); p != \"\" {\n\t\treturn p\n\t}\n\n\treturn defaultValue\n}", "func Getenv(key string, defaultValue string) string {\n\tvalue := os.Getenv(key)\n\tif value == \"\" {\n\t\tvalue = defaultValue\n\t}\n\treturn value\n}", "func getEnv(key string) string {\n\tif value, exists := os.LookupEnv(key); exists {\n\t\treturn value\n\t}\n\tpanic(fmt.Errorf(\"Env Variable is not defined %v\", key))\n}", "func env(key string, defaultValue string) string {\n\tvalue := os.Getenv(key)\n\n\tif len(value) > 0 {\n\t\treturn value\n\t}\n\treturn defaultValue\n\n}", "func (c *ConfigDefault) GetValue(key string) (interface{}, bool) {\n\tfound := false\n\tvar result interface{}\n\n\t// try first default value\n\tif nil != c.values {\n\t\tkeys := strings.Split(key, \".\")\n\t\tsection := keys[:len(keys)-1]\n\t\t// name is last part\n\t\tname := keys[len(keys)-1]\n\n\t\tm := subMap(&c.values, section, true)\n\t\tif nil != m {\n\t\t\tsmap := *m\n\t\t\tresult, found = smap[name]\n\t\t}\n\t}\n\tif !found {\n\t\t// try Env vars\n\t\tname := c.prefix + key\n\t\t// Convert to UpperCase but first replace '.' with '_'\n\t\tname = strings.ToUpper(strings.Replace(name, \".\", \"_\", -1))\n\t\tresult, found = os.LookupEnv(name)\n\t\tif found {\n\t\t\treturn result, true\n\t\t}\n\t\treturn nil, false\n\t}\n\n\treturn result, found\n}", "func getEnv(env, value string) string {\n\tif v := os.Getenv(env); v != \"\" {\n\t\treturn v\n\t}\n\n\treturn value\n}", "func GetInt(name string, defaultVal int) int {\n\tvalStr, ok := os.LookupEnv(name)\n\tif !ok {\n\t\treturn defaultVal\n\t}\n\n\tval, err := strconv.Atoi(valStr)\n\tif err != nil {\n\t\treturn defaultVal\n\t}\n\treturn val\n}", "func getEnv(key string, fallback string) string {\n\tif value, ok := os.LookupEnv(key); ok {\n\t\tif value != \"\" {\n\t\t\treturn value\n\t\t}\n\t}\n\treturn fallback\n}", "func (c *AppConfig) getString(key string, defaultValue string) (val string) {\n\tif val = os.Getenv(key); val != \"\" {\n\t\treturn val\n\t} else {\n\t\tos.Setenv(key, defaultValue)\n\t}\n\n\treturn defaultValue\n}", "func getEnv(key, fallback string) string {\n\tvalue, exists := os.LookupEnv(key)\n\tif !exists {\n\t\tvalue = fallback\n\t}\n\treturn value\n}", "func LookupStringEnv(envName string, defVal string) string {\n\tif envVal, exists := os.LookupEnv(envName); exists {\n\t\treturn envVal\n\t}\n\n\treturn defVal\n}", "func getFlagOrEnvVar(flag, envVar string) string {\n\tif flag != \"\" {\n\t\treturn flag\n\t}\n\treturn os.Getenv(envVar)\n}", "func getUintOpt(name string, dfault uint64) uint64 {\n if result, err := strconv.ParseUint(os.Getenv(name), 10, 64); err == nil {\n return result\n }\n return dfault\n}", "func GetConfigValue(configEnv *viper.Viper, configFile *viper.Viper, key string) (string) {\n\tvalue := configEnv.GetString(key)\n\tif value == \"\" {\n\t\tvalue = configFile.GetString(key)\n\t}\n\n\treturn value\n}" ]
[ "0.7250722", "0.6961752", "0.68624806", "0.6787202", "0.67663264", "0.6746546", "0.67455167", "0.6664482", "0.66006225", "0.6589658", "0.65601677", "0.6507941", "0.6504305", "0.6479081", "0.64308596", "0.64306384", "0.642578", "0.64254993", "0.6404898", "0.63927317", "0.63921237", "0.63628423", "0.63494146", "0.63287646", "0.6321352", "0.6312993", "0.62541384", "0.6156997", "0.6153288", "0.61466885", "0.61269194", "0.60988855", "0.6091179", "0.6059008", "0.6055434", "0.6045896", "0.6043559", "0.6032732", "0.59983575", "0.5986433", "0.5983346", "0.5951956", "0.5950241", "0.5950241", "0.5950241", "0.5906166", "0.58987147", "0.5897258", "0.58634317", "0.5862985", "0.58613527", "0.5850064", "0.5846837", "0.5845677", "0.5842614", "0.5837463", "0.58324933", "0.58260715", "0.58260715", "0.58196765", "0.58178544", "0.5810893", "0.58004475", "0.5800378", "0.5796861", "0.577822", "0.57777745", "0.57541454", "0.57435066", "0.5734673", "0.5732583", "0.57274973", "0.5710872", "0.57088995", "0.570876", "0.5705872", "0.5705709", "0.56986874", "0.56986874", "0.56986874", "0.56986874", "0.5683159", "0.564118", "0.563993", "0.5636118", "0.5635867", "0.5633561", "0.56160325", "0.5615067", "0.5610967", "0.5609212", "0.560652", "0.56034535", "0.55915946", "0.55851245", "0.55842793", "0.5571075", "0.55577046", "0.55571723", "0.5545163" ]
0.7629492
0
GetInt env var `name` `defvalue` or return int and error if it's missing.
func GetInt(name string, defvalue int) int { env := os.Getenv(strings.ToUpper(name)) if env == "" { return defvalue } v, err := strconv.Atoi(env) if err != nil { fmt.Println(fmt.Sprintf("%s Error: os.Getenv %s <-> %d", "GetInt", strings.ToUpper(name), defvalue)) os.Exit(0) } return int(v) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetInt(name string, defaultVal int) int {\n\tvalStr, ok := os.LookupEnv(name)\n\tif !ok {\n\t\treturn defaultVal\n\t}\n\n\tval, err := strconv.Atoi(valStr)\n\tif err != nil {\n\t\treturn defaultVal\n\t}\n\treturn val\n}", "func GetInt(name string, defaultValue ...int) int {\n\tvalue, ok := os.LookupEnv(name)\n\tintValue, err := strconv.Atoi(value)\n\n\tif (!ok || err != nil) && len(defaultValue) == 0 {\n\t\tfmt.Printf(\"ERROR: missing or invalid %s environment variable!\\n\", name)\n\t\tos.Exit(1)\n\t}\n\n\tif !ok || err != nil {\n\t\tintValue = defaultValue[0]\n\t}\n\n\treturn intValue\n}", "func Int(name string, defaultValue int) int {\n\tif strVal, ok := os.LookupEnv(name); ok {\n\t\tif i64, err := strconv.ParseInt(strVal, 10, 0); err == nil {\n\t\t\treturn int(i64)\n\t\t}\n\t}\n\n\treturn defaultValue\n}", "func getEnvInt(key string, defaultVal int) int {\n valueStr := getEnvString(key, \"\")\n if value, err := strconv.Atoi(valueStr); err == nil {\n\t return value\n }\n return defaultVal\n}", "func getEnvInt(env string, def int) int {\n\tvar (\n\t\terr error\n\t\tval = os.Getenv(env)\n\t\tret int\n\t)\n\n\tif len(val) == 0 {\n\t\treturn def\n\t}\n\n\tif ret, err = strconv.Atoi(val); err != nil {\n\t\tlog.Fatal(env + \" environment variable is not numeric\")\n\t}\n\n\treturn ret\n}", "func Integer(name string, def int) int {\n\tif v := os.Getenv(name); v != \"\" {\n\t\tif a, err := strconv.Atoi(v); err == nil {\n\t\t\treturn a\n\t\t}\n\t}\n\treturn def\n}", "func GetIntEnv(name string, defaultValue int) int {\n\tif strVal, ok := os.LookupEnv(name); ok && len(strVal) > 0 {\n\t\tif intVal, err := strconv.Atoi(strVal); err == nil {\n\t\t\treturn intVal\n\t\t}\n\t}\n\n\treturn defaultValue\n}", "func GetEnvInt(key string, def int) int {\n\tif value := os.Getenv(key); value != \"\" {\n\t\ti, err := strconv.Atoi(value)\n\t\tif err != nil {\n\t\t\treturn def\n\t\t}\n\t\treturn i\n\t}\n\treturn def\n}", "func getEnvAsInt(name string, defaultVal int) int {\n\tvalueStr := getEnv(name, \"\")\n\tif value, err := strconv.Atoi(valueStr); err == nil {\n\t\treturn value\n\t}\n\n\treturn defaultVal\n}", "func Int(key string, def int) int {\n\tif env, ok := os.LookupEnv(key); ok {\n\t\ti, err := strconv.Atoi(env)\n\t\tif err != nil {\n\t\t\tfmt.Fprintf(os.Stderr, \"env: parse int from flag: %s\\n\", err)\n\t\t\tos.Exit(1)\n\t\t}\n\t\treturn i\n\t}\n\treturn def\n}", "func GetInt(envKey string, defaultVal int) int {\n\tstr := os.Getenv(envKey)\n\tif str == \"\" {\n\t\treturn defaultVal\n\t}\n\tval, err := strconv.ParseInt(str, 10, 64)\n\tif err != nil {\n\t\treturn defaultVal\n\t}\n\treturn int(val)\n}", "func LookupIntEnv(envName string, defVal int) int {\n\tif envVal, exists := os.LookupEnv(envName); exists {\n\t\tif intVal, err := strconv.Atoi(envVal); err == nil {\n\t\t\treturn intVal\n\t\t}\n\t}\n\n\treturn defVal\n}", "func (c *AppConfig) getInt(key string, defaultValue int) (val int) {\n\tp, _ := strconv.ParseInt(os.Getenv(key), 10, 32)\n\tif val = int(p); val != 0 {\n\t\treturn val\n\t} else {\n\t\tos.Setenv(key, strconv.Itoa(defaultValue))\n\t}\n\n\treturn defaultValue\n}", "func GetEnvVariableIntValue(envVarName string, defValue int64) int64 {\n\tenvVal := GetEnvValueFromOs(envVarName)\n\n\tretVal, err := strconv.ParseInt(envVal, 10, 64)\n\n\tif err != nil {\n\t\treturn defValue\n\t}\n\n\treturn retVal\n}", "func envInt(name string, fallback int) int {\n\ts := os.Getenv(name)\n\tif s == \"\" {\n\t\tlog.Printf(\"[ERROR] environment variable \\\"%s\\\" isn't set\", name)\n\t\treturn fallback\n\t}\n\tn, err := strconv.Atoi(s)\n\tif err != nil {\n\t\tlog.Printf(\"[ERROR] environment variable \\\"%s\\\" is not a number: %s\", name, s)\n\t\treturn fallback\n\t}\n\tlog.Printf(\"[env] %s=%d\", name, n)\n\treturn n\n}", "func GetEnvAsInt(name string, fallback int) int {\n\tif value, ok := os.LookupEnv(name); ok && len(value) > 0 {\n\t\tif intvalue, err := strconv.Atoi(value); err == nil {\n\t\t\treturn intvalue\n\t\t}\n\t}\n\treturn fallback\n}", "func GetEnvInt(key string, fallback int) int {\n\tif value, ok := os.LookupEnv(key); ok {\n\t\t// linter liked this better than if/else\n\t\tvar err error\n\t\tvar i int\n\t\tif i, err = strconv.Atoi(value); err != nil {\n\t\t\tlogrus.WithError(err).WithFields(logrus.Fields{\"string\": value, \"environment_key\": key}).Fatal(\"Failed to convert string to int\")\n\t\t}\n\t\treturn i\n\t} else if value, ok := os.LookupEnv(key + \"_FILE\"); ok {\n\t\tdat, err := ioutil.ReadFile(filepath.Clean(value))\n\t\tif err == nil {\n\t\t\tvar err error\n\t\t\tvar i int\n\t\t\tif i, err = strconv.Atoi(strings.TrimSpace(string(dat))); err != nil {\n\t\t\t\tlogrus.WithError(err).WithFields(logrus.Fields{\"string\": dat, \"environment_key\": key}).Fatal(\"Failed to convert string to int\")\n\t\t\t}\n\t\t\treturn i\n\t\t}\n\t}\n\treturn fallback\n}", "func GetenvInt(key string, fallback int) int {\n\tif v, ok := syscall.Getenv(key); ok {\n\t\tb, err := strconv.Atoi(v)\n\t\tif err != nil {\n\t\t\treturn fallback\n\t\t}\n\t\treturn b\n\t}\n\treturn fallback\n}", "func (ev Vars) Int(envVar string, defaults ...int) int {\n\tif value, hasValue := ev[envVar]; hasValue {\n\t\treturn util.String.ParseInt(value)\n\t}\n\tif len(defaults) > 0 {\n\t\treturn defaults[0]\n\t}\n\treturn 0\n}", "func GetEnvInt(key string, defaultVal int) int {\n\tif envVal, ok := os.LookupEnv(key); ok {\n\t\tif val, ok := strconv.ParseInt(envVal, 0, 0); ok == nil {\n\t\t\treturn int(val)\n\t\t}\n\t}\n\treturn defaultVal\n}", "func varInt(s *testing.State, name string, defaultVal int) int {\n\tif str, ok := s.Var(name); ok {\n\t\tval, err := strconv.Atoi(str)\n\t\tif err == nil {\n\t\t\treturn val\n\t\t}\n\t\ts.Logf(\"Cannot parse argument %s %s: %v\", name, str, err)\n\t}\n\treturn defaultVal\n}", "func GetIntEnv(key string, fallback int) int {\n\tif value, ok := os.LookupEnv(key); ok {\n\t\tret, err := strconv.Atoi(value)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\treturn ret\n\t}\n\treturn fallback\n}", "func GetInt(key string) int {\n\ts, contains := getFromMode(key)\n\tif !contains {\n\t\ts, contains = getFromGlobal(key)\n\t}\n\tif contains {\n\t\ti, err := strconv.Atoi(s)\n\t\tif err != nil {\n\t\t\tfmt.Println(\"GetInt error:\", err)\n\t\t\treturn 0\n\t\t}\n\t\treturn i\n\t}\n\tfmt.Println(\"value of\", key, \"is not set!\")\n\treturn 0\n}", "func GetIntOrDefault(ctx context.Context, key interface{}, def int) int {\n\tval := ctx.Value(key)\n\tif val == nil {\n\t\treturn def\n\t}\n\n\treturn val.(int)\n}", "func flagEnvInt(name string, defaultValue int, usage string) *int {\n\tval, err := strconv.Atoi(os.Getenv(strings.ToUpper(strings.ReplaceAll(name, \"-\", \"_\"))))\n\tif err != nil {\n\t\tval = defaultValue\n\t}\n\n\treturn flag.Int(\n\t\tname,\n\t\tval,\n\t\tusage,\n\t)\n}", "func GetIntEnv(key string, fallback int) int {\n\tvar intResult = fallback\n\tvar stringResult = os.Getenv(key)\n\n\tif stringResult != \"\" {\n\t\tv, _ := strconv.Atoi(stringResult)\n\t\tintResult = v\n\t}\n\n\treturn intResult\n}", "func GetInt(key string, defaultValue int) int {\n\tresult, err := DefaultConf.ConfigFactory.GetValue(key).ToInt()\n\tif err != nil {\n\t\treturn defaultValue\n\t}\n\treturn result\n}", "func (env Env) GetInt(key string) int {\n\tif val := env.GetValue(key); val != nil {\n\t\tif intValue, ok := val.(int); ok {\n\t\t\treturn intValue\n\t\t}\n\t}\n\treturn 0\n}", "func GetInt(key string, defaultValue int) int {\n\treturn archaius.GetInt(key, defaultValue)\n}", "func GetConfigInt(configEnv *viper.Viper, configFile *viper.Viper, key string) int {\n\tvalue := configEnv.GetInt(key)\n\tif value == 0 {\n\t\tvalue = configFile.GetInt(key)\n\t}\n\n\tif value == 0 {\n\t\tlog.Fatalf(\"Variable '%s' missing.\", strings.ToUpper(key))\n\t}\n\n\treturn value\n}", "func (c *Conf) GetIntWithDef(path string, defVal int) int {\n\tc.mutex.RLock()\n\tdefer c.mutex.RUnlock()\n\tvalue, err := c.root.getValue(path)\n\tif err != nil {\n\t\treturn defVal\n\t}\n\tiValue, err := strconv.Atoi(value)\n\tif err != nil {\n\t\treturn defVal\n\t}\n\treturn iValue\n}", "func (c *Command) GetInt(name string) (int, error) {\n\tvalue := c.Flagset.Lookup(name).Value.String()\n\tif value == \"\" {\n\t\treturn 0, ErrParameterNotFound\n\t}\n\treturn strconv.Atoi(value)\n}", "func (e EnvVars) Int(ctx context.Context) (*int, error) {\n\tvars := e.vars(ctx)\n\n\tif vars.Has(e.Key) {\n\t\tvalue, err := vars.Int(e.Key)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn &value, nil\n\t}\n\treturn nil, nil\n}", "func intFromEnvVar(envvar string, deflt int) int {\n\t// Note that we're using os instead of syscall because we'll be parsing the int anyway, so there is no need to check if the envvar was found.\n\tenvVarStr := os.Getenv(envvar)\n\tenvVarInt, err := strconv.ParseInt(envVarStr, 10, 0)\n\tif err != nil {\n\t\treturn deflt\n\t}\n\treturn int(envVarInt)\n}", "func GetInt(key string) int {\n\treturn defaultViper.GetInt(key)\n}", "func Int(name string, defs ...int) int {\n\tvar def int\n\tfor _, d := range defs {\n\t\tdef = d\n\t\tbreak\n\t}\n\tv := Raw(name)\n\ti, err := strconv.Atoi(v)\n\tif err != nil {\n\t\treturn def\n\t}\n\treturn i\n}", "func Int(key string, d int) int {\n\tv := os.Getenv(key)\n\tif v == \"\" {\n\t\treturn d\n\t}\n\tret, err := strconv.Atoi(v)\n\tif err != nil {\n\t\treturn d\n\t}\n\treturn ret\n}", "func (c *Configure) ReadIntValue(key string, def int) int {\n\tv := c.Get(key)\n\tif v == \"\" {\n\t\treturn def\n\t}\n\tvalue, err := strconv.Atoi(v)\n\tif err != nil {\n\t\treturn def\n\t}\n\treturn value\n}", "func (conf blah) Intval(key string) int {\n\treturn viper.GetInt(key)\n}", "func (p Configuration) GetInt(name string) (value int, err error) {\n\tvar v float64\n\tv, err = p.GetFloat64(name)\n\tif err != nil {\n\t\treturn value, err\n\t}\n\tvalue = int(v)\n\treturn value, nil\n}", "func Int64(name string, defaultValue int64) int64 {\n\tif strVal, ok := os.LookupEnv(name); ok {\n\t\tif i64, err := strconv.ParseInt(strVal, 10, 64); err == nil {\n\t\t\treturn i64\n\t\t}\n\t}\n\n\treturn defaultValue\n}", "func getIntEnv(key string, fallback int) int {\n\tenvStrValue := getEnv(key, \"\")\n\tif envStrValue == \"\" {\n\t\treturn fallback\n\t}\n\tenvIntValue, err := strconv.Atoi(envStrValue)\n\tif err != nil {\n\t\tpanic(\"Env Var \" + key + \" must be an integer\")\n\t}\n\treturn envIntValue\n}", "func (c *ConfigImpl) GetInt(key string, defaultValue ...interface{}) (int64, error) {\n\t// Get raw value\n\traw, err := c.getExpand(key, defaultValue...)\n\t// If not exists,\n\tif nil != raw {\n\t\tswitch val := raw.(type) {\n\t\tcase int:\n\t\t\treturn int64(val), nil\n\t\tcase uint:\n\t\t\treturn int64(val), nil\n\t\tcase int8:\n\t\t\treturn int64(val), nil\n\t\tcase uint8:\n\t\t\treturn int64(val), nil\n\t\tcase int16:\n\t\t\treturn int64(val), nil\n\t\tcase uint16:\n\t\t\treturn int64(val), nil\n\t\tcase int32:\n\t\t\treturn int64(val), nil\n\t\tcase uint32:\n\t\t\treturn int64(val), nil\n\t\tcase int64:\n\t\t\treturn int64(val), nil\n\t\tcase uint64:\n\t\t\treturn int64(val), nil\n\t\tcase float32:\n\t\t\treturn int64(val), nil\n\t\tcase float64:\n\t\t\treturn int64(val), nil\n\t\tcase string:\n\t\t\treturn strconv.ParseInt(val, 0, 64)\n\t\tdefault:\n\t\t\t// Convert to string\n\t\t\tstrval := fmt.Sprint(val)\n\t\t\treturn strconv.ParseInt(strval, 0, 64)\n\t\t}\n\t}\n\treturn 0, err\n}", "func (c *Controller) GetInt(key string, def ...int) (int, error) {\n\tstrv := c.Ctx.Input.Query(key)\n\tif len(strv) == 0 && len(def) > 0 {\n\t\treturn def[0], nil\n\t}\n\treturn strconv.Atoi(strv)\n}", "func (jc *JuiceConfig) GetInt(path string, dflt ...int64) (int64, error) {\n\tif jc.hadError {\n\t\treturn 0, errors.New(\"Already had error\")\n\t}\n\tjvalue, ok := (*jc.config)[path]\n\tif ok {\n\t\tvalue, err := jvalue.Int64()\n\t\tif err == nil {\n\t\t\treturn value, nil\n\t\t}\n\t\treturn 0, jc.setError(\"Value is not int64 [\" + path + \"]\")\n\t}\n\n\t// Value not found. Is there a default?\n\tif len(dflt) > 0 {\n\t\treturn dflt[0], nil\n\t}\n\treturn 0, jc.setError(\"Value not found [\" + path + \"]\")\n}", "func (c *Configuration) GetInt(name string) (int, error) {\n\tv, ok := c.data[name].(float64)\n\tif !ok {\n\t\treturn 0, errors.New(fmt.Sprintf(\"no existe el campo %s o no se puede convertir en int\", name))\n\t}\n\n\treturn int(v), nil\n}", "func (c *Cli) EnvInt(name string, value int, usage string) *int {\n\tc.env[name] = &EnvAttribute{\n\t\tName: name,\n\t\tType: fmt.Sprintf(\"%T\", value),\n\t\tIntValue: value,\n\t\tUsage: usage,\n\t}\n\tvar p *int\n\tp = &c.env[name].IntValue\n\t_, ok := c.env[name]\n\tif ok == false {\n\t\treturn nil\n\t}\n\treturn p\n}", "func getPort(key string, def int) int {\n\tport := os.Getenv(key)\n\tportnum, err := strconv.Atoi(port)\n\tif err != nil {\n\t\tportnum = def\n\t\t// print key is not a number\n\t\tlogrus.Warnf(\"%s is not a number, using default value %d\", key, portnum)\n\t}\n\tif portnum >= 65535 || portnum <= 0 {\n\t\tportnum = def\n\t\tlogrus.Warnf(\"Error: %s is not within 0-65535, using default port %d.\", key, portnum)\n\t}\n\treturn portnum\n}", "func IntStrict(name string, defaultValue int) (int, error) {\n\tif strVal, ok := os.LookupEnv(name); ok {\n\t\ti64, err := strconv.ParseInt(strVal, 10, 0)\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\n\t\treturn int(i64), nil\n\t}\n\n\treturn defaultValue, nil\n}", "func GetInt(section, option string) int {\n\treturn cfg.GetInt(section, option)\n}", "func (c *Config) GetInt(pattern string, def ...interface{}) int {\n\tif j := c.getJson(); j != nil {\n\t\treturn j.GetInt(pattern, def...)\n\t}\n\treturn 0\n}", "func (c *Validator) GetInt(key string, def ...int) (int, error) {\n\tstrv := c.Input.Query(key)\n\tif len(strv) == 0 && len(def) > 0 {\n\t\treturn def[0], nil\n\t}\n\treturn strconv.Atoi(strv)\n}", "func (handler Handler) GetIntOrDefault(r *http.Request, key string, defaultValue int) int {\n\n\tv := handler.GetParameter(r, key)\n\ts, ok := util.ToInt(v)\n\tif ok != nil {\n\t\treturn defaultValue\n\t}\n\treturn s\n\n}", "func (c *Command) GetInt(key string, def ...int) (int, error) {\n\tv := c.Query(key)\n\t\n\tif v != nil && len(v.CommandValue) > 0 {\n\t\ti64, err := strconv.ParseInt(v.CommandValue, 10, 10)\n\t\treturn int(i64), err\n\t} else if len(def) > 0 {\n\t\treturn def[0], nil\n\t} else if v.DefaultValue != nil {\n\t\treturn v.DefaultValue.(int), nil\n\t}\n\n\treturn 0, nil\n}", "func MustGetenvInt(varname string) int {\n\ts := MustGetenv(varname)\n\n\tif i, err := strconv.Atoi(s); err != nil {\n\t\tpanic(fmt.Errorf(\"Expected int value for env var %v - got: %v\",\n\t\t\tvarname, s))\n\t} else {\n\t\treturn i\n\t}\n}", "func getUintOpt(name string, dfault uint64) uint64 {\n if result, err := strconv.ParseUint(os.Getenv(name), 10, 64); err == nil {\n return result\n }\n return dfault\n}", "func Get(name, defvalue string) string {\n\tif s := os.Getenv(strings.ToUpper(name)); s == \"\" {\n\t\treturn defvalue\n\t} else {\n\t\treturn s\n\t}\n}", "func getIntValue(i *ini.File, section, key string, vdefault int) int {\n\treturn i.Section(section).Key(key).MustInt(vdefault)\n}", "func GetInt(key string) int { return viper.GetInt(key) }", "func (a *Arg) Int(def int) int {\n\tif a.value == \"\" {\n\t\treturn def\n\t}\n\ti, err := strconv.Atoi(a.value)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn i\n}", "func GetIntOrDefault(dict map[string]interface{}, key string, defaultValue int) (int, error) {\n\treturn getInt(dict, key, defaultValue, nil)\n}", "func (c *Config) GetInt(k string) (int, error) {\n\tv, err := c.GetString(k)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\ti, err := strconv.Atoi(v)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn i, nil\n}", "func (u *EtcdUtil) GetInt(key string, def int) (int, uint64, error) {\n\ts, i, err := u.Get(key, \"\")\n\tif err != nil {\n\t\treturn def, i, err\n\t}\n\tv, err := strconv.ParseInt(s, 0, strconv.IntSize)\n\tif err != nil {\n\t\treturn def, i, err\n\t}\n\treturn int(v), i, nil\n}", "func GetStr(name string, defaultValue ...string) string {\n\tvalue, ok := os.LookupEnv(name)\n\tif !ok && len(defaultValue) == 0 {\n\t\tfmt.Printf(\"ERROR: missing %s environment variable!\\n\", name)\n\t\tos.Exit(1)\n\t}\n\n\tif !ok {\n\t\tvalue = defaultValue[0]\n\t}\n\n\treturn value\n}", "func (v *ValueGetter) Int(defVal ...int) int {\n\tdef := 0\n\tif len(defVal) == 1 {\n\t\tdef = defVal[0]\n\t}\n\n\tif v.Value == nil || v.Arrayed {\n\t\treturn def\n\t}\n\n\tif str, ok := v.Value.(string); ok {\n\t\tval, err := strconv.Atoi(str)\n\t\tif err != nil {\n\t\t\treturn val\n\t\t}\n\t}\n\n\treturn def\n}", "func GetInt(path string, dflt ...int64) (int64, error) {\n\tcheckDefaultConfigIsLoaded()\n\treturn defaultConfig.GetInt(path, dflt...)\n}", "func GetI(name string, defvals ...int) int {\n\tif global == nil {\n\t\tif len(defvals) == 0 {\n\t\t\treturn 0\n\t\t}\n\n\t\treturn defvals[0]\n\t}\n\n\treturn global.GetI(name, defvals...)\n}", "func Int(key string, def int) int {\n\tif s := String(key, \"\"); s != \"\" {\n\t\tif d, err := strconv.Atoi(s); err == nil {\n\t\t\treturn d\n\t\t} else {\n\t\t\tLog(key, err)\n\t\t}\n\t}\n\treturn def\n}", "func (c Claims) GetInt(name string) (int, error) {\n\tif !c.Has(name) {\n\t\treturn 0, ErrNotFound\n\t}\n\n\tswitch val := c[name].(type) {\n\tcase string:\n\t\tv, err := strconv.ParseInt(val, 10, 64)\n\t\tif err != nil {\n\t\t\treturn 0, ErrClaimValueInvalid\n\t\t}\n\t\treturn int(v), nil\n\tcase float32:\n\t\treturn int(val), nil\n\tcase float64:\n\t\treturn int(val), nil\n\tcase uint:\n\t\treturn int(val), nil\n\tcase uint8:\n\t\treturn int(val), nil\n\tcase uint16:\n\t\treturn int(val), nil\n\tcase uint32:\n\t\treturn int(val), nil\n\tcase uint64:\n\t\treturn int(val), nil\n\tcase int:\n\t\treturn int(val), nil\n\tcase int8:\n\t\treturn int(val), nil\n\tcase int16:\n\t\treturn int(val), nil\n\tcase int32:\n\t\treturn int(val), nil\n\tcase int64:\n\t\treturn int(val), nil\n\t}\n\n\treturn 0, ErrClaimValueInvalid\n}", "func (s *Session) GetInt(key interface{}, def ...int) int {\n\tif v := s.Get(key); v != nil {\n\t\treturn v.(int)\n\t}\n\tif len(def) > 0 {\n\t\treturn def[0]\n\t}\n\treturn 0\n}", "func (r *vpr) GetInt(s string) int {\n\tret, _ := r.Get(s).(int)\n\n\treturn ret\n}", "func (key *EnvironmentKey) GetInt() int {\n\tkey.registerBinding()\n\treturn key.helper.GetInt(key.Name)\n}", "func GetInt(fixControlMap map[uint64]string, key uint64) (value int64, exists bool, parseErr error) {\n\tif fixControlMap == nil {\n\t\treturn 0, false, nil\n\t}\n\trawValue, ok := fixControlMap[key]\n\tif !ok {\n\t\treturn 0, false, nil\n\t}\n\t// The same as TidbOptInt64 in sessionctx/variable.\n\tvalue, parseErr = strconv.ParseInt(rawValue, 10, 64)\n\treturn value, true, parseErr\n}", "func (c *Conf) GetInt(path string) int {\n\treturn c.GetIntWithDef(path, 0)\n}", "func GetIntWithDefault(fixControlMap map[uint64]string, key uint64, defaultVal int64) int64 {\n\tvalue, exists, err := GetInt(fixControlMap, key)\n\tif !exists || err != nil {\n\t\treturn defaultVal\n\t}\n\treturn value\n}", "func GetInt(key string) int {\n\treturn viper.GetInt(key)\n}", "func GetEnvVar(envName string, env []corev1.EnvVar) int {\n\tfor pos, v := range env {\n\t\tif v.Name == envName {\n\t\t\treturn pos\n\t\t}\n\t}\n\treturn -1\n}", "func (p *Parser) GetInt(path string) int {\n\treturn p.Viper.GetInt(config.ProcessPath(p, path))\n}", "func (cr *ConfigReader) GetIntValue(name string, defaultValue int) int {\n\tvalue := cr.config.Get(name)\n\tif value != nil {\n\t\treturn int(value.(float64))\n\t}\n\treturn defaultValue\n}", "func (f *FlagSet) Int(name string) int {\n\tvalue := f.String(name)\n\tif value != \"\" {\n\t\tval, err := strconv.Atoi(value)\n\t\tif err != nil {\n\t\t\treturn 0\n\t\t}\n\t\treturn val\n\t}\n\treturn 0\n}", "func SysInt(name string) int {\r\n\treturn converter.StrToInt(SysString(name))\r\n}", "func (c *Config) GetI(name string, defvals ...int) int {\n\tif len(defvals) != 0 {\n\t\treturn int(c.GetI64(name, int64(defvals[0])))\n\t}\n\n\treturn int(c.GetI64(name))\n}", "func (nvp *NameValues) Int(name string) (int, bool) {\n\n\tif !nvp.prepared {\n\t\tnvp.prepare()\n\t}\n\n\tvar value int\n\n\tname = strings.ToLower(name)\n\ttmp, exists := nvp.Pair[name]\n\tif exists {\n\t\tval, _ := strconv.ParseInt(tmp.(string), 10, 32)\n\t\tvalue = int(val)\n\t}\n\n\treturn value, exists\n}", "func (c *Config) GetInt(key string) int64 {\n\tv := c.Get(key)\n\tif v != \"\" {\n\t\ti, err := strconv.ParseInt(v, 10, 64)\n\t\tif err == nil {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn 0\n}", "func (this *Section) GetInt(key string, args ... int) (int, error) {\n\tvalue, ok := this.Params[key]\n\tif ok {\n\t\tretVal, err := strconv.Atoi(value)\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t\treturn retVal, nil\n\t}\n\tif len(args) > 0 {\n\t\treturn args[0], nil\n\t}\n\treturn 0, fmt.Errorf(\"The param named %s does not exist.\", key)\n}", "func GetEnvOrDefaultUInt32(env, defaultValue string) uint32 {\n\treturn envParseUInt32(env, GetEnvOrDefault(env, defaultValue))\n}", "func Int(name string) (int, error) {\n\ti, err := strconv.Atoi(String(name))\n\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"failed to decode input %q as int: %w\", name, err)\n\t}\n\n\treturn i, nil\n}", "func (m *Metadata) GetInt(value string) (int64, error) {\n\tif err := validInt(value); err != nil {\n\t\treturn 0, err\n\t}\n\tm.mu.Lock()\n\tv, ok := m.valuesInt[value]\n\tm.mu.Unlock()\n\tif !ok {\n\t\treturn 0, ErrUnsetValue\n\t}\n\treturn v, nil\n}", "func Get(name string) interface{} {\n\tlock.Lock()\n\tvar v, found = env.variables[name]\n\tlock.Unlock()\n\n\tif found {\n\t\tv.mutex.Lock()\n\t\tif v.cachedValue != nil {\n\t\t\tdefer v.mutex.Unlock()\n\t\t\treturn v.cachedValue.value\n\t\t}\n\t\tv.mutex.Unlock()\n\t}\n\n\tvar value interface{}\n\tif !found {\n\t\t// it's for an ad-hoc value, let's go through the default chain\n\t\tfor _, source := range env.settings.DefaultSources {\n\t\t\tvalue = source.Provider().Get(name, source.Config())\n\t\t\tif value != nil {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t} else {\n\t\t// it's a variable\n\t\tfor _, s := range v.sources {\n\t\t\tsourceValue := s.source.Provider().Get(name, s.source.Config())\n\t\t\ts.cachedValue = &valuePlaceholder{value: sourceValue} // cache the given value to identify if there were changes in a refresh\n\t\t\tif value == nil && sourceValue != nil {\n\t\t\t\tvalue = sourceValue\n\t\t\t\tif v.converter != nil {\n\t\t\t\t\tvalue = v.converter(value)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif value == nil {\n\t\t\tvalue = v.defaultValue\n\t\t}\n\t\tv.cachedValue = &valuePlaceholder{value: value}\n\t}\n\n\treturn value\n}", "func (s RouteVars) GetInt(key string) (int, bool) {\n\tval, ok := s[key]\n\tif !ok {\n\t\treturn 0, false\n\t}\n\n\tintVal, err := strconv.Atoi(val)\n\tif err != nil {\n\t\treturn 0, false\n\t}\n\n\treturn intVal, true\n}", "func (c *conf) GetInt(key string) int {\n\treturn c.v.GetInt(key)\n}", "func GetenvInt64(key string, dflt ...int64) int64 {\n\tif val := strings.TrimSpace(os.Getenv(key)); val != \"\" {\n\t\tif b, err := strconv.ParseInt(val, 0, 64); err == nil {\n\t\t\treturn b\n\t\t}\n\t}\n\n\tif len(dflt) > 0 {\n\t\treturn dflt[0]\n\t}\n\n\treturn 0\n}", "func GetInt32(key string) int32 { return viper.GetInt32(key) }", "func GetInt(section, key string) int {\n\tif str := GetStr(section, key); \"\" != str {\n\t\tif v, err := strconv.ParseInt(str, 10, 64); nil == err {\n\t\t\treturn int(v)\n\t\t}\n\t}\n\n\treturn 0\n}", "func (p Properties) GetInt(name string) int {\n\tfor _, property := range p {\n\t\tif property.Name == name && property.Type == \"int\" {\n\t\t\tv, err := strconv.Atoi(property.Value)\n\t\t\tif err != nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\treturn v\n\t\t}\n\t}\n\treturn 0\n}", "func (d *Data) GetInt(key string, defaultValue int) int {\n\tval, err := d.Get(key)\n\tif err != nil {\n\t\treturn defaultValue\n\t}\n\n\tres, ok := val.(int)\n\tif !ok {\n\t\treturn defaultValue\n\t}\n\n\treturn res\n}", "func (item Item) GetInt(name string) int64 {\n\ti, _ := strconv.ParseInt(fmt.Sprintf(\"%v\", item[name]), 10, 64)\n\treturn i\n}", "func Intval(s string) int {\n\n\tif s == \"\" {\n\t\treturn 0\n\t}\n\n\ti, err := strconv.Atoi(s)\n\tif err != nil {\n\t\treturn 0\n\t}\n\treturn i\n}", "func GetIntDefault(key string, defaultValue int) int {\n\tv := GetInt(key)\n\tif v != 0 {\n\t\treturn v\n\t}\n\treturn defaultValue\n}", "func Get(key string, defaultVal string) string {\n\tval, ok := os.LookupEnv(key)\n\tif !ok {\n\t\tval = defaultVal\n\t}\n\treturn val\n}" ]
[ "0.7986519", "0.7857246", "0.75847757", "0.7222349", "0.72123957", "0.7165278", "0.70656717", "0.7026597", "0.6992428", "0.69578", "0.69111544", "0.68092704", "0.6761588", "0.67093635", "0.6700677", "0.6666582", "0.663473", "0.6605895", "0.657694", "0.6516577", "0.64769053", "0.6453791", "0.6436697", "0.64362955", "0.6425268", "0.64208454", "0.6356361", "0.6327881", "0.62645316", "0.6221551", "0.6201738", "0.6185549", "0.6173262", "0.6128501", "0.6117407", "0.61011624", "0.60939693", "0.60757744", "0.60654646", "0.60648", "0.6052861", "0.6025893", "0.6020677", "0.6014976", "0.59952307", "0.59859717", "0.59778416", "0.5969444", "0.596396", "0.59561604", "0.5951497", "0.59423554", "0.59388024", "0.59367865", "0.5935148", "0.5879686", "0.58723", "0.5865104", "0.58645767", "0.58634406", "0.5860624", "0.5850414", "0.5830297", "0.5802419", "0.5802125", "0.58016235", "0.5797226", "0.5795985", "0.57941324", "0.5790302", "0.5779903", "0.575825", "0.5737998", "0.5735994", "0.57350767", "0.5730169", "0.5729709", "0.5679602", "0.56525475", "0.5567716", "0.5567115", "0.5560117", "0.55538154", "0.55455583", "0.5530461", "0.5527205", "0.5526821", "0.54950964", "0.54940635", "0.54891014", "0.5487764", "0.54705983", "0.54609454", "0.5445202", "0.54430664", "0.5441622", "0.5429677", "0.5422097", "0.5402514", "0.5398328" ]
0.81530017
0
GetBool env var `name` `defvalue` or return and bool if it's missing.
func GetBool(name string, defvalue bool) bool { env := os.Getenv(strings.ToUpper(name)) if env == "" { return defvalue } if strings.ToLower(env) == "true" { return true } return false }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetBool(name string, defaultVal bool) bool {\n\tvalStr, ok := os.LookupEnv(name)\n\tif !ok {\n\t\treturn defaultVal\n\t}\n\n\tval, err := strconv.ParseBool(valStr)\n\tif err != nil {\n\t\treturn defaultVal\n\t}\n\treturn val\n}", "func Bool(name string, defaultValue bool) bool {\n\tif strVal, ok := os.LookupEnv(name); ok {\n\t\tif res, err := strconv.ParseBool(strVal); err == nil {\n\t\t\treturn res\n\t\t}\n\t}\n\n\treturn defaultValue\n}", "func GetBool(name string, defaultValue ...bool) bool {\n\tvalue, ok := os.LookupEnv(name)\n\tboolValue, err := strconv.ParseBool(value)\n\n\tif (!ok || err != nil) && len(defaultValue) == 0 {\n\t\tfmt.Printf(\"ERROR: missing or invalid %s environment variable!\\n\", name)\n\t\tos.Exit(1)\n\t}\n\n\tif !ok || err != nil {\n\t\tboolValue = defaultValue[0]\n\t}\n\n\treturn boolValue\n}", "func GetBoolVal(envVar string, defaultValue bool) bool {\n\tif val := os.Getenv(envVar); val != \"\" {\n\t\tif strings.ToLower(val) == \"true\" {\n\t\t\treturn true\n\t\t} else if strings.ToLower(val) == \"false\" {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn defaultValue\n}", "func getEnvAsBool(name string, defaultVal bool) bool {\n\tvalStr := getEnv(name, \"\")\n\tif val, err := strconv.ParseBool(valStr); err == nil {\n\t\treturn val\n\t}\n\n\treturn defaultVal\n}", "func getEnvAsBool(name string, defaultVal bool) bool {\n\tvalStr := getEnv(name, \"\")\n\tif val, err := strconv.ParseBool(valStr); err == nil {\n\t\treturn val\n\t}\n\n\treturn defaultVal\n}", "func Get(name, defvalue string) string {\n\tif s := os.Getenv(strings.ToUpper(name)); s == \"\" {\n\t\treturn defvalue\n\t} else {\n\t\treturn s\n\t}\n}", "func LookupBoolEnv(envName string, defVal bool) bool {\n\tif envVal, exists := os.LookupEnv(envName); exists {\n\t\tif boolVal, err := strconv.ParseBool(envVal); err == nil {\n\t\t\treturn boolVal\n\t\t}\n\t}\n\n\treturn defVal\n}", "func Bool(key string, def bool) bool {\n\tenv, ok := os.LookupEnv(key)\n\tif !ok {\n\t\treturn def\n\t}\n\n\tswitch env {\n\tcase \"true\", \"T\", \"TRUE\", \"1\":\n\t\treturn true\n\tcase \"false\", \"F\", \"FALSE\", \"0\":\n\t\treturn false\n\tdefault:\n\t\treturn def\n\t}\n}", "func (c *AppConfig) getBool(key string, defaultValue bool) (val bool) {\n\tif v := os.Getenv(key); v != \"\" {\n\t\tif v == \"true\" {\n\t\t\treturn true\n\t\t} else {\n\t\t\treturn false\n\t\t}\n\t} else {\n\t\tos.Setenv(key, strconv.FormatBool(defaultValue))\n\t}\n\n\treturn defaultValue\n}", "func getEnvBool(env string, def bool) bool {\n\tvar (\n\t\terr error\n\t\tval = os.Getenv(env)\n\t\tret bool\n\t)\n\n\tif len(val) == 0 {\n\t\treturn def\n\t}\n\n\tif ret, err = strconv.ParseBool(val); err != nil {\n\t\tlog.Fatal(val + \" environment variable is not boolean\")\n\t}\n\n\treturn ret\n}", "func MustGetBool(k string) bool {\n\tv := os.Getenv(k)\n\tif v == \"\" {\n\t\tlog.Panicln(\"ENV missing, key: \" + k)\n\t}\n\tb, err := strconv.ParseBool(v)\n\tif err != nil {\n\t\tlog.Panicln(\"ENV err: [\" + k + \"]\\n\" + err.Error())\n\t}\n\treturn b\n}", "func GetEnvBool(v string, def bool) bool {\n\tr := os.Getenv(v)\n\tif r == \"\" {\n\t\treturn def\n\t}\n\n\tswitch strings.ToLower(r[0:1]) {\n\tcase \"t\", \"y\", \"1\":\n\t\treturn true\n\t}\n\n\treturn false\n}", "func GetBool(envKey string, defaultValue bool) bool {\n\tstr := os.Getenv(envKey)\n\tif str == \"\" {\n\t\treturn defaultValue\n\t}\n\tval, err := strconv.ParseBool(str)\n\tif err != nil {\n\t\treturn defaultValue\n\t}\n\treturn val\n}", "func Bool(name string) (bool, error) {\n\tv, err := getenv(name)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\treturn strconv.ParseBool(v)\n}", "func GetenvBool(name string) bool {\n\tv := strings.ToLower(os.Getenv(name))\n\treturn v == \"true\" || v == \"t\" || v == \"1\"\n}", "func (c *Config) GetB(name string, defvals ...bool) bool {\n\tif c == nil || c.mx == nil {\n\t\tif len(defvals) == 0 {\n\t\t\treturn false\n\t\t}\n\n\t\treturn defvals[0]\n\t}\n\n\tc.mx.RLock()\n\tval := c.data[strings.ToLower(name)]\n\tc.mx.RUnlock()\n\n\tif val == \"\" {\n\t\tif len(defvals) == 0 {\n\t\t\treturn false\n\t\t}\n\n\t\treturn defvals[0]\n\t}\n\n\tswitch val {\n\tcase \"\", \"0\", \"false\", \"no\":\n\t\treturn false\n\tdefault:\n\t\treturn true\n\t}\n}", "func boolValue(vars resource.PropertyMap, prop resource.PropertyKey, envs []string) bool {\n\tval, ok := vars[prop]\n\tif ok && val.IsBool() {\n\t\treturn val.BoolValue()\n\t}\n\tfor _, env := range envs {\n\t\tval, ok := os.LookupEnv(env)\n\t\tif ok && val == \"true\" {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "func GetEnvBool(key string, def bool) bool {\n\tif value := os.Getenv(key); value == \"true\" {\n\t\treturn true\n\t}\n\treturn def\n}", "func GetEnvAsBool(name string, fallback bool) bool {\n\tif value, ok := os.LookupEnv(name); ok && len(value) > 0 {\n\t\treturn strings.Contains(\"1onyestrue\", strings.ToLower(value))\n\t}\n\treturn fallback\n}", "func (ev Vars) Bool(envVar string, defaults ...bool) bool {\n\tif value, hasValue := ev[envVar]; hasValue {\n\t\tif len(value) > 0 {\n\t\t\treturn util.String.CaseInsensitiveEquals(value, \"true\") || util.String.CaseInsensitiveEquals(value, \"yes\") || value == \"1\"\n\t\t}\n\t}\n\tif len(defaults) > 0 {\n\t\treturn defaults[0]\n\t}\n\treturn false\n}", "func BoolStrict(name string, defaultValue bool) (bool, error) {\n\tif strVal, ok := os.LookupEnv(name); ok {\n\t\tres, err := strconv.ParseBool(strVal)\n\t\tif err != nil {\n\t\t\treturn false, err\n\t\t}\n\n\t\treturn res, nil\n\t}\n\n\treturn defaultValue, nil\n}", "func (c *Conf) GetBoolWithDef(path string, defVal bool) bool {\n\tc.mutex.RLock()\n\tdefer c.mutex.RUnlock()\n\tvalue, err := c.root.getValue(path)\n\tif err != nil {\n\t\treturn defVal\n\t}\n\tbValue, err := strconv.ParseBool(value)\n\tif err != nil {\n\t\treturn defVal\n\t}\n\treturn bValue\n}", "func GetenvBool(key string, fallback bool) bool {\n\tif v, ok := syscall.Getenv(key); ok {\n\t\tb, err := strconv.ParseBool(v)\n\t\tif err != nil {\n\t\t\treturn fallback\n\t\t}\n\t\treturn b\n\t}\n\treturn fallback\n}", "func getEnv(name, def string) (value string) {\n\tif value = os.Getenv(name); value == \"\" {\n\t\tvalue = def\n\t}\n\treturn\n}", "func getBoolEnv(key string, fallback bool) bool {\n\tenvStrValue := getEnv(key, \"\")\n\tif envStrValue == \"\" {\n\t\treturn fallback\n\t}\n\tenvBoolValue, err := strconv.ParseBool(envStrValue)\n\tif err != nil {\n\t\tpanic(\"Env Var \" + key + \" must be either true or false\")\n\t}\n\treturn envBoolValue\n}", "func evaluateENVBool(envVar string, defaultValue bool) bool {\n\tenvValue, isSet := os.LookupEnv(envVar)\n\n\tif isSet {\n\n\t\tswitch strings.ToLower(envValue) {\n\n\t\tcase \"false\", \"0\", \"no\", \"n\", \"f\":\n\t\t\tlog.Infof(\"%s is %t through environment variable\", envVar, false)\n\t\t\treturn false\n\t\t}\n\t\tlog.Infof(\"%s is %t through environment variable\", envVar, true)\n\t\treturn true\n\t}\n\tlog.Infof(\"%s is %t (defaulted) through environment variable\", envVar, defaultValue)\n\treturn defaultValue\n}", "func GetBool(key string, defaultValue bool) bool {\n\tb, err := DefaultConf.ConfigFactory.GetValue(key).ToBool()\n\tif err != nil {\n\t\treturn defaultValue\n\t}\n\treturn b\n}", "func GetEnvVarBool(name string) (bool, error) {\n\tstr := os.Getenv(name)\n\tif len(str) == 0 {\n\t\treturn false, nil\n\t}\n\tval, isBool := ParseBool(str)\n\tif !isBool {\n\t\treturn false, fmt.Errorf(\"invalid %s env var boolean value: [%s]\", name, str)\n\t}\n\treturn val, nil\n}", "func GetB(name string, defvals ...bool) bool {\n\tif global == nil {\n\t\tif len(defvals) == 0 {\n\t\t\treturn false\n\t\t}\n\n\t\treturn defvals[0]\n\t}\n\n\treturn global.GetB(name, defvals...)\n}", "func GetBoolEnvVar(envVarName string, defaultValue bool) bool {\n\tvalue := os.Getenv(envVarName)\n\tif value == \"\" {\n\t\treturn defaultValue\n\t}\n\n\tif boolValue, err := strconv.ParseBool(value); err == nil {\n\t\treturn boolValue\n\t}\n\treturn defaultValue\n}", "func GetBoolean(vars map[string]string, key string, def bool) bool {\n\tval, ok := vars[key]\n\tif ok {\n\t\tif b, err := strconv.ParseBool(val); err == nil {\n\t\t\treturn b\n\t\t} else {\n\t\t\tlog.Printf(\"failed to convert config[%v]=%v to boolean: %v\", key, val, err)\n\t\t}\n\t}\n\treturn def\n}", "func GetenvBool(key string, dflt ...bool) bool {\n\tif val := strings.TrimSpace(os.Getenv(key)); val != \"\" {\n\t\tif b, err := strconv.ParseBool(val); err == nil {\n\t\t\treturn b\n\t\t}\n\t}\n\n\tif len(dflt) > 0 {\n\t\treturn dflt[0]\n\t}\n\n\treturn false\n}", "func (kv KeyValueStore) GetBoolDef(key string) bool {\n\tval, _ := kv.Get(key)\n\treturn stringToBool(val)\n}", "func (c *Config) GetBool(pattern string, def ...interface{}) bool {\n\tif j := c.getJson(); j != nil {\n\t\treturn j.GetBool(pattern, def...)\n\t}\n\treturn false\n}", "func GetBoolEnv(key string, fallback bool) bool {\n\tvar boolResult = fallback\n\tvar stringResult = os.Getenv(key)\n\n\tif stringResult != \"\" {\n\t\tb, _ := strconv.ParseBool(stringResult)\n\t\tboolResult = b\n\t}\n\n\treturn boolResult\n}", "func Bool(v string) (bool, bool, error) {\n\tswitch os.Getenv(v) {\n\tcase \"true\":\n\t\treturn true, true, nil\n\tcase \"false\":\n\t\treturn false, true, nil\n\tcase \"\":\n\t\treturn false, false, nil\n\tdefault:\n\t\treturn false, false, fmt.Errorf(\"%s must be 'true' or 'false'\", v)\n\t}\n}", "func GetBool(key string, defaultValue bool) bool {\n\treturn archaius.GetBool(key, defaultValue)\n}", "func getStringOpt(name, dfault string) string {\n if value := os.Getenv(name); value != \"\" {\n return value\n }\n return dfault\n}", "func GetBool(key string) bool {\n\ts, contains := getFromMode(key)\n\tif !contains {\n\t\ts, contains = getFromGlobal(key)\n\t}\n\tif contains {\n\t\tb, err := strconv.ParseBool(s)\n\t\tif err != nil {\n\t\t\tfmt.Println(\"GetBool error:\", err)\n\t\t\treturn false\n\t\t}\n\t\treturn b\n\t}\n\tfmt.Println(\"value of\", key, \"is not set!\")\n\treturn false\n}", "func getBoolValue(i *ini.File, section, key string, vdefault bool) bool {\n\treturn i.Section(section).Key(key).MustBool(vdefault)\n}", "func getBoolVal(input string) bool {\n\tinput = strings.ToLower(input)\n\tif input == \"yes\" || input == \"true\" {\n\t\treturn true\n\t}\n\treturn false\n}", "func (c *Configure) ReadBoolValue(key string, def bool) bool {\n\tv := c.Get(key)\n\tif v == \"\" {\n\t\treturn def\n\t}\n\tvalue, err := strconv.ParseBool(v )\n\tif err != nil {\n\t\treturn false\n\t}\n\treturn value\n\n}", "func GetBoolWithDefault(fixControlMap map[uint64]string, key uint64, defaultVal bool) bool {\n\tvalue, exists := GetBool(fixControlMap, key)\n\tif !exists {\n\t\treturn defaultVal\n\t}\n\treturn value\n}", "func GetBool(key string) bool { return viper.GetBool(key) }", "func Bool(name string, defs ...bool) bool {\n\tvar def bool\n\tfor _, d := range defs {\n\t\tdef = d\n\t\tbreak\n\t}\n\tswitch v := strings.ToLower(Raw(name)); {\n\tcase v == \"true\":\n\t\treturn true\n\tcase v == \"false\":\n\t\treturn false\n\tdefault:\n\t\treturn def\n\t}\n}", "func GetBool(name string) bool {\n\t//params, err := url.ParseQuery(r.URL.RawQuery)\n\t//if err != nil {\n\t//\treturn false\n\t//}\n\n\t//value, ok := params[name]\n\t//if !ok {\n\t//\treturn false\n\t//}\n\n\tstrValue := strings.Join([]string{\"\", \"\"}, \"\")\n\tif strValue == \"\" {\n\t\treturn true\n\t}\n\n\tboolValue, err := strconv.ParseBool(strValue)\n\tif err != nil {\n\t\treturn false\n\t}\n\n\treturn boolValue\n}", "func BoolVar(p *bool, name string, value bool) {\n\t*p = value\n\tfuncs = append(funcs, func() bool {\n\t\tif s := os.Getenv(name); s != \"\" {\n\t\t\tv, err := strconv.ParseBool(s)\n\t\t\tif err != nil {\n\t\t\t\tlog.Println(name, err)\n\t\t\t\treturn false\n\t\t\t}\n\t\t\t*p = v\n\t\t}\n\t\treturn true\n\t})\n}", "func GetBool(path string, dflt ...bool) (bool, error) {\n\tcheckDefaultConfigIsLoaded()\n\treturn defaultConfig.GetBool(path, dflt...)\n}", "func GetStringEnvWithDefault(name, def string) string {\n\tvar val string\n\t\n\tif val = os.Getenv(name); val == \"\" {\n\t\tlog.Printf(\"Env variant %s not found, using default value: %s\", name, def)\n\t\treturn def\n\t}\n\t\n\tlog.Printf(\"Env variant %s found, using env value: %s\", name, val)\n\treturn val\n}", "func Bool(key string, d bool) bool {\n\tv := os.Getenv(key)\n\tv = strings.ToLower(v)\n\tif v == \"1\" ||\n\t\t(v) == \"true\" {\n\t\treturn true\n\t}\n\tif v == \"0\" ||\n\t\t(v) == \"false\" {\n\t\treturn false\n\t}\n\treturn d\n}", "func (c *ConfigImpl) GetBool(key string, defaultValue ...interface{}) (bool, error) {\n\t// Get raw value\n\traw, err := c.getExpand(key, defaultValue...)\n\t// If not exists,\n\tif nil != raw {\n\t\t// Convert to string....\n\t\tswitch v := raw.(type) {\n\t\tcase bool:\n\t\t\treturn v, err\n\t\tcase string:\n\t\t\treturn strconv.ParseBool(v)\n\t\tdefault:\n\t\t\t// Convert to string\n\t\t\tstrval := fmt.Sprint(v)\n\t\t\treturn strconv.ParseBool(strval)\n\t\t}\n\t}\n\treturn false, err\n}", "func (e EnvConfig) Get(name string, defaultValue string) string {\n\tv := os.Getenv(strings.ToUpper(name))\n\tif v == \"\" {\n\t\tv = defaultValue\n\t}\n\treturn v\n}", "func (c *Cli) EnvBool(name string, value bool, usage string) *bool {\n\tc.env[name] = &EnvAttribute{\n\t\tName: name,\n\t\tType: fmt.Sprintf(\"%T\", value),\n\t\tBoolValue: value,\n\t\tUsage: usage,\n\t}\n\t// FIXME: make sure i am creating a point to the boolean value in the map.\n\tvar p *bool\n\tp = &c.env[name].BoolValue\n\t_, ok := c.env[name]\n\tif ok == false {\n\t\treturn nil\n\t}\n\treturn p\n}", "func (el *EnvVars) Bool(key string) bool {\n\tval, found := el.load(key)\n\tif !found {\n\t\treturn false\n\t}\n\n\trv, err := strconv.ParseBool(val)\n\tif err != nil {\n\t\t// invalid values will now return an error\n\t\t// previous behavior defaulted to false\n\t\tpanic(err)\n\t}\n\n\treturn rv\n}", "func getEnvBool(key string) (bool, error) {\n\tv, ok := os.LookupEnv(key)\n\tif !ok {\n\t\treturn false, nil\n\t}\n\n\tb, err := strconv.ParseBool(v)\n\tif err != nil {\n\t\treturn false, fmt.Errorf(\"%s: %v\", key, err)\n\t}\n\n\treturn b, nil\n}", "func (e EnvVars) Bool(ctx context.Context) (*bool, error) {\n\tvars := e.vars(ctx)\n\n\tif vars.Has(e.Key) {\n\t\tvalue := vars.Bool(e.Key)\n\t\treturn &value, nil\n\t}\n\treturn nil, nil\n}", "func (c *Controller) GetBool(key string, def ...bool) (bool, error) {\n\tstrv := c.Ctx.Input.Query(key)\n\tif len(strv) == 0 && len(def) > 0 {\n\t\treturn def[0], nil\n\t}\n\treturn strconv.ParseBool(strv)\n}", "func (c Controller) GetBool(key string, def ...bool) bool {\n\tif v := string(c.QueryArgs().Peek(key)); v != \"\" {\n\t\ttmp, _ := strconv.ParseBool(v)\n\t\treturn tmp\n\t}\n\tif len(def) > 0 {\n\t\treturn def[0]\n\t}\n\treturn false\n}", "func GetBool(fixControlMap map[uint64]string, key uint64) (value bool, exists bool) {\n\tif fixControlMap == nil {\n\t\treturn false, false\n\t}\n\trawValue, ok := fixControlMap[key]\n\tif !ok {\n\t\treturn false, false\n\t}\n\t// The same as TiDBOptOn in sessionctx/variable.\n\tvalue = strings.EqualFold(rawValue, \"ON\") || rawValue == \"1\"\n\treturn value, true\n}", "func (c *Validator) GetBool(key string, def ...bool) (bool, error) {\n\tstrv := c.Input.Query(key)\n\tif len(strv) == 0 && len(def) > 0 {\n\t\treturn def[0], nil\n\t}\n\treturn strconv.ParseBool(strv)\n}", "func getEnvOrDefault(name string, fallback string) string {\n\tval := os.Getenv(name)\n\tif val == \"\" {\n\t\treturn fallback\n\t}\n\n\treturn val\n}", "func AskBoolDef(question string, def bool) bool {\n\tdefAns := \"n\"\n\n\tif def {\n\t\tdefAns = \"y\"\n\t}\n\n\tfmt.Fprintf(Out, \"%s (y/n) (%s):\\n\", question, defAns)\n\tans := prompt()\n\n\tif ans == \"\" {\n\t\treturn def\n\t}\n\n\tif strings.ToLower(ans) != \"y\" {\n\t\treturn false\n\t}\n\n\treturn true\n}", "func GetStr(name string, defaultValue ...string) string {\n\tvalue, ok := os.LookupEnv(name)\n\tif !ok && len(defaultValue) == 0 {\n\t\tfmt.Printf(\"ERROR: missing %s environment variable!\\n\", name)\n\t\tos.Exit(1)\n\t}\n\n\tif !ok {\n\t\tvalue = defaultValue[0]\n\t}\n\n\treturn value\n}", "func (h *Helpers) BoolEnv(s string) bool {\n\ts = strings.ToUpper(s)\n\tif v, ok := os.LookupEnv(s); ok {\n\t\tobool, err := strconv.ParseBool(v)\n\t\tif err != nil {\n\t\t\tlogrus.Errorln(err)\n\t\t\treturn false\n\t\t}\n\n\t\treturn obool\n\t}\n\n\treturn false\n}", "func GetEnvBool(key string) (envValBool bool) {\n\tif envVal, ok := os.LookupEnv(key); ok {\n\t\tenvValBool, _ = strconv.ParseBool(envVal)\n\t}\n\treturn\n}", "func Bool(key string, def bool) bool {\n\tif s := String(key, \"\"); s != \"\" {\n\t\tif d, err := strconv.ParseBool(s); err == nil {\n\t\t\treturn d\n\t\t} else {\n\t\t\tLog(key, err)\n\t\t}\n\t}\n\treturn def\n}", "func GetBoolEnv(varName string) (bool, error) {\n\tvarValue := os.Getenv(varName)\n\n\t// no value given\n\tif varValue == \"\" {\n\t\treturn false, BooleanNoValueError\n\t}\n\n\ttruthy := regexp.MustCompile(booleanTruthyMatch)\n\tfalsy := regexp.MustCompile(booleanFalsyMatch)\n\n\tif truthy.FindString(varValue) != \"\" {\n\t\treturn true, nil\n\t}\n\tif falsy.FindString(varValue) != \"\" {\n\t\treturn false, nil\n\t}\n\n\treturn false, BooleanFormatError\n}", "func getEnvOr(name, defaultValue string) (out string) {\n\tout = os.Getenv(name)\n\tif out == \"\" {\n\t\tout = defaultValue\n\t}\n\treturn\n}", "func (jc *JuiceConfig) GetBool(path string, dflt ...bool) (bool, error) {\n\tif jc.hadError {\n\t\treturn false, errors.New(\"Already had error\")\n\t}\n\tjvalue, ok := (*jc.config)[path]\n\tif ok {\n\t\tvalue, err := jvalue.Boolean()\n\t\tif err == nil {\n\t\t\treturn value, nil\n\t\t}\n\t\treturn false, jc.setError(\"Value is not bool [\" + path + \"]\")\n\t}\n\n\t// Value not found. Is there a default?\n\tif len(dflt) > 0 {\n\t\treturn dflt[0], nil\n\t}\n\treturn false, jc.setError(\"Value not found [\" + path + \"]\")\n}", "func (ctx *Ctx) BoolEnv(k string) bool {\n\tv := os.Getenv(ctx.DSPrefix + k)\n\treturn StringToBool(v)\n}", "func (c *Command) GetBool(name string) (bool, error) {\n\tvalue := c.Flagset.Lookup(name).Value.String()\n\tif value == \"\" {\n\t\treturn false, ErrParameterNotFound\n\t}\n\treturn strconv.ParseBool(value)\n}", "func Get(key string, defaultVal string) string {\n\tval, ok := os.LookupEnv(key)\n\tif !ok {\n\t\tval = defaultVal\n\t}\n\treturn val\n}", "func GetEnvStr(name, def string) (res string) {\n\tres = def\n\n\ts := os.Getenv(name)\n\tif len(s) > 0 {\n\t\tres = s\n\t}\n\n\treturn\n}", "func GetFromEnvOrDefault(name, defaultValue string) string {\n\tfromEnv := os.Getenv(name)\n\tif fromEnv == \"\" {\n\t\treturn defaultValue\n\t}\n\treturn fromEnv\n}", "func (c Claims) GetBool(name string) (bool, error) {\n\tif !c.Has(name) {\n\t\treturn false, ErrNotFound\n\t}\n\n\t// Type check\n\tswitch val := c[name].(type) {\n\tcase string:\n\t\tv, _ := strconv.ParseBool(val)\n\t\treturn v, nil\n\tcase bool:\n\t\treturn val, nil\n\t}\n\n\treturn false, ErrClaimValueInvalid\n}", "func (c *Command) GetBool(key string, def ...bool) (bool, error) {\n\tv := c.Query(key)\n\n\tif v != nil && len(v.CommandValue) > 0 {\n\t\treturn strconv.ParseBool(v.CommandValue)\n\t} else if len(def) > 0 {\n\t\treturn def[0], nil\n\t} else if v.DefaultValue != nil {\n\t\treturn v.DefaultValue.(bool), nil\n\t}\n\n\treturn false, nil\n}", "func GetDefaultBool(in bool) bool {\n\treturn in\n}", "func MustGetenv(varname string, defaultValue string) string {\n\tlog.Debugf(\"Reading environment variable %v\", varname)\n\n\tvalue := os.Getenv(varname)\n\tif value == \"\" {\n\t\tif defaultValue == \"\" {\n\t\t\tlog.Fatalf(\"Missing env variable %v\", varname)\n\t\t}\n\t\tvalue = defaultValue\n\t}\n\n\treturn value\n}", "func MustGet(name string) string {\n\tenvVar, ok := os.LookupEnv(name)\n\tif !ok {\n\t\tlog.Fatalf(\"environment variable (%s) has not been set\", name)\n\t}\n\tif envVar == \"\" {\n\t\tlog.Fatalf(\"environment variable (%s) is empty\", name)\n\t}\n\treturn envVar\n}", "func (key *EnvironmentKey) GetBool() bool {\n\tkey.registerBinding()\n\treturn key.helper.GetBool(key.Name)\n}", "func Getenv(name string, def ...string) string {\n\tval := os.Getenv(name)\n\tif val == \"\" && len(def) > 0 {\n\t\tval = def[0]\n\t}\n\n\treturn val\n}", "func getOrElse(key, standard string) string {\n\tif val := os.Getenv(key); val != \"\" {\n\t\treturn val\n\t} else if standard == \"\" {\n\t\tlog.Fatalf(\"ERROR: The environment variable, %s, must be set\", key)\n\t}\n\treturn standard\n}", "func mustGetenv(name string) string {\n\tvalue := os.Getenv(name)\n\tif value == \"\" {\n\t\tpanic(name + \" not defined in environment\")\n\t}\n\treturn value\n}", "func NamedBoolDefault(name string, def bool) func(http.ResponseWriter, url.Values, martini.Context) {\n\treturn func(w http.ResponseWriter, query url.Values, m martini.Context) {\n\t\tvalue_string := query.Get(name)\n\t\tvalue, err := strconv.ParseBool(value_string)\n\n\t\tif \"\" == value_string {\n\t\t\tm.Map(NamedBoolParameter(def))\n\t\t\treturn\n\t\t}\n\n\t\tif nil != err {\n\t\t\thttp.Error(w, fmt.Sprintf(\"\\\"%s\\\" is not a boolean\"), 422)\n\t\t}\n\n\t\tm.Map(NamedBoolParameter(value))\n\t}\n}", "func ConfigVal(name, defval string) string {\n\tif path := *options.String[name]; path != \"\" {\n\t\treturn path\n\t} else if path := os.Getenv(strings.ToUpper(name)); path != \"\" {\n\t\treturn path\n\t} else if defval != \"\" {\n\t\treturn defval\n\t} else {\n\t\toptions.Usage(\"Must supply -%s or set $%s\", name, strings.ToUpper(name))\n\t\treturn \"\" // not reached\n\t}\n}", "func (c *Configuration) GetBool(name string) (bool, error) {\n\tv, ok := c.data[name].(bool)\n\tif !ok {\n\t\treturn false, errors.New(fmt.Sprintf(\"no existe el campo %s o no se puede convertir en bool\", name))\n\t}\n\n\treturn v, nil\n}", "func Get(key, defaultValue string) string {\n\tif v, ok := os.LookupEnv(key); ok {\n\t\treturn v\n\t}\n\treturn defaultValue\n}", "func mustEnv(key string, value *string, defaultVal string) {\r\n\tif *value = os.Getenv(key); *value == \"\" {\r\n\t\t*value = defaultVal\r\n\t\tfmt.Printf(\"%s env variable not set, using default value.\\n\", key)\r\n\t}\r\n}", "func (a Attributes) GetAsBoolWithDefault(key string, defaultValue bool) bool {\n\tswitch v := a[key].(type) {\n\tcase bool:\n\t\treturn v\n\tcase string:\n\t\tif result, err := strconv.ParseBool(v); err == nil {\n\t\t\treturn result\n\t\t}\n\t}\n\treturn defaultValue\n}", "func GetBool(key string) bool { return c.GetBool(key) }", "func Get(key, defaultValue string) string {\n\tvalue, ok := os.LookupEnv(key)\n\tif ok {\n\t\treturn value\n\t}\n\n\treturn defaultValue\n}", "func getenv(key, def string) string {\n\tif value, ok := os.LookupEnv(key); ok {\n\t\treturn value\n\t}\n\treturn def\n}", "func Bool(name string, value string, usage string, aliases ...string) *Value {\n\treturn newBool(flag.Var, name, value, usage, aliases...)\n}", "func (cfg *Config) Bool(name string) (bool, error) {\n\tv, ok := cfg.findLast(name)\n\tif !ok {\n\t\treturn false, fmt.Errorf(\"config %s: not found\", name)\n\t}\n\tif v == nil {\n\t\t// No equals sign, which implies true.\n\t\treturn true, nil\n\t}\n\tb, ok := parseBool(v)\n\tif !ok {\n\t\treturn false, fmt.Errorf(\"config %s: cannot parse %q as a bool\", name, v)\n\t}\n\treturn b, nil\n}", "func Get(envKey, defaultVal string) string {\n\tval := os.Getenv(envKey)\n\tif val == \"\" {\n\t\tval = defaultVal\n\t}\n\treturn val\n}", "func (s *StressFlag) Bool(name string, def bool, usage string) *bool {\n\tv := def\n\n\tswitch name {\n\tcase \"top\":\n\t\tv = true\n\t}\n\n\treturn &v\n}", "func GetBool(key string) bool {\n\treturn viper.GetBool(key)\n}", "func BoolSet(flg *flag.FlagSet, name string, value string, usage string, aliases ...string) *Value {\n\treturn newBool(flg.Var, name, value, usage, aliases...)\n}", "func RequiredWithDefaultEnvVar(flag *string, envVarName, errMsg string) *string {\n\tif *flag != \"\" {\n\t\treturn flag\n\t}\n\tnewFlag := os.Getenv(envVarName)\n\tif newFlag != \"\" {\n\t\treturn &newFlag\n\t}\n\tFatal(CLI_INPUT_ERROR, errMsg)\n\treturn flag // won't ever happen, here just to make intellij happy\n}" ]
[ "0.71697176", "0.71646243", "0.7045053", "0.6848531", "0.6728741", "0.6728741", "0.6679044", "0.6624117", "0.6597611", "0.65082514", "0.6471463", "0.63924235", "0.6352383", "0.6341439", "0.6332916", "0.6249553", "0.6247445", "0.61819255", "0.6120278", "0.611434", "0.6100827", "0.60934883", "0.6068827", "0.60515887", "0.6045113", "0.601753", "0.5991733", "0.5985655", "0.5958519", "0.5950357", "0.5950189", "0.59360474", "0.59048396", "0.58806676", "0.58361363", "0.58328366", "0.5812019", "0.5801058", "0.5781167", "0.57583034", "0.5755419", "0.5745706", "0.5735016", "0.5730968", "0.5726633", "0.5723111", "0.571526", "0.5714853", "0.56985736", "0.5695938", "0.5690585", "0.5665587", "0.56268346", "0.5626269", "0.5612916", "0.56078035", "0.55593824", "0.5549492", "0.5531417", "0.55299175", "0.5526008", "0.5518673", "0.55146843", "0.55135995", "0.550194", "0.5493048", "0.5458458", "0.54401946", "0.54344827", "0.54205096", "0.5406087", "0.5401685", "0.53994626", "0.53828144", "0.53739893", "0.5369882", "0.5369017", "0.53642327", "0.53612864", "0.53505397", "0.5336283", "0.533478", "0.5324919", "0.53058636", "0.53040606", "0.5297836", "0.5285283", "0.52849084", "0.527285", "0.5267141", "0.52614266", "0.5256203", "0.52472824", "0.5246217", "0.5237758", "0.52357036", "0.5232443", "0.5231442", "0.5218684", "0.52103496" ]
0.7630428
0
parse from server:port/prefix server/prefix /prefix
func ParseParams(urlString string) (Params, error) { p := NewParams() if !strings.HasPrefix(urlString, "http") { urlString = "https://" + urlString } u, err := url.Parse(urlString) if err != nil { return p, err } p.Query = u.Query() if u.Scheme == "" { u.Scheme = "https" } if u.Path != "" { p.Prefix = strings.Trim(u.Path, "/") } u.RawQuery = "" u.Fragment = "" u.Path = "" u.RawPath = "" p.Server = u.String() return p, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func ParsePrefix(line string) *Prefix {\n\t// Start by creating an Prefix with nothing but the host\n\tid := &Prefix{\n\t\tName: line,\n\t}\n\n\tuh := strings.SplitN(id.Name, \"@\", 2)\n\tif len(uh) == 2 {\n\t\tid.Name, id.Host = uh[0], uh[1]\n\t}\n\n\tnu := strings.SplitN(id.Name, \"!\", 2)\n\tif len(nu) == 2 {\n\t\tid.Name, id.User = nu[0], nu[1]\n\t}\n\n\treturn id\n}", "func ParsePrefix(in string) *Prefix {\n\tif len(in) == 0 {\n\t\treturn nil\n\t}\n\tif in[0] == '!' || in[0] == '@' {\n\t\treturn nil\n\t}\n\tdpos := strings.Index(in, \".\") + 1\n\tupos := strings.Index(in, \"!\") + 1\n\thpos := strings.Index(in[upos:], \"@\") + 1 + upos\n\tp := &Prefix{Raw: in}\n\tif upos > 0 {\n\t\tp.Nickname = in[:upos-1]\n\t\tif hpos > 0 && upos < hpos {\n\t\t\tp.User = in[upos : hpos-1]\n\t\t\tp.Host = in[hpos:]\n\t\t} else {\n\t\t\tp.User = in[upos:]\n\t\t}\n\t} else if hpos > 0 {\n\t\tp.Nickname = in[:hpos-1]\n\t\tp.Host = in[hpos:]\n\t} else if dpos > 0 {\n\t\tp.Host = in\n\t\tp.IsServer = true\n\t} else {\n\t\tp.Nickname = in\n\t}\n\treturn p\n}", "func resolvePrefix(prefix string) (nick, ident, host, src string, err error) {\n\tsrc = prefix\n\tif prefix == \"\" {\n\t\tnick = \"<Server>\"\n\t\treturn\n\t}\n\n\tnickEnd := strings.Index(prefix, \"!\")\n\tuserEnd := strings.Index(prefix, \"@\")\n\tif nickEnd != -1 && userEnd != -1 {\n\t\tnick = prefix[0:nickEnd]\n\t\tident = prefix[nickEnd+1 : userEnd]\n\t\thost = prefix[userEnd+1:]\n\t} else {\n\t\tnick = src\n\t}\n\n\treturn\n}", "func parse(target, protocol, port string) string {\n\tline := target\n\n\t// if no protocol, no port was specified\n\tif len(protocol) == 0 && len(port) == 0 {\n\t\t// scope only http/https\n\t\tline = `http(s)?://` + line\n\n\t\t// if port was specified but no protocol\n\t} else if len(protocol) == 0 && len(port) > 0 {\n\t\t// scope any protocol\n\t\tline = `\\w+://` + line\n\t} else {\n\n\t}\n\n\t// escape '.'\n\tline = strings.Replace(line, \".\", `\\.`, -1)\n\t// escape '/'\n\tline = strings.Replace(line, \"/\", `\\/`, -1)\n\t// replace wildcard\n\tline = strings.Replace(line, \"*\", `[\\S]*`, -1)\n\t// Zap needs this to scope URL params\n\tline = `^` + line + `[\\S]*$`\n\n\treturn line\n}", "func parseListenString(s string) (string, string, error) {\n\tif s == \"\" {\n\t\treturn \"\", \"\", fmt.Errorf(\"patroni configuration option 'restapi.listen' not found\")\n\t}\n\n\tif s == \"::\" {\n\t\treturn \"[::1]\", \"8008\", nil\n\t}\n\n\tparts := strings.Split(s, \":\")\n\n\tvar addr, port string\n\tvar ip net.IP\n\n\tif len(parts) != 1 {\n\t\tip = net.ParseIP(strings.Join(parts[0:len(parts)-1], \":\"))\n\t\tport = parts[len(parts)-1]\n\t} else {\n\t\tip = net.ParseIP(parts[0])\n\t\tport = \"8008\"\n\t}\n\n\t// Convert 'unspecified' address to loopback. Wraps IPv6 addresses into square brackets (required for net/http).\n\tif ip.Equal(net.IPv4zero) {\n\t\taddr = \"127.0.0.1\"\n\t} else if ip.Equal(net.IPv6unspecified) {\n\t\taddr = fmt.Sprintf(\"[%s]\", net.IPv6loopback.String())\n\t} else {\n\t\tif ip.To4() != nil {\n\t\t\taddr = ip.String()\n\t\t} else {\n\t\t\taddr = fmt.Sprintf(\"[%s]\", ip.String())\n\t\t}\n\t}\n\n\treturn addr, port, nil\n}", "func (sock *Server) parse(line string) {\n\tsplit := strings.SplitN(line, \" \", 4)\n\tsplit = append(split, make([]string, 4-len(split), 4-len(split))...)\n\n\tswitch true {\n\tcase split[0] == \"PING\":\n\t\tsock.pong(split[1]) //Ping e.g.: PING :B97B6379\n\tcase split[1] == \"JOIN\":\n\t\teventOnJoin(sock, split[2][1:], getNick(split[0]))\n\tcase split[1] == \"PART\":\n\t\tif len(split[3]) == 0 {\n\t\t\tsplit[3] = \" \"\n\t\t}\n\t\teventOnPart(sock, split[2], getNick(split[0]), split[3][1:])\n\tcase split[1] == \"QUIT\":\n\t\tif split[3] != \"\" {\n\t\t\tsplit[2] += \" \" + split[3]\n\t\t}\n\t\tif len(split[2]) == 0 {\n\t\t\tsplit[2] = \" \"\n\t\t}\t\n\t\teventOnQuit(sock, getNick(split[0]), split[2][1:])\n\tcase split[1] == \"PRIVMSG\":\n\t\tnick := getNick(split[0])\n\t\tchannel := split[2]\n\t\tif channel == sock.Nickname {\n\t\t\tchannel = nick\n\t\t}\n\t\tif len(split[3]) == 0 {\n\t\t\tsplit[3] = \" \"\n\t\t}\n\t\teventOnPrivmsg(sock, channel, nick, split[3][1:])\n\tcase split[1] == \"NOTICE\":\n\t\tnick := getNick(split[0])\n\t\tchannel := split[2]\n\t\tif channel == sock.Nickname {\n\t\t\tchannel = nick\n\t\t}\n\t\tif len(split[3]) == 0 {\n\t\t\tsplit[3] = \" \"\n\t\t}\t\n\t\teventOnNotice(sock, channel, nick, split[3][1:])\n\tcase isNum(split[1]):\n\t\teventOnReply(sock, split[1], split[2], split[3])\n\t}\n}", "func parseEndpoint(endpoint string) (listener, host string, port int, err error) {\n\tre := regexp.MustCompile(`([A-Za-z_]+)://([^:]+):(\\d+)`)\n\tmatches := re.FindStringSubmatch(endpoint)\n\tif matches == nil {\n\t\treturn \"\", \"\", 0, errors.New(\"regex pattern did not match endpoint\")\n\t}\n\n\tport, err = strconv.Atoi(matches[3])\n\tif err != nil {\n\t\treturn \"\", \"\", 0, fmt.Errorf(\"failed parsing port as int: %s\", err)\n\t}\n\n\treturn matches[1], matches[2], port, nil\n}", "func parseSchemeAndPort(services []byte, scheme, port string) (string, string) {\n\tre := regexp.MustCompile(`([a-zA-Z0-9-]+)\\s+(\\d+)`) // for configs/services\n\t// re groups: 0. full match - [ftp 21]\n\t// 1. service - [ftp] 21\n\t// 2. port - ftp [21]\n\n\tif isVar(scheme) && !isVar(port) {\n\t\t// set corresponding port from configs/services\n\t\tscanner := bufio.NewScanner(strings.NewReader(string(services[:])))\n\t\tfor scanner.Scan() {\n\t\t\tmatch := re.FindStringSubmatch(scanner.Text())\n\t\t\tif scheme == match[1] {\n\t\t\t\tport = \"^\" + match[2] + \"$\"\n\t\t\t}\n\t\t}\n\t} else if !isVar(scheme) && !isVar(port) {\n\t\t// set port to 80, 443\n\t\tport = \"^(80|443)$\"\n\t} else if isVar(scheme) && isVar(port) {\n\t\t// set whatever port + service port\n\t\tif scheme == \"http\" {\n\t\t\tport = \"^(80|\" + port + \")$\"\n\t\t} else if scheme == \"https\" {\n\t\t\tport = \"^(443|\" + port + \")$\"\n\t\t} else {\n\t\t\tport = \"^\" + port + \"$\"\n\t\t}\n\t} else if isVar(port) {\n\t\tport = \"^\" + port + \"$\"\n\t}\n\n\t// set \"Any\" when not http(s)\n\tif scheme != \"http\" && scheme != \"https\" {\n\t\tscheme = \"Any\"\n\t}\n\n\treturn scheme, port\n}", "func ParseWithPrefix(conf interface{}, prefix string) (*ConfInfo, error) {\n\treturn ParseWithOptions(conf, Options{Prefix: prefix})\n}", "func (t *Target) parse(fn string) error {\n\tdata, err := ioutil.ReadFile(fn)\n\tif err != nil {\n\t\treturn err\n\t}\n\tconnections := connectionset{}\n\txml.Unmarshal(data, &connections)\n\tfor _, conn := range connections.Conns {\n\t\tif conn.Id == t.Name {\n\t\t\tt.dest = strings.Replace(conn.URI, \"ipbusudp-2.0://\", \"\", 1)\n\t\t\t//ns := nodes{}\n\t\t\taddr := strings.Replace(conn.Address, \"file://\", \"\", 1)\n\t\t\tif err := t.parseregfile(addr, \"\", uint32(0)); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn error(nil)\n}", "func parseUpstream(upstream string) (result Upstream, err error) {\n\tif strings.Trim(upstream, \" \") == \"\" {\n\t\treturn Upstream{}, nil\n\t}\n\n\tparts := strings.Split(upstream, \":\")\n\n\tif len(parts) < 2 || len(parts) > 3 {\n\t\terr = fmt.Errorf(\"wrong configuration, couldn't parse input '%s', please enter net:host[:port]\", upstream)\n\t\treturn\n\t}\n\n\tnet := strings.TrimSpace(parts[0])\n\n\tif _, ok := netDefaultPort[net]; !ok {\n\t\terr = fmt.Errorf(\"wrong configuration, couldn't parse net '%s', please user one of %s\",\n\t\t\tnet, reflect.ValueOf(netDefaultPort).MapKeys())\n\t\treturn\n\t}\n\n\tvar port uint16\n\n\thost := strings.TrimSpace(parts[1])\n\n\tif len(parts) == 3 {\n\t\tvar p int\n\t\tp, err = strconv.Atoi(strings.TrimSpace(parts[2]))\n\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(\"can't convert port to number %v\", err)\n\t\t\treturn\n\t\t}\n\n\t\tif p < 1 || p > 65535 {\n\t\t\terr = fmt.Errorf(\"invalid port %d\", p)\n\t\t\treturn\n\t\t}\n\n\t\tport = uint16(p)\n\t} else {\n\t\tport = netDefaultPort[net]\n\t}\n\n\treturn Upstream{Net: net, Host: host, Port: port}, nil\n}", "func parseArgs() (host, port string) {\n\targs := os.Args[1:]\n\tif len(args) != 2 {\n\t\tlog.Fatalln(\"Error: dashert requires two arguments <host> and <port>\\n\")\n\t}\n\th := args[0]\n\tp := args[1]\n\treturn h, p\n}", "func parseConnSpec(connStr string) (out connSpec, err error) {\n\tpartMatcher := regexp.MustCompile(`((.*):\\/\\/)?(([^\\/?:]*)(:([^\\/?:@]*))?@)?([^\\/?]*)(\\/([^\\?]*))?(\\?(.*))?`)\n\thostMatcher := regexp.MustCompile(`([^;\\,\\:]+)(:([0-9]*))?(;\\,)?`)\n\tparts := partMatcher.FindStringSubmatch(connStr)\n\n\tif parts[2] != \"\" {\n\t\t(&out.Scheme).load(parts[2])\n\t\tif out.Scheme == csInvalid {\n\t\t\terr = fmt.Errorf(\"Unknown scheme '%s'\", parts[2])\n\t\t\treturn\n\t\t}\n\t\tout.hasExplicitScheme = true\n\t} else {\n\t\tout.Scheme = csPlainMcd\n\t}\n\n\tif parts[7] != \"\" {\n\t\thosts := hostMatcher.FindAllStringSubmatch(parts[7], -1)\n\t\tfor _, hostInfo := range hosts {\n\t\t\tport := 0\n\t\t\tif hostInfo[3] != \"\" {\n\t\t\t\tport, err = strconv.Atoi(hostInfo[3])\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tout.hasExplicitPort = true\n\t\t\t}\n\t\t\terr = out.addRawHost(hostInfo[1], port)\n\t\t\tif err != nil {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\n\tif len(out.HttpHosts) == 0 && len(out.MemcachedHosts) == 0 {\n\t\terr = out.addRawHost(\"127.0.0.1\", 0)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\n\tif parts[9] != \"\" {\n\t\tout.Bucket, err = url.QueryUnescape(parts[9])\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\n\tif parts[11] != \"\" {\n\t\tout.Options, err = url.ParseQuery(parts[11])\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\n\treturn\n}", "func getPrefix(line string) string {\n\tif strings.Contains(line, \":\") {\n\t\tres := strings.Split(line, \":\")\n\t\treturn res[0]\n\t}\n\treturn \"\"\n}", "func (parser *Parser) parseNamespace(start bool) {\n\tprefixIdx := parser.getLEWord(parser.ParserOffset + (4 * WORD_SIZE))\n\turiIdx := parser.getLEWord(parser.ParserOffset + (5 * WORD_SIZE))\n\n\turi := parser.getString(uriIdx)\n\tprefix := parser.getString(prefixIdx)\n\n\tif start {\n\t\tparser.listener.StartPrefixMapping(prefix, uri)\n\t\tparser.Namespaces[uri] = prefix\n\t} else {\n\t\tparser.listener.EndPrefixMapping(prefix, uri)\n\t\tdelete(parser.Namespaces, uri)\n\t}\n\n\t// Offset to first tag\n\tparser.ParserOffset += (6 * WORD_SIZE)\n}", "func parseAddr(addr string) (string, string) {\n\tparsed := strings.SplitN(addr, \":\", 2)\n\treturn parsed[0], parsed[1]\n}", "func parseHost(addr string) string {\n\tvar (\n\t\thost, port string\n\t\tdefaultAssigned bool\n\t)\n\n\tv := strings.Split(addr, \":\")\n\n\tswitch len(v) {\n\tcase 2:\n\t\thost = v[0]\n\t\tport = v[1]\n\n\t\tif host == \"\" {\n\t\t\thost = _DEFAULT_HOST\n\t\t\tdefaultAssigned = true\n\t\t}\n\n\t\tif port == \"\" {\n\t\t\tport = _DEFAULT_PORT\n\t\t\tdefaultAssigned = true\n\t\t}\n\n\t\tif defaultAssigned == false {\n\t\t\treturn addr // addr is already in required format\n\t\t}\n\t\tbreak\n\n\tcase 1:\n\t\thost = v[0]\n\t\tif host == \"\" {\n\t\t\thost = _DEFAULT_HOST\n\t\t}\n\t\tport = _DEFAULT_PORT\n\tcase 0:\n\t\tfallthrough\n\tdefault:\n\t\thost = _DEFAULT_HOST\n\t\tport = _DEFAULT_PORT\n\t\tbreak\n\t}\n\treturn strings.Join([]string{host, port}, \":\")\n}", "func parseConnectionStr(connectionStr string) (string, error) {\n\tdbPos := strings.Index(connectionStr, \":\")\n\tif dbPos == -1 {\n\t\treturn \"\", errorList[\"err.db.conf.separator.missing\"]\n\t}\n\tdbName := strings.TrimSpace(connectionStr[:dbPos])\n\n\treturn dbName, nil\n}", "func (d *dataHandler) parsePORTcommand(line string) error {\n\t// PORT command format : \"PORT h1,h2,h3,h4,p1,p2\\r\\n\"\n\tvar err error\n\n\td.clientConn.remoteIP, d.clientConn.remotePort, err = parseLineToAddr(strings.Split(strings.Trim(line, \"\\r\\n\"), \" \")[1])\n\n\t// if received ip is not public IP, ignore it\n\tif !isPublicIP(net.ParseIP(d.clientConn.remoteIP)) {\n\t\td.clientConn.remoteIP = d.clientConn.originalRemoteIP\n\t}\n\n\treturn err\n}", "func prefix(k string) *goraptor.Uri {\n\tvar pref string\n\trest := k\n\tif i := strings.Index(k, \":\"); i >= 0 {\n\t\tpref = k[:i+1]\n\t\trest = k[i+1:]\n\t}\n\tif long, ok := rdfPrefixes[pref]; ok {\n\t\tpref = long\n\t}\n\turi := goraptor.Uri(pref + rest)\n\treturn &uri\n}", "func parseHostPort(str string) (string, string) {\n\tvar (\n\t\thost string\n\t\tport string\n\n\t\ti = strings.Index(str, \":\")\n\t)\n\tif i == -1 {\n\t\treturn str, \"\"\n\t}\n\n\thost = str[:i]\n\tport = str[i+1:]\n\n\treturn host, port\n}", "func ServiceAndPort(host string) (string, string) {\n\tsp := strings.Split(host, \":\")\n\tif len(sp) <= 1 {\n\t\treturn host, \"0\"\n\t}\n\tss := strings.Split(sp[0], \".\")\n\tif len(ss) <= 1 {\n\t\treturn sp[0], sp[1]\n\t}\n\treturn ss[0], sp[1]\n}", "func parseHostAndPath(s string) (string, string) {\n\ttoks := strings.SplitN(s, \":\", 2)\n\n\tif len(toks) > 1 {\n\t\treturn toks[0], toks[1]\n\t}\n\n\treturn s, \"./\"\n}", "func (p *parser) parseLine(line string) {\n\tfor _, option := range p.options {\n\t\tif strings.HasPrefix(line, option.prefix) {\n\t\t\t// 1st condiition expect to parse a single line where the prefix is match\n\t\t\tif option.count == 0 {\n\t\t\t\tp.ips = append(p.ips, strings.Fields(line)[option.index])\n\t\t\t}\n\n\t\t\t// look for the IPs in lines after prefix are matched\n\t\t\tfor i := 0; i < option.count; i++ {\n\t\t\t\tif !p.scanner.Scan() {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\tline := p.scanner.Text()\n\t\t\t\tp.ips = append(p.ips, strings.Fields(line)[option.index])\n\t\t\t}\n\t\t}\n\t}\n}", "func parseNameServer() ([]net.IP, error) {\n\tfile, err := os.Open(\"/etc/resolv.conf\")\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error opening /etc/resolv.conf: %v\", err)\n\t}\n\tdefer file.Close()\n\n\tscan := bufio.NewScanner(file)\n\tscan.Split(bufio.ScanLines)\n\n\tip := make([]net.IP, 0)\n\n\tfor scan.Scan() {\n\t\tserverString := scan.Text()\n\t\tif strings.Contains(serverString, \"nameserver\") {\n\t\t\ttmpString := strings.Replace(serverString, \"nameserver\", \"\", 1)\n\t\t\tnameserver := strings.TrimSpace(tmpString)\n\t\t\tsip := net.ParseIP(nameserver)\n\t\t\tif sip != nil && !sip.Equal(config.Config.ListenIP) {\n\t\t\t\tip = append(ip, sip)\n\t\t\t}\n\t\t}\n\t}\n\tif len(ip) == 0 {\n\t\treturn nil, fmt.Errorf(\"there is no nameserver in /etc/resolv.conf\")\n\t}\n\treturn ip, nil\n}", "func prefixHandler(query string) (string, string) {\n\tif strings.HasPrefix(query, \"<=\") ||\n\t\tstrings.HasPrefix(query, \">=\") {\n\t\treturn query[:2], query[2:]\n\t} else if strings.HasPrefix(query, \"<\") ||\n\t\tstrings.HasPrefix(query, \">\") {\n\t\treturn query[:1], query[1:]\n\t} else if strings.HasPrefix(query, \"(\") {\n\t\treturn \"startsWith\", query[1:]\n\t} else if strings.HasPrefix(query, \")\") {\n\t\treturn \"endsWith\", query[1:]\n\t} else if strings.HasPrefix(query, \"=\") {\n\t\treturn \"=\", query[1:]\n\t} else if strings.HasPrefix(query, \"!\") {\n\t\treturn \"!=\", query[1:]\n\t} else if strings.HasPrefix(query, \"~\") {\n\t\treturn \"interface\", query[1:]\n\t} else {\n\t\treturn \"default\", query\n\t}\n}", "func (s *serverCGI) extractHost(method string) (string, string, int) {\n\treg := regexp.MustCompile(\"^\" + method + `/([^\\+]*)(\\+.*)`)\n\tm := reg.FindStringSubmatch(s.path())\n\tif m == nil {\n\t\tlog.Println(\"illegal url\")\n\t\treturn \"\", \"\", 0\n\t}\n\tpath := m[2]\n\thost, portstr, err := net.SplitHostPort(m[1])\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn \"\", \"\", 0\n\t}\n\tport, err := strconv.Atoi(portstr)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn \"\", \"\", 0\n\t}\n\treturn host, path, port\n}", "func parseReplicationInfo(m map[string]string) map[string]string {\n\t/*\n\t\trole:master\n\t\tconnected_slaves:1\n\t\tslave0:ip=10.1.1.228,port=7004,state=online,offset=3689968249,lag=1\n\t\tmaster_replid:17270cf205f7c98c4c8e80c348fd0564132e6643\n\t\tmaster_replid2:0000000000000000000000000000000000000000\n\t\t...\n\t\t...\n\t*/\n\tif len(m) < 1 {\n\t\treturn nil\n\t}\n\tslaveReg, _ := regexp.Compile(\"^slave([0-9]*)\")\n\tslaveMapping := make(map[string]string)\n\n\ttmpInfolines := make([]string, 0)\n\tfor key, value := range m {\n\t\tif !slaveReg.MatchString(key) {\n\t\t\tcontinue\n\t\t}\n\t\tinfoss := strings.Split(value, \",\")\n\t\tif len(infoss) < 2 {\n\t\t\treturn nil\n\t\t}\n\t\tfor _, info := range infoss {\n\t\t\t// ip=10.1.1.228,...\n\t\t\tinfoLine := strings.Split(info, \"=\")\n\t\t\ttmpInfolines = append(tmpInfolines, infoLine...)\n\t\t}\n\t\targs := sliceStr2Dict(tmpInfolines)\n\t\tslaveMapping[key] =\n\t\t\tstrings.Join(\n\t\t\t\tstrings.Split(fieldSplicing(args, \"ip\", \"port\"), \",\"),\n\t\t\t\t\":\",\n\t\t\t)\n\t}\n\treturn slaveMapping\n}", "func configureServer(s *http.Server, scheme, addr string) {\n\n}", "func parse(b []byte) *Message {\n var servername, nick, user, host string\n var command, target, msg string\n words := bytes.Split(b, bytes.NewBufferString(\" \").Bytes())\n\n if len(words) >= 4 {\n if match, _ := regexp.Match(\"^:\", words[0]); match {\n if match, _ := regexp.Match(\"!|@\", words[0]); match {\n i := 1\n for words[0][i] != '!' { i++ }\n nick = bytes.NewBuffer(words[0][1:i]).String()\n j := i+1\n for words[0][j] != '@' { j++ }\n var wordstart int = i + 1\n if words[0][i+1] == '~' {\n wordstart = i+2\n }\n\n user = bytes.NewBuffer(words[0][wordstart:j]).String()\n k := j+1\n host = bytes.NewBuffer(words[0][k:len(words[0])]).String()\n } else {\n servername = bytes.NewBuffer(words[0][1:len(words[0])]).String()\n }\n }\n command = bytes.NewBuffer(words[1]).String()\n target = bytes.NewBuffer(words[2]).String()\n str := bytes.Join(words[3:len(words)], bytes.NewBufferString(\" \").Bytes())\n msg = bytes.NewBuffer(str[1:len(str)]).String()\n } else {\n if match, _ := regexp.Match(\"PING\", words[0]); match {\n command = \"PING\"\n host= bytes.NewBuffer(words[1][1:len(words[1])]).String()\n fmt.Println(host)\n }\n }\n\n return &Message{\n Servername: servername,\n Nickname: nick,\n Username: user,\n Hostname: host,\n Command: command,\n Target: target,\n Message: msg,\n }\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func configureServer(s *graceful.Server, scheme, addr string) {\n}", "func (uri *URI) Parse(isConnect bool, reqURI []byte) {\n\turi.Reset()\n\turi.isConnect = isConnect\n\turi.full = reqURI\n\tif len(reqURI) == 0 {\n\t\treturn\n\t}\n\tfragmentIndex := bytes.IndexByte(reqURI, '#')\n\tif fragmentIndex >= 0 {\n\t\turi.fragments = reqURI[fragmentIndex:]\n\t\turi.parseWithoutFragments(reqURI[:fragmentIndex])\n\t} else {\n\t\turi.parseWithoutFragments(reqURI)\n\t}\n\tif !isConnect && len(uri.path) == 0 {\n\t\turi.path = []byte(\"/\")\n\t}\n\tif isConnect {\n\t\turi.scheme = uri.scheme[:0]\n\t\turi.path = uri.path[:0]\n\t\turi.queries = uri.queries[:0]\n\t\turi.fragments = uri.fragments[:0]\n\t}\n\turi.hostInfo.ParseHostWithPort(string(uri.host), isConnect)\n}", "func parseLine(l string) (*Event, error) {\n\tev := &Event{}\n\tws := strings.Split(l, \" \") // split args on \" \"\n\tvar paramIndex int // the argument index where the parameters are\n\n\t// Make sure we have at least two params (PREFIX and COMMAND)\n\tif len(ws) < 1 {\n\t\treturn nil, fmt.Errorf(InvalidLineSize, len(ws), 2)\n\t}\n\n\t// Check if our \"prefix\" has \":\"\n\tif ws[0][0] == ':' {\n\t\t// Server sent a prefix\n\t\tev.Prefix = ws[0][1:]\n\t\tev.Command = ws[1]\n\n\t\tparamIndex = 2\n\t} else {\n\t\t// Server did not send a prefix\n\t\tev.Prefix = \"\"\n\t\tev.Command = ws[0]\n\n\t\tparamIndex = 1\n\t}\n\n\tev.Parameters = readParams(ws, paramIndex)\n\tev.Timestamp = time.Now()\n\n\treturn ev, nil\n}", "func parseHost(host string) string {\n\trealHost, _, _ := net.SplitHostPort(host)\n\tif realHost != \"\" {\n\t\treturn realHost\n\t}\n\treturn host\n}", "func ParsePrefix(cursor *bufio.Reader) string {\n\tvar buffer string\n\tfor {\n\t\tnextc, _, err := cursor.ReadRune()\n\t\tif strings.ContainsRune(\" \", nextc) {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\tif err == io.EOF {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tlog.Fatal(\"Error parsing prefix \", err)\n\t\t}\n\t\tbuffer += string(nextc)\n\t}\n\t//fmt.Printf(\"Prefix:\\t\\t%s \\n\", buffer)\n\treturn buffer\n}", "func parsehostpath(hostpath string) (host string, path string) {\n\tsplits := strings.Split(hostpath, \"/\")\n\thost = splits[0]\n\tpath = strings.Join(splits[1:len(splits)], \"/\")\n\tpath = \"/\" + path\n\treturn\n}", "func (dns *EdgeDNS) parseNameServer() ([]net.IP, error) {\n\tfile, err := os.Open(\"/etc/resolv.conf\")\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error opening /etc/resolv.conf: %v\", err)\n\t}\n\tdefer file.Close()\n\n\tscan := bufio.NewScanner(file)\n\tscan.Split(bufio.ScanLines)\n\n\tip := make([]net.IP, 0)\n\n\tfor scan.Scan() {\n\t\tserverString := scan.Text()\n\t\tif strings.Contains(serverString, \"nameserver\") {\n\t\t\ttmpString := strings.Replace(serverString, \"nameserver\", \"\", 1)\n\t\t\tnameserver := strings.TrimSpace(tmpString)\n\t\t\tsip := net.ParseIP(nameserver)\n\t\t\tif sip != nil && !sip.Equal(dns.ListenIP) {\n\t\t\t\tip = append(ip, sip)\n\t\t\t}\n\t\t}\n\t}\n\tif len(ip) == 0 {\n\t\treturn nil, fmt.Errorf(\"there is no nameserver in /etc/resolv.conf\")\n\t}\n\treturn ip, nil\n}", "func parseArgs(args []string) {\n\n\tvar i int\n\tvar arg string\n\nFORLOOP:\n\tfor i, arg = range args {\n\n\t\tswitch {\n\t\tcase arg == \"-h\":\n\t\t\tusage()\n\t\tcase arg == \"-v\":\n\t\t\tfmt.Println(Progname + \" version \" + Version)\n\t\t\tos.Exit(1)\n\t\tcase arg == \"+tcp\":\n\t\t\tOptions.tcp = true\n\t\tcase arg == \"+ignore\":\n\t\t\tOptions.ignore = true\n\t\tcase arg == \"+dnssec\":\n\t\t\tOptions.dnssec = true\n\t\t\tOptions.edns = true\n\t\tcase arg == \"+nocreate\":\n\t\t\tOptions.nocreate = true\n\t\tcase strings.HasPrefix(arg, \"@\"):\n\t\t\tOptions.servers = []string{arg[1:]}\n\t\tcase strings.HasPrefix(arg, \"-p\"):\n\t\t\tn, err := strconv.Atoi(arg[2:])\n\t\t\tif err != nil {\n\t\t\t\tfmt.Printf(\"Invalid port (-p): %s\\n\", arg[2:])\n\t\t\t\tusage()\n\t\t\t}\n\t\t\tOptions.port = n\n\t\tcase strings.HasPrefix(arg, \"+bufsize=\"):\n\t\t\tn, err := strconv.Atoi(strings.TrimPrefix(arg, \"+bufsize=\"))\n\t\t\tif err != nil {\n\t\t\t\tfmt.Printf(\"Invalid bufsize: %s\\n\", arg)\n\t\t\t\tusage()\n\t\t\t}\n\t\t\tOptions.bufsize = uint16(n)\n\t\t\tOptions.edns = true\n\t\tcase strings.HasPrefix(arg, \"+ednsflags=\"):\n\t\t\tn, err := strconv.Atoi(strings.TrimPrefix(arg, \"+ednsflags=\"))\n\t\t\tif err != nil {\n\t\t\t\tfmt.Printf(\"Invalid ednsflags: %s\\n\", arg)\n\t\t\t\tusage()\n\t\t\t}\n\t\t\tOptions.edns = true\n\t\t\tOptions.edns_flags = uint16(n)\n\t\tcase strings.HasPrefix(arg, \"+ednsopt=\"):\n\t\t\ts := strings.SplitN(strings.TrimPrefix(arg, \"+ednsopt=\"), \":\", 2)\n\t\t\tn, err := strconv.Atoi(s[0])\n\t\t\tif err != nil {\n\t\t\t\tfmt.Printf(\"Invalid ednsopt: %s\\n\", arg)\n\t\t\t\tusage()\n\t\t\t}\n\t\t\to := new(EdnsoptStruct)\n\t\t\to.code = uint16(n)\n\t\t\tif len(s) == 2 {\n\t\t\t\to.data = s[1]\n\t\t\t}\n\t\t\tOptions.edns = true\n\t\t\tOptions.edns_opt = append(Options.edns_opt, o)\n\t\tcase strings.HasPrefix(arg, \"+retry=\"):\n\t\t\tn, err := strconv.Atoi(strings.TrimPrefix(arg, \"+retry=\"))\n\t\t\tif err != nil {\n\t\t\t\tfmt.Printf(\"Invalid retry parameter: %s\\n\", arg)\n\t\t\t\tusage()\n\t\t\t}\n\t\t\tOptions.retries = n\n\t\tcase strings.HasPrefix(arg, \"+time=\"):\n\t\t\tn, err := strconv.Atoi(strings.TrimPrefix(arg, \"+time=\"))\n\t\t\tif err != nil {\n\t\t\t\tfmt.Printf(\"Invalid timeout parameter: %s\\n\", arg)\n\t\t\t\tusage()\n\t\t\t}\n\t\t\tOptions.itimeout = time.Duration(n) * time.Second\n\t\t\tOptions.tcptimeout = time.Duration(n) * time.Second\n\t\tcase strings.HasPrefix(arg, \"+parallel=\"):\n\t\t\tn, err := strconv.Atoi(strings.TrimPrefix(arg, \"+parallel=\"))\n\t\t\tif err != nil {\n\t\t\t\tfmt.Printf(\"Invalid #parallel queries: %s\\n\", arg)\n\t\t\t\tusage()\n\t\t\t}\n\t\t\tnumParallel = uint16(n)\n\t\tcase strings.HasPrefix(arg, \"-\"):\n\t\t\tfmt.Printf(\"Invalid option: %s\\n\", arg)\n\t\t\tusage()\n\t\tcase strings.HasPrefix(arg, \"+\"):\n\t\t\tfmt.Printf(\"Invalid option: %s\\n\", arg)\n\t\t\tusage()\n\t\tdefault:\n\t\t\tbreak FORLOOP\n\t\t}\n\n\t}\n\n\tif len(args)-i != 2 {\n\t\tfmt.Printf(\"ERROR: Exactly 2 arguments required: batchfile dbfile.\\n\")\n\t\tusage()\n\t}\n\n\tOptions.batchfile = args[i]\n\tOptions.dbfile = args[i+1]\n\treturn\n}", "func parsePodPrefixes(clauses []string) (map[string][]string, error) {\n\tpodPrefixes := map[string][]string{}\n\tfor _, p := range clauses {\n\t\tif strings.Contains(p, \":\") {\n\t\t\tss := strings.Split(p, \":\")\n\t\t\tdesc := ss[0]\n\t\t\tps := strings.Split(ss[1], \"|\")\n\t\t\tpodPrefixes[desc] = append(podPrefixes[desc], ps...)\n\t\t} else if strings.Contains(p, \"|\") {\n\t\t\treturn nil, errors.New(\"required-pods must be either <namespace>/<pod-name> or <desc>:<namespace>/<pod-name>|<namespace>/<pod-name>|...\")\n\t\t} else {\n\t\t\tpodPrefixes[p] = []string{p}\n\t\t}\n\t}\n\treturn podPrefixes, nil\n}", "func configureServer(s *http.Server, scheme, addr string) {\n}", "func configureServer(s *http.Server, scheme, addr string) {\n}", "func configureServer(s *http.Server, scheme, addr string) {\n}", "func configureServer(s *http.Server, scheme, addr string) {\n}", "func configureServer(s *http.Server, scheme, addr string) {\n}", "func configureServer(s *http.Server, scheme, addr string) {\n}", "func SplitHostPort(hostport string) (host, port string, err error) {\n\tj, k := 0, 0\n\n\t// The port starts after the last colon.\n\ti := last(hostport, ':')\n\tif i < 0 {\n\t\tgoto missingPort\n\t}\n\n\tif hostport[0] == '[' {\n\t\t// Expect the first ']' just before the last ':'.\n\t\tend := byteIndex(hostport, ']')\n\t\tif end < 0 {\n\t\t\terr = &AddrError{\"missing ']' in address\", hostport}\n\t\t\treturn\n\t\t}\n\t\tswitch end + 1 {\n\t\tcase len(hostport):\n\t\t\t// There can't be a ':' behind the ']' now.\n\t\t\tgoto missingPort\n\t\tcase i:\n\t\t\t// The expected result.\n\t\tdefault:\n\t\t\t// Either ']' isn't followed by a colon, or it is\n\t\t\t// followed by a colon that is not the last one.\n\t\t\tif hostport[end+1] == ':' {\n\t\t\t\tgoto tooManyColons\n\t\t\t}\n\t\t\tgoto missingPort\n\t\t}\n\t\thost = hostport[1:end]\n\t\tj, k = 1, end+1 // there can't be a '[' resp. ']' before these positions\n\t} else {\n\t\thost = hostport[:i]\n\t\tif byteIndex(host, ':') >= 0 {\n\t\t\tgoto tooManyColons\n\t\t}\n\t\tif byteIndex(host, '%') >= 0 {\n\t\t\tgoto missingBrackets\n\t\t}\n\t}\n\tif byteIndex(hostport[j:], '[') >= 0 {\n\t\terr = &AddrError{\"unexpected '[' in address\", hostport}\n\t\treturn\n\t}\n\tif byteIndex(hostport[k:], ']') >= 0 {\n\t\terr = &AddrError{\"unexpected ']' in address\", hostport}\n\t\treturn\n\t}\n\n\tport = hostport[i+1:]\n\treturn\n\nmissingPort:\n\terr = &AddrError{\"missing port in address\", hostport}\n\treturn\n\ntooManyColons:\n\terr = &AddrError{\"too many colons in address\", hostport}\n\treturn\n\nmissingBrackets:\n\terr = &AddrError{\"missing brackets in address\", hostport}\n\treturn\n}", "func parse(v []byte) {\n\ti := -1\n\tline := []byte(nil)\n\tfor {\n\t\tif line, v = getNextLine(v); line == nil {\n\t\t\tbreak\n\t\t} else {\n\t\t\ti++\n\t\t\tline = trimSpace(line)\n\t\t\tif i == 0 {\n\t\t\t\tprocessPortaStartLine(line)\n\t\t\t} else if i == 1 {\n\t\t\t\t// For the first line parse the request\n\t\t\t\tprocessSipRequestLine(line)\n\t\t\t} else {\n\t\t\t\t// For subsequent lines split in sep (: for sip, = for sdp)\n\t\t\t\tspos, stype := indexSep(line)\n\t\t\t\tif spos > 1 && stype == ':' {\n\t\t\t\t\t// SIP: Break up into header and value\n\t\t\t\t\tlhdr := line[0:spos]\n\t\t\t\t\tswitch {\n\t\t\t\t\tcase bytes.Equal(lhdr, viaCapBytes) || bytes.Equal(lhdr, viaBytes) || bytes.Equal(lhdr, []byte(\"v\")):\n\t\t\t\t\t\tProcessSipVia(line)\n\t\t\t\t\tcase bytes.Equal(lhdr, fromCapBytes) || bytes.Equal(lhdr, toCapBytes) || bytes.Equal(lhdr, contactCapBytes) ||\n\t\t\t\t\t\tbytes.Equal(lhdr, routeCapBytes) || bytes.Equal(lhdr, recordRouteCapBytes) || bytes.Equal(lhdr, rpidCapBytes) ||\n\t\t\t\t\t\tbytes.Equal(lhdr, paiCapBytes) ||\n\t\t\t\t\t\tbytes.Equal(lhdr, fromBytes) || bytes.Equal(lhdr, toBytes) || bytes.Equal(lhdr, contactBytes) ||\n\t\t\t\t\t\tbytes.Equal(lhdr, routeBytes) || bytes.Equal(lhdr, recordRouteBytes) || bytes.Equal(lhdr, rpidBytes) ||\n\t\t\t\t\t\tbytes.Equal(lhdr, paiBytes) ||\n\t\t\t\t\t\tbytes.Equal(lhdr, []byte(\"f\")) || bytes.Equal(lhdr, []byte(\"t\")) || bytes.Equal(lhdr, []byte(\"m\")):\n\t\t\t\t\t\tprocessURLBasedHeader(line)\n\t\t\t\t\tcase bytes.Equal(lhdr, callIDCapBytes) || bytes.Equal(lhdr, callIDBytes) || bytes.Equal(lhdr, []byte(\"i\")):\n\t\t\t\t\t\tProcessSipCallID(line)\n\t\t\t\t\t}\n\t\t\t\t} else if spos == 1 && stype == '=' {\n\t\t\t\t\t// SDP: Break up into header and value\n\t\t\t\t\tlhdr := line[0]\n\t\t\t\t\t// Switch on the line header\n\t\t\t\t\tswitch {\n\t\t\t\t\tcase lhdr == 'm':\n\t\t\t\t\t\tprocessSdpMedia(line)\n\t\t\t\t\tcase lhdr == 'c':\n\t\t\t\t\t\tprocessSdpConnection(line)\n\t\t\t\t\tcase lhdr == 'o':\n\t\t\t\t\t\tprocessSdpOriginator(line)\n\t\t\t\t\t} // End of Switch\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}", "func parseNsAndSvc(svc string) (namespace string, service string, ok bool) {\n\tif svc == \"\" {\n\t\treturn \"\", \"\", false\n\t}\n\tif strings.Contains(svc, \":\") {\n\t\tpairs := strings.Split(svc, \":\")\n\t\tif pairs[1] == \"\" {\n\t\t\treturn \"\", \"\", false\n\t\t}\n\t\treturn pairs[0], pairs[1], true\n\t}\n\treturn \"default\", svc, true\n}", "func parseBind(flags *pflag.FlagSet, spec *api.ServiceSpec) error {\n\tif flags.Changed(\"bind\") {\n\t\tbinds, err := flags.GetStringSlice(\"bind\")\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tcontainer := spec.Task.GetContainer()\n\n\t\tfor _, bind := range binds {\n\t\t\tparts := strings.SplitN(bind, \":\", 2)\n\t\t\tif len(parts) != 2 {\n\t\t\t\treturn fmt.Errorf(\"bind format %q not supported\", bind)\n\t\t\t}\n\t\t\tcontainer.Mounts = append(container.Mounts, api.Mount{\n\t\t\t\tType: api.MountTypeBind,\n\t\t\t\tSource: parts[0],\n\t\t\t\tTarget: parts[1],\n\t\t\t})\n\t\t}\n\t}\n\n\treturn nil\n}", "func parseIPAndPort(input string) (string, int, error) {\n\tseparator := strings.LastIndex(input, \":\")\n\tif separator == -1 {\n\t\treturn \"\", 0, errors.New(\"cannot parse IP and port correctly\")\n\t}\n\tIPStr := input[0:separator]\n\tif IPStr[0] == '[' {\n\t\tIPStr = IPStr[1 : len(IPStr)-1]\n\t}\n\tfor _, prefix := range localIPv4 {\n\t\tif strings.HasPrefix(IPStr, prefix) {\n\t\t\treturn \"\", 0, errors.New(\"ignore this IP address\")\n\t\t}\n\t}\n\toutputIP := net.ParseIP(IPStr)\n\tif outputIP == nil {\n\t\treturn \"\", 0, errors.New(\"invalid IP address\")\n\t}\n\n\tport, err := strconv.Atoi(input[separator+1:])\n\tif err != nil {\n\t\treturn \"\", 0, errors.New(\"invalid IP port\")\n\t}\n\treturn IPStr, port, nil\n}", "func init() {\n\thostPtr := flag.String(\"host\", \"localhost\", \"ip of host\")\n\tportPtr := flag.String(\"port\", \"12345\", \"port on which to run server\")\n\tflag.Parse()\n\thost = *hostPtr\n\tport = *portPtr\n}", "func ParseEndpoint(ep string) (string, string, error) {\n\tif strings.HasPrefix(strings.ToLower(ep), \"unix://\") {\n\t\ts := strings.SplitN(ep, \"://\", 2)\n\t\tif s[1] != \"\" {\n\t\t\treturn s[0], s[1], nil\n\t\t}\n\t}\n\treturn \"\", \"\", fmt.Errorf(\"invalid endpoint: %v\", ep)\n}", "func parseContainerID(ID string) string {\n\ti := strings.Index(ID, \"://\")\n\tif i < 0 {\n\t\treturn ID\n\t}\n\treturn ID[i+3:]\n}", "func main() {\n\tbindTo := flag.String(\n\t\t\"l\", \"0.0.0.0:999\", \"interface and port to listen at\")\n\tflag.Parse()\n\trunServer(*bindTo)\n}", "func (c *NetConv) Server(address string, ln net.Listener) []attribute.KeyValue {\n\tif ln == nil {\n\t\treturn c.Host(address)\n\t}\n\n\tlAddr := ln.Addr()\n\tif lAddr == nil {\n\t\treturn c.Host(address)\n\t}\n\n\thostName, hostPort := splitHostPort(address)\n\tsockHostAddr, sockHostPort := splitHostPort(lAddr.String())\n\tnetwork := lAddr.Network()\n\tsockFamily := family(network, sockHostAddr)\n\n\tn := nonZeroStr(hostName, network, sockHostAddr, sockFamily)\n\tn += positiveInt(hostPort, sockHostPort)\n\tattr := make([]attribute.KeyValue, 0, n)\n\tif hostName != \"\" {\n\t\tattr = append(attr, c.HostName(hostName))\n\t\tif hostPort > 0 {\n\t\t\t// Only if net.host.name is set should net.host.port be.\n\t\t\tattr = append(attr, c.HostPort(hostPort))\n\t\t}\n\t}\n\tif network != \"\" {\n\t\tattr = append(attr, c.Transport(network))\n\t}\n\tif sockFamily != \"\" {\n\t\tattr = append(attr, c.NetSockFamilyKey.String(sockFamily))\n\t}\n\tif sockHostAddr != \"\" {\n\t\tattr = append(attr, c.NetSockHostAddrKey.String(sockHostAddr))\n\t\tif sockHostPort > 0 {\n\t\t\t// Only if net.sock.host.addr is set should net.sock.host.port be.\n\t\t\tattr = append(attr, c.NetSockHostPortKey.Int(sockHostPort))\n\t\t}\n\t}\n\treturn attr\n}", "func ParsePortMapping(s *string) (port *string, protocol *string, err error) {\n\tif s == nil {\n\t\treturn nil, nil, nil\n\t}\n\tportProtocol := strings.Split(*s, \"/\")\n\tswitch len(portProtocol) {\n\tcase 1:\n\t\treturn aws.String(portProtocol[0]), nil, nil\n\tcase 2:\n\t\treturn aws.String(portProtocol[0]), aws.String(portProtocol[1]), nil\n\tdefault:\n\t\treturn nil, nil, fmt.Errorf(\"cannot parse port mapping from %s\", *s)\n\t}\n}", "func ParseCliAddr(ctx *cli.Context) (string, string) {\n\treturn ctx.GlobalString(\"address\"), ctx.GlobalString(\"port\")\n}", "func parseConf() {\n\tselfConf.Servers = append(selfConf.Servers, pubConf.Servers...)\n}", "func UnmarshalServer(s string) (*Server, error) {\n\tp := strings.Split(s, \":\")\n\tif len(p) != 9 {\n\t\treturn nil, fmt.Errorf(\"unmarshal: malformed fields exp 8, saw %d\", len(p))\n\t}\n\n\tsz, err := strconv.Atoi(p[0])\n\tif err != nil || sz <= 0 {\n\t\treturn nil, fmt.Errorf(\"unmarshal: malformed field size %s\", p[0])\n\t}\n\tpf, ok := pflist[sz]\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"unmarshal: invalid prime-field size: %d\", sz)\n\t}\n\n\th, err := strconv.Atoi(p[1])\n\tif err != nil || h <= 0 {\n\t\treturn nil, fmt.Errorf(\"unmarshal: malformed field size %s\", p[1])\n\t}\n\n\thf := crypto.Hash(h)\n\tif !hf.Available() {\n\t\treturn nil, fmt.Errorf(\"unmarshal: hash algorithm %d unavailable\", h)\n\t}\n\n\ti, err := hex.DecodeString(p[2])\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unmarshal: invalid identity: %s\", p[2])\n\t}\n\n\tsalt, err := hex.DecodeString(p[3])\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unmarshal: invalid salt: %s\", p[3])\n\t}\n\n\tv := atobi(p[4], 10)\n\tif r := recover(); r != nil {\n\t\treturn nil, fmt.Errorf(\"unmarshal: invalid verifier: %s\", p[4])\n\t}\n\n\tB := atobi(p[5], 10)\n\tif r := recover(); r != nil {\n\t\treturn nil, fmt.Errorf(\"unmarshal: invalid ephemeral key B: %s\", p[5])\n\t}\n\n\tK, err := hex.DecodeString(p[6])\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unmarshal: invalid key: %s\", p[6])\n\t}\n\n\tM, err := hex.DecodeString(p[7])\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unmarshal: invalid M: %s\", p[7])\n\t}\n\n\tver, err := strconv.Atoi(p[8])\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unmarshal: invalid version: %s\", p[8])\n\t}\n\n\treturn &Server{\n\t\ts: &SRP{\n\t\t\th: hf,\n\t\t\tpf: pf,\n\t\t\tver: int32(ver),\n\t\t},\n\t\ti: i,\n\t\tsalt: salt,\n\t\tv: v,\n\t\txB: B,\n\t\txK: K,\n\t\txM: M,\n\t}, nil\n}", "func ParseZkServersFlag(zkServers string) (zkHosts []string, solrZkPath string, err error) {\n\t// TODO(scottb): move -zkServers flag processing to a shared location\n\tpos := strings.Index(zkServers, \"/\")\n\tif pos < 0 {\n\t\treturn nil, \"\", Errorf(\"-zkServers must be of the form host1:2181,host2:2181/solr; missing '/'\")\n\t}\n\thosts := zkServers[:pos]\n\tzkHosts = strings.Split(hosts, \",\")\n\tsolrZkPath = strings.TrimSuffix(zkServers[pos:], \"/\")\n\treturn zkHosts, solrZkPath, nil\n}", "func parseAddress(address string) (scheme, host, port string, err error) {\n\tif address == \"\" {\n\t\treturn\n\t}\n\tif strings.Contains(address, \"://\") {\n\t\turl, err := url.Parse(address)\n\t\tif err != nil {\n\t\t\treturn \"\", \"\", \"\", err\n\t\t}\n\t\tscheme, address = url.Scheme, url.Host\n\t}\n\tif strings.Contains(address, \":\") {\n\t\thost, port, err = net.SplitHostPort(address)\n\t\tif err != nil {\n\t\t\thost = address\n\t\t\terr = nil\n\t\t}\n\t} else {\n\t\thost = address\n\t}\n\tif port == \"\" {\n\t\tswitch scheme {\n\t\tcase \"http\", \"ws\":\n\t\t\tport = \"80\"\n\t\tcase \"https\", \"wss\":\n\t\t\tport = \"443\"\n\t\t}\n\t}\n\treturn\n}", "func parseName(ns []string) string {\n\treturn strings.Join(ns[len(namespacePrefix):], \"/\")\n}", "func parseRequest(str string, req *Request) error {\n\tchunks := strings.Split(str, \" \")\n\tif len(chunks) != 3 {\n\t\treturn fmt.Errorf(\"invalid request format\")\n\t}\n\n\treq.Method = chunks[0]\n\treq.Proto = chunks[2]\n\n\tif uri, err := url.Parse(chunks[1]); err == nil {\n\t\treq.Host = uri.Host\n\t\treq.Path = uri.Path\n\t}\n\n\treturn nil\n}", "func parseArgs(q *dany.Query, args []string, typeMap map[string]bool, testMode bool) ([]string, error) {\n\t// Regexps\n\treAtPrefix := regexp.MustCompile(\"^@\")\n\treDot := regexp.MustCompile(\"\\\\.\")\n\treComma := regexp.MustCompile(\",\")\n\n\t// Args: 1 domain (required); 1 @-prefixed server ip (optional); 1 comma-separated list of types (optional)\n\tvar newargs []string\n\tfor _, arg := range args {\n\t\targIsRRType := false\n\t\t// Check whether non-dotted args are bare RRtypes\n\t\tif !reDot.MatchString(arg) {\n\t\t\tif _, ok := typeMap[arg]; ok {\n\t\t\t\targIsRRType = true\n\t\t\t}\n\t\t}\n\t\t// Check for @<ip> server argument\n\t\t// Deprecated: use -s <ip> option instead\n\t\tif reAtPrefix.MatchString(arg) {\n\t\t\tif q.Server != \"\" {\n\t\t\t\terr := fmt.Errorf(\"Error: argument %q looks like `@<ip>`, but we already have %q\",\n\t\t\t\t\targ, q.Server)\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif !testMode {\n\t\t\t\t// Deprecation warning\n\t\t\t\tfmt.Fprintln(os.Stderr, \"Warning: the @<ip> server argument is deprecated and will be removed in a future release\")\n\t\t\t\tfmt.Fprintln(os.Stderr, \"Please use the '-s/--server <ip>' option instead\")\n\t\t\t}\n\t\t\tserverIP := net.ParseIP(arg[1:])\n\t\t\tif serverIP == nil {\n\t\t\t\terr := fmt.Errorf(\"Error: argument %q looks like `@<ip>`, but unable to parse ip address\",\n\t\t\t\t\targ)\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tq.Server = net.JoinHostPort(serverIP.String(), dnsPort)\n\t\t\tcontinue\n\t\t}\n\t\t// Check for <RR>[,<RR>...] types argument\n\t\t// Deprecated: use -t <types> option instead\n\t\tif argIsRRType || reComma.MatchString(arg) {\n\t\t\tif len(q.Types) != 0 {\n\t\t\t\terr := fmt.Errorf(\"Error: argument %q looks like types list, but we already have %q\",\n\t\t\t\t\targ, q.Types)\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif !testMode {\n\t\t\t\t// Deprecation warning\n\t\t\t\tfmt.Fprintln(os.Stderr, \"Warning: the [Types] argument is deprecated and will be removed in a future release\")\n\t\t\t\tfmt.Fprintln(os.Stderr, \"Please use the '-t/--types <types>' option instead\")\n\t\t\t}\n\t\t\t// Check all types are valid\n\t\t\ttypes := strings.Split(arg, \",\")\n\t\t\terr := checkValidTypes(types, typeMap)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tq.Types = types\n\t\t\tcontinue\n\t\t}\n\t\t// Otherwise assume hostname\n\t\tnewargs = append(newargs, arg)\n\t}\n\n\treturn newargs, nil\n}", "func parseListeningAddress(ctx *context.T, laddress string) (network string, address string, p flow.Protocol, err error) {\n\tparts := strings.SplitN(laddress, \"/\", 2)\n\tif len(parts) != 2 {\n\t\treturn \"\", \"\", nil, ErrorfInvalidAddress(ctx, \"invalid vine address %v, address must be of the form 'network/address/tag'\", laddress)\n\t}\n\tp, _ = flow.RegisteredProtocol(parts[0])\n\tif p == nil {\n\t\treturn \"\", \"\", nil, ErrorfNoRegisteredProtocol(ctx, \"no registered protocol: %v\", parts[0])\n\t}\n\treturn parts[0], parts[1], p, nil\n}", "func parseServerURL(rawURL string) (*url.URL, error) {\n\tuu, err := url.Parse(rawURL)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif uu.Scheme == \"\" {\n\t\treturn nil, fmt.Errorf(\"url %s missing scheme\", rawURL)\n\t}\n\tif uu.Path != \"\" && uu.Path != \"/\" {\n\t\treturn nil, fmt.Errorf(\"url %s should not contain path\", rawURL)\n\t}\n\treturn uu, nil\n}", "func ParseNameServers(fileContent []byte) []string {\n\tfileLines := bytes.Split(fileContent, []byte(\"\\n\"))\n\tvar nameservers []string\n\tfor _, currentLine := range fileLines {\n\t\tvar contentToParse = currentLine\n\t\tvar commentIndicatorIndex = bytes.Index(currentLine, []byte(\"#\"))\n\t\tif commentIndicatorIndex != -1 {\n\t\t\t// Only check the content before the comment section\n\t\t\tcontentToParse = currentLine[:commentIndicatorIndex]\n\t\t}\n\n\t\tserver := nameServerRegex.FindSubmatch(contentToParse)\n\t\tif len(server) == 2 {\n\t\t\taddress := string(server[1])\n\t\t\tif net.ParseIP(address) != nil {\n\t\t\t\tnameservers = append(nameservers, address)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nameservers\n}", "func startClientServer(){\n var convError error\n clientIndex,convError = strconv.Atoi(os.Args[1])\n if(convError!=nil){\n fmt.Println(\"Conversion Error\")\n }\n myAddress = clientConfigs[\"clientConfig\"][clientIndex].ClientHost + \":\" + clientConfigs[\"clientConfig\"][clientIndex].ClientPort\n myQueryAddress = clientConfigs[\"clientConfig\"][clientIndex].ClientHost + \":\" + clientConfigs[\"clientConfig\"][clientIndex].ClientSendPort\n cliAddr, err4 := net.ResolveUDPAddr(\"udp\", myAddress)\n if(err4!=nil){\n fmt.Println(\"cliAddr\",err4)\n }\n var tailConnError error\n connTail,tailConnError = net.ListenUDP(\"udp\",cliAddr)\n logMessage(\"Status\",\"Started the client server\")\n if(tailConnError != nil){\n fmt.Println(\"Error in Connecting the client\\n\")\n }\n}", "func parseTargetPath(target, serverUrl string) string {\n\tif strings.Contains(target, serverUrl) {\n\t\treturn target[len(serverUrl):]\n\t}\n\treturn target\n}", "func parseTargetPath(target, serverUrl string) string {\n\tif strings.Contains(target, serverUrl) {\n\t\treturn target[len(serverUrl):]\n\t}\n\treturn target\n}", "func(this *UdpServer) parse(buf []byte , to int) (*model.Message, error){\n\tfmt.Print(\"Server: Message arrived,\")\n\tfmt.Println(\" content: \"+string(buf[0:to]))\n\tmsg := model.Message{}\n\terr := json.Unmarshal(buf[0:to],&msg) // DECODE THE MESSAGE\n\tif(err != nil){ // IN CASE OF ERROR, SEND BACK AN ERROR MESSAGE\n\t\treturn model.NewMessage(0,SERVER_ADDR, \"\", util.ERROR, util.RESPONSE,err), err // TODO reduce to 1 line\n\t}\n\tswitch msg.Header { // VERIFY THE MESSAGE HEADER\n\t\tcase util.REQUEST: // IN CASE OF A REQUEST FROM TYPE\n\t\t\tswitch msg.Type {\n\t\t\t\tcase util.GROUP:// GROUP\n\t\t\t\t\tgroup := this.groups[strconv.Itoa(this.next_group)] // GET THE REQUESTED GROUP`S POINTER\n\t\t\t\t\tif len(group.Peers) < 4{ // (DEFINES A LIMIT TO THE GROUP SIZE)\n\t\t\t\t\t\tmListener := model.NewMulticastListener(len(group.Peers)+1, group.Address)\n\t\t\t\t\t\tpeer := model.NewPeer(msg.SenderAddr, mListener) // PARSE THE PEER RECEIVED FROM THE MESSAGE, RESETTING NETWORK SETTINGS\n\t\t\t\t\t\tif group.Leader.HostAddr == \"\" && len(group.Peers) == 0 { // IF THE CURRENT GROUP HAS NO ACTIVE LEADER\n\t\t\t\t\t\t\tgroup.Leader = *peer // MAKE THE PEER THE GROUP LEADER\n\t\t\t\t\t\t}\n\t\t\t\t\t\tgroup.Peers[peer.HostAddr] = peer // THEN REGISTER THE PEER IN THE GROUP\n\t\t\t\t\t\t//this.next_group += 1 // todo review (next user will be conected to the next available group with this line)\n\t\t\t\t\t\tfmt.Println(\"Server: Client group request received, retrieving...\")\n\t\t\t\t\t\treturn model.NewMessage(0, SERVER_ADDR, peer.HostAddr, util.RESPONSE, util.GROUP, *group), nil // RETURNS THE GROUP TO THE USER\n\t\t\t\t\t}\n\t\t\t\t\treturn model.NewMessage(0, SERVER_ADDR, msg.SenderAddr, util.RESPONSE, util.ERROR, nil), nil // RETURNS AN ERROR (FULL GROUP)\n\t\t\t}\n\t}\n\treturn model.NewMessage(0,SERVER_ADDR, msg.SenderAddr , util.ERROR, util.RESPONSE,err), err\n}", "func parseNodeInfo(line string) map[sectionType]map[string]string {\n\tredisInfo := make(map[sectionType]map[string]string)\n\tstrList := strings.Split(line, \"\\n\")\n\tselection := \"\"\n\tfor i, _ := range strList {\n\t\tline := strings.TrimSpace(strList[i])\n\t\tif line == \"\" || len(line) == 0 {\n\t\t\tcontinue\n\t\t}\n\t\tif strings.HasPrefix(line, \"#\") {\n\t\t\tselection = strings.TrimSpace(line[1:])\n\t\t\tredisInfo[sectionType(selection)] = make(map[string]string)\n\t\t\tcontinue\n\t\t}\n\t\tcontentList := strings.Split(line, \":\")\n\t\tredisInfo[sectionType(selection)][contentList[0]] = contentList[1]\n\t}\n\treturn redisInfo\n}", "func parseNodesString(nodes string, proto string) (parsed map[string]string) {\n\tvar nodeContactLen int\n\tif proto == \"udp4\" {\n\t\tnodeContactLen = v4nodeContactLen\n\t} else if proto == \"udp6\" {\n\t\tnodeContactLen = v6nodeContactLen\n\t} else {\n\t\treturn\n\t}\n\tparsed = make(map[string]string)\n\tif len(nodes)%nodeContactLen > 0 {\n\t\tlogger.Infof(\"DHT: len(NodeString) = %d, INVALID LENGTH, should be a multiple of %d\", len(nodes), nodeContactLen)\n\t\tlogger.Infof(\"%T %#v\\n\", nodes, nodes)\n\t\treturn\n\t} else {\n\t\tlogger.Infof(\"DHT: len(NodeString) = %d, had %d nodes, nodeContactLen=%d\\n\", len(nodes), len(nodes)/nodeContactLen, nodeContactLen)\n\t}\n\tfor i := 0; i < len(nodes); i += nodeContactLen {\n\t\tid := nodes[i : i+nodeIdLen]\n\t\taddress := nettools.BinaryToDottedPort(nodes[i+nodeIdLen : i+nodeContactLen])\n\t\tparsed[id] = address\n\t}\n\treturn\n\n}", "func (c *Configuration) Prefix() string { return \"mtlsproxy\" }", "func StartServer(port string) {\n\tr := gin.New()\n\tr.GET(\"/:p1\", middleWare)\n\tr.GET(\"/:p1/:p2\", middleWare)\n\tr.GET(\"/:p1/:p2/:p3\", middleWare)\n\tr.GET(\"/:p1/:p2/:p3/:p4\", middleWare)\n\tr.GET(\"/:p1/:p2/:p3/:p4/:p5\", middleWare)\n\tr.GET(\"/:p1/:p2/:p3/:p4/:p5/:p6\", middleWare)\n\tr.GET(\"/:p1/:p2/:p3/:p4/:p5/:p6/:p7\", middleWare)\n\tr.GET(\"/:p1/:p2/:p3/:p4/:p5/:p6/:p7/:p8\", middleWare)\n\tr.Run(\":\" + port)\n}", "func (s *Server) parsePacket(packet []byte, from net.Addr) (*Request, error) {\n\tvar request Request\n\trequest.from = from\n\tif err := request.query.Unpack(packet); err != nil {\n\t\tif err != dns.ErrTruncated {\n\t\t\tlog.Printf(\"[ERR] bonjour: Failed to unpack packet: %v\", err)\n\t\t}\n\t\treturn nil, err\n\t} else {\n\t\treturn &request, nil\n\t}\n}", "func parseUri(uri string) (string, string, error) {\n\tparts := strings.SplitN(uri, \":\", 2)\n\tif len(parts) > 1 {\n\t\tif parts[0] == \"http\" {\n\t\t\treturn \"http\", uri, nil\n\t\t} else if _, has := protocolHandlers[parts[0]]; has {\n\t\t\treturn parts[0], parts[1], nil\n\t\t}\n\t} else if info, err := os.Stat(uri); err == nil && !info.IsDir() {\n\t\treturn \"file\", uri, nil\n\t}\n\treturn \"\", \"\", errors.New(fmt.Sprintf(\"Not an antipaste URI: %s\", uri))\n}", "func (n *nodeHeader) setPrefix(p []byte) {\n\tpLen, pBytes := n.prefixFields()\n\n\t// Write to the byte array and set the length field to the num bytes copied\n\t*pLen = uint16(copy(pBytes, p))\n}", "func parseSlaveInfo(s map[string]string, pwd string) ([]*NodeInfo, error) {\n\t/*\n\t\tmap[string]string{\"slave0\":\"10.1.1.228:7004\"}\n\t*/\n\tres := make([]*NodeInfo, 0)\n\tfor _, v := range s {\n\t\tallInfoMap, err := probeNode(v, pwd)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tselectionServerMap, exist := allInfoMap[Server]\n\t\tif !exist {\n\t\t\treturn nil, errors.New(\"probe info error\")\n\t\t}\n\t\tversion, exist := selectionServerMap[\"redis_version\"]\n\t\tif !exist {\n\t\t\treturn nil, errors.New(\"selection Server.redis_version not exist\")\n\t\t}\n\n\t\trunid, exist := selectionServerMap[\"run_id\"]\n\t\tif !exist {\n\t\t\treturn nil, errors.New(\"selection Server.run_id not exist\")\n\t\t}\n\n\t\tres = append(res, &NodeInfo{\n\t\t\tVer: version, Id: runid, Addr: v,\n\t\t})\n\t}\n\treturn res, nil\n}", "func hasPort(s string) bool { return strings.LastIndex(s, \":\") > strings.LastIndex(s, \"]\") }", "func hasPort(s string) bool { return strings.LastIndex(s, \":\") > strings.LastIndex(s, \"]\") }", "func hasPort(s string) bool { return strings.LastIndex(s, \":\") > strings.LastIndex(s, \"]\") }", "func ParseDevAddrPrefix(prefixString string) (prefix DevAddrPrefix, err error) {\n\tpattern := regexp.MustCompile(\"([[:xdigit:]]{8})/([[:digit:]]+)\")\n\tmatches := pattern.FindStringSubmatch(prefixString)\n\tif len(matches) != 3 {\n\t\terr = errors.New(\"Invalid Prefix\")\n\t\treturn\n\t}\n\taddr, _ := ParseDevAddr(matches[1]) // errors handled in regexp\n\tprefix.Length, _ = strconv.Atoi(matches[2]) // errors handled in regexp\n\tprefix.DevAddr = addr.Mask(prefix.Length)\n\treturn\n}", "func getASPrefixes(as int, ipv4Slice *[]string, ipv6Slice *[]string) (int, int, error) {\n ann4 := 0; ann6 := 0\n url := fmt.Sprintf(\"https://stat.ripe.net//data/announced-prefixes/data.json?resource=AS%d\", as);\n res, err := http.Get(url);\n if err == nil {\n bytes, err := ioutil.ReadAll(res.Body)\n res.Body.Close()\n if err == nil {\n var data map[string]interface{}\n if err := json.Unmarshal(bytes, &data); err != nil {\n err := errors.New(\"JSON parsing failed\")\n return 0, 0, err\n }\n if data[\"status\"] == \"ok\" {\n prefixes := data[\"data\"].(map[string]interface{})[\"prefixes\"].([]interface{})\n for j := 0; j < len(prefixes); j++ {\n prefix := prefixes[j].(map[string]interface{})[\"prefix\"].(string)\n if strings.ContainsRune(prefix, ':') {\n //fmt.Printf(\"# IPv6: %s\\n\", prefix)\n *ipv6Slice=append(*ipv6Slice, prefix);\n ann6++\n } else {\n //fmt.Printf(\"# IPv4: %s\\n\", prefix)\n *ipv4Slice=append(*ipv4Slice, prefix);\n ann4++\n }\n }\n }\n } else {\n return 0, 0, errors.New(\"Reading document body failed\")\n }\n } else {\n return 0, 0, errors.New(\"HTTP request failed\")\n }\n return ann4, ann6, nil\n}", "func parseBindAddr(s string) (address net.Addr, err error) {\n\tconst maxUnixLen = 106\n\n\t// '@' prefix specifies a Linux abstract domain socket.\n\tif runtime.GOOS == \"linux\" && strings.HasPrefix(s, \"@\") {\n\t\tif len(s) > maxUnixLen {\n\t\t\treturn nil, fmt.Errorf(\"sock file length must be less than %d characters\", maxUnixLen)\n\t\t}\n\t\treturn &net.UnixAddr{Name: s, Net: \"unix\"}, nil\n\t}\n\n\tif strings.Contains(s, \"/\") {\n\t\tif !filepath.IsAbs(s) {\n\t\t\treturn nil, errors.New(\"sock file must be an absolute path\")\n\t\t} else if len(s) > maxUnixLen {\n\t\t\treturn nil, fmt.Errorf(\"sock file length must be less than %d characters\", maxUnixLen)\n\t\t}\n\t\treturn &net.UnixAddr{Name: s, Net: \"unix\"}, nil\n\t}\n\n\t// For TCP, the supplied address string, s, is one of a port, a :port, or a host:port.\n\tip, port := net.IPv4(127, 0, 0, 1), 0\n\n\tif strings.Contains(s, \":\") {\n\t\thost, portString, err := net.SplitHostPort(s)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"invalid addr %q - must be provided as host:port\", s)\n\t\t}\n\t\tif host != \"\" {\n\t\t\tip = net.ParseIP(host)\n\t\t}\n\n\t\tport, err = strconv.Atoi(portString)\n\t} else {\n\t\tport, err = strconv.Atoi(s)\n\t}\n\n\tif err != nil || port < 1 || port > 65534 {\n\t\treturn nil, fmt.Errorf(\"invalid port %d - must be between 1 and 65534\", port)\n\t}\n\treturn &net.TCPAddr{IP: ip, Port: port}, nil\n}", "func ParseURL(serverFlag string) (*UrlRes, int, string) {\n\n\tpURL := &UrlRes{serverFlag, \"\", \"\"}\n\n\t// the URL golang Parse method has the limitation that when we pass in a host that is a string\n\t// and not an ip, without the protocol scheme, it mis-interprets the url string. For such cases we\n\t// need to explicitely make sure that we are missing a protocol scheme.\n\n\t// If no protocol exists, then append http:// as default protocol.\n\n\tif !strings.HasPrefix(strings.ToLower(serverFlag), \"https://\") &&\n\t\t!strings.HasPrefix(strings.ToLower(serverFlag), \"http://\") &&\n\t\t!strings.HasPrefix(strings.ToLower(serverFlag), \"couchbase://\") &&\n\t\t!strings.HasPrefix(strings.ToLower(serverFlag), \"couchbases://\") {\n\t\t//There is something else wrong and we need to throw an error.\n\t\tserverFlag = \"http://\" + serverFlag\n\t}\n\n\t//Parse the url\n\tparsedURL, err := url.Parse(serverFlag)\n\tif err != nil {\n\t\treturn pURL, errors.INVALID_URL, err.Error()\n\t}\n\n\tif parsedURL.Host == \"\" {\n\t\treturn pURL, errors.INVALID_URL, INVALIDHOST\n\t}\n\n\t// Check if the input url is a DNS SRV\n\t_, addr, err := net.LookupSRV(parsedURL.Scheme, \"tcp\", parsedURL.Hostname())\n\tif err == nil {\n\t\t// It is a DNS SRV .. Has couchbase or couchbases as a scheme\n\t\tparsedURL.Host = addr[0].Target\n\t}\n\n\t// We now have a valid URL. Check if we have a port\n\t_, portNo, err := net.SplitHostPort(parsedURL.Host)\n\n\t// couchbase:// and couchbases:// will represent http:// ... :8091 and\n\t// https:// ... 18091 respectively. If the port is specified along with\n\t// the scheme for this case, we throw an error.\n\n\tif parsedURL.Hostname() != \"\" {\n\t\tparsedURL.Host = parsedURL.Hostname()\n\t}\n\n\tif portNo == \"\" {\n\t\tif strings.ToLower(parsedURL.Scheme) == \"couchbase\" || strings.ToLower(parsedURL.Scheme) == \"couchbases\" {\n\n\t\t\tif strings.ToLower(parsedURL.Scheme) == \"couchbase\" {\n\t\t\t\tparsedURL.Host = net.JoinHostPort(parsedURL.Host, \"8091\")\n\t\t\t\tparsedURL.Scheme = \"http\"\n\n\t\t\t} else {\n\t\t\t\tparsedURL.Scheme = \"https\"\n\t\t\t\tparsedURL.Host = net.JoinHostPort(parsedURL.Host, \"18091\")\n\t\t\t}\n\n\t\t} else {\n\t\t\tif strings.ToLower(parsedURL.Scheme) == \"http\" {\n\t\t\t\tparsedURL.Host = net.JoinHostPort(parsedURL.Host, \"8091\")\n\n\t\t\t} else if strings.ToLower(parsedURL.Scheme) == \"https\" {\n\t\t\t\tparsedURL.Host = net.JoinHostPort(parsedURL.Host, \"18091\")\n\t\t\t}\n\t\t}\n\t} else {\n\t\tparsedURL.Host = net.JoinHostPort(parsedURL.Host, portNo)\n\t\t// Cannot give port with couchbase:// couchbases://\n\t\tif strings.ToLower(parsedURL.Scheme) == \"couchbase\" || strings.ToLower(parsedURL.Scheme) == \"couchbases\" {\n\t\t\treturn pURL, errors.INVALID_URL, INVALIDPORT\n\t\t} else {\n\t\t\tif err != nil {\n\t\t\t\treturn pURL, errors.INVALID_URL, err.Error()\n\t\t\t}\n\t\t}\n\t}\n\n\tpURL.Password, _ = parsedURL.User.Password()\n\tpURL.Username = parsedURL.User.Username()\n\tpURL.ServerFlag = parsedURL.String()\n\n\treturn pURL, 0, \"\"\n}", "func ServerFromURL(s string) (Server, error) {\n\tu, err := url.Parse(s)\n\tif err != nil {\n\t\tlog.Printf(\"can not parse server url %s err: %s\", s, err)\n\t\treturn nil, err\n\t}\n\n\tc, ok := serverMap[strings.ToLower(u.Scheme)]\n\tif ok {\n\t\treturn c(u)\n\t}\n\n\treturn nil, errors.New(\"unknown server scheme '\" + u.Scheme + \"'\")\n}", "func prefixmatch(prefix string) *v2.RouteMatch {\n\treturn &v2.RouteMatch{\n\t\tPathSpecifier: &v2.RouteMatch_Prefix{\n\t\t\tPrefix: prefix,\n\t\t},\n\t}\n}", "func getHostNameAndPort(hostInfo string) (string, int, error) {\n\thost := strings.SplitN(hostInfo, \":\", -1)\n\tif len(host) != 2 {\n\t\treturn \"\", 0, fmt.Errorf(\"expected hostname:port, got %s\", host)\n\t}\n\n\tport, err := strconv.Atoi(host[1])\n\tif err != nil {\n\t\treturn \"\", 0, fmt.Errorf(\"invalid port number, got %s\", host[1])\n\t}\n\n\treturn host[0], port, nil\n}", "func RawParser(raw string) Results {\n results := Results{}\n results.Command = toS(find(raw, \"^(httperf .*)\"))\n results.MaxConnectBurstLength = toI(find(raw, \"Maximum connect burst length: ([0-9]*?\\\\.?[0-9]+)$\"))\n results.TotalConnections = toI(find(raw, \"^Total: connections ([0-9]*?\\\\.?[0-9]+) \"))\n results.TotalRequests = toI(find(raw, \"^Total: connections .+ requests ([0-9]*?\\\\.?[0-9]+) \"))\n results.TotalReplies = toI(find(raw, \"^Total: connections .+ replies ([0-9]*?\\\\.?[0-9]+) \"))\n results.TotalTestDuration = toF(find(raw, \"^Total: connections .+ test-duration ([0-9]*?\\\\.?[0-9]+) \"))\n results.ConnectionRatePerSec = toF(find(raw, \"^Connection rate: ([0-9]*?\\\\.?[0-9]+) \"))\n results.ConnectionRateMsConn = toF(find(raw, \"^Connection rate: .+ \\\\(([0-9]*?\\\\.?[0-9]+) ms\"))\n results.ConnectionTimeMin = toF(find(raw, \"^Connection time \\\\[ms\\\\]: min ([0-9]*?\\\\.?[0-9]+) \"))\n results.ConnectionTimeAvg = toF(find(raw, \"^Connection time \\\\[ms\\\\]: min .+ avg ([0-9]*?\\\\.?[0-9]+) \"))\n results.ConnectionTimeMax = toF(find(raw, \"^Connection time \\\\[ms\\\\]: min .+ max ([0-9]*?\\\\.?[0-9]+) \"))\n results.ConnectionTimeMedian = toF(find(raw, \"^Connection time \\\\[ms\\\\]: min .+ median ([0-9]*?\\\\.?[0-9]+) \"))\n results.ConnectionTimeStddev = toF(find(raw, \"^Connection time \\\\[ms\\\\]: min .+ stddev ([0-9]*?\\\\.?[0-9]+)$\"))\n results.ConnectionTimeConnect = toF(find(raw, \"^Connection time \\\\[ms\\\\]: connect ([0-9]*?\\\\.?[0-9]+)$\"))\n results.ConnectionLength = toF(find(raw, \"^Connection length \\\\[replies\\\\/conn\\\\]: ([0-9]*?\\\\.?[0-9]+)$\"))\n results.RequestRatePerSec = toF(find(raw, \"^Request rate: ([0-9]*?\\\\.?[0-9]+) req\"))\n results.RequestRateMsRequest = toF(find(raw, \"^Request rate: .+ \\\\(([0-9]*?\\\\.?[0-9]+) ms\"))\n results.RequestSize = toF(find(raw, \"^Request size \\\\[B\\\\]: ([0-9]*?\\\\.?[0-9]+)$\"))\n results.ReplyRateMin = toF(find(raw, \"^Reply rate \\\\[replies\\\\/s\\\\]: min ([0-9]*?\\\\.?[0-9]+) \"))\n results.ReplyRateAvg = toF(find(raw, \"^Reply rate \\\\[replies\\\\/s\\\\]: min .+ avg ([0-9]*?\\\\.?[0-9]+) \"))\n results.ReplyRateMax = toF(find(raw, \"^Reply rate \\\\[replies\\\\/s\\\\]: min .+ max ([0-9]*?\\\\.?[0-9]+) \"))\n results.ReplyRateStddev = toF(find(raw, \"^Reply rate \\\\[replies\\\\/s\\\\]: min .+ stddev ([0-9]*?\\\\.?[0-9]+) \"))\n results.ReplyRateSamples = toI(find(raw, \"^Reply rate \\\\[replies\\\\/s\\\\]: min .+ \\\\(([0-9]*?\\\\.?[0-9]+) samples\"))\n results.ReplyTimeResponse = toF(find(raw, \"^Reply time \\\\[ms\\\\]: response ([0-9]*?\\\\.?[0-9]+) \"))\n results.ReplyTimeTransfer = toF(find(raw, \"^Reply time \\\\[ms\\\\]: response .+ transfer ([0-9]*?\\\\.?[0-9]+)$\"))\n results.ReplySizeHeader = toF(find(raw, \"^Reply size \\\\[B\\\\]: header ([0-9]*?\\\\.?[0-9]+) \"))\n results.ReplySizeContent = toF(find(raw, \"^Reply size \\\\[B\\\\]: header .+ content ([0-9]*?\\\\.?[0-9]+) \"))\n results.ReplySizeFooter = toF(find(raw, \"^Reply size \\\\[B\\\\]: header .+ footer ([0-9]*?\\\\.?[0-9]+) \"))\n results.ReplySizeTotal = toF(find(raw, \"^Reply size \\\\[B\\\\]: header .+ \\\\(total ([0-9]*?\\\\.?[0-9]+)\\\\)\"))\n results.ReplyStatus1xx = toI(find(raw, \"^Reply status: 1xx=([0-9]*?\\\\.?[0-9]+) \"))\n results.ReplyStatus2xx = toI(find(raw, \"^Reply status: .+ 2xx=([0-9]*?\\\\.?[0-9]+) \"))\n results.ReplyStatus3xx = toI(find(raw, \"^Reply status: .+ 3xx=([0-9]*?\\\\.?[0-9]+) \"))\n results.ReplyStatus4xx = toI(find(raw, \"^Reply status: .+ 4xx=([0-9]*?\\\\.?[0-9]+) \"))\n results.ReplyStatus5xx = toI(find(raw, \"^Reply status: .+ 5xx=([0-9]*?\\\\.?[0-9]+)\"))\n results.CPUTimeUserSec = toF(find(raw, \"^CPU time \\\\[s\\\\]: user ([0-9]*?\\\\.?[0-9]+) \"))\n results.CPUTimeUserPct = toF(find(raw, \"^CPU time \\\\[s\\\\]: .+ \\\\(user ([0-9]*?\\\\.?[0-9]+)\\\\% \"))\n results.CPUTimeSystemSec = toF(find(raw, \"^CPU time \\\\[s\\\\]: .+ system ([0-9]*?\\\\.?[0-9]+) \"))\n results.CPUTimeSystemPct = toF(find(raw, \"^CPU time \\\\[s\\\\]: user .+ system .+ system ([0-9]*?\\\\.?[0-9]+)\\\\% \"))\n results.CPUTimeTotalPct = toF(find(raw, \"^CPU time \\\\[s\\\\]: user .+ total ([0-9]*?\\\\.?[0-9]+)\\\\%\"))\n results.NetIoKbSec = toF(find(raw, \"^Net I\\\\/O: ([0-9]*?\\\\.?[0-9]+) KB\"))\n results.NetIoBps = toS(find(raw, \"^Net I\\\\/O: .+ \\\\((.+) bps\\\\)\"))\n results.ErrorsTotal = toI(find(raw, \"^Errors: total ([0-9]*?\\\\.?[0-9]+) \"))\n results.ErrorsClientTimeout = toI(find(raw, \"^Errors: total .+ client-timo ([0-9]*?\\\\.?[0-9]+) \"))\n results.ErrorsSocketTimeout = toI(find(raw, \"^Errors: total .+ socket-timo ([0-9]*?\\\\.?[0-9]+) \"))\n results.ErrorsConnRefused = toI(find(raw, \"^Errors: total .+ connrefused ([0-9]*?\\\\.?[0-9]+) \"))\n results.ErrorsConnReset = toI(find(raw, \"^Errors: total .+ connreset ([0-9]*?\\\\.?[0-9]+)\"))\n results.ErrorsFdUnavail = toI(find(raw, \"^Errors: fd-unavail ([0-9]*?\\\\.?[0-9]+) \"))\n results.ErrorsAddrUnavail = toI(find(raw, \"^Errors: fd-unavail .+ addrunavail ([0-9]*?\\\\.?[0-9]+) \"))\n results.ErrorsFtabFull = toI(find(raw, \"^Errors: fd-unavail .+ ftab-full ([0-9]*?\\\\.?[0-9]+) \"))\n results.ErrorsOther = toI(find(raw, \"^Errors: fd-unavail .+ other ([0-9]*?\\\\.?[0-9]+)\"))\n results.ConnectionTimes = findConnectionTimes(raw)\n results.calculatePercentiles()\n\n return results\n}", "func parseClientConn(s []string, connAge int64, connIdle int64) []string {\n\n\tpattern := `addr=(?P<addr>.*) fd=.* age=(?P<age>.*) idle=(?P<idle>.*) flags=.*`\n\tpatternMetadata := regexp.MustCompile(pattern)\n\tvar connectionList []string\n\tfor _, v := range s {\n\t\tvar ageCheck = false\n\t\tvar idleCheck = false\n\t\tvar remoteAddr string\n\t\tmatch := patternMetadata.FindStringSubmatch(v)\n\t\tresult := make(map[string]string)\n\t\tfor i, name := range patternMetadata.SubexpNames() {\n\t\t\tif i != 0 && name != \"\" {\n\t\t\t\tresult[name] = match[i]\n\t\t\t}\n\t\t}\n\t\tfor k, v := range result {\n\t\t\tif k == \"age\" {\n\t\t\t\tx, _ := strconv.ParseInt(v, 10, 64)\n\t\t\t\tif secondsToDays(x) > connAge {\n\t\t\t\t\tageCheck = true\n\t\t\t\t}\n\t\t\t} else if k == \"idle\" {\n\t\t\t\tx, _ := strconv.ParseInt(v, 10, 64)\n\t\t\t\tif secondsToDays(x) > connIdle {\n\t\t\t\t\tidleCheck = true\n\t\t\t\t}\n\t\t\t} else if k == \"addr\" {\n\t\t\t\tremoteAddr = v\n\t\t\t}\n\t\t}\n\t\tif ageCheck == true || idleCheck == true {\n\t\t\tconnectionList = append(connectionList, remoteAddr)\n\t\t}\n\t}\n\treturn connectionList\n}", "func ConnIDHasPrefix(v string) predicate.OfflineSession {\n\treturn predicate.OfflineSession(sql.FieldHasPrefix(FieldConnID, v))\n}" ]
[ "0.62915856", "0.60835487", "0.6077736", "0.60755247", "0.58891195", "0.5873394", "0.5862242", "0.5781484", "0.5727526", "0.5637101", "0.55761975", "0.5488654", "0.54740405", "0.54153466", "0.540331", "0.5396181", "0.5376468", "0.5342372", "0.5334321", "0.5330774", "0.5323101", "0.53098565", "0.5306836", "0.5285978", "0.5280115", "0.5268061", "0.52279574", "0.52171373", "0.51592875", "0.5158582", "0.5158162", "0.5158162", "0.5158162", "0.5158162", "0.5158162", "0.5158162", "0.5158162", "0.5158162", "0.5158162", "0.5158162", "0.5136652", "0.5124188", "0.51232606", "0.51081336", "0.5106559", "0.5094305", "0.5087416", "0.5079952", "0.50688916", "0.50688916", "0.50688916", "0.50688916", "0.50688916", "0.50688916", "0.5068011", "0.505766", "0.50551945", "0.50510746", "0.50501174", "0.50439095", "0.5038056", "0.50316095", "0.50309265", "0.50162834", "0.5015905", "0.50129324", "0.5003417", "0.49982187", "0.4994418", "0.49882126", "0.49750364", "0.49684986", "0.49529868", "0.49519372", "0.49436408", "0.49417737", "0.49374944", "0.49372587", "0.49372587", "0.4932707", "0.4932552", "0.49157858", "0.49150163", "0.49105448", "0.4909298", "0.49069437", "0.49052274", "0.48989993", "0.48935965", "0.48935965", "0.48935965", "0.48888695", "0.48864433", "0.48717868", "0.48655242", "0.48514274", "0.4849631", "0.4845739", "0.48424646", "0.4841492", "0.48386475" ]
0.0
-1
wordcloudDataHandler writes the json body needed to draw in the word cloud (index.html) The returned object needs to be an array of: name: Name of the country id: country ID percent: percent of the total requests in that country amount: number requests in that country
func wordcloudDataHandler(w http.ResponseWriter, req *http.Request) { w.Header().Set("Content-Type", "application/json") points := []mapPoint{} for country, amount := range requestsByCountry { point := mapPoint{ Name: country, ID: country, Amount: strconv.Itoa(amount), Percent: fmt.Sprintf("%f", (float64(amount)/float64(requestsTotal))*100), } points = append(points, point) } str, _ := json.Marshal(points) fmt.Fprintf(w, string(str)) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func handleWord(w http.ResponseWriter, r *http.Request ) {\n\tfmt.Fprintf( w, \"<h1>%s</h1>\\n\", \"Endpoint for gopher Word translation\" )\n\n\tbody, readErr := ioutil.ReadAll(r.Body)\n\tif readErr != nil {\n\t\tfmt.Println(readErr)\n\t\treturn\n\t}\n\n\tword := Word{}\n\terr := json.Unmarshal(body, &word)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\n\tgoph, err := translateWord(word.Word)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\n\ttextBytes, err := json.Marshal(map[string]interface{}{\"gopher-word\": goph})\n\tif err != nil {\n\t\treturn\n\t}\n\n\tgopherTranslated := string(textBytes)\n\tfmt.Println(gopherTranslated)\n\tfmt.Fprintf( w, \"<h3>%s</h3>\\n\",gopherTranslated )\n}", "func Water2( w http.ResponseWriter, r *http.Request ) ([]type4.Water2 ) {\n\n// IN w   : response-writer\n// IN r   : request-parameter\n\n// OUT : slice of Water2 (struct)\n\n// fmt.Fprintf( w, \"trans2.water2 start \\n\" )\n\n project_name := os.Getenv(\"GOOGLE_CLOUD_PROJECT\")\n\n if project_name == \"\" {\n// fmt.Fprintf( w, \"storage_bucket_list : projectID unset \\n\" )\n\n project_name = \"sample-7777\"\n\n\t}\n ctx := context.Background()\n\n\tquery := datastore.NewQuery(\"Water2\").Order(\"Name\")\n\n client, err := datastore.NewClient(ctx, project_name)\n if err != nil {\n http.Error(w, err.Error(), http.StatusInternalServerError)\n return nil\n }\n\n count, err := client.Count(ctx, query)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\tnil\n\t}\n\n\twater2 := make([]type4.Water2, 0, count)\n\n\twater2_view := make([]type4.Water2, 0)\n\n\tkeys, err := client.GetAll(ctx, query , &water2)\n if err != nil {\n http.Error(w, err.Error(), http.StatusInternalServerError)\n//\t\tfmt.Fprintf( w, \"water2 err \\n\" ,err)\n\t\treturn\tnil\n\t}\n\n\tkeys_wk := make([]int64, count)\n\n\tfor ii, keysw := range keys {\n\n keys_wk[ii] = keysw.ID\n\n }\n\n\tfor pos, water2w := range water2 {\n\n water2_view = append(water2_view, type4.Water2 { keys_wk[pos] ,\n water2w.Name ,\n water2w.High ,\n water2w.Roughness_Factor })\n\n\t}\n\n return\twater2_view\n}", "func StatsHandler(w http.ResponseWriter, _ *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\te := json.NewEncoder(w)\n\terr := e.Encode(httpstats.Data())\n\tif err != nil {\n\t\tlog.Println(\"Error encoding data:\", err)\n\t}\n}", "func (c *Client) Words() goa.Endpoint {\n\tvar (\n\t\tdecodeResponse = DecodeWordsResponse(c.decoder, c.RestoreResponseBody)\n\t)\n\treturn func(ctx context.Context, v interface{}) (interface{}, error) {\n\t\treq, err := c.BuildWordsRequest(ctx, v)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresp, err := c.WordsDoer.Do(req)\n\t\tif err != nil {\n\t\t\treturn nil, goahttp.ErrRequestError(\"shiritori\", \"words\", err)\n\t\t}\n\t\treturn decodeResponse(resp)\n\t}\n}", "func Create_Words(w http.ResponseWriter, r *http.Request) {\n\n\t// set the header to content type x-www-form-urlencoded\n\t// Allow all origin to handle cors issue\n\tw.Header().Set(\"Context-Type\", \"application/x-www-form-urlencoded\")\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tw.Header().Set(\"Access-Control-Allow-Methods\", \"POST\")\n\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Content-Type\")\n\n\t// create an empty user of type models.User\n\tvar word models.Query\n\n\t// decode the json request to words\n\terr := json.NewDecoder(r.Body).Decode(&word)\n\n\tif err != nil {\n\t\tlog.Fatalf(\"Unable to decode the request body. %v\", err)\n\t}\n\n\t// call insert user function and pass the user\n\tinsertID := Insert_words(word)\n\n\tres := response{\n\t\tID: insertID,\n\t\tMessage: \"Find created successfully\",\n\t}\n\n\t// send the response\n\tjson.NewEncoder(w).Encode(res)\n\n}", "func getSuggestions(w http.ResponseWriter, r *http.Request) {\n prefix := r.URL.Query().Get(\"prefix\")\n max, e := strconv.Atoi(r.URL.Query().Get(\"max\"))\n if e != nil {\n log.Fatal(e)\n }\n if trieLoaded {\n suggestions := t.FindEntries(prefix, max)\n resp := SuggestionResponse{Prefix: prefix, Suggestions: suggestions, File: loadedFile}\n \n j, err := json.Marshal(resp)\n if nil != err {\n log.Println(err)\n w.WriteHeader(500)\n w.Write([]byte(err.Error()))\n } else {\n w.Header().Add(\"Content-Type\", \"application/json\")\n w.Write(j)\n }\n } else {\n resp := LoadingResponse{Message: \"Data is still loading, please wait\"}\n j, err := json.Marshal(resp)\n if nil != err {\n log.Println(err)\n w.WriteHeader(500)\n w.Write([]byte(err.Error()))\n } else {\n w.WriteHeader(409)\n w.Header().Add(\"Content-Type\", \"application/json\")\n w.Write(j)\n }\n }\n}", "func GetWordsHandler(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"text/html; charset=utf-8\")\n\t//clear boggleWords for next board.\n\tif r.Method == \"POST\" {\n\t\tif err := r.ParseForm(); err != nil {\n\t\t\tfmt.Fprintf(w, \"ParseForm() err: %v\", err)\n\t\t\treturn\n\t\t}\n\t\tvar boggleBoard [4][4]string\n\t\tvar entry = 1\n\t\tfor i := 0; i < 4; i++ {\n\t\t\tfor j := 0; j < 4; j++ {\n\t\t\t\tb := r.FormValue(strconv.Itoa(entry))\n\t\t\t\tboggleBoard[i][j] = b\n\t\t\t\tentry++\n\t\t\t}\n\t\t}\n\t\tbogglePnC(boggleBoard)\n\t\tvar words []string\n\t\tfor w := range boggleWords {\n\t\t\twords = append(words, w)\n\t\t}\n\t\tsort.Strings(words)\n\t\tvar results string\n\t\tfor _, word := range words {\n\t\t\tresults = results + \"<li class=\\\"list-group-item\\\">\" + word + \"</li>\"\n\t\t}\n\t\tresponse := p{template.HTML(results)}\n\t\tt, err := template.ParseFiles(staticDir + \"results.html\")\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\t\tfmt.Println(\"clear the set of words\")\n\t\tboggleWords = make(map[string]bool)\n\t\tif err := t.ExecuteTemplate(w, \"results.html\", response); err != nil {\n\t\t\tfmt.Fprintf(w, \"ExecuteTemplate() err: %v\", err)\n\t\t}\n\t}\n}", "func dataHandler(w http.ResponseWriter, r *http.Request) {\n\n\n \tarrByte,err := json.Marshal( csvRecord ) \n \tif err != nil {\n\t\tp2(w,\"Marshal Map to Json - %v\",err) \t\t\n \t} else {\n \tw.Header().Set(\"Content-type:\", \"application/json\")\n \tw.Write(arrByte)\n \t}\n \t\n}", "func handleSentence(w http.ResponseWriter, r *http.Request) {\n\tfmt.Fprintf( w, \"<h1>%s</h1>\\n\", \"Endpoint for gopher Sentence translation\" )\n\n\tbody, readErr := ioutil.ReadAll(r.Body)\n\tif readErr != nil {\n\t\tfmt.Println(readErr)\n\t\treturn\n\t}\n\n\tsentence := Sentence{}\n\terr := json.Unmarshal(body, &sentence)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\n\tsen := strings.Split(sentence.Sentence, \" \")\n\n\tvar gophSen []string\n\tfor i := range sen {\n\t\tif strings.IndexAny(sen[i], \".,!?\") != -1 {\n\t\t\tword, sign := separateSign(sen[i])\n\t\t\tgoph, err := translateWord(word)\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(err)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tgophSen = append(gophSen, goph + sign)\n\t\t} else {\n\t\t\tgoph, err := translateWord(sen[i])\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(err)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tgophSen = append(gophSen, goph )\n\t\t}\n\t}\n\n\tgS := strings.Join(gophSen, \" \")\n\n\ttextBytes, err := json.Marshal(map[string]interface{}{\"gopher-sentence\": gS})\n\tif err != nil {\n\t\treturn\n\t}\n\n\tgopherTranslated := string(textBytes)\n\tfmt.Println(gopherTranslated)\n\tfmt.Fprintf( w, \"<h3>%s</h3>\\n\",gopherTranslated )\n}", "func handler(w http.ResponseWriter, r *http.Request) {\n log.Println(\"received request:\", r.Method, r.URL.Path)\n word := strings.Trim(path.Base(r.URL.Path), \"/\")\n\n // Get the definition from groupcache and write it out.\n var data []byte\n err := dict.Get(nil, word, groupcache.AllocatingByteSliceSink(&data))\n if err != nil {\n log.Println(\"retreiving definition for\", word, \"-\", err)\n w.WriteHeader(http.StatusInternalServerError)\n return\n }\n io.Copy(w, bytes.NewReader(data))\n}", "func main() {\n\n\turl := \"http://127.0.0.1:8080/search/bing\"\n\n\t/* ----- Get User Input ----- */\n\tinputtedWord := \"\"\n\tfmt.Println(\"URL:>\", url)\n\tfmt.Println(\"Please enter a keyword to search for: \")\n\tfmt.Scanln(&inputtedWord)\n\t/* --------------------------- */\n\n\t// Use JSON.marshall to convert the struct to JSON format\n\n\t/* ----- Convert To JSON ----- */\n\n\t// Convert our inputtedWord String to a JSON object\n\tkey := Keyword{inputtedWord}\n\t// Convert that struct to JSON\n\tbuf, err := json.Marshal(key)\n\t/* Handle any Errors */\n\tif err != nil {\n\t\tlog.Fatal(err) // Throw fatal error & print to console\n\t}\n\n\tvar jsonStr = []byte(buf) // Convert to format which the server accepts\n\t/*-------------------*/\n\n\tfmt.Printf(\"%s\\n\", buf)\n\n\t/* --------------------------- */\n\n\t/* ----- Request from Server ----- */\n\t// Create the request with the JSON object we made\n\treq, err := http.NewRequest(\"POST\", url, bytes.NewBuffer(jsonStr))\n\treq.Header.Set(\"Content-Type\", \"application/json\") // Set Header to JSON-type\n\n\tclient := &http.Client{} // Create a 'client' from the http library\n\tresp, err := client.Do(req) // Use the .Do function to send the request\n\n\t/* Handle any Errors */\n\tif err != nil {\n\t\tfmt.Printf(\"There was an error connecting to the server\", err)\n\t\tpanic(err)\n\t}\n\t/*-------------------*/\n\tdefer resp.Body.Close()\n\n\tfmt.Println(\"Response Status:\", resp.Status)\n\tfmt.Println(\"Response Headers:\", resp.Header)\n\tbody, _ := ioutil.ReadAll(resp.Body)\n\n\t/* ---------------------------------- */\n\n\t/* File I/O */\n\tif resp.Status == \"200 OK\" {\n\t\tfmt.Printf(\"\\nPrinting output to sampleresponse.txt!\")\n\t\tresponseFile, _ := os.OpenFile(\"sampleresponse.txt\", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0666) // create file if not existing, and append to it. 666 = permission bits rw-/rw-/rw-\n\t\tresponseFile.WriteString(string(body))\n\t\tdefer responseFile.Close()\n\t\tparseManager(string(body), inputtedWord)\n\t} else {\n\t\tfmt.Printf(\"\\nServer request failed! Attempting to use local file instead.\\n\\n\")\n\t\tbyteResponse, err := ioutil.ReadFile(\"sampleresponse.txt\") // just pass the file name\n\n\t\tif err != nil {\n\t\t\tfmt.Print(err)\n\n\t\t}\n\t\tstringResponse := string(byteResponse)\n\t\tparseManager(stringResponse, inputtedWord)\n\t}\n\n\t/* -------- */\n\n\t// Create a parse manager to handle the response\n\n\t// Keep the process alive -\n\tfor {\n\n\t}\n}", "func statsHandler(w http.ResponseWriter, r *http.Request) {\n\tdata := core.GetStats()\n\tb, err := json.Marshal(data)\n\tif err != nil {\n\t\thttp.Error(w, \"Error marshalling JSON\", http.StatusInternalServerError)\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\tfmt.Fprintf(w, \"%s\", b)\n}", "func loadWords() []string {\n\tdictionary, err := os.Open(filepath.Join(cwd, \"static\", \"words.json\"))\n\tprocessError(\"unable to open dictionary file\", err)\n\n\tjsonBytes, err := ioutil.ReadAll(dictionary)\n\tprocessError(\"unable to read dictionary file\", err)\n\n\tvar unmarshaled Dictionary\n\tjson.Unmarshal([]byte(jsonBytes), &unmarshaled)\n\n\treturn unmarshaled.Words\n}", "func PageViews(w http.ResponseWriter, r *http.Request) {\n\n\t// enable outside API requests\n\twdp.EnableCors(&w)\n\n\t// validate input API args\n\tvars, err := wdp.ValidateApiArgs(r)\n\tif err != nil {\n\t\tlog.Printf(\"error %v validating API arguments\\n\", err)\n\t\treturn\n\t}\n\n\t// query the database to get normalCount and dpCount\n\tnormalCount, dpCount, err := wdp.Query(db, vars.Lang, vars.PrivUnit, vars.Epsilon, vars.Delta, vars.Sensitivity)\n\tif err != nil {\n\t\tlog.Printf(\"error %v querying database\\n\", err)\n\t\treturn\n\t}\n\n\t// feed those into a util function to format them correctly\n\tresults := wdp.CreateOutputStruct(normalCount, dpCount, vars)\n\n\t// create outward facing parameters\n\tvar params = outParams{\n\t\tLang: vars.Lang,\n\t\tEps: vars.Epsilon,\n\t\tSensitivity: vars.Sensitivity,\n\t\tQualEps: wdp.QualEps(vars.Epsilon, 0.5),\n\t\tAlpha: vars.Alpha,\n\t\tPropWithin: vars.PropWithin,\n\t\tAggregateThreshold: wdp.AggregationThreshold(vars.Sensitivity, vars.Epsilon, vars.Alpha, vars.PropWithin),\n\t}\n\n\t// put outward facing parameters and results into one struct\n\tvar out = output{\n\t\tParams: params,\n\t\tResults: results,\n\t}\n\n\t// send the struct back as a json file\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tjson.NewEncoder(w).Encode(out)\n}", "func (handler WebserviceHandler) GetKeywords(res http.ResponseWriter, req *http.Request) {\n\thandler.Logger.Info(\"Received \" + req.Method + \" request at path: \" + req.URL.Path)\n\n\t// Setting headers for CORS\n\tres.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tres.Header().Set(\"Access-Control-Allow-Headers\", \"Authorization\")\n\tif req.Method == http.MethodOptions {\n\t\treturn\n\t}\n\n\thandler.Logger.Debug(\"Starting to retrieve keywords\")\n\tkeywordsUseCase, err := handler.KeywordsInteractor.Keywords()\n\tif err != nil {\n\t\tres.WriteHeader(http.StatusInternalServerError)\n\t\treturn\n\t}\n\thandler.Logger.Debug(\"Keywords retrieved\")\n\n\thandler.Logger.Debug(\"Transforming data for presentation\")\n\tvar keywords []Keyword\n\tfor _, keyword := range keywordsUseCase {\n\t\tkeywords = append(keywords, Keyword{keyword.ID, keyword.DisplayText})\n\t}\n\tkb := Keywords{keywords}\n\thandler.Logger.Debug(\"Data transformed\")\n\n\thandler.Logger.Debug(\"Starting writing body\")\n\tvar body []byte\n\tif body, err = json.Marshal(kb); err != nil {\n\t\tres.WriteHeader(http.StatusInternalServerError)\n\t\treturn\n\t}\n\thandler.Logger.Debug(\"Body written\")\n\n\tres.Header().Add(\"Content-Type\", \"application/json\")\n\n\tres.WriteHeader(200)\n\t_, err = res.Write(body)\n\tif err != nil {\n\t\treturn\n\t}\n\thandler.Logger.Info(\"Returning response\")\n\treturn\n}", "func indexHandler1( w http.ResponseWriter, r *http.Request){\n\n\treg, err := regexp.Compile(\"[^a-zA-Z0-9]+\")\n\tfmt.Fprintf(w, \"word\"+\" \"+\"count\")\n\tfmt.Fprintf(w,\"\\n----------\\n\")\n\tfor key, val := range wordcountMap {\n\n\t\tfmt.Println(strings.TrimRight(key, \" \"), val)\n\t\tfmt.Fprintf(w, reg.ReplaceAllString(key, \"\")+\" \"+strconv.Itoa(val))\n\t\tfmt.Fprintf(w,\"\\n\")\n\n\t}\n\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n}", "func AddWord(c *gin.Context) {\n\tc.Writer.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tc.Writer.Header().Set(\"Access-Control-Allow-Origin\", \"null\")\n\tc.Writer.Header().Set(\"Access-Control-Allow-Origin\", \"http://127.0.0.1:5500\")\n\tc.Writer.Header().Set(\"Access-Control-Allow-Methods\", \"POST\")\n\tc.Writer.Header().Set(\"Access-Control-Allow-Methods\", \"OPTIONS\")\n\tc.Writer.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\tc.Writer.Header().Set(\"Access-Control-Allow-Headers\", \"Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization, accept, origin, Cache-Control, X-Requested-With\")\n\tvar NewWord models.NewWord\n\tif err := c.ShouldBindJSON(&NewWord); err != nil {\n\t\tc.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tAword := models.Word{\n\t\tWord: NewWord.Word,\n\t\tMeaning: NewWord.Meaning,\n\t\tLearnt: 1,\n\t\tLearntAt: \"\",\n\t}\n\t_, err := models.DB.Query(\"INSERT INTO Words (word,meaning,learnt,learntAt) VALUES(?,?,?,now()) \", Aword.Word, Aword.Meaning, Aword.Learnt)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t_, err2 := models.DB.Query(\"update Words set learntAt = Now() where word = ?\", Aword.Word)\n\tif err2 != nil {\n\t\tpanic(err2)\n\t}\n\n\t//fmt.Println(w1.Word, w1.Meaning)\n\t_, err1 := models.DB.Exec(\"UPDATE Words set learnt = 1 where word = ?\", Aword.Word)\n\tif err1 != nil {\n\t\tpanic(err1)\n\t}\n\n}", "func Pipe_line1_show_graf( w http.ResponseWriter ,r *http.Request ,f_name string) {\n\n// IN w : response-writer\n// IN r : request-parameter\n// IN f_name    : file-name\n\n\tvar g type4.Water_Slope // ”Water_Slope\" and type5.Image_Show”is same format\n\n// fmt.Fprintf( w, \"pipe_line1_show_graf start \\n\" )\n\n project_name := os.Getenv(\"GOOGLE_CLOUD_PROJECT\")\n\n if project_name == \"\" {\n\n project_name = \"sample-7777\"\n\n\t}\n\n ctx := context.Background()\n\n client, err := datastore.NewClient(ctx, project_name)\n if err != nil {\n http.Error(w, err.Error(), http.StatusInternalServerError)\n return\n }\n\n g.File_Name = f_name\n\n\tbucket := \"sample-7777\"\n\n\tconst publicURL = \"https://storage.googleapis.com/%s/%s\"\n\tg.Url = fmt.Sprintf(publicURL, bucket, g.File_Name)\n\n//\tfmt.Fprintf( w, \"pipe_line1_show_graf : g.File_Name %v\\n\", g.File_Name )\n//\tfmt.Fprintf( w, \"pipe_line1_show_graf : g.Url %v\\n\", g.Url )\n\n///\n/// put new data in d.s.\n///\n\n new_key := datastore.IncompleteKey(\"Water_Slope\", nil)\n\n if _, err = client.Put(ctx, new_key, &g ); err != nil {\n\n\t\thttp.Error(w,err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n///\n/// set template\n///\n\n monitor := template.Must(template.New(\"html\").Parse(html4.Pipe_line1_show_graf))\n\n///\n/// show water-slope inf. on web\n///\n\n\terr = monitor.Execute(w, g)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t}\n\n}", "func ingestedHandler(w http.ResponseWriter, r *http.Request) {\n\tif coverageIngester == nil {\n\t\thttp.Error(w, \"Server not ready yet\", http.StatusServiceUnavailable)\n\t\treturn\n\t}\n\ttype list struct {\n\t\tList []coverageingest.IngestedResults `json:\"list\"`\n\t}\n\tsummary := list{List: coverageIngester.GetResults()}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tif err := json.NewEncoder(w).Encode(summary); err != nil {\n\t\tsklog.Errorf(\"Failed to write or encode output: %s\", err)\n\t\treturn\n\t}\n}", "func D_district_temp(w http.ResponseWriter, r *http.Request ) ([]type5.General_Work) {\n\n\n// IN w    : レスポンスライター\n// IN r    : リクエストパラメータ\n// OUT general_work_out : area number /area name\n// fmt.Fprintf( w, \"check/d_district_temp start \\n\" )\n\n var district_no int64\n var district_name string\n\n projectID := os.Getenv(\"GOOGLE_CLOUD_PROJECT\")\n\n if projectID == \"\" {\n\n projectID = \"sample-7777\"\n\n\t}\n\n ctx := context.Background()\n\n client, err := datastore.NewClient(ctx, projectID)\n if err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn nil\n\t}\n\n query := datastore.NewQuery(\"D_District_Temp\").Order(\"District_No\")\n\n count, err := client.Count(ctx, query)\n\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn nil\n\t}\n\n// fmt.Fprintf( w, \"check/d_district_temp count \\n\" ,count )\n\n\td_district_temp := make([]type2.D_District_Temp, 0, count)\n\n if _, err := client.GetAll(ctx, query , &d_district_temp) ; err != nil {\n\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn nil\n } else {\n\t for _, d_district_tempw := range d_district_temp {\n\n district_no = d_district_tempw.District_No\n district_name = d_district_tempw.District_Name\n\n// fmt.Fprintf( w, \"check/d_district_temp pos2 %v \\n\" , pos2 )\n\n }\n// fmt.Fprintf( w, \"check/d_district_temp district_no \\n\" ,district_no )\n// fmt.Fprintf( w, \"check/d_district_temp district_name \\n\" ,district_name )\n }\n\n general_work_out := make([]type5.General_Work, 1)\n general_work_out[0].Int64_Work = district_no\n general_work_out[0].String_Work = district_name\n\n\treturn general_work_out\n}", "func userLikeHandler(formatter *render.Render) http.HandlerFunc {\n\treturn func(w http.ResponseWriter, req *http.Request) {\n\t\t/**\n\t\t\tMongo server setup\n\t\t**/\n\t\tsession, err := mgo.Dial(mongodb_server)\n if err != nil {\n fmt.Println(\"mongoserver panic\")\n }\n defer session.Close()\n session.SetMode(mgo.Monotonic, true)\n u := session.DB(mongodb_database).C(\"userLike\")\n s := session.DB(mongodb_database).C(\"score\")\n c := session.DB(mongodb_database).C(\"cloth\")\n\t\t/**\n\t\t\tGet Post body\n\t\t**/ \n // body, err := ioutil.ReadAll(req.Body)\n\t\t// if err != nil {\n\t\t// \tlog.Fatalln(err)\n\t\t// }\n\t\t// fmt.Println(body)\n\n\t\t// var userPostResult UserPostId\n\t\t// json.Unmarshal(body, &userPostResult)\n\n\t\t// userId := userPostResult.UserId\n\n\t\tparams := mux.Vars(req)\n\t\tvar userId string = params[\"userId\"]\n\t\tfmt.Println(\"userId\", userId)\n\t\t/**\n\t\t\tGet cloth id by userid\n\t\t**/\n\t\tvar clothIdResult []bson.M\n\t\terr = u.Find(bson.M{\"userId\": userId}).All(&clothIdResult)\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Get cloth id panic\")\n\t\t}\n\t\tcount := len(clothIdResult)\n\t\t/*\n\t\t\tDeclare return response\n\t\t*/\n\t\tresponse := make([]Predict, count)\n\n\t\tfor i := 0; i < count; i++ {\n\t\t\tclothSingleResult := clothIdResult[i]\n\t\t\tclothId := clothSingleResult[\"clothId\"].(string)\n\t\t\tvar clothInfo bson.M\n\t\t\terr = c.Find(bson.M{\"clothesId\": clothId}).One(&clothInfo)\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(\"Get cloth info panic\")\n\t\t\t}\n\t\t\tresponse[i].ClothId = clothId\n\t\t\tresponse[i].Url = clothInfo[\"url\"].(string)\n\t\t\tresponse[i].Name = clothInfo[\"name\"].(string)\n\t\t\tresponse[i].Price = clothInfo[\"price\"].(string)\n\t\t\tvar clothScore bson.M\n\t\t\terr = s.Find(bson.M{\"id\": clothId}).One(&clothScore)\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(\"Get cloth score panic\")\n\t\t\t}\n\t\t\tresponse[i].Score = clothScore[\"score\"].(string)\n\t\t}\n \n\t\tformatter.JSON(w, http.StatusOK, response)\n\t}\n}", "func getCities(writer http.ResponseWriter, request *http.Request){\n\n\tletter := request.FormValue(\"letter\")\n\n\tvar buffer bytes.Buffer\n\tbuffer.WriteString(\"data/\")\n\tbuffer.WriteString(letter)\n\tbuffer.WriteString(\".json\")\n\n\tfmt.Println(buffer.String())\n\n\tdat, err := ioutil.ReadFile(buffer.String())\n\n\tif err != nil{\n\t\tfmt.Fprintln(writer, \"Error\")\n\t\treturn\n\t}\n\n\tfmt.Fprintf(writer, string(dat))\n}", "func ImagesNextDataHandler(writer http.ResponseWriter, request *http.Request) {\n\tapiApplyCorsHeaders(writer, request)\n\timage := NextImage()\n\tif image == nil {\n\t\twriter.WriteHeader(http.StatusBadRequest)\n\t\twriter.Write([]byte(\"No more images to classify\"))\n\t}\n\n\tjson, err := json.Marshal(image)\n\tif err != nil {\n\t\twriter.WriteHeader(http.StatusInternalServerError)\n\t}\n\n\twriter.Header().Set(\"Content-Type\", \"application/json\")\n\twriter.Write(json)\n}", "func (gb *geobinServer) countsHandler(w http.ResponseWriter, r *http.Request) {\n\tdebugLog(\"counts -\", r.URL)\n\n\t// get list of binIds from request body\n\tvar binIds []string\n\tdec := json.NewDecoder(r.Body)\n\tif err := dec.Decode(&binIds); err != nil {\n\t\tlog.Println(\"Error marshalling request:\", err)\n\t\thttp.Error(w, \"Error marshalling request:\", http.StatusBadRequest)\n\t}\n\n\t// look up each binId in db\n\tcounts := make(map[string]interface{})\n\tfor _, binId := range binIds {\n\t\tif c, err := gb.ZCount(binId, \"-inf\", \"+inf\"); err == nil && c > 0 {\n\t\t\tcounts[binId] = c - 1\n\t\t} else {\n\t\t\tcounts[binId] = nil\n\t\t}\n\t}\n\n\t// return counts\n\tif err := json.NewEncoder(w).Encode(counts); err != nil {\n\t\tlog.Println(\"Error encoding response:\", err)\n\t\thttp.Error(w, \"Error encoding response!\", http.StatusInternalServerError)\n\t}\n}", "func generateHandler(w http.ResponseWriter, r *http.Request) {\n\tvar err error\n\n\t// Default length for the body to generate.\n\ttokenLen := 50\n\n\tif r.URL.Query().Get(\"limit\") != \"\" {\n\t\ttokenLen, err = strconv.Atoi(r.URL.Query().Get(\"limit\"))\n\t\tif err != nil {\n\t\t\terrHandler(w, 500, err)\n\t\t}\n\t}\n\n\tout, err := index.Babble(\"\", tokenLen) // Starting seed is left blank for random choice.\n\tif err != nil {\n\t\tif err == ngrams.ErrEmptyIndex {\n\t\t\tm, err := json.Marshal(map[string]interface{}{\n\t\t\t\t\"err\": \"index is empty; please learn ngrams before generating.\",\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\terrHandler(w, 400, err)\n\t\t\t}\n\n\t\t\tw.Write(m)\n\t\t\treturn\n\t\t}\n\n\t\terrHandler(w, 500, err)\n\t}\n\n\tm, err := json.Marshal(map[string]interface{}{\n\t\t\"body\": out,\n\t\t\"limit\": tokenLen,\n\t})\n\tif err != nil {\n\t\terrHandler(w, 500, err)\n\t}\n\n\tw.Write(m)\n\n}", "func handlerRoot(w http.ResponseWriter, r *http.Request) {\n\n\t// ========== ========== ========== ========== ==========\n\t// New Context - opaque value used by many functions in the Go App Engine SDK to communicate with the App Engine service\n\t// [START new_context]\n\tctx := appengine.NewContext(r) // c or ctx\n\t// Send to func via: (c context.Context)\n\t// [END new_context]\n\t// ========== ========== ========== ========== ==========\n\n w.Header().Set(\"Content-Type\", \"text/html; charset=utf-8\")\n fmt.Fprint(w, drawPage(r, ctx))\n}", "func Handler(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\n\tvars := mux.Vars(r)\n\tdomainName := vars[\"domain_name\"]\n\n\tquery := r.URL.Query()\n\tsortBy := getOrDefault(query.Get(\"sortBy\"), defaultSortBy)\n\n\tpageStr := getOrDefault(query.Get(\"page\"), defaultPage)\n\tpage, err := strconv.Atoi(pageStr)\n\tif err != nil {\n\t\tlog.Fatal(\"Error read page params!\")\n\t}\n\n\tpositions := []Position{}\n\n\tsqlStmt := \"select keyword, position, url, volume, results, updated from positions where domain = '%s' order by %s asc limit %d offset %d\"\n\trows, err := DB.Query(fmt.Sprintf(sqlStmt, domainName, sortBy, limit, page))\n\tfor rows.Next() {\n\t\tposition := Position{}\n\t\terr := rows.Scan(&position.Keyword, &position.Position, &position.URL, &position.Volume, &position.Results, &position.Updated)\n\t\tif err != nil {\n\t\t\tlog.Fatalln(err)\n\t\t}\n\t\tpositions = append(positions, position)\n\t}\n\n\tdm := DomainWithPosition{Domain: domainName, Positions: positions}\n\tbytes, err := json.Marshal(dm)\n\tif err != nil {\n\t\tfmt.Println(\"Can't serialize\", dm)\n\t}\n\n\tw.Write(bytes)\n}", "func (s *Server) Generate(w http.ResponseWriter, r *http.Request) {\n\tsentence, err := s.Corpus.Generate(100)\n\tif err != nil {\n\t\tlog.Println(err)\n\n\t\t// Must populate corpus before generating\n\t\tw.WriteHeader(400)\n\t\treturn\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tjson.NewEncoder(w).Encode(&GenerateResp{\n\t\tSentence: strings.Join(sentence, \" \"),\n\t})\n}", "func saveJSONToCloudStorage(w http.ResponseWriter, data map[string]string) {\n\tbucket := \"my-test-appsero-storage\"\n\tt := time.Now()\n\tobject := \"track/\" + t.Format(\"2006/01/02/15\") + \"/\" + strconv.FormatInt(time.Now().UnixNano(), 10) + \".json\"\n\tctx := context.Background()\n\tclient, err := storage.NewClient(ctx)\n\tif err != nil {\n\t\tfmt.Fprint(w, \"storage.NewClient: %v\", err)\n\t}\n\tdefer client.Close()\n\n\tctx, cancel := context.WithTimeout(ctx, time.Second*50)\n\tdefer cancel()\n\n\tbkt := client.Bucket(bucket)\n\tobj := bkt.Object(object)\n\twc := obj.NewWriter(ctx)\n\n\twc.ContentType = \"text/josn\"\n\tmapData, err := json.Marshal(data)\n\tif err != nil {\n\t\tfmt.Fprintf(w, \"Error %v\\n\", err.Error())\n\t}\n\n\tif _, err := wc.Write(mapData); err != nil {\n\t\tfmt.Fprintf(w, \"Failed to write json file %v\\n\", err)\n\t}\n\n\tif err := wc.Close(); err != nil {\n\t\tfmt.Fprintf(w, \"Failed to close file %v\\n\", err)\n\t}\n}", "func CosineSimilarity(jsonData []byte) (map[string]map[string]float64, error) {\n\n\tvar fileMap map[string][]string\n\tjson.Unmarshal(jsonData, &fileMap)\n\n\t// Convert every word to lowercase in the document.\n\tfor fName, doc := range fileMap {\n\t\tfor i, word := range doc {\n\t\t\tdoc[i] = strings.ToLower(word)\n\t\t}\n\t\tfileMap[fName] = doc\n\t}\n\n\t// Filter the stop words out of the content.\n\tswSet := createSetFromJSON(\"./fixtures/en_stopwords.json\")\n\tfor fName, doc := range fileMap {\n\t\tfilteredDoc := doc[:0]\n\t\tfor _, word := range doc {\n\t\t\t_, ok := swSet[word]\n\t\t\tif !ok {\n\t\t\t\tfilteredDoc = append(filteredDoc, word)\n\t\t\t}\n\t\t}\n\t\tfileMap[fName] = filteredDoc\n\t}\n\n\t// Filter the punctuation from the content.\n\tpuncSet := createSetFromJSON(\"./fixtures/en_punctuation.json\")\n\tfor fName, doc := range fileMap {\n\t\tfilteredDoc := doc[:0]\n\t\tfor _, word := range doc {\n\t\t\t_, ok := puncSet[word]\n\t\t\tif !ok {\n\t\t\t\tfilteredDoc = append(filteredDoc, word)\n\t\t\t}\n\t\t}\n\t\tfileMap[fName] = filteredDoc\n\t}\n\n\t// Calculate term frequency and word to doc count.\n\tfileToTF := make(map[string]map[string]int)\n\tallWordSet := make(map[string]bool)\n\tfor fileName, doc := range fileMap {\n\t\twordSet := make(map[string]int)\n\t\tfor _, word := range doc {\n\t\t\twordSet[word]++\n\t\t\tallWordSet[word] = true\n\t\t}\n\t\tfileToTF[fileName] = wordSet\n\t}\n\n\t// Calculate number of docs word appears on. (word to doc count)\n\t// This is also used as a total word set as it includes every word used\n\t// across all documents.\n\twordToDC := make(map[string]int)\n\tfor _, docSet := range fileToTF {\n\t\tfor word := range docSet {\n\t\t\twordToDC[word]++\n\t\t}\n\t}\n\n\t// Calculate normalized term frequency.\n\t// Calculate number of documents a word occurs in\n\tfileToNTF := make(map[string]map[string]float64)\n\twordToDF := make(map[string]int)\n\tfor fileName, wordSet := range fileToTF {\n\t\twordToNTF := make(map[string]float64)\n\t\tnumWords := float64(len(wordSet))\n\t\tfor word, tf := range wordSet {\n\t\t\twordToNTF[word] = float64(tf) / numWords\n\t\t\twordToDF[word]++\n\t\t}\n\t\tfileToNTF[fileName] = wordToNTF\n\t}\n\n\t// -------------- calculate tf-idf\n\t// IDF(word) = 1 + loge(Total Number Of Documents / Number Of Documents w/ word in it)\n\t// normalized TF * inverse document frequency\n\tnumDocs := len(fileToNTF)\n\t//wordToIDF := make(map[string]float64)\n\tfileNameTFIDF := make(map[string]map[string]float64)\n\tfor fileName, wordToNTF := range fileToNTF {\n\t\twordToTFIDF := make(map[string]float64)\n\t\tfor word, ntf := range wordToNTF {\n\t\t\tdocCount := wordToDC[word]\n\t\t\tinner := float64(numDocs) / float64(docCount)\n\t\t\tidf := 1 + math.Log(inner)\n\t\t\ttfidf := ntf * idf\n\t\t\twordToTFIDF[word] = tfidf\n\t\t}\n\t\tfileNameTFIDF[fileName] = wordToTFIDF\n\t}\n\n\t// -------------- calculate cosine similarity\n\n\t// create map of word to tf-idf in each document.\n\tfileToTFIDFSet := make(map[string]map[string]float64)\n\tfor fName := range fileMap {\n\t\tfinalWordToTFIDF := make(map[string]float64)\n\t\tfor word := range allWordSet {\n\t\t\tval, ok := fileNameTFIDF[fName][word]\n\t\t\tif !ok {\n\t\t\t\tval = 0.0\n\t\t\t}\n\t\t\tfinalWordToTFIDF[word] = val\n\t\t}\n\t\tfileToTFIDFSet[fName] = finalWordToTFIDF\n\t}\n\n\t// Calculate tfidf vector for each document.\n\tfNameToTFIDFVector := make(map[string]map[string]float64)\n\tfor fName := range fileMap {\n\t\tdocTFIDFVector := make(map[string]float64)\n\t\tfor word := range allWordSet {\n\t\t\tval, ok := fileToTFIDFSet[fName][word]\n\t\t\tif !ok {\n\t\t\t\tval = 0.0\n\t\t\t}\n\t\t\tdocTFIDFVector[word] = val\n\t\t}\n\t\tfNameToTFIDFVector[fName] = docTFIDFVector\n\t}\n\n\t// ------------ Calculate cosine similarity between each document.\n\t// ----------- numerator\n\t// sum of the product of each corresponding tfidf value\n\tfNameToCosineNumMap := make(map[string]map[string]float64)\n\tfor fNameA, docTFIDFVectorA := range fNameToTFIDFVector {\n\t\ttempMap := make(map[string]float64)\n\t\tfor fNameB, docTFIDFVectorB := range fNameToTFIDFVector {\n\t\t\tsumProdAB := 0.0\n\t\t\tfor word, valA := range docTFIDFVectorA {\n\t\t\t\tvalB := docTFIDFVectorB[word]\n\t\t\t\tprodAB := valA * valB\n\t\t\t\tsumProdAB += prodAB\n\t\t\t}\n\t\t\ttempMap[fNameB] = sumProdAB\n\t\t}\n\t\tfNameToCosineNumMap[fNameA] = tempMap\n\t}\n\n\t// ---------- denom\n\t// square root of each value\n\tfNameToCosDenPre := make(map[string]float64)\n\tfor fName, docTFIDFVector := range fNameToTFIDFVector {\n\t\tvar numPre float64\n\t\tfor _, val := range docTFIDFVector {\n\t\t\tv := math.Pow(val, 2)\n\t\t\tnumPre += v\n\t\t}\n\t\tnumPre = math.Sqrt(numPre)\n\t\tfNameToCosDenPre[fName] = numPre\n\t}\n\n\t// TODO: this needs to be optimized so that we don't calculate values twice\n\t// a cross functionality\n\tfNameToCosDen := make(map[string]map[string]float64)\n\tfor fNameA, valA := range fNameToCosDenPre {\n\t\ttempMap := make(map[string]float64)\n\t\tfor fNameB, valB := range fNameToCosDenPre {\n\t\t\ttempMap[fNameB] = valA * valB\n\t\t}\n\t\tfNameToCosDen[fNameA] = tempMap\n\t}\n\n\tfNameToCosSim := make(map[string]map[string]float64)\n\tfor fNameA, numMapA := range fNameToCosineNumMap {\n\t\ttempCosMap := make(map[string]float64)\n\t\tfor fNameB, num := range numMapA {\n\t\t\tdenom := fNameToCosDen[fNameA][fNameB]\n\t\t\tval := num / denom\n\t\t\ttempCosMap[fNameB] = val\n\t\t}\n\t\tfNameToCosSim[fNameA] = tempCosMap\n\t}\n\n\treturn fNameToCosSim, nil\n}", "func (s *Service) SearchHandler(w http.ResponseWriter, r *http.Request) {\n\tresp := message.Response{\n\t\tRequestInfo: message.Request{\n\t\t\tDate: time.Now(),\n\t\t},\n\t}\n\tif val, err := utils.GetParamValue(r, \"origin\"); err == nil {\n\t\tresp.RequestInfo.OD.Origin = api.CityCode(val)\n\t} else {\n\t\tresp.Errors = append(resp.Errors, api.Error{Code: int(http.StatusBadRequest), Description: fmt.Sprintf(\"%v\", err)})\n\t}\n\n\tif val, err := utils.GetParamValue(r, \"destination\"); err == nil {\n\t\tresp.RequestInfo.OD.Destination = api.CityCode(val)\n\t} else {\n\t\tresp.Errors = append(resp.Errors, api.Error{Code: int(http.StatusBadRequest), Description: fmt.Sprintf(\"%v\", err)})\n\t}\n\n\tfor _, provider := range s.config.Providers {\n\t\troutes, err := getRoutesForProvider(provider, &resp.RequestInfo)\n\t\tif err != nil {\n\t\t\tresp.Errors = append(resp.Errors, api.Error{Code: int(http.StatusBadRequest), Description: fmt.Sprintf(\"%v\", err)})\n\t\t}\n\t\tif routes != nil {\n\t\t\tfor _, r := range routes {\n\t\t\t\tfor _, flight := range r.Segments {\n\t\t\t\t\ts.priceHistogram.WithLabelValues(flight.Provider, flight.OD.String()).Observe(float64(flight.Price.Price))\n\t\t\t\t\tfmt.Println(\"Price:\", flight.Price.Price)\n\t\t\t\t}\n\t\t\t}\n\t\t\tresp.Solutions = append(resp.Solutions, routes...)\n\n\t\t}\n\t}\n\n\tenc := json.NewEncoder(w)\n\tenc.SetIndent(\"\", \" \")\n\n\tif err := enc.Encode(resp); err != nil {\n\t\tfmt.Println(\"encode error:\", err)\n\t\thttp.Error(w, \"unable ot encode response\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\treturnCode := http.StatusOK\n\tfor _, err := range resp.Errors {\n\t\tif err.Code > returnCode {\n\t\t\treturnCode = err.Code\n\t\t}\n\t}\n\tif returnCode != http.StatusOK {\n\t\tw.WriteHeader(returnCode)\n\t}\n}", "func HelloWorld(w http.ResponseWriter, r *http.Request) {\n\tformErr := r.ParseMultipartForm(32)\n\tif formErr != nil {\n\t\tfmt.Fprintf(w, \"ParseForm() Err: %v\", formErr)\n\t}\n\n\tdata := make(map[string]string)\n\tfor key, value := range r.Form {\n\t\tnewKey := strings.ReplaceAll(strings.TrimRight(key, \"]\"), \"[\", \"_\")\n\t\tdata[newKey] = value[0]\n\t}\n\n\t// Store Json file to cloud storage\n\tsaveJSONToCloudStorage(w, data)\n\n}", "func (t *trs80) writeWords() {\n\tfor i := 0; i < int(t.pb.Header.NumWords); i++ {\n\t\tt.writeWord(t.pb.Verbs[i])\n\t\tt.writeWord(t.pb.Nouns[i])\n\t}\n}", "func apiHandler(w http.ResponseWriter, r *http.Request) {\n\n\tvar out []byte\n\tout, err := exec.Command(\"/usr/games/fortune\").Output()\n\tif err != nil {\n\t\tout = []byte(\"This is not the fortune you are looking for.\")\n\t}\n\n\tc := appengine.NewContext(r)\n\tlog.Infof(c, string(out))\n\tfortuneString := simpleTextStrip(string(out))\n\n\tf := Fortune{Fortune: fortuneString, Text: string(out)}\n\n\tjson, err := json.Marshal(f)\n\n\tw.Header().Add(\"Content-Type\", \"application/json\")\n\tw.Write(json)\n}", "func Index(data []byte){\n\n api.Domain = \"localhost\"\n fmt.Println(\"Entered inside elasticgo file...lets do this\")\n response, _ := core.Index(\"scalegray_sample\", \"first_sampleset\", \"3\", nil, data)\n fmt.Println(response)\n fmt.Println(\"Done pushing the data into elastic search..woohoo!\")\n}", "func (wd Word) WordList(ctx context.Context, w http.ResponseWriter, r *http.Request) error {\n\n\tctx, span := trace.StartSpan(ctx, \"handlers.Word.WordList\")\n\tdefer span.End()\n\n\twordList, err := word.WordList(ctx, wd.DB)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn web.Respond(ctx, w, wordList, http.StatusOK)\n}", "func WriteSummary(index_name string, fields []string) {\n\n // Setting font as needed\n pdf.SetFont(\"Helvetica\",\"\",10)\n\n // a slice of Summary{} that will hold Summary{} structure for each field\n response_struct := []Summary{}\n\n // Looping through each fields requestd\n for index := range fields {\n url := fmt.Sprintf(`https://127.0.0.1:9200/%s/_search?`, index_name)\n queries := fmt.Sprintf(`\n {\n \"size\":\"0\",\n \"aggs\" : {\n \"uniq_gender\" : {\n \"terms\" : { \"field\" : \"%s.keyword\" }\n }\n }\n }`, fields[index])\n\n p, err := es.Query(\"GET\", url, queries)\n if err != nil {\n fmt.Println(\"Report Generation error ERROR: Could not get response from Elasticsearch server \", err, \"Trying to connect again\")\n return\n }\n\n temp := Summary{}\n\n err = json.Unmarshal(p, &temp)\n if (err != nil) {\n fmt.Println(\"Error unmarshalling json\",err);\n }\n\n response_struct = append(response_struct,temp);\n }\n for i :=0; i < len(response_struct); i++ {\n pdf.Write(10,fmt.Sprintf(`%s Count\\n`,fields[i]))\n //DrawLine();\n for _, v := range(response_struct[i].Aggregations.Uniq.Buck){\n pdf.Write(10,fmt.Sprintf(`%s %d\\n`,v.Key,v.Count))\n }\n }\n}", "func gcServiceData(js jsonstore.JsonStore) {\n\t// TODO:\n}", "func GetClouds(resp http.ResponseWriter, req *http.Request, params routing.Params) {\n\n\tdata, err := json.Marshal(clouds.GetClouds())\n\tif err != nil {\n\t\tlog.Printf(\"[ERR ] Error %v\", err)\n\t\thttp.Error(resp, \"internal server error\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\tresp.Header().Set(\"Content-Type\", \"application/json\")\n\tresp.Write(data)\n}", "func (s *CountriesService) Countries(headers map[string]string, log logger.Logger) (response models.OperationResponseOfListOfCountriesObject, err error) {\n\terr = s.client.apiReq(http.MethodPost, \"/countries\", nil, nil, &response, &headers, log)\n\treturn\n}", "func wordcountcomment(x geddit.Comment, words map[string]SubsWordData) {\n\t//prepare vars\n\tsubstrs := strings.Fields(x.Body)\n\ttmpdata := SubsWordData{}\n\ttmpUser := UserData{}\n\t//regex to remove trailing and leading punctuation\n\treg, err := regexp.Compile(`[^0-9a-zA-Z-]`)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t//log user information\n\ttmpUser, uexists := usermap[x.Author]\n\n\t//if no user exists\n\tif !uexists {\n\n\t\ttmpUser = UserData{\n\t\t\tUser: x.Author,\n\t\t\tWords: make(map[string]SubsWordData),\n\t\t}\n\n\t}\n\n\t//range through individual words\n\tfor _, word := range substrs {\n\t\t//remove anything but alphanumeric\n\t\tword = reg.ReplaceAllString(word, \"\")\n\t\t//get rid of words like \"I\"\n\t\tif len(word) > 1 {\n\t\t\t//determine if word is stopword\n\t\t\tif _, stopped := stopwords[word]; !stopped {\n\n\t\t\t\ttmpdata = SubsWordData{}\n\t\t\t\t_, ok := words[strings.ToLower(word)]\n\n\t\t\t\tif ok == true {\n\t\t\t\t\t//if that worddata exists in the map\n\t\t\t\t\ttmpdata = words[word]\n\n\t\t\t\t\ttmpdata.Avgscore = ((tmpdata.Avgscore * tmpdata.Numoccur) + x.UpVotes) / (tmpdata.Numoccur + 1)\n\t\t\t\t\ttmpdata.Numoccur += 1\n\t\t\t\t\ttmpdata.Heur += x.UpVotes\n\t\t\t\t\t// tmpdata.TimePassed =\n\n\t\t\t\t} else {\n\t\t\t\t\t//if no worddata exists\n\t\t\t\t\ttmpdata = SubsWordData{\n\t\t\t\t\t\tWord: strings.ToLower(word),\n\t\t\t\t\t\tNumoccur: 1,\n\t\t\t\t\t\tAvgscore: x.UpVotes,\n\t\t\t\t\t\tHeur: x.UpVotes,\n\t\t\t\t\t}\n\n\t\t\t\t} //endelse\n\n\t\t\t\t//add word to map\n\t\t\t\twords[word] = tmpdata\n\n\t\t\t\t//empty word data for user\n\t\t\t\ttmpword := SubsWordData{}\n\n\t\t\t\tif userword, wordexists := tmpUser.Words[word]; wordexists {\n\t\t\t\t\t//check if data exists for author, if so update\n\t\t\t\t\ttmpword.Avgscore = ((userword.Avgscore * userword.Numoccur) + x.UpVotes) / (userword.Numoccur + 1)\n\t\t\t\t\ttmpword.Numoccur += 1\n\t\t\t\t\ttmpword.Heur += x.UpVotes\n\n\t\t\t\t} else {\n\t\t\t\t\t//create the data for the word\n\t\t\t\t\ttmpword.Avgscore = x.UpVotes\n\t\t\t\t\ttmpword.Numoccur = 1\n\t\t\t\t\ttmpword.Heur = x.UpVotes\n\t\t\t\t}\n\n\t\t\t\t//update word in user's word map\n\t\t\t\ttmpUser.Words[word] = tmpword\n\t\t\t\t// fmt.Println(tmpword)\n\n\t\t\t}\n\t\t}\n\n\t}\n\t//update user in global usermap\n\ttmpUser.Subs = append(tmpUser.Subs, x.Subreddit)\n\tusermap[x.Author] = tmpUser\n\n}", "func APILetterFilter(w http.ResponseWriter, rctx *chttp.Context) error {\n\td := dictionary.Get(chi.URLParam(rctx.R, \"dictionary\"))\n\tif d == nil {\n\t\treturn APINotFound(w, rctx)\n\t}\n\n\turlQuery := htmlui.Query([]htmlui.QueryParam{\n\t\thtmlui.NewIntegerQueryParam(\"page\", 1),\n\t\thtmlui.NewStringQueryParam(\"prefix\", \"\"),\n\t})\n\turlQuery.From(rctx.R.URL.Query())\n\n\tprefix := []rune(urlQuery.Get(\"prefix\").(*htmlui.StringQueryParam).Value())\n\tprefix = prefix[:min(3, len(prefix))]\n\n\tallaggs := map[string]interface{}{}\n\tfor i := 0; i < min(len(prefix), 2)+1; i++ {\n\t\taggs := map[string]interface{}{\n\t\t\tfmt.Sprintf(\"Letter%d\", i+1): map[string]interface{}{\n\t\t\t\t\"terms\": map[string]interface{}{\n\t\t\t\t\t\"field\": fmt.Sprintf(\"Prefix.Letter%d\", i+1),\n\t\t\t\t\t\"size\": 200,\n\t\t\t\t\t\"order\": map[string]interface{}{\"_key\": \"asc\"},\n\t\t\t\t},\n\t\t\t},\n\t\t}\n\t\tfor j := 0; j < i; j++ {\n\t\t\taggs = map[string]interface{}{\n\t\t\t\tfmt.Sprintf(\"Letter%d\", i+1): map[string]interface{}{\n\t\t\t\t\t\"filter\": map[string]interface{}{\n\t\t\t\t\t\t\"term\": map[string]interface{}{\n\t\t\t\t\t\t\tfmt.Sprintf(\"Prefix.Letter%d\", j+1): string(prefix[j]),\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t\"aggs\": aggs,\n\t\t\t\t},\n\t\t\t}\n\t\t}\n\n\t\tfor k, v := range aggs {\n\t\t\tallaggs[k] = v\n\t\t}\n\t}\n\n\taggsreqbody := map[string]interface{}{\n\t\t\"size\": 0,\n\t\t\"aggs\": map[string]interface{}{\n\t\t\t\"Prefix\": map[string]interface{}{\n\t\t\t\t\"nested\": map[string]interface{}{\"path\": \"Prefix\"},\n\t\t\t\t\"aggs\": allaggs,\n\t\t\t},\n\t\t},\n\t}\n\n\ttype aggresult struct {\n\t\tBuckets []htmlui.LetterFilterEntity `json:\"buckets\"`\n\t}\n\taggsrespbody := struct {\n\t\tAggregations struct {\n\t\t\tPrefix struct {\n\t\t\t\tLetter1 *aggresult\n\t\t\t\tLetter2 *struct{ Letter2 aggresult }\n\t\t\t\tLetter3 *struct{ Letter3 struct{ Letter3 aggresult } }\n\t\t\t}\n\t\t} `json:\"aggregations\"`\n\t}{}\n\tif err := storage.Post(\"/dict-\"+d.IndexID()+\"/_search\", aggsreqbody, &aggsrespbody); err != nil {\n\t\treturn fmt.Errorf(\"aggs query: %w\", err)\n\t}\n\n\tletterFilter := htmlui.LetterFilter{\n\t\tPrefix: prefix,\n\t\tLetterLink: func(prefix string) string {\n\t\t\treturn prefix\n\t\t},\n\t}\n\tletterFilter.AddLevel(aggsrespbody.Aggregations.Prefix.Letter1.Buckets)\n\tif aggsrespbody.Aggregations.Prefix.Letter2 != nil {\n\t\tletterFilter.AddLevel(aggsrespbody.Aggregations.Prefix.Letter2.Letter2.Buckets)\n\t}\n\tif aggsrespbody.Aggregations.Prefix.Letter3 != nil {\n\t\tletterFilter.AddLevel(aggsrespbody.Aggregations.Prefix.Letter3.Letter3.Letter3.Buckets)\n\t}\n\n\ttype letterfilterview struct {\n\t\tDictID string\n\t\tPrefix string\n\t\tEntries [][]htmlui.LetterFilterLink\n\t}\n\n\tif err := json.NewEncoder(w).Encode(letterfilterview{\n\t\tDictID: d.ID(),\n\t\tPrefix: string(prefix),\n\t\tEntries: letterFilter.Links(),\n\t}); err != nil {\n\t\treturn fmt.Errorf(\"encode response: %w\", err)\n\t}\n\n\treturn nil\n}", "func GetDataSlowly(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"text/html; charset=utf-8\")\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tchars := []string{\" \", \"a\", \"b\", \"c\", \"d\", \"e\", \"f\", \"g\", \"h\", \"i\", \"j\", \"k\", \"l\", \"m\", \"n\", \"o\", \"p\", \"q\", \"r\", \"s\", \"t\", \"u\", \"v\", \"w\", \"x\", \"y\", \"z\", \"0\", \"1\", \"2\", \"3\", \"4\", \"5\", \"6\", \"7\", \"8\", \"9\"}\n\ts := \"\"\n\tparams := mux.Vars(r)\n\tnum := params[\"num\"]\n\tctr, err := strconv.Atoi(num)\n\tif err != nil {\n\t\tctr = 10\n\t}\n\tif ctr > 1000 {\n\t\tctr = 1000\n\t}\n\n\ts2 := rand.NewSource(time.Now().UnixNano())\n\tr2 := rand.New(s2)\n\n\tfor i := 0; i < 1024*ctr; i++ {\n\t\t//s += \"c\"\n\t\ts += chars[r2.Intn(37)]\n\t}\n\tjson.NewEncoder(w).Encode(s)\n}", "func getAllflavours(w http.ResponseWriter, r *http.Request) {\r\n\tw.Header().Set(\"Content-Type\", \"application/json\")\r\n\tjson.NewEncoder(w).Encode(flavours)\r\n}", "func (w WeatherData) Output() {\n\n\toutput := map[string]interface{}{\n\t\t\"response_type\": \"in_channel\",\n\t}\n\tattachments := make([]map[string]string, 1)\n\tattachments[0] = map[string]string{\n\t\t\"image_url\": \"http://amberandbrice.com/realfeel/realfeeltm.gif\",\n\t}\n\toutput[\"attachments\"] = attachments\n\n\tvar text string = \"\"\n\n\ttext += fmt.Sprintf(\"Weather Data for: %s \\n\", w.Current.ObservationLocation[\"full\"])\n\ttext += fmt.Sprintf(\"Latitude: %10s, Longitude: %10s\\n\", w.Current.ObservationLocation[\"latitude\"], w.Current.ObservationLocation[\"longitude\"])\n\ttext += fmt.Sprintf(\"Elevation: %10s\\n\", w.Current.ObservationLocation[\"elevation\"])\n\n\t//\ttext += fmt.Sprint(`\n\t//\n\t// _____ _ _ _ _ _ _\n\t//|_ _| | | ( ) | | | | | | | |\n\t// | | ___ __| | __ _ _ _|/ ___ | | | | ___ __ _| |_| |__ ___ _ __\n\t// | |/ _ \\ / _ |/ _ | | | | / __| | |/\\| |/ _ \\/ _ | __| '_ \\ / _ \\ '__|\n\t// | | (_) | (_| | (_| | |_| | \\__ \\ \\ /\\ / __/ (_| | |_| | | | __/ |\n\t// \\_/\\___/ \\__,_|\\__,_|\\__, | |___/ \\/ \\/ \\___|\\__,_|\\__|_| |_|\\___|_|\n\t// __/ |\n\t// |___/\n\t//`)\n\n\ttext += fmt.Sprintf(\"%-20s:%7.2f° \\n\", \"Temperature\", w.Current.Temp)\n\ttext += fmt.Sprintf(\"%-20s:%10s \\n\", \"Real Feel™\", w.Current.RealFeel)\n\ttext += fmt.Sprintf(\"%-20s:%10s \\n\", \"Humidity\", w.Current.Humidity)\n\ttext += fmt.Sprintf(\"%-20s:%6.2f mph \\n\", \"Wind Speed\", w.Current.WindSpeed)\n\ttext += fmt.Sprintf(\"%-20s:%10s \\n\", \"UV Index\", w.Current.UVindex)\n\toutput[\"text\"] = text\n\n\toutput[\"unfurl_media\"] = true\n\toutput[\"unfurl_links\"] = true\n\tforprinting, _ := json.Marshal(output)\n\tfmt.Println(string(forprinting))\n}", "func goLanguages(w http.ResponseWriter, r *http.Request){\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\tw.WriteHeader(http.StatusOK)\n\n\tvar langs Languages = getLanguages();\n\tif err := json.NewEncoder(w).Encode(langs); err != nil {\n\t\tpanic(err)\n\t}\n}", "func goLanguages(w http.ResponseWriter, r *http.Request){\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\tw.WriteHeader(http.StatusOK)\n\n\tvar langs Languages = getLanguages();\n\tif err := json.NewEncoder(w).Encode(langs); err != nil {\n\t\tpanic(err)\n\t}\n}", "func learnHandler(w http.ResponseWriter, r *http.Request) {\n\tb, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\terrHandler(w, 500, err)\n\t}\n\n\tif len(b) == 0 {\n\t\terrHandler(w, 400, err)\n\t}\n\n\ttokens, err := index.Parse(string(b))\n\tif err != nil {\n\t\terrHandler(w, 500, err)\n\t}\n\n\tm, err := json.Marshal(map[string]interface{}{\n\t\t\"parsed_tokens\": len(tokens),\n\t})\n\tif err != nil {\n\t\terrHandler(w, 500, err)\n\t}\n\n\tw.Write(m)\n\n}", "func HelloWorld(w http.ResponseWriter, r *http.Request) {\n\n\tprojectID := \"mamun-appsero\"\n\tdatasetID := \"tracking\"\n\ttableID := \"temp_sites\"\n\tctx := context.Background()\n\tclient, err := bigquery.NewClient(ctx, projectID)\n\tif err != nil {\n\t\tfmt.Fprintf(w, \"bigquery.NewClient: %v\", err)\n\t}\n\tdefer client.Close()\n\n\tgcsRef := bigquery.NewGCSReference(\"gs://my-test-appsero-storage/track/*\")\n\tgcsRef.SourceFormat = bigquery.JSON\n\tgcsRef.AutoDetect = true\n\tloader := client.Dataset(datasetID).Table(tableID).LoaderFrom(gcsRef)\n\tloader.WriteDisposition = bigquery.WriteTruncate\n\n\tjob, err := loader.Run(ctx)\n\tif err != nil {\n\t\tfmt.Fprintf(w, \"Failed to Run %v\\n\", err)\n\t}\n\tstatus, err := job.Wait(ctx)\n\tif err != nil {\n\t\tfmt.Fprintf(w, \"Failed to Wait %v\\n\", err)\n\t}\n\n\tif status.Err() != nil {\n\t\tfmt.Fprintf(w, \"job completed with error: %v\", status.Err())\n\t}\n\n\t// Save as a json file\n\tsaveToCloudStorage(w, projectID, datasetID, tableID)\n\treadingFromBigQuery(w, projectID)\n\n\tfmt.Fprintf(w, \"Done\\n\")\n\n}", "func jsonHandler(w http.ResponseWriter, r *http.Request) {\r\n w.Header().Set(\"Content-Type\", \"application/json\")\r\n json.NewEncoder(w).Encode(&Message{helloWorldString})\r\n}", "func DemoChannels1() http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\n\t\tdata := []string{\n\t\t\t\"The yellow fish swims slowly in the water\",\n\t\t\t\"The brown dog barks loudly after a drink ...\",\n\t\t\t\"The dark bird bird of prey lands on a small ...\",\n\t\t}\n\n\t\thistogram := make(map[string]int)\n\n\t\twords := words(data)\n\n\t\t//Pulls the data from the channel\n\t\t//Checks the open status of the channel\n\t\t//If closed, break out of the loop\n\t\t//Otherwise record histogram\n\t\tfor {\n\t\t\tword, opened := <-words\n\t\t\tif !opened {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\thistogram[word]++\n\t\t}\n\n\t\tfor k, v := range histogram {\n\t\t\tw.Write([]byte(fmt.Sprintf(\"%s\\t %d\\n\", k, v)))\n\t\t}\n\t}\n}", "func mysqlHandler (vp *viper.Viper, cbBucket *gocb.Bucket) (int, interface{}) {\n\n // get user name from config.toml\n user := vp.GetString(\"mysql.user\")\n if user == \"\" {\n return 0, \"MySQL connection failed: user is missing\"\n }\n\n // get password\n password := vp.GetString(\"mysql.password\")\n if password == \"\" {\n return 0, \"MySQL connection failed: password is missing\"\n }\n\n // get database name\n dbname := vp.GetString(\"mysql.dbname\")\n if dbname == \"\" {\n return 0, \"MySQL connection failed: database name is missing\"\n }\n\n // connect to database\n db, err := sql.Open(\"mysql\", user + \":\" + password + \"@/\" + dbname)\n if err != nil {\n return 0, err.Error()\n }\n defer db.Close()\n\n // number of added documents\n count := 0\n\n // query.sql array from config.toml\n var queries []map[string]interface{}\n vp.UnmarshalKey(\"query\", &queries)\n\n // loop over sql queries\n for i := range queries {\n\n query := queries[i]\n\n // fetch data from database\n rows, err := db.Query(query[\"sql\"].(string))\n if err != nil {\n return 0, err.Error()\n }\n\n // get column names\n columns, err := rows.Columns()\n if err != nil {\n return 0, err.Error()\n }\n\n // make a slice for the values\n values := make([]sql.RawBytes, len(columns))\n scanArgs := make([]interface{}, len(values))\n for i := range values {\n scanArgs[i] = &values[i]\n }\n\n // fetch rows\n for rows.Next() {\n err = rows.Scan(scanArgs...)\n if err != nil {\n return 0, err.Error()\n }\n\n // add field values to document\n var value interface{}\n doc := map[string]interface{}{}\n for i, col := range values {\n if col == nil {\n value = \"NULL\"\n } else {\n value = string(col)\n }\n doc[columns[i]] = value\n }\n\n // add additional field values\n if props, ok := query[\"props\"].(map[string]interface{}); ok {\n for propKey, propValue := range props {\n doc[propKey] = propValue\n }\n }\n\n // convert doc to json and upsert into couchbase\n jsonDoc, _ := json.Marshal(doc)\n _, err := upsert(cbBucket, sue.New2(), jsonDoc)\n if err != nil {\n return 0, err\n }\n\n count++\n }\n if err = rows.Err(); err != nil {\n return 0, err.Error()\n }\n }\n return count, nil\n}", "func GetDataHandler(w http.ResponseWriter, r *http.Request) {\n\n}", "func Handler(w http.ResponseWriter, req *http.Request) {\n\n\tid := req.URL.Query().Get(\":id\")\n\n\t// Call into DD API to get the dataset information\n\tdatasetModel, err := discovery.GetDataset(id)\n\tif err != nil {\n\t\tlog.Error(err, nil)\n\t\trespond(w, http.StatusNotFound, []byte(err.Error()))\n\t\treturn\n\t}\n\tlog.DebugR(req, `Got response from API`, log.Data{\"datasetModel\": datasetModel})\n\n\t// Rewrite the URLs in the datasets to point to our own address\n\tdatasetModel.URL = config.ExternalURL + \"/datasets/\" + datasetModel.ID\n\n\tpage := dataset.Page{\n\t\tDataset: datasetModel,\n\t}\n\n\tbody, err := renderer.Render(page, \"dd/dataset\")\n\tif err != nil {\n\t\tlog.ErrorR(req, err, nil)\n\t\trespond(w, http.StatusInternalServerError, []byte(err.Error()))\n\t\treturn\n\t}\n\n\trespond(w, http.StatusOK, body)\n}", "func InformationGatheringForOnePage() {\n\n\tstart:=time.Now()\n\ttestPost:=models.DomainData{\n\t\tHost: \"\",\n\t\tTld: \"\",\n\t\tStatus: \"\",\n\t\tSubdomains: nil,\n\t\tFirstSeen: \"\",\n\t\tLastSeen: \"\",\n\t\tResolvers: nil,\n\t\tWhoISInformation: nil,\n\t\tSource: \"\",\n\t\tTag: nil,\n\t}\n\n\tdelId,err:=DBElastic.SendPost(testPost)\n\tif err!=nil{\n\t\t_ = fmt.Errorf(\"Cannot insert test doc %s\\n\", err)\n\t}\n\n\n\tdomains,_:=GetHostsSlice()\n\treader,readerA:=recon.OpenGeoLite()\n\n\tvar wg sync.WaitGroup\n\n\tdataCh := make(chan models.DomainData)\n\thosts:=make(chan string)\n\n\tfor i:=0;i<=concurrency;i++{\n\t\tgo recon.GetInformationAboutHost(hosts,dataCh,&wg,reader,readerA)\n\t\twg.Add(1)\n\t}\n\n\tnumJobs:=len(domains)\n\tfmt.Printf(\"Start gathering information from %v hostst. Please wait.\\n\",len(domains))\n\n\tgo func(){\n\t\tfor i:=0;i<numJobs;i++{\n\t\t\thosts <- domains[i]\n\t\t}\n\t\tclose(hosts)\n\t}()\n\n\tdataSlice:=make([]models.DomainData,0,numJobs)\n\tfinishProcess:=float64(cap(dataSlice))\n\n\tfor i:=0;i<numJobs;i++{\n\t\tdataSlice = append(dataSlice, <-dataCh)\n\t\tstartProcess:=float64(len(dataSlice))\n\t\tif int(startProcess)%100==0{\n\t\t\tpercentProcess:=(startProcess/finishProcess)*100\n\t\t\tfmt.Printf(\"The array is %.2f perc full.\\n\", percentProcess)\n\t\t}\n\t}\n\twg.Wait()\n\n\tfmt.Println(\"Array is full\")\n\tfmt.Println(\"Starting bulk index\")\n\t_ = DBElastic.BulkSendPost(dataSlice)\n\t_ = DBElastic.DeletePost(delId)\n\tfmt.Println(\"Collection information completed. All post are indexed.\")\n\tfmt.Printf(\"Time taken: %s\\n\", time.Since(start))\n}", "func hotelsContinent(response http.ResponseWriter, request *http.Request){\n\thoteles := FindAllHotelsContinent()\n\tresult, err := json.Marshal(hoteles)\n\tif err != nil {\n\t\thttp.Error(response, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tresponse.Header().Set(\"Content-Type\", \"application/json\")\n\tresponse.Write(result)\n}", "func infoHandler(w http.ResponseWriter, r *http.Request) {\n\tinfo := Metainfo{Uptime: timeSince(startTime), Info: \"Service for IGC tracks.\", Version: \"v1\"}\n\tjs, err := json.Marshal(info)\n\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.Write(js)\n\n}", "func MainHandler(w http.ResponseWriter, r *http.Request) {\n\tfmt.Println(\"Main Handled\")\n\tdata := models.Response{Err: 0, Message: []string{}, Data: nil}\n\n\twriteJSON(w, data)\n}", "func PostClouds(resp http.ResponseWriter, req *http.Request, params routing.Params) {\n\n\tcloud := &clouds.Cloud{}\n\tdecoder := json.NewDecoder(req.Body)\n\n\tif err := decoder.Decode(cloud); err != nil {\n\t\thttp.Error(resp, \"bad Request: \"+err.Error(), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tif cloud.ID == \"\" {\n\t\tcloud.ID = bson.NewObjectId().Hex()\n\t}\n\n\tif _, err := url.Parse(cloud.REST); err != nil {\n\t\thttp.Error(resp, \"bad request: mal formatted REST address\", http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tif cloud.MQTT != \"\" {\n\t\tif _, err := url.Parse(cloud.MQTT); err != nil {\n\t\t\thttp.Error(resp, \"bad request: mal formatted MQTT address\", http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\t}\n\n\tif err := clouds.AddCloud(cloud); err != nil {\n\t\thttp.Error(resp, \"bad request: \"+err.Error(), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tlog.Printf(\"[CLOUD] Created %q.\", cloud.ID)\n\n\twriteCloudFile()\n\tresp.Write([]byte(cloud.ID))\n}", "func (handlersImpl WeatherHandlersImpl) GetData(w http.ResponseWriter, req *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\n\tctx := req.Context()\n\n\tvars := mux.Vars(req)\n\tcityName := vars[\"cityName\"]\n\tresp, err := handlersImpl.svc.GetData(ctx, cityName)\n\n\tif err != nil {\n\t\tw.WriteHeader(http.StatusNotFound)\n\t\treturn\n\t}\n\n\tif err := json.NewEncoder(w).Encode(resp); err != nil {\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t}\n\n\tw.WriteHeader(http.StatusOK)\n}", "func PublishDataAPIHandler(w http.ResponseWriter, r *http.Request) {\n\tLog := Logger.NewSessionLogger()\n\n\tvar plog PodLog\n\tplog.Result = LOG_RESULT_FAILED\n\tplog.Operation = LOG_OPERATION_TYPE_ALICE_PUBLISH\n\n\tdefer func() {\n\t\terr := insertLogToDB(plog)\n\t\tif err != nil {\n\t\t\tLog.Warnf(\"insert log error! %v\", err)\n\t\t\treturn\n\t\t}\n\t\tnodeRecovery(w, Log)\n\t}()\n\n\tmerkleRoot := r.FormValue(\"merkleRoot\")\n\tvalue := r.FormValue(\"value\")\n\tplog.Detail = fmt.Sprintf(\"merkleRoot=%v, deposit value=%v\", merkleRoot, value)\n\n\tLog.Infof(\"start publish data to contract...merkleRoot=%v, value=%v\", merkleRoot, value)\n\tdataFile := BConf.AliceDir + \"/publish/\" + merkleRoot\n\n\tvalueInt, err := strconv.ParseInt(value, 10, 64)\n\tif err != nil {\n\t\tLog.Warnf(\"invalid value. value=%v, err=%v\", value, err)\n\t\tfmt.Fprintf(w, RESPONSE_INCOMPLETE_PARAM)\n\t\treturn\n\t}\n\tif merkleRoot == \"\" {\n\t\tLog.Warnf(\"invalid merkle root. merkle root=%v, err=%v\", merkleRoot, err)\n\t\tfmt.Fprintf(w, RESPONSE_INCOMPLETE_PARAM)\n\t\treturn\n\t}\n\n\trs, err := pathExists(dataFile)\n\tif err != nil {\n\t\tLog.Errorf(\"check path exist error. err=%v\", err)\n\t\tfmt.Fprintf(w, RESPONSE_READ_DATABASE_FAILED)\n\t\treturn\n\t}\n\tif !rs {\n\t\tLog.Warnf(\"the data does not exist. filepath=%v\", dataFile)\n\t\tfmt.Fprintf(w, RESPONSE_DATA_NOT_EXIST)\n\t\treturn\n\t}\n\n\tb, err := readBulletinFile(dataFile+\"/bulletin\", Log)\n\tif err != nil {\n\t\tLog.Warnf(\"failed to read bulletin file. err=%v\", err)\n\t\tfmt.Fprintf(w, RESPONSE_DATA_NOT_EXIST)\n\t\treturn\n\t}\n\tLog.Debugf(\"read bulletin file...filepath=%v\", dataFile+\"/bulletin\")\n\n\textra, err := readExtraFile(dataFile + \"/extra.json\")\n\tif err != nil {\n\t\tLog.Errorf(\"failed to read publish extra info. err=%v\", err)\n\t\tfmt.Fprintf(w, RESPONSE_DATA_NOT_EXIST)\n\t\treturn\n\t}\n\n\tLog.Debugf(\"start send transaction to contract for publishing data...merkle root=%v, mode=%v, size=%v, n=%v, s=%v\", b.SigmaMKLRoot, b.Mode, b.Size, b.N, b.S)\n\tt := time.Now()\n\ttxid, err := publishDataToContract(b, valueInt)\n\tif err != nil {\n\t\tLog.Errorf(\"publish data to contract error! err=%v\", err)\n\t\tfmt.Fprintf(w, RESPONSE_PUBLISH_TO_CONTRACT_FAILED)\n\t\treturn\n\t}\n\tLog.Debugf(\"send publish data to contract successfully. txid=%v, merkle root=%v, time cost=%v\", txid, b.SigmaMKLRoot, time.Since(t))\n\n\textra.ContractAddr = BConf.ContractAddr\n\terr = savePublishExtraInfo(extra, dataFile+\"/extra.json\")\n\tif err != nil {\n\t\tLog.Errorf(\"failed to save publish extra info. err=%v\")\n\t}\n\tLog.Infof(\"finish send transaction for publishing data...merkle root=%v\", b.SigmaMKLRoot)\n\n\tplog.Result = LOG_RESULT_SUCCESS\n\tfmt.Fprintf(w, fmt.Sprintf(RESPONSE_SUCCESS, \"send publish transaction to contract...\"))\n\treturn\n}", "func (d *Data) ServeHTTP(uuid dvid.UUID, ctx *datastore.VersionedCtx, w http.ResponseWriter, r *http.Request) {\n\ttimedLog := dvid.NewTimeLog()\n\n\t// Check the action\n\taction := strings.ToLower(r.Method)\n\tif action != \"get\" {\n\t\tserver.BadRequest(w, r, \"labelsz data can only accept GET HTTP requests\")\n\t\treturn\n\t}\n\n\t// Break URL request into arguments\n\turl := r.URL.Path[len(server.WebAPIPath):]\n\tparts := strings.Split(url, \"/\")\n\tif len(parts[len(parts)-1]) == 0 {\n\t\tparts = parts[:len(parts)-1]\n\t}\n\n\tif len(parts) < 4 {\n\t\tserver.BadRequest(w, r, \"Incomplete API request\")\n\t\treturn\n\t}\n\n\t// Process help and info.\n\tswitch parts[3] {\n\tcase \"help\":\n\t\tw.Header().Set(\"Content-Type\", \"text/plain\")\n\t\tfmt.Fprintln(w, dtype.Help())\n\n\tcase \"info\":\n\t\tjsonBytes, err := d.MarshalJSON()\n\t\tif err != nil {\n\t\t\tserver.BadRequest(w, r, err)\n\t\t\treturn\n\t\t}\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\tfmt.Fprintf(w, string(jsonBytes))\n\n\tcase \"size\":\n\t\t// GET <api URL>/node/<UUID>/<data name>/size/<label>\n\t\tif len(parts) < 5 {\n\t\t\tserver.BadRequest(w, r, \"ERROR: DVID requires label ID to follow 'size' command\")\n\t\t\treturn\n\t\t}\n\t\tlabel, err := strconv.ParseUint(parts[4], 10, 64)\n\t\tif err != nil {\n\t\t\tserver.BadRequest(w, r, err)\n\t\t\treturn\n\t\t}\n\t\tsize, err := d.GetSize(ctx.VersionID(), label)\n\t\tif err != nil {\n\t\t\tserver.BadRequest(w, r, err)\n\t\t\treturn\n\t\t}\n\t\tw.Header().Set(\"Content-type\", \"application/json\")\n\t\tfmt.Fprintf(w, \"{%q: %d, %q: %d}\", \"Label\", label, \"Voxels\", size)\n\t\ttimedLog.Infof(\"HTTP %s: get label %d size\", r.Method, label)\n\n\tcase \"sizerange\":\n\t\t// GET <api URL>/node/<UUID>/<data name>/sizerange/<min size>/<optional max size>\n\t\tif len(parts) < 5 {\n\t\t\tserver.BadRequest(w, r, \"ERROR: DVID requires at least the minimum size to follow 'sizerange' command\")\n\t\t\treturn\n\t\t}\n\t\tminSize, err := strconv.ParseUint(parts[4], 10, 64)\n\t\tif err != nil {\n\t\t\tserver.BadRequest(w, r, err)\n\t\t\treturn\n\t\t}\n\t\tvar maxSize uint64\n\t\tif len(parts) >= 6 {\n\t\t\tmaxSize, err = strconv.ParseUint(parts[5], 10, 64)\n\t\t\tif err != nil {\n\t\t\t\tserver.BadRequest(w, r, err)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tjsonStr, err := d.GetSizeRange(ctx.VersionID(), minSize, maxSize)\n\t\tif err != nil {\n\t\t\tserver.BadRequest(w, r, err)\n\t\t\treturn\n\t\t}\n\t\tw.Header().Set(\"Content-type\", \"application/json\")\n\t\tfmt.Fprintf(w, jsonStr)\n\t\ttimedLog.Infof(\"HTTP %s: get labels with volume > %d and < %d (%s)\", r.Method, minSize, maxSize, r.URL)\n\n\tdefault:\n\t\tserver.BadRequest(w, r, \"Unrecognized API call '%s' for labelsz data '%s'. See API help.\",\n\t\t\tparts[3], d.DataName())\n\t}\n}", "func CachedWorlds(ctx *fasthttp.RequestCtx) {\n\tqueries := queriesParam(ctx)\n\tworlds := acquireWorlds()\n\tworlds.W = worlds.W[:queries]\n\n\tfor i := 0; i < queries; i++ {\n\t\tworlds.W[i] = worldsCache.W[randomWorldNum()-1]\n\t}\n\n\tdata, _ := json.Marshal(worlds.W)\n\n\tctx.Response.Header.SetContentType(contentTypeJSON)\n\tctx.Response.SetBody(data)\n\n\treleaseWorlds(worlds)\n}", "func (d *DBClient) StatsHandler(w http.ResponseWriter, req *http.Request, next http.HandlerFunc) {\n\tstats := d.Counter.Flush()\n\n\tcount, err := d.Cache.RecordsCount()\n\n\tif err != nil {\n\t\tlog.Error(err)\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t}\n\n\tvar sr statsResponse\n\tsr.Stats = stats\n\tsr.RecordsCount = count\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\n\tb, err := json.Marshal(sr)\n\n\tif err != nil {\n\t\tlog.Error(err)\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t} else {\n\t\tw.Write(b)\n\t\treturn\n\t}\n\n}", "func get_meaning(word string) []byte {\n\tvar meanings []string\n\ttime.Sleep(1000 * time.Millisecond)\n\tdoc, err := goquery.NewDocument(\"http://ejje.weblio.jp/content/\" + word)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tdoc.Find(\".level0\").Each(func(i int, s *goquery.Selection) {\n\t\tmeanings = append(meanings, s.Text())\n\t})\n\n\tmeaning := strings.Join(meanings, \",\")\n\n\treturn []byte(word + \"\\t\" + meaning + \"\\n\")\n}", "func golangParseWords(pResponseText, pStopWords *C.char) string {\n return wordcloud.ParseWords(C.GoString(pResponseText), C.GoString(pStopWords))\n}", "func carsHandler(w http.ResponseWriter, req *http.Request) {\n\ttime.Sleep(time.Duration(rand.Intn(maxLatency)) * time.Millisecond)\n\tw.Write([]byte(`[\n {\n \"id\": \"cfb6f7a5-4591-4f5c-8b17-9a1b10f98ada\",\n \"name\": \"Toyota Yaris\",\n \"price\": \"30\"\n },\n {\n \"id\": \"afad6e6c-ef7f-4dc9-bc0f-ce74d5392175\",\n \"name\": \"Honda Civic\",\n \"price\": \"40\"\n }\n]`))\n}", "func writeJSON(w http.ResponseWriter, status int, data mapStringInterface) error {\n\tjs, err := json.Marshal(data)\n\t//js, err := json.MarshalIndent(data, \"\", \"\\t\")\n\n\tif err != nil {\n\t\treturn err\n\t}\n\tjs = append(js, '\\n')\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(status)\n\tw.Write(js)\n\treturn nil\n}", "func count(w http.ResponseWriter, r *http.Request) {\n\tif r.Method != \"GET\" {\n\t\tw.WriteHeader(http.StatusMethodNotAllowed)\n\t\treturn\n\t}\n\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"http://www.malaysia-ikea.com\")\n\n\tif n, err := store.CountEntries(); err != nil {\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t} else {\n\t\tw.Write([]byte(fmt.Sprintf(\"%d\", n)))\n\t}\n}", "func dataHandler(w http.ResponseWriter, r *http.Request) {\n\tvar token util.PickupToken\n\terr := json.Unmarshal([]byte(visStr), &token)\n\tif err != nil {\n\t\tfmt.Println(\"error when unmarshalling message\")\n\t\treturn\n\t}\n\trow := make([]Row, 0, 10)\n\t// fmt.Println(\"[UI] \" + visStr)\n\n\tfor i := 0; i < token.Length; i++ {\n\t\tpoint := token.Points[i]\n\t\tif point.X == math.MaxFloat64/10 {\n\t\t\tcontinue\n\t\t}\n\t\trow = append(row, Row{\n\t\t\tC: []ColVal{\n\t\t\t\t{\n\t\t\t\t\tV: point.X,\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tV: point.Y,\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tV: \"null\",\n\t\t\t\t},\n\t\t\t},\n\t\t})\n\t}\n\n\terr = json.Unmarshal([]byte(visBusyStr), &token)\n\tif err != nil {\n\t\tfmt.Println(\"error when unmarshalling message\")\n\t\treturn\n\t}\n\t// fmt.Println(\"[UI] \" + visStr)\n\n\tfor i := 0; i < token.Length; i++ {\n\t\tpoint := token.Points[i]\n\t\tif point.X == math.MaxFloat64/10 {\n\t\t\tcontinue\n\t\t}\n\t\trow = append(row, Row{\n\t\t\tC: []ColVal{\n\t\t\t\t{\n\t\t\t\t\tV: point.X,\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tV: \"null\",\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tV: point.Y,\n\t\t\t\t},\n\t\t\t},\n\t\t})\n\t}\n\n\t// //forced sync, terrible idea\n\t// for reqMap[id] == \"\" {\n\t// \ttime.Sleep(10 * time.Millisecond)\n\t// }\n\t// fmt.Println(\"[UI] response sent:\" + reqMap[id])\n\t// fmt.Fprintf(w, \"%s\\r\\n\", reqMap[id])\n\t// delete(reqMap, id)\n\n\t// for _, point := range idleCarNodePosition {\n\t// \trow = append(row, Row{\n\t// \t\tC: []ColVal{\n\t// \t\t\t{\n\t// \t\t\t\tV: point.X,\n\t// \t\t\t},\n\t// \t\t\t{\n\t// \t\t\t\tV: point.Y,\n\t// \t\t\t},\n\t// \t\t},\n\t// \t})\n\t// }\n\td := DataTable{\n\t\tColsDesc: []ColDesc{\n\t\t\t{Label: \"X\", Type: \"number\"},\n\t\t\t{Label: \"Idle\", Type: \"number\"},\n\t\t\t{Label: \"Busy\", Type: \"number\"},\n\t\t\t//{Label: \"Y\", Type: \"number\"},\n\t\t},\n\t\tRows: row,\n\t}\n\t// d := DataTable{\n\t// \tColsDesc: []ColDesc{\n\t// \t\t{Label: \"X\", Type: \"number\"},\n\t// \t\t{Label: \"Y\", Type: \"number\"},\n\t// \t\t//{Label: \"Y\", Type: \"number\"},\n\t// \t},\n\t// \tRows: []Row{\n\t// \t\t{\n\t// \t\t\tC: []ColVal{\n\t// \t\t\t\t{\n\t// \t\t\t\t\tV: 4,\n\t// \t\t\t\t},\n\t// \t\t\t\t{\n\t// \t\t\t\t\tV: 3,\n\t// \t\t\t\t},\n\t// \t\t\t\t{\n\t// \t\t\t\t\tV: \"null\",\n\t// \t\t\t\t},\n\t// \t\t\t},\n\t// \t\t},\n\t// \t\t{\n\t// \t\t\tC: []ColVal{\n\t// \t\t\t\t{\n\t// \t\t\t\t\tV: -1,\n\t// \t\t\t\t},\n\t// \t\t\t\t{\n\t// \t\t\t\t\tV: \"null\",\n\t// \t\t\t\t},\n\t// \t\t\t\t{\n\t// \t\t\t\t\tV: -7,\n\t// \t\t\t\t},\n\t// \t\t\t},\n\t// \t\t},\n\t// \t},\n\t// }\n\tb, err := json.MarshalIndent(d, \"\", \"\t\")\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\t// fmt.Printf(\"%s\\n\", b)\n\tfmt.Fprintf(w, \"%s\\n\", b)\n\t// fmt.Fprintf(w, \"<h1>Hello from Team 9 %s!</h1>\", r.URL.Path[1:])\n}", "func userCartHandler(formatter *render.Render) http.HandlerFunc {\n\treturn func(w http.ResponseWriter, req *http.Request) {\n\t\t/**\n\t\t\tMongo server setup\n\t\t**/\n\t\tsession, err := mgo.Dial(mongodb_server)\n if err != nil {\n fmt.Println(\"mongoserver panic\")\n }\n defer session.Close()\n session.SetMode(mgo.Monotonic, true)\n u := session.DB(mongodb_database).C(\"cart\")\n s := session.DB(mongodb_database).C(\"score\")\n c := session.DB(mongodb_database).C(\"cloth\")\n\t\t/**\n\t\t\tGet Post body\n\t\t**/ \n // body, err := ioutil.ReadAll(req.Body)\n\t\t// if err != nil {\n\t\t// \tlog.Fatalln(err)\n\t\t// }\n\t\t// fmt.Println(body)\n\n\t\t// var userPostResult UserPostId\n\t\t// json.Unmarshal(body, &userPostResult)\n\n\t\t// userId := userPostResult.UserId\n\n\t\tparams := mux.Vars(req)\n\t\tvar userId string = params[\"userId\"]\n\t\tfmt.Println(\"userId\", userId)\n\t\t/**\n\t\t\tGet cloth id by userid\n\t\t**/\n\t\tvar clothIdResult []bson.M\n\t\terr = u.Find(bson.M{\"userId\": userId}).All(&clothIdResult)\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Get cloth id panic\")\n\t\t}\n\t\tcount := len(clothIdResult)\n\t\t/*\n\t\t\tDeclare return response\n\t\t*/\n\t\tresponse := make([]Predict, count)\n\n\t\tfor i := 0; i < count; i++ {\n\t\t\tclothSingleResult := clothIdResult[i]\n\t\t\tclothId := clothSingleResult[\"clothId\"].(string)\n\t\t\tvar clothInfo bson.M\n\t\t\terr = c.Find(bson.M{\"clothesId\": clothId}).One(&clothInfo)\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(\"Get cloth info panic\")\n\t\t\t}\n\t\t\tresponse[i].ClothId = clothId\n\t\t\tresponse[i].Url = clothInfo[\"url\"].(string)\n\t\t\tresponse[i].Name = clothInfo[\"name\"].(string)\n\t\t\tresponse[i].Price = clothInfo[\"price\"].(string)\n\t\t\tvar clothScore bson.M\n\t\t\terr = s.Find(bson.M{\"id\": clothId}).One(&clothScore)\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(\"Get cloth score panic\")\n\t\t\t}\n\t\t\tresponse[i].Score = clothScore[\"score\"].(string)\n\t\t}\n \n\t\tformatter.JSON(w, http.StatusOK, response)\n\t}\n}", "func dataGetHandler(w http.ResponseWriter, r *http.Request) {\n\tvars := mux.Vars(r)\n\tresourceName := vars[\"resourceName\"]\n\tlog.Debug(\"In dataHandler. Request for \" + resourceName)\n\tdat, err := ioutil.ReadFile(dataDir + resourceName + \".json\")\n\tif err != nil {\n\t\thttp.Error(w, \"Error fetching data for resource \"+resourceName, 404)\n\t\tlog.Error(err.Error())\n\t\treturn\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tfmt.Fprintf(w, string(dat))\n}", "func Light(w http.ResponseWriter, r *http.Request) {\n\n\t// get word list\n\tword := r.URL.Query().Get(wordParam)\n\tlog.Printf(\"Word to display is : %s\", word)\n\n\tif word == \"\" {\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t_, err := w.Write([]byte(\"Error while reading word to display\"))\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Error while sending error %v\", err)\n\t\t}\n\t\treturn\n\t}\n\n\t// put string lower case\n\tword = strings.ToLower(word)\n\n\t// filter special characters\n\tt := transform.Chain(norm.NFD, runes.Remove(runes.In(unicode.Mn)), norm.NFC)\n\tword, _, _ = transform.String(t, word)\n\n\t// read configuration from env\n\tprojectID := os.Getenv(\"PROJECT_ID\")\n\tif projectID == \"\" {\n\t\tlog.Print(\"Error while reading project ID\")\n\t\treturn\n\t}\n\n\tprojectRegion := os.Getenv(\"PROJECT_REGION\")\n\tif projectRegion == \"\" {\n\t\tlog.Print(\"Error while reading project region\")\n\t\treturn\n\t}\n\n\tprojectRegistryID := os.Getenv(\"PROJECT_REGISTRY_ID\")\n\tif projectRegistryID == \"\" {\n\t\tlog.Print(\"Error while reading project registry ID\")\n\t\treturn\n\t}\n\n\tprojectDeviceID := os.Getenv(\"PROJECT_DEVICE_ID\")\n\tif projectDeviceID == \"\" {\n\t\tlog.Print(\"Error while reading project device ID\")\n\t\treturn\n\t}\n\n\t// send command to device\n\tres, err := sendCommand(projectID, projectRegion, projectRegistryID, projectDeviceID, word)\n\n\tif err != nil {\n\t\tlog.Printf(\"Error while sending words to guarland %v\", err)\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t_, err := w.Write([]byte(\"Error while sending command to device\"))\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Error while sending error %v\", err)\n\t\t}\n\t\treturn\n\t}\n\n\t_, err = fmt.Fprintln(w, \"Sent command to device\")\n\tif err != nil {\n\t\tlog.Printf(\"Error while sending back answer to caller %v\", err)\n\t}\n\n\tlog.Printf(\"Light answer %v\", res)\n}", "func (h *Handler) GetWeeklyHandler(c *gin.Context) {\n\n\th.config.TotalWeeks = config.DefaultWeeks\n\n\t// get totalWeeks from uri, if exist\n\tvar tw URITotalWeeks\n\tif err := c.ShouldBindUri(&tw); err != nil {\n\t\tlog.Println(\"err: \", err)\n\t\tc.AbortWithStatus(http.StatusBadRequest)\n\t} else if tw.Weeks != \"\" {\n\t\tif weeks, err := strconv.Atoi(tw.Weeks); err != nil {\n\t\t\tlog.Println(\"err: \", err)\n\t\t\tc.AbortWithStatus(http.StatusBadRequest)\n\t\t} else {\n\t\t\tlog.Println(\"total weeks to display: \", weeks)\n\t\t\th.config.TotalWeeks = weeks\n\t\t}\n\t}\n\n\tcoronaDataSet, err := h.fetcher.GetCoronaData()\n\tif err != nil {\n\t\tlog.Printf(\"h.fetcher.GetCoronaData() returns an err = %v\", err)\n\t\tc.AbortWithStatus(http.StatusInternalServerError)\n\t}\n\tdata := coronaDataSet.Data\n\n\t// set some global options like Title/Legend/ToolTip or anything else\n\tbar := charts.NewBar()\n\tbar.SetGlobalOptions(\n\t\tcharts.WithTitleOpts(opts.Title{\n\t\t\tTitle: \"Covid confirmed person data comparison\",\n\t\t\tSubtitle: fmt.Sprintf(\"%d Weeks comparison of each weekday\", h.config.TotalWeeks),\n\t\t\tLeft: \"5%\",\n\t\t}),\n\t\tcharts.WithLegendOpts(opts.Legend{\n\t\t\tShow: true,\n\t\t\tLeft: \"48%\",\n\t\t\tTop: \"5%\",\n\t\t}),\n\t\tcharts.WithInitializationOpts(opts.Initialization{\n\t\t\tPageTitle: \"Weekly comparison of the Corona confirmed persion in South Korea\", // HTML title\n\t\t\tWidth: \"950px\", // Width of canvas\n\t\t\tHeight: \"550px\", // Height of canvas\n\t\t}),\n\t)\n\n\t// Put data into the bar instance\n\n\tfor i := 0; i < h.config.TotalWeeks; i++ {\n\t\tlabel := fmt.Sprintf(\"%d weeks ago\", h.config.TotalWeeks-i)\n\t\tstart := i * 7\n\t\tend := start + 7\n\t\tbar.AddSeries(label, generateWeeklyBarItems(data[start:end]))\n\t}\n\tbar.SetXAxis(h.getWeeklyAxis(data[0])).\n\t\tSetSeriesOptions(charts.WithLabelOpts(opts.Label{\n\t\t\tShow: true,\n\t\t\tPosition: \"top\",\n\t\t}),\n\t\t)\n\n\tif err := bar.Render(c.Writer); err != nil {\n\t\tlog.Println(err)\n\t}\n}", "func (d *Data) ServeHTTP(uuid dvid.UUID, ctx *datastore.VersionedCtx, w http.ResponseWriter, r *http.Request) {\n\ttimedLog := dvid.NewTimeLog()\n\n\t// Check the action\n\taction := strings.ToLower(r.Method)\n\tif action != \"get\" {\n\t\tserver.BadRequest(w, r, \"labelsurf data can only accept GET HTTP requests\")\n\t\treturn\n\t}\n\n\t// Break URL request into arguments\n\turl := r.URL.Path[len(server.WebAPIPath):]\n\tparts := strings.Split(url, \"/\")\n\tif len(parts[len(parts)-1]) == 0 {\n\t\tparts = parts[:len(parts)-1]\n\t}\n\n\tif len(parts) < 4 {\n\t\tserver.BadRequest(w, r, \"Incomplete API request\")\n\t\treturn\n\t}\n\n\t// Process help and info.\n\tswitch parts[3] {\n\tcase \"help\":\n\t\tw.Header().Set(\"Content-Type\", \"text/plain\")\n\t\tfmt.Fprintln(w, dtype.Help())\n\n\tcase \"info\":\n\t\tjsonBytes, err := d.MarshalJSON()\n\t\tif err != nil {\n\t\t\tserver.BadRequest(w, r, err)\n\t\t\treturn\n\t\t}\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\tfmt.Fprintf(w, string(jsonBytes))\n\n\tcase \"surface\":\n\t\t// GET <api URL>/node/<UUID>/<data name>/surface/<label>\n\t\tif len(parts) < 5 {\n\t\t\tserver.BadRequest(w, r, \"ERROR: DVID requires label ID to follow 'surface' command\")\n\t\t\treturn\n\t\t}\n\t\tlabel, err := strconv.ParseUint(parts[4], 10, 64)\n\t\tfmt.Printf(\"Getting surface for label %d\\n\", label)\n\t\tif err != nil {\n\t\t\tserver.BadRequest(w, r, err)\n\t\t\treturn\n\t\t}\n\t\tgzipData, found, err := GetSurface(ctx, label)\n\t\tif err != nil {\n\t\t\tserver.BadRequest(w, r, \"Error on getting surface for label %d: %v\", label, err)\n\t\t\treturn\n\t\t}\n\t\tif !found {\n\t\t\thttp.Error(w, fmt.Sprintf(\"Surface for label '%d' not found\", label), http.StatusNotFound)\n\t\t\treturn\n\t\t}\n\t\tw.Header().Set(\"Content-type\", \"application/octet-stream\")\n\t\tif err := dvid.WriteGzip(gzipData, w, r); err != nil {\n\t\t\tserver.BadRequest(w, r, err)\n\t\t\treturn\n\t\t}\n\t\ttimedLog.Infof(\"HTTP %s: surface on label %d (%s)\", r.Method, label, r.URL)\n\n\tcase \"surface-by-point\":\n\t\t// GET <api URL>/node/<UUID>/<data name>/surface-by-point/<coord>\n\t\tif len(parts) < 5 {\n\t\t\tserver.BadRequest(w, r, \"ERROR: DVID requires coord to follow 'surface-by-point' command\")\n\t\t\treturn\n\t\t}\n\t\tcoord, err := dvid.StringToPoint(parts[4], \"_\")\n\t\tif err != nil {\n\t\t\tserver.BadRequest(w, r, err)\n\t\t\treturn\n\t\t}\n\t\tlabel, err := d.GetLabelAtPoint(ctx.VersionID(), coord)\n\t\tif err != nil {\n\t\t\tserver.BadRequest(w, r, err)\n\t\t\treturn\n\t\t}\n\t\tgzipData, found, err := GetSurface(ctx, label)\n\t\tif err != nil {\n\t\t\tserver.BadRequest(w, r, \"Error on getting surface for label %d: %v\", label, err)\n\t\t\treturn\n\t\t}\n\t\tif !found {\n\t\t\thttp.Error(w, fmt.Sprintf(\"Surface for label '%d' not found\", label), http.StatusNotFound)\n\t\t\treturn\n\t\t}\n\t\tfmt.Printf(\"Found surface for label %d: %d bytes (gzip payload)\\n\", label, len(gzipData))\n\t\tw.Header().Set(\"Content-type\", \"application/octet-stream\")\n\t\tif err := dvid.WriteGzip(gzipData, w, r); err != nil {\n\t\t\tserver.BadRequest(w, r, err)\n\t\t\treturn\n\t\t}\n\t\ttimedLog.Infof(\"HTTP %s: surface-by-point at %s (%s)\", r.Method, coord, r.URL)\n\n\tdefault:\n\t\tserver.BadRequest(w, r, \"Unrecognized API call '%s' for labelsurf data '%s'. See API help.\",\n\t\t\tparts[3], d.DataName())\n\t}\n}", "func NameHandler(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(http.StatusOK)\n\n\tname := nameResponse{\n\t\tName: \"world\",\n\t}\n\n\tjson.NewEncoder(w).Encode(name)\n}", "func (analyzer *Analyzer) Keywords(flavor, payload string, options url.Values) (*KeywordsResponse, error) {\n\tif !entryPoints.hasFlavor(\"keywords\", flavor) {\n\t\treturn nil, errors.New(fmt.Sprintf(\"keywords info for %s not available\", flavor))\n\t}\n\n\toptions.Add(flavor, payload)\n\turl := entryPoints.urlFor(analyzer.baseUrl, \"keywords\", flavor)\n\tdata, err := analyzer.analyze(url, options, nil)\n\n\tif err != nil {\n\t\treturn nil, err\n\t} else {\n\t\tresponse := new(KeywordsResponse)\n\t\terr := json.Unmarshal(data, &response)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t} else {\n\t\t\tif response.Status != \"OK\" {\n\t\t\t\treturn nil, errors.New(response.StatusInfo)\n\t\t\t} else {\n\t\t\t\treturn response, nil\n\t\t\t}\n\t\t}\n\t}\n}", "func (self *WebServer) respData(c *gin.Context, status, code int,\n\tmessage string, data interface{}) {\n\tc.JSON(status, &CR{\n\t\tMessage: message,\n\t\tCode: code,\n\t\tTimestamp: time.Now().Unix(),\n\t})\n}", "func saveData(w http.ResponseWriter, req *http.Request) {\n\tm := inverseRowMajor(array)\n\tm.printMatrix()\n\tenableCors(&w)\n\tjson.NewEncoder(w).Encode(m)\n}", "func loadData(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {\n\t// Fetch necessary data\n\ttimes := getEventTimes()\n\timages, err := loadImages()\n\tcheckError(err)\n\n\t// Encode to json\n\tdata := struct {\n\t\tEvents []event `json:\"events\"`\n\t\tImages []imageData `json:\"images\"`\n\t}{times, images}\n\n\tw.Header().Add(\"Content-Type\", \"application/json\")\n\terr = json.NewEncoder(w).Encode(&data)\n\tcheckError(err)\n}", "func MapHandler(w http.ResponseWriter, r *http.Request) {\n\tjob := HttpRequest{w: w, r: r}\n\tcustomer, err := job.GetCustomer()\n\tif nil != err {\n\t\tdata := HttpMessageResponse{Status: \"error\", Message: err.Error()}\n\t\tjs := job.MarshalJsonFromStruct(data)\n\t\tjob.SendJsonResponse(js)\n\t\treturn\n\t}\n\n\thtmlFile := \"./templates/map.html\"\n\ttmpl, _ := template.ParseFiles(htmlFile)\n\tmessage := fmt.Sprintf(\" %v %v [200]\", r.Method, r.URL.Path)\n\tNetworkLogger.Info(r.RemoteAddr, message)\n\ttmpl.Execute(w, PageViewData{Apikey: customer.Apikey, Version: VERSION})\n}", "func Write(w http.ResponseWriter, data interface{}, statusCode int) {\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tw.WriteHeader(statusCode)\n\n\t// For now, we're assuming json.Marshal succeeds...\n\tmarshalledData, _ := json.Marshal(data)\n\tw.Write(marshalledData)\n}", "func UploadHandler(outputPath string, config *task.IngestTaskConfig) func(http.ResponseWriter, *http.Request) {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tdataset := pat.Param(r, \"dataset\")\n\n\t\t// read the file from the request\n\t\tbytes, err := receiveFile(r)\n\t\tif err != nil {\n\t\t\thandleError(w, errors.Wrap(err, \"unable to receive file from request\"))\n\t\t\treturn\n\t\t}\n\n\t\t// create the raw dataset schema doc\n\t\tformattedPath, err := task.CreateDataset(dataset, bytes, outputPath, config)\n\t\tif err != nil {\n\t\t\thandleError(w, errors.Wrap(err, \"unable to create dataset\"))\n\t\t\treturn\n\t\t}\n\t\tlog.Infof(\"uploaded new dataset %s at %s\", dataset, formattedPath)\n\t\t// marshal data and sent the response back\n\t\terr = handleJSON(w, map[string]interface{}{\"result\": \"uploaded\"})\n\t\tif err != nil {\n\t\t\thandleError(w, errors.Wrap(err, \"unable marshal result histogram into JSON\"))\n\t\t\treturn\n\t\t}\n\t}\n}", "func buildJSON(line []string) []byte {\n\tvar dataArray []Data\n\tfor _, imgurl := range line {\n\t\tvar urlMap = ImageURL{}\n\t\turlMap.Url = imgurl\n\t\tvar imageMap = Image{}\n\t\timageMap.Image = urlMap\n\t\tvar dataMap = Data{}\n\t\tdataMap.Data = imageMap\n\t\tdataArray = append(dataArray, dataMap)\n\t}\n\tpMap = make(map[string][]Data)\n\tpMap[\"inputs\"] = dataArray\n\tpagesJson, err := json.Marshal(pMap)\n\tcheck(err)\n\treturn pagesJson\n}", "func metricsHandler(w http.ResponseWriter, r *http.Request, s *Server) {\n\tif r.Method == \"POST\" {\n\t\t//parse the parameters and file sent in the post request\n\t\tr.ParseMultipartForm(32 << 20)\n\t\tfile, _, err := r.FormFile(\"uploadfile\")\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t\treturn\n\t\t}\n\t\tdefer file.Close()\n\n\t\tbuff := bytes.NewBuffer(nil)\n\t\tio.Copy(buff, file)\n\t\tsource := string(buff.Bytes())\n\n\t\t//get selected language\n\t\tlanguage := r.Form[\"language\"][0]\n\n\t\t//get selected metrics\n\t\tmetrics := make([]string, 0)\n\t\tselectedMetrics := r.Form[\"metric\"]\n\t\tfor _, metric := range selectedMetrics {\n\t\t\tif _, ok := s.Analyzer.Metrics[metric]; ok {\n\t\t\t\tmetrics = append(metrics, metric)\n\t\t\t}\n\t\t}\n\n\t\t//analyze the code with selected metrics\n\t\tpage := &Page{Config: s.Config, Source: source, Languages: s.Analyzer.Languages,\n\t\t\tAnalysis: s.Analyzer.Analyze(language, source, metrics)}\n\n\t\t//display the results page\n\t\ts.Template.ExecuteTemplate(w, \"metrics.html\", page)\n\t} else if r.Method == \"GET\" {\n\t\thttp.Redirect(w, r, \"http://\"+s.Config.Domain+\":\"+s.Config.Port+\"/\", http.StatusFound)\n\t}\n}", "func DBHandler(db storage.DB) atreugo.View {\n\treturn func(ctx *atreugo.RequestCtx) error {\n\t\tworld := storage.AcquireWorld()\n\t\tdb.GetOneRandomWorld(world)\n\t\terr := ctx.JSONResponse(world)\n\n\t\tstorage.ReleaseWorld(world)\n\n\t\treturn err\n\t}\n}", "func Worlds(lang string) (res []int, err error) {\n\tvar appendix bytes.Buffer\n\tver := \"v2\"\n\ttag := \"worlds\"\n\n\tif lang != \"\" {\n\t\tappendix.WriteString(\"?lang=\")\n\t\tappendix.WriteString(lang)\n\t}\n\n\tdata, err := fetchJSON(ver, tag, appendix.String())\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\terr = json.Unmarshal(data, &res)\n\treturn\n}", "func GinHandler(c *gin.Context) {\n\tc.JSON(http.StatusOK, api.GetStats())\n}", "func (s *SimpleConnector) GetPageOfDictionary(index int) jsonResponse.Dictionary {\n\trequestStr := stringUrlCommon + \"userdict/json?groupId=dictionary&filter=learned&page=\" + strconv.Itoa(index)\n\trequestArgs := url.Values{}\n\tresp, err := client.PostForm(requestStr, requestArgs)\n\tdefer resp.Body.Close()\n\tvar m lingualeo.LeoDictionaryImpl\n\tdec := json.NewDecoder(resp.Body)\n\terr = dec.Decode(&m)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\treturn &m\n}", "func getPosts(w http.ResponseWriter, r *http.Request) {\n io.WriteString(w, \"Getting posts... \\n\")\n w.Header().Set(\"Content-Type\", \"application/json\")\n json.NewEncoder(w).Encode(posts)\n}", "func (h *StatsHandler) ServeHTTP(res http.ResponseWriter, req *http.Request) {\n\n\tres.Header().Set(\"Content-Type\", \"application/json\")\n\ttotal := postCount.Count()\n\tvar average float64\n\tvar sum int64\n\taverage = 0\n\tif len(postStats) != 0 {\n\t\tfor i := 0; i < len(postStats); i++ {\n\t\t\tsum += postStats[i]\n\t\t}\n\t\t//Response times were well under a millisecond, so this...\n\t\taverage = float64(sum / int64(total)) / 1000.0\n\t}\n\n\tfmt.Fprintf(res, `{\"total\": %d, \"average\": %f}`, total, average)\n}", "func GenerateWord() (string, error) {\n\taddress := fmt.Sprintf(\"%s:%v\", cfg.Conf.Word.Service.Host, cfg.Conf.Word.Service.Port)\n\tconn, err := grpc.Dial(address, grpc.WithInsecure())\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"error while connecting: %v\", err)\n\t}\n\tdefer conn.Close()\n\tc := wordgen.NewWordGeneratorClient(conn)\n\tctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)\n\tdefer cancel()\n\tresp, err := c.GenerateWord(ctx, &wordgen.GenerateWordReq{})\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"error while making request: %v\", err)\n\t}\n\treturn resp.Word, nil\n}", "func GetCountriesHandler(w http.ResponseWriter, r *http.Request) {\n\tcountries := repository.GetAllCountries()\n\tsortCountries(countries, r)\n\tjson.NewEncoder(w).Encode(countries)\n}", "func control_data_type(w http.ResponseWriter, r *http.Request) {\n \n //ADMIN\n checkAdmin(w,r)\n\n //CONTEXT\n c := appengine.NewContext(r)\n\n /*******************************GET LIST/ADD**************************/\n if r.Method == \"GET\"{\n \n //DATA\n data := map[string]string{\n \"get\":\"true\",\n \"title\": \"Data\",\n } \n\n\n //QUERY\n q := datastore.NewQuery(\"DataType\").Limit(100)\n\n\n //DB GET ALL\n var db []*DataType\n keys,err := q.GetAll(c,&db)\n\n //DB ERR\n if err != nil {\n fmt.Fprint(w,\"error getting items\")\n return\n }\n\n //VAR\n var dbData []map[string]string\n\n //FOR DB ITEMS\n for i := range db {\n \n //KEYS ENCODE\n k := keys[i].Encode()\n\n dbData = append(dbData,\n map[string]string {\n \"title\": db[i].Title,\n \"key\": k,\n //\"fieldNames\": db[i].Fields[1].Name,\n //\"fieldUI\":db[i].UI,\n /*Fields: map[string]string{\n \"Name\":\"text\",\n \"Email\":\"text\",\n \"Phone\":\"text\",\n \"Message\":\"textarea\", \n },*/\n\n\n },\n )\n }\n\n \n//fmt.Fprintln(w,data)\n//fmt.Fprintln(w,dbData)\n//fmt.Fprintln(w,r.Header.Get(\"X-Requested-With\")) //\"X-Requested-With\"\n\nif r.Header.Get(\"X-Requested-With\") != \"\" {\n\n //MARSHAL JSON\n j,errJSON := json.Marshal(dbData)\n if errJSON != nil {\n fmt.Fprintln(w,\"error with JSON\")\n }\n\n //SET CONTENT-TYPE\n w.Header().Set(\"Content-Type\", \"application/json\")\n\n //DISPLAY JSON\n fmt.Fprint(w,string(j))\n \n} else {\n renderControl(w, r, \"/control/data.html\", data, dbData)\n}\n \n\n\n/********************************POST ADD*********************************/\n} else {\n\n\n //GET FORM VALS\n formVal := func(val string)string{\n return r.FormValue(val)\n }\n\n //newFields := map[string]string {}\n \n //fmt.Fprintln(w,r)\n\n TheFields := []FieldType {}\n FieldCount,_ := strconv.Atoi(formVal(\"field_count\"))\n FieldCount = FieldCount - 1\n \n for i := 0; i <= FieldCount; i++ {\n\n iCount := strconv.Itoa(i)\n\n TheFields = append(TheFields, FieldType {\n Name: formVal(\"fieldName\" + iCount),\n Order: formVal(\"fieldOrder\" + iCount),\n UI: formVal(\"fieldUI\" + iCount),\n //Errors: formVal(\"fieldErrors\" + iCount),\n })\n\n //fmt.Fprintln(w,formVal(\"fieldName\" + iCount))\n //fmt.Fprintln(w,formVal(\"fieldName0\"))\n //fmt.Fprintln(w, FieldCount)\n //fmt.Fprintln(w,iCount)\n }\n\n\n\n \n \n/*\n {\n Name: formVal(\"fieldName1\"),\n Label: formVal(\"fieldLabel1\"),\n UI: formVal(\"fieldUI1\"),\n Errors: formVal(\"fieldErrors1\"),\n },\n {\n Name: formVal(\"fieldName2\"),\n Label: formVal(\"fieldLabel2\"),\n UI: formVal(\"fieldUI2\"),\n Errors: formVal(\"fieldErrors2\"),\n },\n }*/\n\n\n //PRETTIFY URL\n reg, err := regexp.Compile(\"[^A-Za-z0-9]+\")\n \n if err != nil {\n fmt.Fprintln(w,\"error with RegX\")\n }\n \n prettyurl := reg.ReplaceAllString(formVal(\"slug\"), \"-\")\n prettyurl = strings.ToLower(strings.Trim(prettyurl, \"-\"))\n\n\n //fmt.Fprintln(w,prettyurl)\n\n\n\n //MAP FORM VALS\n newType := DataType {\n Title: formVal(\"typeName\"),\n TemplateList: formVal(\"data_type_template_list\"),\n TemplateItem: formVal(\"data_type_template_item\"),\n Description: formVal(\"description\"),\n Keywords: formVal(\"keywords\"),\n URL: prettyurl,\n Fields: TheFields,\n \n }\n\n //fmt.Fprintln(w,newType)\n\n\n //DB PUT\n key, err := datastore.Put(c, datastore.NewIncompleteKey(c, \"DataType\", nil), &newType)\n \n //IF ERRORS\n if err != nil {\n fmt.Fprint(w,\"error adding\")\n return\n\n //NO ERRORS\n } else {\n\n \n cacheFlush(\"types\",r)\n\n //DEBUG\n //fmt.Fprintln(w,\"added successfully\")\n //fmt.Fprintln(w,\"key: \" + key.Encode())\n\n \n //PREP JSON\n m := map[string]string{\n \"message\":\"new type added\",\n \"key\":key.Encode(),\n \"title\":newType.Title,\n \"adminSlug\":AdminSlug,\n } \n\n //MARSHAL JSON\n j,errJSON := json.Marshal(m)\n if errJSON != nil {\n fmt.Fprintln(w,\"error with JSON\")\n }\n\n //DISPLAY JSON\n w.Header().Set(\"Content-Type\", \"application/json\")\n fmt.Fprint(w,string(j))\n return\n \n \n //END ERRORS\n }\n \n\n //END POST\n }\n \n//END FUNC\n}", "func write(resp *Response, w http.ResponseWriter) {\n\tjs, _ := json.Marshal(resp)\n\tfmt.Fprint(w, string(js))\n}", "func main() {\n\tenv, err := plugins.NewEnvironment()\n\tenv.RespondAndExitIfError(err)\n\n\tvar stats *statistics.DocumentStatistics\n\n\tfor _, model := range env.Request.Models {\n\t\tswitch model.TypeUrl {\n\t\tcase \"openapi.v2.Document\":\n\t\t\tdocumentv2 := &openapiv2.Document{}\n\t\t\terr = proto.Unmarshal(model.Value, documentv2)\n\t\t\tif err == nil {\n\t\t\t\t// Analyze the API document.\n\t\t\t\tstats = statistics.NewDocumentStatistics(env.Request.SourceName, documentv2)\n\t\t\t}\n\t\tcase \"openapi.v3.Document\":\n\t\t\tdocumentv3 := &openapiv3.Document{}\n\t\t\terr = proto.Unmarshal(model.Value, documentv3)\n\t\t\tif err == nil {\n\t\t\t\t// Analyze the API document.\n\t\t\t\tstats = statistics.NewDocumentStatisticsV3(env.Request.SourceName, documentv3)\n\t\t\t}\n\t\t}\n\t}\n\n\tif stats != nil {\n\t\t// Return the analysis results with an appropriate filename.\n\t\t// Results are in files named \"summary.json\" in the same relative\n\t\t// locations as the description source files.\n\t\tfile := &plugins.File{}\n\t\tfile.Name = strings.Replace(stats.Name, path.Base(stats.Name), \"summary.json\", -1)\n\t\tfile.Data, err = json.MarshalIndent(stats, \"\", \" \")\n\t\tfile.Data = append(file.Data, []byte(\"\\n\")...)\n\t\tenv.RespondAndExitIfError(err)\n\t\tenv.Response.Files = append(env.Response.Files, file)\n\t}\n\n\tenv.RespondAndExit()\n}", "func count(rawdata string, c chan dict) { // mapfn\n\tm := make(dict)\n\twords := strings.Fields(rawdata)\n\n\tfor _, word := range words {\n\t\tm[word]++\n\t}\n\n\tc <- m\n}", "func (h *HTTP) JSONHandler() http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tuptime := time.Since(h.timestamp).String()\n\t\tgoroutines := runtime.NumGoroutine()\n\n\t\th.Lock()\n\n\t\th.json.SetP(fmt.Sprintf(\"%v\", uptime), \"uptime\")\n\t\th.json.SetP(goroutines, \"goroutines\")\n\t\tblob := h.jsonRoot.Bytes()\n\n\t\th.Unlock()\n\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\tw.Write(blob)\n\t}\n}", "func Handler(res http.ResponseWriter, req *http.Request) {\n\t// First, decode the JSON response body\n\tbody := &models.TelegramRequest{}\n\tif err := json.NewDecoder(req.Body).Decode(body); err != nil {\n\t\tfmt.Println(\"could not decode request body\", err)\n\t\treturn\n\t}\n\n\tfmt.Println(\"Received text :- \" + body.Message.Text + \" , from :- \" + body.Message.From.FirstName)\n\n\tif val, err := strconv.Atoi(body.Message.Text); err == nil {\n\t\tdataToSend := \"\"\n\t\tif len(body.Message.Text) == 6 {\n\t\t\tdataToSend = cowin.FetchDataByPinCode(val)\n\t\t} else if len(body.Message.Text) <= 3 {\n\t\t\tdataToSend = cowin.FetchDataByDistrictId(val)\n\t\t} else {\n\t\t\tsendMessage.SendTelegramUsingWebhook(body.Message.Chat.Id,\n\t\t\t\t\"Please enter valid pincode or district id\")\n\t\t\treturn\n\t\t}\n\t\tif dataToSend == \"\" {\n\t\t\tfmt.Println(\"Error empty string, so not sending data\")\n\t\t\tsendMessage.SendTelegramUsingWebhook(body.Message.Chat.Id,\n\t\t\t\t\"Unable to fetch data, please try again after sometime\")\n\t\t} else {\n\t\t\t//fmt.Println(dataToSend)\n\t\t\tsendMessage.SendTelegramUsingWebhook(body.Message.Chat.Id, dataToSend)\n\t\t\tfmt.Println(\"Data sent\")\n\t\t}\n\t} else {\n\t\tsendMessage.SendTelegramUsingWebhook(body.Message.Chat.Id,\n\t\t\t\"Please enter either valid pincode or district id\")\n\t}\n}", "func AddDataHandler(w http.ResponseWriter, r *http.Request) {\n\n\t// prevent CORS error\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Content-Type\")\n\tr.ParseForm()\n\n\tpatientID, err := strconv.Atoi(r.FormValue(\"patientid\"))\n\tevent := r.FormValue(\"event\")\n\tmedicine := strings.Split(r.FormValue(\"medicine\"), \",\")\n\ttimeSinceFirstOccurence, err := strconv.Atoi(r.FormValue(\"time_since_first_occurance\"))\n\tsuccess, err := strconv.ParseBool(r.FormValue(\"success\"))\n\n\tvar msg EventBlock\n\tmsg = EventBlock{\n\t\tEventID: len(Chain),\n\t\tPatientID: patientID,\n\t\tEvent: event,\n\t\tMedicine: medicine,\n\t\tTimeSFO: timeSinceFirstOccurence,\n\t\tSuccess: success,\n\t\tHash: \"\",\n\t}\n\ti := len(Chain) - 1\n\tdata := Chain[i].Event + string(Chain[i].PatientID) + string(Chain[i].TimeSFO) + strconv.FormatBool(Chain[i].Success) + strings.Join(Chain[i].Medicine, \",\")\n\tinst := sha256.New()\n\tinst.Write([]byte(data))\n\tmsg.Hash = hex.EncodeToString(inst.Sum(nil))\n\n\tChain = append(Chain, msg)\n\tresult, err := json.Marshal(Chain)\n\terr = ioutil.WriteFile(\"datastore/events.json\", result, 0777)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw.Write([]byte(`true`))\n}" ]
[ "0.60433084", "0.521025", "0.51180834", "0.50989854", "0.509619", "0.4998162", "0.4948303", "0.49430013", "0.48974964", "0.489294", "0.48574218", "0.48545098", "0.48515287", "0.48196232", "0.4805118", "0.4777445", "0.47773147", "0.47377983", "0.47038555", "0.4700269", "0.46917212", "0.46716532", "0.4653707", "0.46402752", "0.46369022", "0.45910075", "0.4578513", "0.45686004", "0.45607325", "0.4553334", "0.45327744", "0.45310697", "0.45233282", "0.4515415", "0.45132312", "0.44995868", "0.4487678", "0.44851655", "0.4477275", "0.44588694", "0.44583136", "0.44423792", "0.44311798", "0.44216612", "0.44198442", "0.440494", "0.440494", "0.44007", "0.43863958", "0.4382774", "0.43757334", "0.43756306", "0.43715945", "0.43714082", "0.43665007", "0.43635693", "0.43622965", "0.43330285", "0.43317634", "0.433047", "0.43285078", "0.4326441", "0.4322083", "0.43127325", "0.43098775", "0.43085232", "0.4308067", "0.4300237", "0.42991197", "0.42989916", "0.4292875", "0.4290898", "0.42905143", "0.4289521", "0.42847574", "0.42803627", "0.4272803", "0.42713708", "0.4270666", "0.42684016", "0.42651993", "0.4264259", "0.4259423", "0.4258542", "0.4258095", "0.4255805", "0.42466903", "0.42461336", "0.4245346", "0.42448142", "0.42387107", "0.4234588", "0.4234355", "0.42342213", "0.42329583", "0.42248043", "0.4223586", "0.4221349", "0.4206149", "0.42052728" ]
0.85232705
0
GetAllData get all data
func (db2 *DB2) GetAllData(ctx context.Context) (*TreeMapNode, error) { root := NewTreeMapNode("root") for _, v := range db2.cfg.DB2Markets { err := db2.AnkaDB.ForEachWithPrefix(ctx, v, candlesDB2KeyPrefix, func(key string, buf []byte) error { candles := &tradingpb.Candles{} err := proto.Unmarshal(buf, candles) if err != nil { return err } market := root.GetChildEx(v) symbol := market.GetChildEx(candles.Symbol) tag := symbol.GetChildEx(candles.Tag) tag.GetChildEx(strconv.Itoa(len(candles.Candles))) return nil }) if err != nil { return nil, err } } return root, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (a *AGI) GetAllData(name string, timeout int, maxdigits int) (status int, result int, digit string, value string, err error) {\n\treturn a.Command(\"GET DATA\", name, strconv.Itoa(timeout), strconv.Itoa(maxdigits)).ResStrAll()\n}", "func (c *C) GetAll() map[string]interface{} {\n\treturn c.data\n}", "func (call *Call) DataAll() map[string]map[string]interface{} {\n\treturn call.data\n}", "func (o *Avi) FetchAll() (r []Data, err error) {\r\n\td := new(Data)\r\n\terr = o.fetch(d)\r\n\tr = append(r, *d)\r\n\treturn\r\n}", "func (p *Personal) All(ctx context.Context) (*[]PersonalData, error) {\n\tusrs, err := p.DB.All(ctx)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"could not select all personal data\")\n\t}\n\treturn usrs, nil\n}", "func (o *StatsAppliancesListAllOf) GetData() []StatsAppliancesListAllOfData {\n\tif o == nil || o.Data == nil {\n\t\tvar ret []StatsAppliancesListAllOfData\n\t\treturn ret\n\t}\n\treturn *o.Data\n}", "func (s *Store) GetAll() Dict {\n\treturn *s.data\n}", "func GetAll(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tvar data Data\n\tcovidData, errs := scrapper.GetAllData()\n\tif len(errs) > 0 {\n\t\tfor _, err := range errs {\n\t\t\tdata.Errors = append(data.Errors, Error{\n\t\t\t\tCode: 500,\n\t\t\t\tMessage: err.Error(),\n\t\t\t})\n\t\t}\n\n\t\tjson.NewEncoder(w).Encode(data)\n\t\treturn\n\t}\n\n\tdata.COVIDData = covidData\n\tjson.NewEncoder(w).Encode(data)\n}", "func GetAllData() string {\n\tlogger.Info(\"Trying to get all data from wechat...\")\n\theaders := GenerateWechatRequestHeaders()\n\tresp, err := req.Get(GET_ALL_DATA_URL, headers)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t}\n\tdefer resp.Response().Body.Close()\n\n\tb, _ := ioutil.ReadAll(resp.Response().Body)\n\ttext := string(b)\n\t//匹配wx.cgiData=开头,seajs.use结尾的字符串,需要再把开头和结尾去掉\n\tregExp := `(wx.cgiData=)([\\w\\W]*)(seajs.use)`\n\treg := regexp.MustCompile(regExp)\n\tmatchedText := string(reg.Find([]byte(text)))\n\tmatchedText = strings.ReplaceAll(matchedText, \"wx.cgiData=\", \"\")\n\tmatchedText = strings.ReplaceAll(matchedText, \"seajs.use\", \"\")\n\tmatchedText = strings.ReplaceAll(matchedText, \";\", \"\")\n\tlogger.Info(\"Find matched text\\n %s\", matchedText)\n\tfilename := path.Join(path.Dir(BASE_DIR), \"/assets/users_original.txt\")\n\tlogger.Info(\"Save it into file %s\", filename)\n\tioutil.WriteFile(filename, []byte(matchedText), 0644)\n\treturn filename\n}", "func (agtr *Aggregator) GetAllData(userID string) (*AggregateResponse, error) {\n\n\t//Step 1 - Fetch All Folders for the user\n\tfolders, err := agtr.fs.GetAllByUserID(userID)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &AggregateResponse{}\n\t//Step2 - For every folder fetch task and create a data struct\n\tfor i := 0; i < len(folders); i++ {\n\t\tfmt.Println(\"Filling Folder with name\", folders[i].Name)\n\t\ttasksForFolder, err := agtr.ts.GetAllByFolderID(folders[i].ID)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tif tasksForFolder == nil {\n\t\t\ttasksForFolder = make([]*Task, 0)\n\t\t}\n\t\t// Find out the remaining tasks in the folder\n\t\t// Remaining tasks = Tasks whose completed date is after the current date or nil\n\t\tremainingTasks := 0\n\t\tfor _, task := range tasksForFolder {\n\t\t\tif task.CompletedDate == nil || task.CompletedDate.After(time.Now()) {\n\t\t\t\tremainingTasks++\n\t\t\t}\n\t\t}\n\t\tresponse.Data = append(response.Data, Data{\n\t\t\tFolderID: folders[i].ID,\n\t\t\tName: folders[i].Name,\n\t\t\tRemainingTasks: remainingTasks,\n\t\t\tTasks: tasksForFolder})\n\t}\n\treturn response, nil\n}", "func (g *Get) GetAll(db *sql.DB, query string) (interface{}, error) {\n\tstmt, err := db.Prepare(query)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer stmt.Close()\n\n\trows, err := stmt.Query()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer rows.Close()\n\n\tms, err := g.get.GetRows(rows)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif err = rows.Err(); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn ms, nil\n}", "func (o *PolicyPaginationAllOf) GetData() []PolicyExtended {\n\tif o == nil {\n\t\tvar ret []PolicyExtended\n\t\treturn ret\n\t}\n\n\treturn o.Data\n}", "func GetData(db *sql.DB) ([]*Data, error) {\n\trows, err := db.Query(selectSQL)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Get Data Error: %w\", err)\n\t}\n\n\tallData := make([]*Data, 0)\n\tfor rows.Next() {\n\t\tvar dataAddedTime int64\n\n\t\tvar d Data\n\t\tif err := rows.Scan(&d.ID, &d.Location.Lat, &d.Location.Long, &dataAddedTime); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"Get Data Error: %w\", err)\n\t\t}\n\t\td.DateAdded = time.Unix(dataAddedTime, 0)\n\t\tallData = append(allData, &d)\n\t}\n\n\treturn allData, nil\n}", "func (m *DBMockedObject) All(ctx context.Context) ([]entity.PersonalData, error) {\n\targs := m.Called(ctx)\n\treturn args.Get(0).([]entity.PersonalData), args.Error(1)\n}", "func (hm *HashMap) GetAllData() map[common.Hash]types.Unit {\n\thm.lock.RLock()\n\tdefer hm.lock.RUnlock()\n\treturn hm.data\n}", "func (db *jsonDB) QueryAll() ([]common.Task, bool) {\n\treturn db.data, len(db.data) > 0\n}", "func (ms *MemStore) GetAll(data map[string]io.ReaderFrom) error {\n\tvar err error\n\tms.mu.RLock()\n\tfor k, d := range data {\n\t\tbuf, ok := ms.data[k]\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\t\tif _, err = d.ReadFrom(&buf); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tms.mu.RUnlock()\n\treturn err\n}", "func (b *BookDao) GetAll() ([]models.Book, error) {\n\treturn parseJSONFile(dataSrc)\n}", "func (ds *DataStore) GetAll(baseURL string) ([]todos.Todo, error) {\n\tvar todos []todos.Todo\n\n\t// Get all the docs from the todos database.\n\trows, err := ds.DB.AllDocs(ds.ctx, kivik.Options{\"include_docs\": true})\n\tif err != nil {\n\t\treturn todos, fmt.Errorf(\"error getting all docs: %s\", err)\n\t}\n\n\t// Loop through each row and create a todo from the doc, which is added to\n\t// the list of todos.\n\tfor rows.Next() {\n\t\tvar doc todoDoc\n\t\tif err := rows.ScanDoc(&doc); err != nil {\n\t\t\treturn todos, fmt.Errorf(\"error scanning doc: %s\", err)\n\t\t}\n\t\ttodo := convertDocToTodo(doc)\n\t\ttodo.URL = baseURL + doc.ID\n\t\ttodos = append(todos, todo)\n\t}\n\n\treturn todos, nil\n}", "func (DiseaseUsecase *DiseaseUsecaseImpl) GetAll() (model.Diseases, error) {\n\tgolog.Info(\"Enter Get All Disease Usecase :\")\n\tersons, err := DiseaseUsecase.DiseasetRepository.FindAll()\n\tgolog.Info(\"finish !\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ersons, nil\n}", "func (d *Demo) GetAll(g *gom.Gom) int64 {\n\ttoolkit.Println(\"===== Get All =====\")\n\tres := []models.Hero{}\n\n\tvar cTotal int64\n\tvar err error\n\tif d.useParams {\n\t\tcTotal, err = g.Set(&gom.SetParams{\n\t\t\tTableName: \"hero\",\n\t\t\tResult: &res,\n\t\t\tTimeout: 10,\n\t\t}).Cmd().Get()\n\t} else {\n\t\tcTotal, err = g.Set(nil).Timeout(10).Table(\"hero\").Result(&res).Cmd().Get()\n\t}\n\n\tif err != nil {\n\t\ttoolkit.Println(err.Error())\n\t\treturn 0\n\t}\n\n\ttoolkit.Println(len(res), \"of\", cTotal)\n\n\tfor _, h := range res {\n\t\ttoolkit.Println(h)\n\t}\n\n\treturn int64(len(res))\n}", "func (c *Command) GetAll(ctx *gin.Context) {\n\ttoken := strings.ToLower(html.EscapeString(ctx.Param(\"token\")))\n\tfilter := map[string]interface{}{\"token\": token}\n\tfromDB, err := c.Conn.GetByFilter(c.Table, filter, 0)\n\tif err != nil {\n\t\tutil.NiceError(ctx, err, http.StatusBadRequest)\n\t\treturn\n\t}\n\tif fromDB == nil {\n\t\tctx.JSON(http.StatusNotFound, make([]struct{}, 0))\n\t\treturn\n\t}\n\n\tvar respDecode ResponseSchema\n\tvar decoded = make([]map[string]interface{}, len(fromDB))\n\tfor pos, record := range fromDB {\n\t\t// If there's an issue decoding it, just log it and move on to the next record\n\t\tif err := mapstruct.Decode(record, &respDecode); err != nil {\n\t\t\tlog.Error(err.Error())\n\t\t\tcontinue\n\t\t}\n\t\tmarshalled := util.MarshalResponse(respDecode)\n\t\tdecoded[pos] = map[string]interface{}{\n\t\t\t\"id\": marshalled[\"data\"].(map[string]interface{})[\"id\"],\n\t\t\t\"attributes\": marshalled[\"data\"].(map[string]interface{})[\"attributes\"],\n\t\t\t\"meta\": marshalled[\"meta\"],\n\t\t}\n\t}\n\tvar response = make(map[string]interface{})\n\n\tresponse[\"data\"] = decoded\n\n\tctx.Header(\"x-total-count\", fmt.Sprint(len(decoded)))\n\tctx.JSON(http.StatusOK, response)\n}", "func (m MariaDB) All(ctx context.Context) ([]entity.PersonalData, error) {\n\tsqlQuery := fmt.Sprintf(\"SELECT * FROM person\")\n\tvar p personalData\n\tvar persons []entity.PersonalData\n\trows, err := m.Person.QueryContext(ctx, sqlQuery)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"could not make query\")\n\t}\n\tdefer rows.Close()\n\tfor rows.Next() {\n\t\terr = rows.Scan(&p.ID, &p.Name, &p.LastName, &p.Phone, &p.Email, &p.YearOfBirth)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, \"could not scan rows\")\n\t\t}\n\t\tpersons = append(persons, p.transmit())\n\t}\n\tif err = rows.Err(); err != nil {\n\t\treturn nil, errors.Wrap(err, \"rows error\")\n\t}\n\treturn persons, nil\n}", "func getAll(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-type\", \"application/json\")\n\n\t// sending query over db object and storing respose in var result\n\tresult, err := db.Query(\"SELECT fname, lname, email, pword, id FROM person\")\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tdefer result.Close()\n\n\t// to fetch one record at a time from result\n\tfor result.Next() {\n\n\t\t// creating a variable person to store the and then show it\n\t\tvar person Person\n\t\terr := result.Scan(&person.Fname, &person.Lname, &person.Email, &person.Pword, &person.Id)\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t\tpeople = append(people, person)\n\t}\n\t// Encode json to be sent to client machine\n\tjson.NewEncoder(w).Encode(people)\n}", "func GetData(c *gin.Context) {\r\n\tvar predict []Models.Predict\r\n\terr := Models.GetAllData(&predict)\r\n\tif err != nil {\r\n\t\tc.AbortWithStatus(http.StatusNotFound)\r\n\t} else {\r\n\t\tc.JSON(http.StatusOK, predict)\r\n\t}\r\n}", "func (ds DataSource) GetAll(scope scope.Scope, key string) ([]string, error) {\n\treturn gitconfiglegacy.GetAll(scope, key)\n}", "func (db *DB) GetAll(ctx context.Context) ([]*databroker.Record, error) {\n\treturn db.getAll(ctx, func(record *databroker.Record) bool { return true })\n}", "func (m *MMSplitStorage) All() []dtos.SplitDTO {\n\tm.mutex.RLock()\n\tdefer m.mutex.RUnlock()\n\tsplitList := make([]dtos.SplitDTO, 0)\n\tfor _, split := range m.data {\n\t\tsplitList = append(splitList, split)\n\t}\n\treturn splitList\n}", "func GetAll(ctx context.Context) []ds.Entity {\n\tfromCache := cache.GetProjectsAll(ctx)\n\tif fromCache != nil {\n\t\treturn fromCache\n\t}\n\n\tfromDataStore := ds.GetAll(ctx)\n\tcache.AddProjectsAll(ctx, fromDataStore)\n\n\treturn fromDataStore\n}", "func (dao *OHLCVDao) GetAll() ([]types.Tick, error) {\n\tvar response []types.Tick\n\terr := db.Get(dao.dbName, dao.collectionName, bson.M{}, 0, 0, &response)\n\tif err != nil {\n\t\tlogger.Error(err)\n\t\treturn nil, err\n\t}\n\n\treturn response, nil\n}", "func GetAll(w http.ResponseWriter, r *http.Request) {\n\tdata := Data{\n\t\tParam: &Param{\n\t\t\tID: chi.URLParam(r, \"id\"),\n\t\t\tName: r.URL.Query()[\"name\"][0],\n\t\t},\n\t\tHeader: &Header{\n\t\t\tToken: r.Header.Get(\"token\"),\n\t\t},\n\t\tBody: &Body{\n\t\t\tMessage: r.FormValue(\"message\"),\n\t\t},\n\t}\n\n\tjsonData, err := json.Marshal(data)\n\tif err != nil {\n\t\tlog.Println(\"error while marshaling: \", err)\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(http.StatusOK)\n\tw.Write(jsonData)\n}", "func (q currentChartDataMinutelyQuery) All(exec boil.Executor) (CurrentChartDataMinutelySlice, error) {\n\tvar o []*CurrentChartDataMinutely\n\n\terr := q.Bind(nil, exec, &o)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"models: failed to assign all query results to CurrentChartDataMinutely slice\")\n\t}\n\n\tif len(currentChartDataMinutelyAfterSelectHooks) != 0 {\n\t\tfor _, obj := range o {\n\t\t\tif err := obj.doAfterSelectHooks(exec); err != nil {\n\t\t\t\treturn o, err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn o, nil\n}", "func (user UserModel) GetAll() (interface{}, error) {\n\tconn, db := config.DataBase()\n\tdefer db.Close()\n\tvar query = \"SELECT * FROM \" + user.TableName()\n\tvar response []userModelResponse\n\tif err := conn.Raw(query).Scan(&response).Error; err != nil {\n\t\treturn nil, err\n\t}\n\treturn response, nil\n}", "func GetDatasets(a *config.AppContext) ([]models.Dataset, error) {\n\tresults := []models.Dataset{}\n\terr := a.Db.Where(\"user_id = ?\", a.Session.User.ID).Find(&results).Error\n\treturn results, err\n}", "func (o *PermissionOptionsPagination) GetData() []string {\n\tif o == nil {\n\t\tvar ret []string\n\t\treturn ret\n\t}\n\n\treturn o.Data\n}", "func (s *UserService) GetAll() ([]model.User , error){\n\t// variable for all user\n\tvar users []model.User\n\t// get all data in user based on filter if err print error\n\tcursor, err := conn.Collection(\"user\").Find(context.Background(), bson.D{})\n\tif err != nil {\n\t\tprintln(err)\n\t}\n\t// iterate all cursor and append it to users slice\n\tfor cursor.Next(context.TODO()) {\n elem := model.User{}\n if err := cursor.Decode(&elem); err != nil {\n log.Fatal(err)\n }\n users = append(users, elem)\n\t}\n\t// return user\n\treturn users, nil\n}", "func (c InfluxDBClient) FetchAll(start int64, end int64) ([]EventModel, error) {\n\tlog.Printf(\"Fetch all events from now - %ds to now - %ds\", start, end)\n\n\tcmd := fmt.Sprintf(`SELECT * FROM %s\n\t\t\t\t\t\tWHERE time >= NOW() - %ds AND time <= NOW() - %ds`, eventsTableName, start, end)\n\n\tlog.Println(\"Query data with command\", cmd)\n\n\tresponse, err := c.queryDB(cmd)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn c.parseResponse(response)\n}", "func (dao CompanyDAOPsql) GetAll() ([]models.Company, error) {\n\tquery := `SELECT id, identification_type_id, identification_number, verification_digit, company, address, phone, departments_id, cities_id, web, email, activity, autorretenedor, person_type_id, regime_type_id, taxpayer_type_id, logo, created_at, updated_at\n\t\t\t\tFROM companies ORDER BY id`\n\tobjs := make([]models.Company, 0)\n\tdb := get()\n\tdefer db.Close()\n\n\tstmt, err := db.Prepare(query)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer stmt.Close()\n\n\trows, err := stmt.Query()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer rows.Close()\n\n\tfor rows.Next() {\n\t\tvar o models.Company\n\t\terr = rows.Scan(&o.ID, &o.IdentificationType.ID, &o.IdentificationNumber, &o.VerificationDigit, &o.Company, &o.Address, &o.Phone, &o.Department.ID, &o.City.ID, &o.Web, &o.Email, &o.Activity, &o.AutoRretenedor, &o.PersonType.ID, &o.RegimeType.ID, &o.TaxpayerType.ID, &o.Logo, &o.CreatedAt, &o.UpdatedAt)\n\t\tif err != nil {\n\t\t\treturn objs, err\n\t\t}\n\t\tobjs = append(objs, o)\n\t}\n\treturn objs, nil\n}", "func (m newsManager) All() newsList {\n\tlog.Printf(\"Gettin all news in newsManager\")\n\tfewNews := make(newsList, 0, 5)\n\tfor i := 0; i < 5; i++ {\n\t\tfewNews = append(fewNews, getOneNews(fmt.Sprintf(\"Some news %d\", i+1), \"Some text\"))\n\t}\n\treturn fewNews\n}", "func QueryData() OneData {\n\tvar data OneData\n\tdata.ModValueData = make([]AllModvalue, len(allstr))\n\tdata.ModInfoData = make([]AllModinfo, len(allstr))\n\tbody := MakeRequest(ipserver + strAllMB)\n\tjson.Unmarshal(body, &data.ModbusData)\n\tfor num, str := range allstr {\n\t\tbody := MakeRequest(ipserver + strModval + str)\n\t\tjson.Unmarshal(body, &data.ModValueData[num])\n\t\tif !modInfofirst {\n\t\t\tbody = MakeRequest(ipserver + strModinfo + str)\n\t\t\tjson.Unmarshal(body, &data.ModInfoData[num])\n\t\t}\n\t}\n\treturn data\n}", "func (c *UserRepoImpl) ReadAll() ([]*model.User, error) {\n\tuserList := make([]*model.User, 0)\n\tif err := c.db.Table(\"user\").Find(&userList).Error; err != nil {\n\t\tlogrus.Error(err)\n\t\treturn nil, errors.New(\"get user list data : error \")\n\t}\n\treturn userList, nil\n}", "func (t *MedChain) getAll(stub shim.ChaincodeStubInterface, args []string) peer.Response {\n\t\t// ==== Input sanitation ====\n\t\tfmt.Println(\"- start getAll\")\n\n\t\t// query \n\t\tqueryString := \"SELECT blockNo, key, valueJson FROM <STATE> WHERE 1\"\n\t\t\n\t\tqueryResults, err := getQueryResultForQueryString(stub, queryString)\n\t\t\n\t\tif err != nil {\n\t\t\treturn shim.Error(err.Error())\n\t\t}\n\t\t\n\t\treturn shim.Success(queryResults)\n\t}", "func GetData(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {\n\t//startTime := time.Now()\n\tr.ParseForm()\n\tuser_id= strings.Join(r.Form[\"userid\"], \"\")\n\tfmt.Println(\"userNum is :\", user_id)\n\tctx := context.Background()\n\tdsClient, err := datastore.NewClient(ctx, \"kouzoh-p-codechaitu\")\n\tif err != nil {\n\t\t// Handle error.\n\t\tfmt.Println(\"error in creating client\")\n\t}\n\tuser_id_int64, _ := strconv.ParseInt(user_id, 10, 64)\n\tk := datastore.IDKey(\"Recommendations\", user_id_int64, nil)\n\n\tvar e Entity\n\tif err := dsClient.Get(ctx, k, &e); err != nil {\n\t\t// Handle error.\n\t\tfmt.Println(\"error in getting data\" + fmt.Sprint(err))\n\t}\n\tLen := len(e.Recommend)\n\toutput := \"[\"\n\tfor i := 0; i < Len; i++ {\n\t\tm := Recommends{e.Recommend[i].Name, e.Recommend[i].Price, e.Recommend[i].Index, e.Recommend[i].Num_likes, e.Recommend[i].Item_id}\n\t\tdata, err2 := json.Marshal(m)\n\t\tif err2 != nil {\n\t\t\tfmt.Println(\"wrong in json \" + fmt.Sprint(err2))\n\t\t}\n\t\tif (i < Len-1) {\n\n\t\t\toutput = output + string(data) + \",\"\n\t\t} else {\n\n\t\t\toutput = output + string(data)\n\t\t}\n\n\t}\n\toutput = output + \"]\"\n\tfmt.Println(output)\n\tfmt.Fprint(w, output)\n\n}", "func GetAllCurrencyController() (data currency.ArrCurrData, err error) {\n\t// init pkg\n\tpkg := currency.CurrPkg{}\n\n\t// get all datas\n\tdata, err = pkg.GetCurrency()\n\tif err != nil {\n\t\tif err != sql.ErrNoRows {\n\t\t\tlog.Error(err, \"error while retrieving data\")\n\t\t\treturn\n\t\t}\n\t}\n\n\treturn\n}", "func (b Engine) GetData() []Benchmark {\n\treturn b.Data\n}", "func (m *Manager) GetAll(ctx context.Context) ([]*hub.ChartRepository, error) {\n\tvar r []*hub.ChartRepository\n\terr := m.dbQueryUnmarshal(ctx, &r, \"select get_chart_repositories()\")\n\treturn r, err\n}", "func GetAllUserData() (allUserData map[string]interface{}, err error) {\n\n err = checkInit()\n if err != nil {\n return\n }\n\n if len(data) == 0 {\n var defaults = make(map[string]interface{})\n defaults[\"dbVersion\"] = \"1.0\"\n defaults[\"hash\"] = \"sha256\"\n defaults[\"users\"] = make(map[string]interface{})\n saveDatabase(defaults)\n data = defaults\n }\n\n allUserData = data[\"users\"].(map[string]interface{})\n return\n}", "func (h *Handler) getAll(c *gin.Context) {\n\tcontent, err := h.services.MainPage.GetAll()\n\n\tif err != nil {\n\t\tNewErrorResponse(c, http.StatusInternalServerError, \"bad\", err.Error())\n\t\treturn\n\t}\n\n\tif content.News == nil {\n\t\tcontent.News = []models.MiniNews{}\n\t}\n\n\tif content.Courses == nil {\n\t\tcontent.Courses = []models.MiniCourses{}\n\t}\n\n\tc.JSON(http.StatusOK, content)\n}", "func (d *Driver) getAllData() (map[string][]byte, error) {\n\t// check if the db file exists\n\t_, err := os.Stat(d.secretsDataFilePath)\n\tif err != nil {\n\t\tif errors.Is(err, os.ErrNotExist) {\n\t\t\t// the file will be created later on a store()\n\t\t\treturn make(map[string][]byte), nil\n\t\t}\n\t\treturn nil, err\n\t}\n\n\tfile, err := os.Open(d.secretsDataFilePath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer file.Close()\n\n\tbyteValue, err := io.ReadAll(file)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsecretData := new(map[string][]byte)\n\terr = json.Unmarshal([]byte(byteValue), secretData)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn *secretData, nil\n}", "func GetAll() (result []Operation) {\n\tview(func(k, v []byte) {\n\t\top := &Operation{}\n\t\tboltdb.DecodeValue(v, op, cfg.DataEncoding)\n\t\tresult = append(result, *op)\n\t})\n\treturn\n}", "func (b BladeStorage) GetAll(offset string, limit string) (count int, blades []model.Blade, err error) {\n\tif offset != \"\" && limit != \"\" {\n\t\tif err = b.db.Limit(limit).Offset(offset).Order(\"serial asc\").Find(&blades).Error; err != nil {\n\t\t\treturn count, blades, err\n\t\t}\n\t\tb.db.Model(&model.Blade{}).Order(\"serial asc\").Count(&count)\n\t} else {\n\t\tif err = b.db.Order(\"serial\").Find(&blades).Error; err != nil {\n\t\t\treturn count, blades, err\n\t\t}\n\t}\n\treturn count, blades, err\n}", "func (r *ItemsRepository) getAll() (*[]Item, error) {\n\tvar items *[]Item\n\tif query := r.databaseHandler.DB().Find(&items); query.Error != nil {\n\t\treturn nil, query.Error\n\t}\n\treturn items, nil\n}", "func (f *fsDataPlatform) getData(ctx context.Context) ([]byte, error) {\n\t// If the path is to a document, fulfill the request with the document.\n\tif f.isDoc {\n\t\tdoc, err := f.client.Doc(f.itemPath).Get(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tdocItem := doc.Data()\n\t\treturn json.Marshal(&docItem)\n\t}\n\n\t// Otherwise the request is for a collection.\n\tq := f.client.Collection(f.itemPath)\n\n\t// Get all the documents in a single read. Only a single read is charged.\n\tdocs, err := q.Documents(ctx).GetAll()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Create a slice of maps to hold the firestore result set.\n\tres := []map[string]interface{}{}\n\n\tfor _, doc := range docs {\n\t\t// Adding the doc id to the result for ease of use.\n\t\td := doc.Data()\n\t\td[\"docid\"] = doc.Ref.ID\n\n\t\t// Append the doc to the map so it can be marshaled.\n\t\tres = append(res, d)\n\t}\n\n\treturn json.Marshal(res)\n}", "func (s UserSet) All() []m.UserData {\n\tallSlice := s.RecordCollection.All()\n\tres := make([]m.UserData, len(allSlice))\n\tfor i, v := range allSlice {\n\t\tres[i] = &UserData{v}\n\t}\n\treturn res\n}", "func (q automodRuleDatumQuery) All(ctx context.Context, exec boil.ContextExecutor) (AutomodRuleDatumSlice, error) {\n\tvar o []*AutomodRuleDatum\n\n\terr := q.Bind(ctx, exec, &o)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"models: failed to assign all query results to AutomodRuleDatum slice\")\n\t}\n\n\treturn o, nil\n}", "func GetRPLSDatas(ctx iris.Context) {\n\tvar resp rplsDatasResp\n\tdb := ctx.Values().Get(\"db\").(*sql.DB)\n\tif err := resp.RPLSArray.GetAll(db); err != nil {\n\t\tctx.JSON(jsonError{\"Datas RPLS, requête RPLS : \" + err.Error()})\n\t\tctx.StatusCode(http.StatusInternalServerError)\n\t\treturn\n\t}\n\tif err := resp.Cities.GetAll(db); err != nil {\n\t\tctx.JSON(jsonError{\"Datas RPLS, requête Cities : \" + err.Error()})\n\t\tctx.StatusCode(http.StatusInternalServerError)\n\t\treturn\n\t}\n\tctx.StatusCode(http.StatusOK)\n\tctx.JSON(resp)\n}", "func (req *Request) DataList() []Data {\n\treturn []Data{req}\n}", "func FindAll() ([]entity.Hechizo, error) {\n\n\tlog.Println(\"---- Consulta FindAll ----\")\n\tdb := dbUtils.Connect()\n\tdefer dbUtils.Close(db)\n\n\tvar id int64\n\tvar mana, counter int\n\tvar nombre string\n\n\tquery := fmt.Sprintf(\"SELECT * FROM %v\", tabla)\n\tresultQuery, error := db.Query(query)\n\n\thechizos := make([]entity.Hechizo, 0)\n\tif error != nil {\n\t\treturn hechizos, error\n\t}\n\n\tfor resultQuery.Next() {\n\n\t\terrorSelect := resultQuery.Scan(&id, &nombre, &mana)\n\t\tif errorSelect != nil {\n\t\t\tlog.Println(errorSelect)\n\t\t\tcontinue\n\t\t}\n\t\tcounter++\n\t\thechizosDb := entity.Hechizo{Id: id, Nombre: nombre, Mana: mana}\n\t\thechizos = append(hechizos, hechizosDb)\n\t}\n\treturn hechizos, nil\n}", "func (o *StatsAppliancesListAllOf) GetDataOk() (*[]StatsAppliancesListAllOfData, bool) {\n\tif o == nil || o.Data == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Data, true\n}", "func GetAll() []UnidadMgo {\n\tvar result []UnidadMgo\n\ts, Unidads, err := MoConexion.GetColectionMgo(MoVar.ColeccionUnidad)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\terr = Unidads.Find(nil).All(&result)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\ts.Close()\n\treturn result\n}", "func ListAll() []ModelUser {\n\tcommon.Logger(\"info\", \"Initialize Get Database in PostgreSQL\", \"Modul User : ListAll\")\n\tdb := common.GetPostgreSQLDB()\n\n\ttx := db.Begin()\n\n\tcommon.Logger(\"info\", \"Prepare Query Select Table in Database PostgreSQL\", \"Modul User : ListAll\")\n\tvar models []ModelUser\n\n\tcommon.Logger(\"info\", \"Prepare Read Data from PostgreSQL\", \"Modul User : ListAll\")\n\ttx.Find(&models)\n\n\ttx.Commit()\n\tcommon.Logger(\"info\", \"Finnished Read Data from PostgreSQL\", \"Modul User : ListAll\")\n\n\treturn models\n}", "func (db *DB) GetAll(ctx context.Context) (recs []*databroker.Record, err error) {\n\t_, span := trace.StartSpan(ctx, \"databroker.redis.GetAll\")\n\tdefer span.End()\n\tdefer recordOperation(ctx, time.Now(), \"get_all\", err)\n\treturn db.getAll(ctx, func(record *databroker.Record) bool { return true })\n}", "func (s BmProductaggdataStorage) GetAll(r api2go.Request, skip int, take int) []*BmModel.Productaggregation {\n\tin := BmModel.Productaggregation{}\n\tvar out []BmModel.Productaggregation\n\terr := s.db.FindMulti(r, &in, &out, skip, take)\n\tif err == nil {\n\t\tvar tmp []*BmModel.Productaggregation\n\t\tfor i := 0; i < len(out); i++ {\n\t\t\tptr := out[i]\n\t\t\ts.db.ResetIdWithId_(&ptr)\n\t\t\ttmp = append(tmp, &ptr)\n\t\t}\n\t\treturn tmp\n\t} else {\n\t\treturn nil //make(map[string]*BmModel.Productaggregation)\n\t}\n}", "func GetAll(coll string) bson.D {\n\tfmt.Println(\"controllers: get all invoked\")\n\tctx, _ := context.WithTimeout(context.Background(), 50*time.Second)\n\n\tcurs, err := db.Collection(coll).Find(ctx, bson.D{})\n\tutilities.Catch(err)\n\n\tdefer curs.Close(ctx)\n\n\telements := bson.D{}\n\n\tfor curs.Next(ctx) {\n\t\terr := curs.Decode(&elements)\n\t\tutilities.Catch(err)\n\t}\n\n\treturn elements\n}", "func DisplayAll() {\n\n\tif len(dataStorageMap) == 0 {\n\t\tfmt.Println(\"Data Storage Empty!!! No data Found !!!\")\n\t} else {\n\t\tfor key, val := range dataStorageMap {\n\t\t\tfmt.Println(key, \"-->\", val)\n\t\t}\n\t}\n}", "func (tr *Repository) All() []es.Event {\n\tdata := tr.DB.Get()\n\treturn data\n}", "func getAll(w http.ResponseWriter, r *http.Request) {\n\tenableCors(&w)\n\trows, err := mainDB.Query(\"SELECT * FROM testTable\")\n\tcheckErr(err)\n\tvar logs Logs\n\tfor rows.Next() {\n\t\tvar log Log\n\t\terr = rows.Scan(&log.ID, &log.Time, &log.Level, &log.Msg, &log.Category, &log.DebugId, &log.Ip, &log.RequestId, &log.Type, &log.Uri, &log.UserId)\n\t\tcheckErr(err)\n\t\tlogs = append(logs, log)\n\t}\n\tjsonB, errMarshal := json.Marshal(logs)\n\tcheckErr(errMarshal)\n\tfmt.Fprintf(w, \"%s\", string(jsonB))\n}", "func (u *UserCtr) GetUserAll(c *gin.Context) {\n\tusers, err := model.UserAll(u.DB)\n\tif err != nil {\n\t\tresp := errors.New(err.Error())\n\t\tc.JSON(http.StatusInternalServerError, resp)\n\t\treturn\n\t}\n\n\tif len(users) == 0 {\n\t\tc.JSON(http.StatusOK, make([]*model.User, 0))\n\t\treturn\n\t}\n\n\tc.JSON(http.StatusOK, gin.H{\n\t\t\"result\": users,\n\t})\n\treturn\n}", "func (s BmCategoryStorage) GetAll(r api2go.Request, skip int, take int) []*BmModel.Category {\n\tin := BmModel.Category{}\n\tvar out []BmModel.Category\n\terr := s.db.FindMulti(r, &in, &out, skip, take)\n\tif err == nil {\n\t\tvar tmp []*BmModel.Category\n\t\t//tmp := make(map[string]*BmModel.Category)\n\t\tfor _, iter := range out {\n\t\t\ts.db.ResetIdWithId_(&iter)\n\t\t\ttmp = append(tmp, &iter)\n\t\t\t//tmp[iter.ID] = &iter\n\t\t}\n\t\treturn tmp\n\t} else {\n\t\treturn nil //make(map[string]*BmModel.Category)\n\t}\n}", "func (a *ApiDB) GetallUserName(w http.ResponseWriter, r *http.Request) {\n\t// Query()[\"key\"] will return an array of items,\n\t// we only want the single item.\n\n\tallusername := BUSINESS.GetAllUserName(a.Db)\n\tw.Header().Add(\"Content-Type\", \"application/json\")\n\tif allusername == nil {\n\t\t//w.WriteHeader(http.StatusBadRequest)\n\t\tio.WriteString(w, `{\"message\":\"get all username unsuccess\"}`)\n\t\treturn\n\t}\n\ttype result struct {\n\t\tMessage string `json:\"message\"`\n\t\tData []string `json:\"data\"`\n\t}\n\tResult, _ := json.Marshal(result{Message: \"get all username success\", Data: allusername})\n\t//w.WriteHeader(200)\n\tio.WriteString(w, string(Result))\n}", "func TestAPIGetAll() error {\n\ttestRead := testCase{\n\t\tinput: \"\",\n\t\texpected: `[{\"FirstName\":\"Alec\", \"LastName\":\"Perro\", \"Age\":5}]`,\n\t}\n\n query, err := dm.Read(1)\n if err != nil {\n log.Fatal(err)\n }\n\n\tjsonify, err := json.Marshal(query)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tif testRead.expected != string(jsonify) {\n\t\treturn errors.New(\"testDB failed\")\n\t}\n\n\tfmt.Println(\"Tests passed\")\n\treturn nil\n}", "func FetchAllUsers(c *gin.Context) {\n\ttype userQuery struct {\n\t\tID int64\n\t\tUsername string\n\t\tGender string\n\t}\n\tvar users []userQuery\n\tdb.DB.Table(\"users\").\n\t\tSelect(\"id, username, gender\").\n\t\tScan(&users)\n\tc.JSON(http.StatusOK, gin.H{\n\t\t\"msg\": len(users),\n\t\t\"err\": \"\",\n\t\t\"data\": users,\n\t})\n\n}", "func AllEvents() (*[]entity.Event,error){\n\tclient := &http.Client{}\n\tURL := fmt.Sprintf(\"%s%s\",baseEventURL,\"allevents\")\n\treq,_ := http.NewRequest(\"GET\",URL,nil)\n\n\t//DO return an http responce\n\tres,err := client.Do(req)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\teventData := &[]entity.Event{}\n\n\tbody, err := ioutil.ReadAll(res.Body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\terr = json.Unmarshal(body,eventData)\n\tif err != nil{\n\t\treturn nil,err\n\t}\n\treturn eventData,nil\n}", "func (s *permisoService) GetAll() ([]model.Permiso, error) {\n\t// return s.service.GetAll()\n\n\t// return repo.GetAll()\n\n\tpermisos, _ := repo.GetAll()\n\n\tfor _, v := range permisos {\n\t\tv.ID = \"55\"\n\t}\n\n\treturn permisos, nil\n\n}", "func All() []Sample {\n\tvar list []Sample\n\tfor k := range datasets {\n\t\tds := k\n\t\tlist = append(list, ds)\n\t}\n\n\treturn list\n}", "func (ds *gcdatastore) GetAll(c context.Context, query *datastore.Query, dest interface{}) (keys []*datastore.Key, err error) {\n\treturn ds.Client.GetAll(c, query, dest)\n}", "func ReadDataKarya() []Karya {\n\tdb := Conn()\n\tdefer db.Close()\n\trows, err := db.Query(\"SELECT Id_karya, Kategory, Judul, Deskripsi FROM tb_karya\")\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\n\tall_karya := []Karya{}\n\tfor rows.Next() {\n\t\ts := Karya{}\n\t\terr = rows.Scan(&s.Id_karya, &s.Kategory, &s.Judul, &s.Deskripsi)\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t\tall_karya = append(all_karya, s)\n\t}\n\treturn all_karya\n}", "func (h *Handler) GetAll(c echo.Context) error {\n\tid := c.Param(\"id\")\n\tdb := h.DB.Clone()\n\tdefer db.Close()\n\n\tvar results []*particleio.Result\n\tif err := db.DB(\"oxylus\").C(\"metrics\").Find(bson.M{\"uuid\": id}).Sort(\"time\").All(&results); err != nil {\n\t\tlog.Println(err)\n\t}\n\treturn c.JSON(http.StatusOK, results)\n}", "func JSONGetAll(table database.Table, w http.ResponseWriter, r *http.Request, sb *sqrl.SelectBuilder) {\n\ttable.Clear()\n\n\tquery, params, _ := sb.ToSql()\n\n\tif params != nil {\n\t\terrs := db.DB.Select(table.GetItems(), query, params[0])\n\t\tfmt.Println(errs)\n\t} else {\n\t\terrs := db.DB.Select(table.GetItems(), query)\n\t\tfmt.Println(errs)\n\t}\n\t//table.GetPrimaryKey()\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tjson.NewEncoder(w).Encode(table)\n}", "func (ps *salesService) GetAll() ([]model.Sales, error) {\n\tsales, err := ps.salesRepo.GetAll()\n\tif err != nil {\n\t\treturn sales, err\n\t}\n\treturn sales, nil\n}", "func (s Store) ReadAll() ([]ParameterSet, error) {\n\tresults, err := s.backingStore.ReadAll(ItemType, \"\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tparams := make([]ParameterSet, len(results))\n\tfor i, bytes := range results {\n\t\tvar cs ParameterSet\n\t\terr = json.Unmarshal(bytes, &cs)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"error unmarshaling parameter set: %v\", err)\n\t\t}\n\t\tparams[i] = cs\n\t}\n\n\treturn params, nil\n}", "func (C *Commander) GetData(writer http.ResponseWriter, request *http.Request) {\n\tvar error model.Error\n\tdb := database.DbConn()\n\tdefer func() {\n\t\terr := db.Close()\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t}()\n\tif strings.Contains(Role, \"program manager\") == true {\n\t\tvar Offset int\n\t\tPages := request.URL.Query()[\"Pages\"]\n\t\tfmt.Println(Pages)\n\t\tif Pages[0] != \"\" {\n\t\t\tlimit, err := strconv.Atoi(request.URL.Query().Get(\"limit\"))\n\t\t\tif limit != 10 && limit != 20 && limit != 50 {\n\t\t\t\twriter.Header().Set(\"Content-Type\", \"application/json\")\n\t\t\t\twriter.WriteHeader(http.StatusBadRequest)\n\t\t\t\terror.Message = \"Incorrect Limit Value\"\n\t\t\t\tjson.NewEncoder(writer).Encode(error)\n\t\t\t\treturn\n\t\t\t}\n\t\t\ti1, _ := strconv.Atoi(Pages[0])\n\t\t\tfmt.Println(i1)\n\t\t\tOffset = 10 * i1\n\t\t\tcount, _ := db.Query(\"SELECT COUNT(Id) FROM sub_project_manager WHERE sub_project_id in (SELECT id FROM sub_project WHERE project_id in (SELECT id FROM project WHERE program_manager_id in (SELECT id FROM program_manager where program_manager_email = ?)))\", UserName)\n\t\t\tdefer func() {\n\t\t\t\terr := count.Close()\n\t\t\t\tif err != nil {\n\t\t\t\t\tpanic(err.Error())\n\t\t\t\t}\n\t\t\t}()\n\t\t\tGetManagerDetails, err := db.Query(\"call GetAllManagerDetailsData(?, ?, ?)\", UserName, Offset, limit)\n\t\t\tif err != nil {\n\t\t\t\tWriteLogFile(err)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tdefer func() {\n\t\t\t\terr := GetManagerDetails.Close()\n\t\t\t\tif err != nil {\n\t\t\t\t\tpanic(err.Error())\n\t\t\t\t}\n\t\t\t}()\n\t\t\tvar Total int\n\t\t\tvar ManagerDetailData model.Project\n\t\t\tvar ManagerDetailsData []model.Project\n\t\t\tfor GetManagerDetails.Next() {\n\t\t\t\tGetManagerDetails.Scan(&ManagerDetailData.ProjectName, &ManagerDetailData.SubProjectName, &ManagerDetailData.ManagerName, &ManagerDetailData.ManagerEmailID, &ManagerDetailData.Id)\n\t\t\t\tManagerDetailsData = append(ManagerDetailsData, ManagerDetailData)\n\t\t\t}\n\t\t\tif count.Next() != false {\n\t\t\t\tcount.Scan(&Total)\n\t\t\t} else {\n\t\t\t\tTotal = 0\n\t\t\t}\n\t\t\tvar PaginationFormat model.Pagination\n\t\t\tPaginationFormat.TotalData = Total\n\t\t\tPaginationFormat.Limit = limit\n\t\t\tPaginationFormat.Data = ManagerDetailsData\n\t\t\tx1 := Total / limit\n\t\t\tx := Total % limit\n\t\t\tif x == 0 {\n\t\t\t\tPaginationFormat.TotalPages = x1\n\t\t\t} else {\n\t\t\t\tPaginationFormat.TotalPages = x1 + 1\n\t\t\t}\n\t\t\tx, _ = strconv.Atoi(Pages[0])\n\t\t\tif PaginationFormat.TotalPages != 0 {\n\t\t\t\tx1 = x + 1\n\t\t\t}\n\t\t\tPaginationFormat.Page = x1\n\t\t\tsetupResponse(&writer, request)\n\t\t\twriter.Header().Set(\"Content-Type\", \"application/json\")\n\t\t\twriter.WriteHeader(http.StatusOK)\n\t\t\tjson.NewEncoder(writer).Encode(PaginationFormat)\n\t\t} else {\n\t\t\twriter.Header().Set(\"Content-Type\", \"application/json\")\n\t\t\twriter.WriteHeader(http.StatusBadRequest)\n\t\t\terror.Message = \"Incorrect Page Value\"\n\t\t\tjson.NewEncoder(writer).Encode(error)\n\t\t\treturn\n\n\t\t}\n\t} else {\n\t\twriter.WriteHeader(http.StatusNotFound)\n\t}\n}", "func All(w http.ResponseWriter, r *http.Request) {\n\tvar result []Location\n\terr := store.Find(&result, bolthold.Where(\"Serial\").Eq(\"ce011711bd1668d80c\").Index(\"Serial\"))\n\tif err != nil {\n\t\tfmt.Println(\"Err\")\n\t\tfmt.Println(err)\n\t}\n\n\tjson.NewEncoder(w).Encode(result)\n\n}", "func (a *Users) ListAll(w http.ResponseWriter, r *http.Request) {\n\ta.l.Println(\"[DEBUG] get all records\")\n\n\taccs := models.GetUsers()\n\n\t//err := models.ToJSON(accs, w)\n\terr := utils.Respond(w, accs)\n\tif err != nil {\n\t\t// we should never be here but log the error just incase\n\t\ta.l.Println(\"[ERROR] serializing user\", err)\n\t}\n}", "func GetList(tx *sql.Tx) (list []Info, err error) {\n\tmapper := rlt.NewAccountMapper(tx)\n\trows, err := mapper.FindAccountAll()\n\tfor _, row := range rows {\n\t\tinfo := Info{}\n\t\tinfo.ID = row.ID\n\t\tinfo.Domain = row.Domain.String\n\t\tinfo.UserName = row.UserName\n\t\tinfo.DisplayName = row.DisplayName\n\t\tinfo.Email = row.Email\n\t\tlist = append(list, info) //数据写入\n\t}\n\treturn list, err\n}", "func GetData(w http.ResponseWriter, r *http.Request) {\n\tresult := dailyData{}\n\tc := utils.MONGOSESSION.DB(\"healthDB\").C(\"healthData\")\n\tc.Find(bson.M{\"date\": utils.GetDate(time.Now())}).One(&result)\n\tb, _ := json.Marshal(result)\n\tfmt.Fprintf(w, string(b))\n}", "func (m *MetadataMock) GetAll(ctx context.Context) (*clientCPproto.MetadataArray, error) {\n\targs := m.Called()\n\treturn args.Get(0).(*clientCPproto.MetadataArray), args.Error(1)\n}", "func (d Data) GetAllUsers(ctx context.Context) ([]userEntity.User, error) {\n\tvar (\n\t\tuser userEntity.User\n\t\tusers []userEntity.User\n\t\terr error\n\t)\n\n\t// Query ke database\n\trows, err := d.stmt[getAllUsers].QueryxContext(ctx)\n\n\t// Looping seluruh row data\n\tfor rows.Next() {\n\t\t// Insert row data ke struct user\n\t\tif err := rows.StructScan(&user); err != nil {\n\t\t\treturn users, errors.Wrap(err, \"[DATA][GetAllUsers] \")\n\t\t}\n\t\t// Tambahkan struct user ke array user\n\t\tusers = append(users, user)\n\t}\n\t// Return users array\n\treturn users, err\n}", "func (d Data) GetAllUsers(ctx context.Context) ([]userEntity.User, error) {\n\tvar (\n\t\tuser userEntity.User\n\t\tusers []userEntity.User\n\t\terr error\n\t)\n\n\t// Query ke database\n\trows, err := d.stmt[getAllUsers].QueryxContext(ctx)\n\n\t// Looping seluruh row data\n\tfor rows.Next() {\n\t\t// Insert row data ke struct user\n\t\tif err := rows.StructScan(&user); err != nil {\n\t\t\treturn users, errors.Wrap(err, \"[DATA][GetAllUsers] \")\n\t\t}\n\t\t// Tambahkan struct user ke array user\n\t\tusers = append(users, user)\n\t}\n\t// Return users array\n\treturn users, err\n}", "func AllClientes(w http.ResponseWriter, r *http.Request) {\n\n\tq := `\n\t{\n\t\tclientes(func: eq(dgraph.type,[\"Cliente\"])) @filter(has(name)) @cascade {\n\t\t uid\n\t\t name\n\t\t age\n\t\t UIDOLD\n\t\t dgraph.type\n\t\t}\n\t} \n\t `\n\n\tConsultaDataBase(q, func(data []byte) {\n\t\tccc := Clientes{}\n\t\terr3312 := json.Unmarshal(data, &ccc)\n\t\tif err3312 == nil {\n\t\t\tfmt.Println(\"Clientes = \" + parseString(len(ccc.Clientes)))\n\t\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\t\tw.Write(data)\n\t\t} else {\n\t\t\trespondwithJSON(w, http.StatusOK, `{\"result\":\"error\"}`)\n\t\t}\n\t})\n}", "func (us UserService) ListAll() string {\n\t// Read data from file\n\tallUsers := us.Store.Read()\n\t// JSON message to Struct\n\tusers := jsontoStruct(allUsers)\n\t// Formatting output\n\treturn formatting(users)\n}", "func (dao *DAOName) GetAll() ([]ReferenceModel, error) {\n\tm := []ReferenceModel{}\n\tif err := dao.db.Find(&m).Error; err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn m, nil\n}", "func TestTestdataAll(t *testing.T) {\n\tout := new(bytes.Buffer)\n\terr := listPath(out, \"testdata\", true, false, true)\n\tif err != nil {\n\t\tt.Errorf(\"unexpected error:%s\", err)\n\t}\n\tresult := out.String()\n\tif result != resultTestdataAll {\n\t\tt.Errorf(\"bad result\\nexpected:\\n%v\\ngot:\\n%v\\n\", result, resultTestdataAll)\n\t}\n}", "func (db *DB) GetAll(ip string) (*Record, error) { return db.query(ip, ModeDB24) }", "func All() ([]Mineral, error) {\n\tvar d Data\n\n\tjsonFile, err := os.Open(config.Cfg.WorldDataDirectory + \"/data/minerals.json\")\n\tif err != nil {\n\t\terr = fmt.Errorf(\"could not open data file: %w\", err)\n\t\treturn []Mineral{}, err\n\t}\n\n\tdefer jsonFile.Close()\n\n\tbyteValue, _ := ioutil.ReadAll(jsonFile)\n\n\tjson.Unmarshal(byteValue, &d)\n\n\tall := d.Minerals\n\n\tif len(all) == 0 {\n\t\terr = fmt.Errorf(\"no minerals returned from database: minerals.json\")\n\t\treturn []Mineral{}, err\n\t}\n\n\treturn all, nil\n}", "func (device *SilentStepperBrick) GetAllData() (currentVelocity uint16, currentPosition int32, remainingSteps int32, stackVoltage uint16, externalVoltage uint16, currentConsumption uint16, err error) {\n\tvar buf bytes.Buffer\n\n\tresultBytes, err := device.device.Get(uint8(FunctionGetAllData), buf.Bytes())\n\tif err != nil {\n\t\treturn currentVelocity, currentPosition, remainingSteps, stackVoltage, externalVoltage, currentConsumption, err\n\t}\n\tif len(resultBytes) > 0 {\n\t\tvar header PacketHeader\n\n\t\theader.FillFromBytes(resultBytes)\n\n\t\tif header.Length != 24 {\n\t\t\treturn currentVelocity, currentPosition, remainingSteps, stackVoltage, externalVoltage, currentConsumption, fmt.Errorf(\"Received packet of unexpected size %d, instead of %d\", header.Length, 24)\n\t\t}\n\n\t\tif header.ErrorCode != 0 {\n\t\t\treturn currentVelocity, currentPosition, remainingSteps, stackVoltage, externalVoltage, currentConsumption, DeviceError(header.ErrorCode)\n\t\t}\n\n\t\tresultBuf := bytes.NewBuffer(resultBytes[8:])\n\t\tbinary.Read(resultBuf, binary.LittleEndian, &currentVelocity)\n\t\tbinary.Read(resultBuf, binary.LittleEndian, &currentPosition)\n\t\tbinary.Read(resultBuf, binary.LittleEndian, &remainingSteps)\n\t\tbinary.Read(resultBuf, binary.LittleEndian, &stackVoltage)\n\t\tbinary.Read(resultBuf, binary.LittleEndian, &externalVoltage)\n\t\tbinary.Read(resultBuf, binary.LittleEndian, &currentConsumption)\n\n\t}\n\n\treturn currentVelocity, currentPosition, remainingSteps, stackVoltage, externalVoltage, currentConsumption, nil\n}", "func loadData() jsonObj {\n\t/*\n\t\tAPI: https://opentdb.com/api_config.php\n\t*/\n\turl := \"https://opentdb.com/api.php?amount=10&type=multiple\"\n\tclient := http.Client{\n\t\tTimeout: time.Second * 2,\n\t}\n\treq, err := http.NewRequest(http.MethodGet, url, nil)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tres, getErr := client.Do(req)\n\tif(getErr != nil) {\n\t\tlog.Fatal(getErr)\n\t}\n\tif(res.Body != nil) {\n\t\tdefer res.Body.Close()\n\t}\n\n\tbody, readErr := ioutil.ReadAll(res.Body)\n\tif(readErr != nil) {\n\t\tlog.Fatal(readErr)\n\t}\n\tbyteBody := []byte(body)\n\tvar questions jsonObj\n\terr = json.Unmarshal(byteBody, &questions)\n\n\tif err == nil {\n\t\tfmt.Printf(\"Data loaded successfully!\")\n\t} else {\n\t\tfmt.Printf(\"%s\\n\", err)\n\t}\n\n\treturn questions\n}", "func ExtractAllData() (AllDataStruct, int) {\n\tvar wifiData, lengthWiFiData = ExtractWifiData()\n\tvar browserData, lengthBrowserData = ExtractBrowserCredentials()\n\tvar credmanData, lengthCredmanData = ExtractCredmanData()\n\tvar sysadminData = ExtractSysadminData()\n\n\tvar outDataStruct AllDataStruct\n\n\tif lengthWiFiData > 0 {\n\t\toutDataStruct.WifiData = wifiData\n\t}\n\tif lengthBrowserData > 0 {\n\t\toutDataStruct.BrowserData = browserData\n\t}\n\tif lengthCredmanData > 0 {\n\t\toutDataStruct.CredmanData = credmanData\n\t}\n\toutDataStruct.SysadminData = sysadminData\n\n\treturn outDataStruct, lengthCredmanData + lengthBrowserData + lengthWiFiData\n}", "func GetAllDatanode() (int32, []*vp.DataNode) {\n\t_, conn, err := utils.DialVolMgr(VolMgrHosts)\n\tif err != nil {\n\t\tlogger.Error(\"GetAllDatanode failed,Dial to VolMgrHosts fail :%v\", err)\n\t\treturn -1, nil\n\t}\n\tdefer conn.Close()\n\tvc := vp.NewVolMgrClient(conn)\n\tpGetDataNodeReq := &vp.GetDataNodeReq{}\n\tctx, _ := context.WithTimeout(context.Background(), VOLUME_TIMEOUT_SECONDS*time.Second)\n\tpGetDataNodeAck, err := vc.GetDataNode(ctx, pGetDataNodeReq)\n\tif err != nil {\n\t\tlogger.Error(\"GetAllDatanode failed,grpc func err :%v\", err)\n\t\treturn -1, nil\n\t}\n\tif pGetDataNodeAck.Ret != 0 {\n\t\tlogger.Error(\"GetAllDatanode failed,grpc func ret :%v\", pGetDataNodeAck.Ret)\n\t\treturn -1, nil\n\t}\n\treturn 0, pGetDataNodeAck.DataNodes\n}", "func AllUsersGet(c *gin.Context) {\n\tmeta := model.TableMetaFromQuery(c)\n\tginutils.WriteGinJSON(c, http.StatusOK, model.AllUsers(meta))\n}" ]
[ "0.6986866", "0.6965644", "0.694868", "0.6766839", "0.6682119", "0.66248673", "0.65732056", "0.64739794", "0.6445254", "0.6414374", "0.6290187", "0.62791175", "0.6263827", "0.62630916", "0.6240211", "0.6214279", "0.61956394", "0.6095889", "0.60884506", "0.60673577", "0.60611546", "0.60475034", "0.60181797", "0.59920067", "0.5981626", "0.5965519", "0.59647954", "0.5964175", "0.5951097", "0.59410274", "0.5939194", "0.59114707", "0.5896511", "0.58916855", "0.5879033", "0.5868182", "0.5867653", "0.58311534", "0.58140224", "0.578901", "0.5780974", "0.5771228", "0.57589316", "0.5755666", "0.57371265", "0.57357824", "0.5724326", "0.57194453", "0.57113606", "0.57089275", "0.5705538", "0.5693372", "0.5688874", "0.5688115", "0.56822014", "0.56795114", "0.5669916", "0.566099", "0.56584555", "0.5658008", "0.56423765", "0.56381744", "0.56318796", "0.5628968", "0.5627329", "0.5622183", "0.562097", "0.560652", "0.5605996", "0.56031126", "0.55986834", "0.55926013", "0.5587827", "0.5584307", "0.55719274", "0.5568031", "0.55674064", "0.55642104", "0.555609", "0.55446327", "0.55395335", "0.5534933", "0.5526655", "0.55243504", "0.5521129", "0.55145955", "0.5512282", "0.55110824", "0.55110824", "0.55062234", "0.5505847", "0.5503617", "0.5499677", "0.54984325", "0.5489476", "0.548423", "0.5481827", "0.54753774", "0.5473092", "0.54699504" ]
0.57054335
51
GetMarketSymbols get symbols in market
func (db2 *DB2) GetMarketSymbols(ctx context.Context, market string) ([]string, error) { if market == "" || tradingdb2utils.IndexOfStringSlice(db2.cfg.DB2Markets, market, 0) < 0 { return nil, ErrInvalidMarket } symbols := []string{} err := db2.AnkaDB.ForEachWithPrefix(ctx, market, makeSymbolDB2KeyPrefix(market), func(key string, buf []byte) error { si := &tradingpb.SymbolInfo{} err := proto.Unmarshal(buf, si) if err != nil { return err } symbols = append(symbols, si.Symbol) return nil }) if err != nil { return nil, err } return symbols, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m Market) Symbols() ([]aurora.Symbol, error) {\n\tvar baseAssets []string\n\tvar quoteAssets []string\n\tvar wg sync.WaitGroup\n\tvar err1 error\n\tvar err2 error\n\n\twg.Add(2)\n\n\tgo func() {\n\t\tdefer wg.Done()\n\t\tvar res, err = http.Get(\"https://api.kucoin.com/v1/open/markets\")\n\t\tif err != nil {\n\t\t\terr1 = err\n\t\t\treturn\n\t\t}\n\t\tdefer res.Body.Close()\n\n\t\tvar markets struct {\n\t\t\tAssets []string `json:\"data\"`\n\t\t}\n\t\tif err := json.NewDecoder(res.Body).Decode(&markets); err != nil {\n\t\t\terr1 = err\n\t\t\treturn\n\t\t}\n\t\tbaseAssets = markets.Assets\n\t}()\n\n\tgo func() {\n\t\tdefer wg.Done()\n\t\tvar res, err = http.Get(\"https://api.kucoin.com/v1/market/open/coins\")\n\t\tif err != nil {\n\t\t\terr2 = err\n\t\t\treturn\n\t\t}\n\t\tdefer res.Body.Close()\n\n\t\tvar coins struct {\n\t\t\tData []struct {\n\t\t\t\tAsset string `json:\"coin\"`\n\t\t\t} `json:\"data\"`\n\t\t}\n\t\tif err := json.NewDecoder(res.Body).Decode(&coins); err != nil {\n\t\t\terr2 = err\n\t\t\treturn\n\t\t}\n\t\tquoteAssets = make([]string, 0)\n\t\tfor _, a := range coins.Data {\n\t\t\tquoteAssets = append(quoteAssets, a.Asset)\n\t\t}\n\t}()\n\n\twg.Wait()\n\n\tif err1 != nil {\n\t\treturn nil, err1\n\t}\n\tif err2 != nil {\n\t\treturn nil, err2\n\t}\n\n\tvar symbols = make([]aurora.Symbol, 0)\n\tfor _, ba := range baseAssets {\n\t\tfor _, qa := range quoteAssets {\n\t\t\tsymbols = append(symbols, aurora.Symbol{BaseAsset: ba, QuoteAsset: qa})\n\t\t}\n\t}\n\n\treturn symbols, nil\n}", "func (m *Market) Symbols() (MarketResponse, error) {\n\tsymbolURL := URL(\"/v1/symbols\")\n\n\tvar result MarketResponse\n\tresp, err := method.Get(symbolURL, nil, nil)\n\tif err != nil {\n\t\treturn result, err\n\t}\n\tdefer resp.Body.Close()\n\n\treturn bodyToMarketResponse(resp.Body, &result)\n}", "func (sp *SymbolsProvider) getSymbols() (symbols []symbol, err error) {\n\tvar b []byte\n\tvar resp symbolsResponse\n\tif b, err = sp.httpClient.Get(apiSymbols, httpclient.Params(), false); err != nil {\n\t\treturn\n\t}\n\tif err = json.Unmarshal(b, &resp); err != nil {\n\t\treturn\n\t}\n\tif !resp.Success {\n\t\terr = fmt.Errorf(\"[KUCOIN] Error getting symbols: %v\", resp.Message)\n\t\treturn\n\t}\n\tfor _, smb := range resp.Data {\n\t\tsymbols = append(symbols, smb)\n\t}\n\n\treturn\n}", "func (sp *SymbolsProvider) Get() (symbols []schemas.Symbol, err error) {\n\tvar b []byte\n\tvar resp []Symbol\n\tif b, err = sp.httpClient.Get(apiSymbols, httpclient.Params(), false); err != nil {\n\t\treturn\n\t}\n\tif err = json.Unmarshal(b, &resp); err != nil {\n\t\treturn\n\t}\n\n\tfor _, smb := range resp {\n\t\tname, baseCoin, quoteCoin := parseSymbol(smb.Pair)\n\t\tminPrice, _ := strconv.ParseFloat(smb.MinOrderSize, 64)\n\t\tmaxPrice, _ := strconv.ParseFloat(smb.MaxOrderSize, 64)\n\t\tminAmount, _ := strconv.ParseFloat(smb.MinMargin, 64)\n\n\t\tsymbols = append(symbols, schemas.Symbol{\n\t\t\tName: name,\n\t\t\tOriginalName: smb.Pair,\n\t\t\tCoin: quoteCoin,\n\t\t\tBaseCoin: baseCoin,\n\t\t\tMinPrice: minPrice,\n\t\t\tMaxPrice: maxPrice,\n\t\t\tMinAmount: minAmount,\n\t\t\tPricePrecision: int(smb.PricePrecision),\n\t\t})\n\t}\n\n\treturn\n}", "func getAllSymbols(quoteAssets []string) []string {\n\tvalidSymbols := make([]string, 0)\n\tvar tradingSymbols []string\n\n\tm := ExchangeInfo{}\n\terr := getJSON(\"https://api.binance.com/api/v1/exchangeInfo\", &m)\n\tif err != nil {\n\t\tlog.Error(\"Binance /exchangeInfo API error: %v\", err)\n\t\ttradingSymbols = []string{\"BTC\", \"ETH\", \"LTC\", \"BNB\"}\n\t} else {\n\t\ttradingSymbols = getTradingSymbols(quoteAssets, m)\n\t}\n\n\tclient := binance.NewClient(\"\", \"\")\n\t// Double check each symbol is working as intended\n\tfor _, s := range tradingSymbols {\n\t\t_, err := client.NewKlinesService().Symbol(s + quoteAssets[0]).Interval(\"1m\").Do(context.Background())\n\t\tif err == nil {\n\t\t\tvalidSymbols = append(validSymbols, s)\n\t\t}\n\t}\n\n\treturn validSymbols\n}", "func (h *HitBTC) GetSymbols(ctx context.Context, symbol string) ([]string, error) {\n\tvar resp []Symbol\n\tpath := fmt.Sprintf(\"/%s/%s\", apiV2Symbol, symbol)\n\n\tret := make([]string, 0, len(resp))\n\terr := h.SendHTTPRequest(ctx, exchange.RestSpot, path, &resp)\n\tif err != nil {\n\t\treturn ret, err\n\t}\n\n\tfor _, x := range resp {\n\t\tret = append(ret, x.ID)\n\t}\n\treturn ret, err\n}", "func TestWsGetSymbols(t *testing.T) {\n\tsetupWsAuth(t)\n\t_, err := h.wsGetSymbols(currency.NewPair(currency.ETH, currency.BTC))\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n}", "func (sp *SymbolsProvider) Get() (symbols []schemas.Symbol, err error) {\n\tsmbls, err := sp.getSymbols()\n\tif err != nil {\n\t\treturn\n\t}\n\tcoins, err := sp.getCoins()\n\tif err != nil {\n\t\treturn\n\t}\n\n\tfor _, smb := range smbls {\n\t\tvar basePrec, quotePrec int\n\n\t\ts := smb.Map()\n\t\tif p, ok := coins[smb.CoinType]; ok {\n\t\t\tbasePrec = int(p.TradePrecision)\n\t\t}\n\t\tif p, ok := coins[smb.CoinTypePair]; ok {\n\t\t\tquotePrec = int(p.TradePrecision)\n\t\t}\n\n\t\tif basePrec != 0 && quotePrec != 0 {\n\t\t\tif basePrec > quotePrec {\n\t\t\t\ts.PricePrecision = quotePrec\n\t\t\t} else if quotePrec > basePrec {\n\t\t\t\ts.PricePrecision = basePrec\n\t\t\t} else {\n\t\t\t\ts.PricePrecision = basePrec\n\t\t\t}\n\t\t} else {\n\t\t\ts.PricePrecision = defaultPrecision\n\t\t}\n\n\t\tsymbols = append(symbols, s)\n\t}\n\n\treturn\n}", "func (h *HUOBI) GetSymbols(ctx context.Context) ([]Symbol, error) {\n\ttype response struct {\n\t\tResponse\n\t\tSymbols []Symbol `json:\"data\"`\n\t}\n\n\tvar result response\n\n\terr := h.SendHTTPRequest(ctx, exchange.RestSpot, huobiSymbols, &result)\n\tif result.ErrorMessage != \"\" {\n\t\treturn nil, errors.New(result.ErrorMessage)\n\t}\n\treturn result.Symbols, err\n}", "func (c *client) GetMarkets(query *MarketsQuery) ([]SymbolPair, error) {\n\terr := query.Check()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tqp, err := common.QueryParamToMap(*query)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tresp, err := c.baseClient.Get(\"/markets\", qp)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvar listOfPairs []SymbolPair\n\tif err := json.Unmarshal(resp, &listOfPairs); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn listOfPairs, nil\n}", "func getAllCompanySymbols() ([]string){\r\n return []string{}\r\n}", "func (m *MarketClient) GetSymbolOrderBookTicker(symbol string) (interface{}, error) {\n\tvar err error\n\tparams := \"\"\n\tif symbol != \"\" {\n\t\tparams = fmt.Sprintf(\"symbol=%s\", symbol)\n\t}\n\treq, err := m.Builder.Build(http.MethodGet, \"/api/v3/ticker/bookTicker\", params)\n\tif err != nil {\n\t\tlogger.Error(\"Failed to build url: %s\", err.Error())\n\t}\n\tres, err := binance.HttpRequest(req)\n\tvar parser map[string]interface{}\n\terr = json.Unmarshal(res, &parser)\n\tif _, ok := parser[\"code\"]; ok {\n\t\tresult := model.APIErrorResponse{}\n\t\terr = json.Unmarshal(res, &result)\n\t\treturn result, err\n\t}\n\tif params == \"\" {\n\t\tvar result []SymbolOrderBookTickerResponse\n\t\terr = json.Unmarshal(res, &result)\n\t\treturn result, err\n\t}\n\tresult := SymbolOrderBookTickerResponse{}\n\terr = json.Unmarshal(res, &result)\n\treturn result, err\n}", "func (p *Client) GetSymbols() (*model.Symbols, error) {\n\treq, err := http.NewRequest(\"GET\", symbolUrl, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresp, err := p.client.Do(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdata, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer resp.Body.Close()\n\n\tvar sbs = new(model.Symbols)\n\n\terr = json.Unmarshal(data, sbs)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn sbs, nil\n}", "func (c *Client) Symbols(ctx context.Context) (*SymbolsResponse, error) {\n\tvar response *SymbolsResponse\n\trequest := \"/v1/symbols\"\n\n\treq, err := http.NewRequestWithContext(ctx,\n\t\thttp.MethodGet,\n\t\tc.BaseURL+request,\n\t\tnil,\n\t)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\trawResponse, err := c.doPublicRequest(req)\n\tif rawResponse == nil || err != nil {\n\t\treturn nil, err\n\t}\n\tif err := json.Unmarshal(rawResponse, &response); err != nil {\n\t\treturn nil, err\n\t}\n\treturn response, nil\n}", "func (hc *Client) GetSymbolsPair() ([]Symbol, error) {\n\tvar (\n\t\tsymbolReply SymbolsReply\n\t)\n\tendpoint := fmt.Sprintf(\"%s/v1/common/symbols\", huobiEndpoint)\n\tres, err := hc.sendRequest(\n\t\thttp.MethodGet,\n\t\tendpoint,\n\t\tnil,\n\t\tfalse,\n\t)\n\tif err != nil {\n\t\treturn symbolReply.Data, err\n\t}\n\terr = json.Unmarshal(res, &symbolReply)\n\tif err != nil {\n\t\treturn symbolReply.Data, err\n\t}\n\tif symbolReply.Status != StatusOK.String() {\n\t\treturn symbolReply.Data, fmt.Errorf(\"unexpected reply status %s\", symbolReply.Status)\n\t}\n\treturn symbolReply.Data, nil\n}", "func (h *HUOBIHADAX) GetSymbols() ([]Symbol, error) {\n\ttype response struct {\n\t\tResponse\n\t\tSymbols []Symbol `json:\"data\"`\n\t}\n\n\tvar result response\n\turlPath := fmt.Sprintf(\"%s/v%s/%s/%s\", h.APIUrl, huobihadaxAPIVersion, huobihadaxAPIName, huobihadaxSymbols)\n\n\terr := h.SendHTTPRequest(urlPath, &result)\n\tif result.ErrorMessage != \"\" {\n\t\treturn nil, errors.New(result.ErrorMessage)\n\t}\n\treturn result.Symbols, err\n}", "func (c *Coinbene) GetTicker(symbol string) (TickerData, error) {\n\tresp := struct {\n\t\tTickerData TickerData `json:\"data\"`\n\t}{}\n\tparams := url.Values{}\n\tparams.Set(\"symbol\", symbol)\n\tpath := common.EncodeURLValues(coinbeneAPIVersion+coinbeneGetTicker, params)\n\treturn resp.TickerData, c.SendHTTPRequest(exchange.RestSpot, path, spotSpecificTicker, &resp)\n}", "func (t *TauAPI) GetMarkets() (markets []Market, error error) {\n\tvar m []Market\n\tjsonData, err := t.doTauRequest(&TauReq{\n\t\tVersion: 2,\n\t\tMethod: \"GET\",\n\t\tPath: \"trading/markets\",\n\t})\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"TauGetMarkets ->%v\", err)\n\t}\n\tif err := json.Unmarshal(jsonData, &m); err != nil {\n\t\treturn nil, fmt.Errorf(\"TauGetMarkets json.Unmarshall->%v\", err)\n\t}\n\treturn m, nil\n}", "func (c *Client) GetTradeMarketData(tickerSymbol string) (map[string]interface{}, error) {\n\tlog.info(\"========== GET TRADE MARKET DATA ==========\")\n\turl := buildURL(path[\"nodes\"], \"trade-market-watch\")\n\n\tts := []string{tickerSymbol}\n\n\treturn c.do(\"GET\", url, \"\", ts)\n}", "func (h *HitBTC) GetSymbolsDetailed(ctx context.Context) ([]Symbol, error) {\n\tvar resp []Symbol\n\tpath := fmt.Sprintf(\"/%s\", apiV2Symbol)\n\treturn resp, h.SendHTTPRequest(ctx, exchange.RestSpot, path, &resp)\n}", "func (c *Client) Markets(reqParams *MarketsRequest) ([]*Market, *Timestamp, error) {\n\n\t// Prepare the query\n\treq, err := http.NewRequest(\"GET\", c.baseURL+\"/markets\", nil)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\t// encode optional parameters\n\tparams := req.URL.Query()\n\tif reqParams.ExchangeID != \"\" {\n\t\tparams.Add(\"exchange\", reqParams.ExchangeID)\n\t}\n\tif reqParams.BaseSymbol != \"\" {\n\t\tparams.Add(\"baseSymbol\", reqParams.BaseSymbol)\n\t}\n\tif reqParams.BaseID != \"\" {\n\t\tparams.Add(\"baseId\", reqParams.BaseID)\n\t}\n\tif reqParams.QuoteSymbol != \"\" {\n\t\tparams.Add(\"quoteSymbol\", reqParams.QuoteSymbol)\n\t}\n\tif reqParams.QuoteID != \"\" {\n\t\tparams.Add(\"quoteId\", reqParams.QuoteID)\n\t}\n\tif reqParams.AssetSymbol != \"\" {\n\t\tparams.Add(\"AssetSymbol\", reqParams.AssetSymbol)\n\t}\n\tif reqParams.AssetID != \"\" {\n\t\tparams.Add(\"assetId\", reqParams.AssetID)\n\t}\n\tif reqParams.Limit > 0 {\n\t\tparams.Add(\"limit\", strconv.Itoa(reqParams.Limit))\n\t}\n\tif reqParams.Offset > 0 {\n\t\tparams.Add(\"offset\", strconv.Itoa(reqParams.Offset))\n\t}\n\treq.URL.RawQuery = params.Encode()\n\n\t// make the request\n\tccResp, err := c.fetchAndParse(req)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\t// Unmarshal the deferred json from the data field\n\tvar markets []*Market\n\tjson.Unmarshal(*ccResp.Data, &markets)\n\n\treturn markets, ccResp.Timestamp, nil\n}", "func getMarkets(session *mgo.Session) ([]ApiMarket, error) {\n\tdefer session.Close()\n\tdb := session.DB(\"ixm\")\n\n\tdata := []ApiMarket{}\n\n\tnames, err := db.CollectionNames()\n\tif err != nil {\n\t\treturn []ApiMarket{}, err\n\t}\n\n\tfor _, name := range names {\n\t\t// skip index collection\n\t\tif name != \"system.indexes\" {\n\t\t\t// set col\n\t\t\tc := db.C(name)\n\n\t\t\t// set record count\n\t\t\tcount, err := c.Count()\n\t\t\tif err != nil {\n\t\t\t\treturn []ApiMarket{}, err\n\t\t\t}\n\n\t\t\t// find first/last timestamp\n\t\t\trawFirst := bson.M{}\n\t\t\terr = c.Find(nil).Sort(\"timestamp\").Limit(1).Select(bson.M{\"timestamp\": 1}).One(&rawFirst)\n\t\t\tif err != nil {\n\t\t\t\treturn []ApiMarket{}, err\n\t\t\t}\n\n\t\t\trawLast := bson.M{}\n\t\t\terr = c.Find(nil).Sort(\"-timestamp\").Limit(1).Select(bson.M{\"timestamp\": 1}).One(&rawLast)\n\t\t\tif err != nil {\n\t\t\t\treturn []ApiMarket{}, err\n\t\t\t}\n\n\t\t\tfirst := rawFirst[\"timestamp\"].(int)\n\t\t\tlast := rawLast[\"timestamp\"].(int)\n\n\t\t\t// construct response\n\t\t\tdata = append(data, ApiMarket{\n\t\t\t\tMarket: name,\n\t\t\t\tRecords: count,\n\t\t\t\tFirst: first,\n\t\t\t\tLast: last,\n\t\t\t})\n\t\t}\n\t}\n\n\t// ret no err\n\treturn data, nil\n}", "func GetTicker(baseURL string, symbol string) *t.Ticker {\n\tvar url strings.Builder\n\n\tfmt.Fprintf(&url, \"%s/ticker/price?symbol=%s\", baseURL, symbol)\n\tdata, err := h.Get(url.String())\n\tif err != nil {\n\t\treturn nil\n\t}\n\tr := gjson.ParseBytes(data)\n\treturn &t.Ticker{\n\t\tExchange: t.ExcBinance,\n\t\tSymbol: r.Get(\"symbol\").String(),\n\t\tPrice: r.Get(\"price\").Float(),\n\t\tTime: r.Get(\"time\").Int(),\n\t}\n}", "func (h *HitBTC) GetTicker(ctx context.Context, symbol string) (TickerResponse, error) {\n\tvar resp TickerResponse\n\tpath := fmt.Sprintf(\"/%s/%s\", apiV2Ticker, symbol)\n\treturn resp, h.SendHTTPRequest(ctx, exchange.RestSpot, path, &resp)\n}", "func (_Lelecoin *LelecoinCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _Lelecoin.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func (_WELV9 *WELV9Caller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _WELV9.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func (u *User) GetMarkets() (result []Market, err error) {\n\tvar response jsonResponse\n\tr, err := u.getURL(\"GET\", \"/api/v1.1/public/getmarkets\", nil, false)\n\terr = parseData(r, &response)\n\tif err != nil {\n\t\treturn\n\t}\n\terr = parseData(response.Result, &result)\n\treturn\n}", "func (e *Huobi) getTicker(stockType string, sizes ...interface{}) (ticker Ticker, err error) {\n\tstockType = strings.ToUpper(stockType)\n\tif _, ok := e.stockTypeMap[stockType]; !ok {\n\t\terr = fmt.Errorf(\"GetTicker() error, unrecognized stockType: %+v\", stockType)\n\t\treturn\n\t}\n\tresult, err := services.GetMarketDepth(e.stockTypeMap[stockType]+\"usdt\", \"step0\")\n\tif err != nil {\n\t\terr = fmt.Errorf(\"GetTicker() error, %+v\", err)\n\t\treturn\n\t}\n\tif result.Status != \"ok\" {\n\t\terr = fmt.Errorf(\"GetTicker() error, %+v\", result.ErrMsg)\n\t\treturn\n\t}\n\tcount := len(result.Tick.Bids)\n\tfor i := 0; i < count; i++ {\n\t\tticker.Bids = append(ticker.Bids, OrderBook{\n\t\t\tPrice: result.Tick.Bids[i][0],\n\t\t\tAmount: result.Tick.Bids[i][1],\n\t\t})\n\t}\n\tcount = len(result.Tick.Asks)\n\tfor i := 0; i < count; i++ {\n\t\tticker.Asks = append(ticker.Asks, OrderBook{\n\t\t\tPrice: result.Tick.Asks[i][0],\n\t\t\tAmount: result.Tick.Asks[i][1],\n\t\t})\n\t}\n\tif len(ticker.Bids) < 1 || len(ticker.Asks) < 1 {\n\t\terr = fmt.Errorf(\"GetTicker() error, can not get enough Bids or Asks\")\n\t\treturn\n\t}\n\tticker.Buy = ticker.Bids[0].Price\n\tticker.Sell = ticker.Asks[0].Price\n\tticker.Mid = (ticker.Buy + ticker.Sell) / 2\n\treturn\n}", "func (b *Binance) GetOrdersBySymbol(symbol string) []Operation {\n\torders, err := b.client.NewListOrdersService().Symbol(symbol).\n\t\tDo(context.Background())\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn []Operation{}\n\t}\n\topr := []Operation{}\n\tfmt.Println(orders[0].Price)\n\treturn opr\n}", "func (_MainnetCryptoCardsContract *MainnetCryptoCardsContractCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar (\n\t\tret0 = new(string)\n\t)\n\tout := ret0\n\terr := _MainnetCryptoCardsContract.contract.Call(opts, out, \"symbol\")\n\treturn *ret0, err\n}", "func GetCoinMarkets(coin string) ([]Market, error) {\n\turl := fmt.Sprintf(\"https://coinmarketcap.com/currencies/%s/#markets\", strings.ToLower(coin))\n\tvar markets []Market\n\tresponse, err := soup.Get(url)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\trows := soup.HTMLParse(response).Find(\"table\", \"id\", \"markets-table\").Find(\"tbody\").FindAll(\"tr\")\n\tfor _, row := range rows {\n\t\tvar data []string\n\t\tfor _, column := range row.FindAll(\"td\") {\n\t\t\tattrs := column.Attrs()\n\t\t\tif attrs[\"data-sort\"] != \"\" {\n\t\t\t\tdata = append(data, attrs[\"data-sort\"])\n\t\t\t} else {\n\t\t\t\tdata = append(data, column.Text())\n\t\t\t}\n\t\t}\n\t\tmarkets = append(markets, Market{\n\t\t\tRank: toInt(data[0]),\n\t\t\tExchange: data[1],\n\t\t\tPair: data[2],\n\t\t\tVolumeUSD: toFloat(data[3]),\n\t\t\tPrice: toFloat(data[4]),\n\t\t\tVolumePercent: toFloat(data[5]),\n\t\t\tUpdated: data[6],\n\t\t})\n\t}\n\treturn markets, nil\n}", "func (_Bep20 *Bep20Caller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _Bep20.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func (_MainnetCryptoCardsContract *MainnetCryptoCardsContractSession) Symbol() (string, error) {\n\treturn _MainnetCryptoCardsContract.Contract.Symbol(&_MainnetCryptoCardsContract.CallOpts)\n}", "func getSymbols(reels [5]uint8) ([5][3]Symbol, uint8) {\n\tresult := [5][3]Symbol{}\n\n\tvar scatters uint8\n\tvar top, bot uint8\n\tfor reel, mid := range reels {\n\t\ttop = mid - 1\n\t\tbot = mid + 1\n\n\t\tif mid == 0 {\n\t\t\ttop = 31\n\t\t} else if mid == 31 {\n\t\t\tbot = 0\n\t\t}\n\n\t\tresult[reel] = [3]Symbol{\n\t\t\tReals[reel][top],\n\t\t\tReals[reel][mid],\n\t\t\tReals[reel][bot],\n\t\t}\n\n\t\tfor _, s := range result[reel] {\n\t\t\tif s == Scatter {\n\t\t\t\tscatters++\n\t\t\t}\n\t\t}\n\t}\n\treturn result, scatters\n}", "func (h *HUOBI) GetSwapMarkets(ctx context.Context, contract currency.Pair) ([]SwapMarketsData, error) {\n\tvals := url.Values{}\n\tif !contract.IsEmpty() {\n\t\tcodeValue, err := h.FormatSymbol(contract, asset.CoinMarginedFutures)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tvals.Set(\"contract_code\", codeValue)\n\t}\n\ttype response struct {\n\t\tResponse\n\t\tData []SwapMarketsData `json:\"data\"`\n\t}\n\tvar result response\n\terr := h.SendHTTPRequest(ctx, exchange.RestFutures, huobiSwapMarkets+vals.Encode(), &result)\n\tif result.ErrorMessage != \"\" {\n\t\treturn nil, errors.New(result.ErrorMessage)\n\t}\n\treturn result.Data, err\n}", "func (e *Bitfinex) GetTickers() (*Tickers, error) {\n\tclient := rest.NewClient()\n\tbookTickers, err := client.Tickers.All()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\ttickers := NewTickers(len(*bookTickers))\n\tfor _, ticker := range *bookTickers {\n\t\tsymbol := ticker.Symbol\n\t\tif len(symbol) == 0 || symbol[0] != 't' || strings.Contains(symbol, \":\") {\n\t\t\tcontinue\n\t\t}\n\t\tsymbol = symbol[1:]\n\t\tif len(symbol) != 6 {\n\t\t\tlog.Printf(\"warn, bitfinex non 6 letter symbol with no semicolon %q\", symbol)\n\t\t\tcontinue\n\t\t}\n\t\tbase, quote := symbol[:3], symbol[3:]\n\t\ttickers.add(base, quote, Ticker{BuyPrice: ticker.Bid, SellPrice: ticker.Ask})\n\t}\n\treturn tickers, nil\n}", "func (m *MarketClient) GetSymbolTickerPrice(symbol string) (interface{}, error) {\n\tvar err error\n\tparams := \"\"\n\tif symbol != \"\" {\n\t\tparams = fmt.Sprintf(\"symbol=%s\", symbol)\n\t}\n\treq, err := m.Builder.Build(http.MethodGet, \"/api/v3/ticker/price\", params)\n\tif err != nil {\n\t\tlogger.Error(\"Failed to build url: %s\", err.Error())\n\t}\n\tres, err := binance.HttpRequest(req)\n\tvar parser map[string]interface{}\n\terr = json.Unmarshal(res, &parser)\n\tif _, ok := parser[\"code\"]; ok {\n\t\tresult := model.APIErrorResponse{}\n\t\terr = json.Unmarshal(res, &result)\n\t\treturn result, err\n\t}\n\tif params == \"\" {\n\t\tvar result []SymbolPriceTickerResponse\n\t\terr = json.Unmarshal(res, &result)\n\t\treturn result, err\n\t}\n\tresult := SymbolPriceTickerResponse{}\n\terr = json.Unmarshal(res, &result)\n\treturn result, err\n}", "func (p *bitsharesAPI) GetTicker(base, quote string) (*objects.MarketTicker, error) {\n\tvar result *objects.MarketTicker\n\terr := p.call(p.databaseAPIID, \"get_ticker\", &result, base, quote, false)\n\treturn result, err\n}", "func getCurrentPrices(c int) (Stock, error) {\n\tvar (\n\t\tstock Stock // Market price struct\n\t\tyhStock YHStockPrice // Original Yahoo Response\n\t)\n\n\tcode := fmt.Sprintf(\"%04d\", c)\n\turl := fmt.Sprintf(\"https://query1.finance.yahoo.com/v8/finance/chart/%s.HK?region=US&lang=en-US&includePrePost=false&interval=5m&range=1d&corsDomain=finance.yahoo.com&.tsrc=finance\", code)\n\tpayload := strings.NewReader(\"\")\n\n\tclient := &http.Client{}\n\treq, err := http.NewRequest(\"GET\", url, payload)\n\n\tif err != nil {\n\t\tfmt.Println(err.Error())\n\t}\n\n\treq.Header.Add(\"Accept\", \"\")\n\treq.Header.Add(\"Referer\", fmt.Sprintf(\"https://finance.yahoo.com/quote/%s.HK/\", code))\n\treq.Header.Add(\"User-Agent\", \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) HeadlessChrome/87.0.4280.88 Safari/537.36\")\n\n\tres, err := client.Do(req)\n\tdefer res.Body.Close()\n\tbody, err := ioutil.ReadAll(res.Body)\n\tif err != nil {\n\t\treturn stock, errors.Wrap(err, \"something's wrong with ReadAll\")\n\t}\n\n\terr = json.Unmarshal(body, &yhStock)\n\tif err != nil {\n\t\treturn stock, errors.Wrap(err, \"can not marshell response\")\n\t}\n\n\t// Return stock object if market price is greater than 0\n\tr := yhStock.Chart.Result\n\tif len(r) != 0 {\n\t\tprice := r[0].Meta.RegularMarketPrice\n\t\trt := int64(r[0].Meta.RegularMarketTime) // Market time\n\n\t\t// Compare market time and current time\n\t\ttc := time.Now() // Current time\n\t\ttm := time.Unix(rt, 0) // Market time\n\t\tdiff := tc.Sub(tm)\n\t\tthreshold, _ := time.ParseDuration(\"30m\")\n\n\t\t// Only return records within 30 mins of the call\n\t\tif diff <= threshold {\n\t\t\tif price > 0 {\n\t\t\t\tloc, _ := time.LoadLocation(\"Local\")\n\t\t\t\tmarketTime := tm.In(loc).Format(\"2006-01-02 15:04:05\")\n\t\t\t\tstock = Stock{Code: code, MarketTime: marketTime, MarketPrice: price}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn stock, nil\n}", "func (_CraftingI *CraftingICaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _CraftingI.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func getQuote(sym string) string {\n sym = strings.ToUpper(sym)\n url := fmt.Sprintf(\"http://download.finance.yahoo.com/d/quotes.csv?s=%s&f=nsl1op&e=.csv\", sym)\n resp, err := http.Get(url)\n if err != nil {\n return fmt.Sprintf(\"error: %v\", err)\n }\n rows, err := csv.NewReader(resp.Body).ReadAll()\n if err != nil {\n return fmt.Sprintf(\"error: %v\", err)\n }\n if len(rows) >= 1 && len(rows[0]) == 5 {\n\t //var previousClose int :=rows[0][4]\n\t previousClose, err := strconv.ParseFloat(rows[0][4], 64)\n\t if err != nil {\n\t\t return fmt.Sprintf(\"error: %v\", err)\n\t }\n\t open, err := strconv.ParseFloat(rows[0][3], 64)\n\t if err != nil {\n\t\t return fmt.Sprintf(\"error: %v\", err)\n\t }\n\t nowValue, err := strconv.ParseFloat(rows[0][2], 64)\n\t if err != nil {\n\t\t return fmt.Sprintf(\"error: %v\", err)\n\t }\n\t previousClose = nowValue - previousClose\n\n\t open = nowValue - open\n\n return fmt.Sprintf(\"%s (%s) is trading at $%.2f. Since open: %.2f, Since yesterday: %.2f\", rows[0][0], rows[0][1], nowValue, open, previousClose)\n }\n return fmt.Sprintf(\"unknown response format (symbol was \\\"%s\\\")\", sym)\n}", "func NewSymbol(name, ticker, description string) (s Symbol) {\n\ts.Name = name\n\ts.Ticker = ticker\n\ts.Description = description\n\ts.Type = \"Crypto\" // use token/coin\n\ts.Session = \"24x7\"\n\ts.Exchange = \"Balancer\"\n\ts.ListedExchange = \"Balancer\"\n\ts.TimeZone = \"Etc/UTC\"\n\ts.MinMov = 1\n\ts.PriceScale = 1e8\n\ts.SupportedResolutions = Conf.Resolutions\n\ts.HasIntraDay = true // [?]\n\ts.IntraDayMultipliers = []string{\"1\"}\n\t/*\n\t\tfor i := 0; i < len(resolutionMins); i++ {\n\t\t\ts.IntraDayMultipliers = append(s.IntraDayMultipliers, fmt.Sprint(resolutionMins[i]))\n\t\t}\n\n\t\ts.HasDaily = false // [?]\n\t\ts.HasEmptyBars = false\n\t\ts.ForceSessionRebuild = true\n\t\ts.DataStatus = \"pulsed\"\n\t\ts.HasNoVolume = false\n\t*/\n\treturn\n}", "func ToSymbol(cache Repository, currency string) (symbol *hitbtc.Symbol, err error) {\n\tif len(currency) >= 6 {\n\t\tsymbol = cache.GetSymbol(currency, hitbtc.Exchange).(*hitbtc.Symbol)\n\t\tif symbol.ID == \"\" {\n\t\t\treturn nil, hitbtc.ErrSymbolNotFound\n\t\t}\n\n\t\treturn\n\t}\n\n\tif util.Contains(hitbtcCurrencies, currency) {\n\t\tsymbol = &hitbtc.Symbol{\n\t\t\tBase: currency,\n\t\t\tQuote: hitbtc.USD,\n\t\t}\n\n\t\tif symbol.Base == hitbtc.USD {\n\t\t\tsymbol.Base = hitbtc.BTC\n\t\t}\n\n\t\tsymbol.ID = symbol.Base + symbol.Quote\n\n\t\treturn\n\t}\n\n\tfor _, base := range hitbtcCurrencies {\n\t\tsymbol = cache.GetSymbol(currency+base, hitbtc.Exchange).(*hitbtc.Symbol)\n\t\tif symbol.ID != \"\" {\n\t\t\treturn\n\t\t}\n\t}\n\n\treturn nil, hitbtc.ErrCurrencyNotFound\n}", "func (w *ServerInterfaceWrapper) GetTickerBySymbol(ctx echo.Context) error {\n\tvar err error\n\t// ------------- Path parameter \"symbol\" -------------\n\tvar symbol SymbolParam\n\n\terr = runtime.BindStyledParameterWithLocation(\"simple\", false, \"symbol\", runtime.ParamLocationPath, ctx.Param(\"symbol\"), &symbol)\n\tif err != nil {\n\t\treturn echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf(\"Invalid format for parameter symbol: %s\", err))\n\t}\n\n\tctx.Set(ApiKeyAuthScopes, []string{\"\"})\n\n\t// Invoke the callback with all the unmarshalled arguments\n\terr = w.Handler.GetTickerBySymbol(ctx, symbol)\n\treturn err\n}", "func (t *TauAPI) GetMarketOrders(market string) (MarketOrders, error) {\n\tvar mo MarketOrders\n\tvar maxBid, minAsk float64\n\tjsonData, err := t.doTauRequest(&TauReq{\n\t\tVersion: 1,\n\t\tMethod: \"GET\",\n\t\tPath: \"trading/orders?market=\" + strings.ToLower(market),\n\t})\n\tif err != nil {\n\t\treturn mo, fmt.Errorf(\"TauGetMarketOrders ->%s\", err.Error())\n\t}\n\tif err := json.Unmarshal(jsonData, &mo); err != nil {\n\t\treturn mo, err\n\t}\n\tmaxBid = 0.0\n\tfor _, b := range mo.Bids {\n\t\tbid, _ := strconv.ParseFloat(b.Price.String(), 64)\n\t\tmaxBid = math.Max(bid, maxBid)\n\t}\n\tif len(mo.Asks) == 0 {\n\t\tminAsk = maxBid + 0.01\n\t} else {\n\t\tminAsk, _ = strconv.ParseFloat(mo.Asks[0].Price.String(), 64)\n\t\tfor _, a := range mo.Asks {\n\t\t\task, _ := strconv.ParseFloat(a.Price.String(), 64)\n\t\t\tminAsk = math.Min(ask, minAsk)\n\t\t}\n\t}\n\tmo.MaxBid = maxBid\n\tmo.MinAsk = minAsk\n\treturn mo, nil\n}", "func getQuote(sym string) string {\n\tsym = strings.ToUpper(sym)\n\turl := fmt.Sprintf(\"http://download.finance.yahoo.com/d/quotes.csv?s=%s&f=nsl1op&e=.csv\", sym)\n\tresp, err := http.Get(url)\n\tif err != nil {\n\t\treturn fmt.Sprintf(\"error: %v\", err)\n\t}\n\trows, err := csv.NewReader(resp.Body).ReadAll()\n\tif err != nil {\n\t\treturn fmt.Sprintf(\"error: %v\", err)\n\t}\n\tif len(rows) >= 1 && len(rows[0]) == 5 {\n\t\treturn fmt.Sprintf(\"%s (%s) is trading at $%s\", rows[0][0], rows[0][1], rows[0][2])\n\t}\n\treturn fmt.Sprintf(\"unknown response format (symbol was \\\"%s\\\")\", sym)\n}", "func (model *Trade) Symbol() string {\n\treturn model.Tbk.GetItemInCategory(\"Symbol\")\n}", "func (_ZKOnacci *ZKOnacciCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _ZKOnacci.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func (_MainnetCryptoCardsContract *MainnetCryptoCardsContractCallerSession) Symbol() (string, error) {\n\treturn _MainnetCryptoCardsContract.Contract.Symbol(&_MainnetCryptoCardsContract.CallOpts)\n}", "func (_Bep20 *Bep20Session) Symbol() (string, error) {\n\treturn _Bep20.Contract.Symbol(&_Bep20.CallOpts)\n}", "func (_BtlCoin *BtlCoinCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar (\n\t\tret0 = new(string)\n\t)\n\tout := ret0\n\terr := _BtlCoin.contract.Call(opts, out, \"symbol\")\n\treturn *ret0, err\n}", "func (p *Poloniex) GetTicker(ctx context.Context) (map[string]Ticker, error) {\n\ttype response struct {\n\t\tData map[string]Ticker\n\t}\n\n\tresp := response{}\n\tpath := \"/public?command=returnTicker\"\n\n\treturn resp.Data, p.SendHTTPRequest(ctx, exchange.RestSpot, path, &resp.Data)\n}", "func (m *Market) Trades(symbol string) (MarketResponse, error) {\n\ttradesURL := URL(\"/v1/trades\")\n\tvar result MarketResponse\n\n\tif symbol == \"\" {\n\t\treturn result, errors.New(\"Symbol cannot be empty\")\n\t}\n\n\tquery := url.Values{\n\t\t\"symbol\": []string{symbol},\n\t}\n\n\tresp, err := method.Get(tradesURL, nil, query)\n\tif err != nil {\n\t\treturn result, err\n\t}\n\tdefer resp.Body.Close()\n\n\t// Crypto.com Exchange does not return 404 when symbol does not show\n\tif resp.StatusCode == 500 {\n\t\treturn result, errors.New(\"Symbol does not exist\")\n\t}\n\n\treturn bodyToMarketResponse(resp.Body, &result)\n}", "func (_BtlCoin *BtlCoinSession) Symbol() (string, error) {\n\treturn _BtlCoin.Contract.Symbol(&_BtlCoin.CallOpts)\n}", "func (_Weth *WethCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _Weth.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func getTicker(session *mgo.Session, market string) ([]bson.M, error) {\n\tdefer session.Close()\n\tdb := session.DB(\"ixm\")\n\n\t// let's use a slice of bson.M instead of ApiDatapoint\n\tdata := []bson.M{}\n\n\t// check if the collection exists first\n\tnames, err := db.CollectionNames()\n\tif err != nil {\n\t\treturn []bson.M{}, err\n\t}\n\n\t// if name matches market, create c\n\tfor _, name := range names {\n\t\tif name != \"system.indexes\" {\n\t\t\tif market == name {\n\t\t\t\t// access latest by timestamp, add to data\n\t\t\t\tc := db.C(name)\n\n\t\t\t\terr = c.Find(nil).Sort(\"-timestamp\").Limit(1).Iter().All(&data)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn []bson.M{}, err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// ret no err\n\treturn data, nil\n}", "func (m *Market) Ticker(symbol string) (MarketResponse, error) {\n\ttickerURL := URL(\"/v1/ticker\")\n\tvar result MarketResponse\n\tvalues := url.Values{}\n\n\tif symbol != \"\" {\n\t\tvalues.Add(\"symbol\", symbol)\n\t}\n\n\tresp, err := method.Get(tickerURL, nil, values)\n\tif err != nil {\n\t\treturn result, err\n\t}\n\tdefer resp.Body.Close()\n\n\treturn bodyToMarketResponse(resp.Body, &result)\n}", "func (_Wmatic *WmaticCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _Wmatic.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func (_IERC721Metadata *IERC721MetadataCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _IERC721Metadata.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func (_IERC721Metadata *IERC721MetadataCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _IERC721Metadata.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func (_Univ2 *Univ2Caller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _Univ2.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func (a *Client) GetReservedSymbols(params *GetReservedSymbolsParams) (*GetReservedSymbolsOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewGetReservedSymbolsParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"getReservedSymbols\",\n\t\tMethod: \"GET\",\n\t\tPathPattern: \"/asset/tokens/reserved/symbols\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"http\", \"https\"},\n\t\tParams: params,\n\t\tReader: &GetReservedSymbolsReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*GetReservedSymbolsOK)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\t// safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue\n\tmsg := fmt.Sprintf(\"unexpected success response for getReservedSymbols: API contract not enforced by server. Client expected to get an error, but got: %T\", result)\n\tpanic(msg)\n}", "func (_Crowdsale *CrowdsaleCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar (\n\t\tret0 = new(string)\n\t)\n\tout := ret0\n\terr := _Crowdsale.contract.Call(opts, out, \"symbol\")\n\treturn *ret0, err\n}", "func (_Lelecoin *LelecoinSession) Symbol() (string, error) {\n\treturn _Lelecoin.Contract.Symbol(&_Lelecoin.CallOpts)\n}", "func (h *HUOBIHADAX) GetMarketDetail(symbol string) (Detail, error) {\n\tvals := url.Values{}\n\tvals.Set(\"symbol\", symbol)\n\n\ttype response struct {\n\t\tResponse\n\t\tTick Detail `json:\"tick\"`\n\t}\n\n\tvar result response\n\turlPath := fmt.Sprintf(\"%s/%s\", h.APIUrl, huobihadaxMarketDetail)\n\n\terr := h.SendHTTPRequest(common.EncodeURLValues(urlPath, vals), &result)\n\tif result.ErrorMessage != \"\" {\n\t\treturn result.Tick, errors.New(result.ErrorMessage)\n\t}\n\treturn result.Tick, err\n}", "func (_IERC20 *IERC20Caller) Symbol(opts *bind.CallOpts) (string, error) {\r\n\tvar out []interface{}\r\n\terr := _IERC20.contract.Call(opts, &out, \"symbol\")\r\n\r\n\tif err != nil {\r\n\t\treturn *new(string), err\r\n\t}\r\n\r\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\r\n\r\n\treturn out0, err\r\n\r\n}", "func (sun Suncoin) Symbol() string {\n\treturn \"SUN\"\n}", "func (_Erc20 *Erc20Caller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar (\n\t\tret0 = new(string)\n\t)\n\tout := ret0\n\terr := _Erc20.contract.Call(opts, out, \"symbol\")\n\treturn *ret0, err\n}", "func (_IUniswapV2Pair *IUniswapV2PairCaller) Symbol(opts *bind.CallOpts) (string, error) {\r\n\tvar out []interface{}\r\n\terr := _IUniswapV2Pair.contract.Call(opts, &out, \"symbol\")\r\n\r\n\tif err != nil {\r\n\t\treturn *new(string), err\r\n\t}\r\n\r\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\r\n\r\n\treturn out0, err\r\n\r\n}", "func (h *HUOBIHADAX) GetTrades(symbol string) ([]Trade, error) {\n\tvals := url.Values{}\n\tvals.Set(\"symbol\", symbol)\n\n\ttype response struct {\n\t\tResponse\n\t\tTick struct {\n\t\t\tData []Trade `json:\"data\"`\n\t\t} `json:\"tick\"`\n\t}\n\n\tvar result response\n\turlPath := fmt.Sprintf(\"%s/%s\", h.APIUrl, huobihadaxMarketTrade)\n\n\terr := h.SendHTTPRequest(common.EncodeURLValues(urlPath, vals), &result)\n\tif result.ErrorMessage != \"\" {\n\t\treturn nil, errors.New(result.ErrorMessage)\n\t}\n\treturn result.Tick.Data, err\n}", "func (_ERC721Metadata *ERC721MetadataCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _ERC721Metadata.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func (_BREMToken *BREMTokenCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar (\n\t\tret0 = new(string)\n\t)\n\tout := ret0\n\terr := _BREMToken.contract.Call(opts, out, \"symbol\")\n\treturn *ret0, err\n}", "func getSymbols() (successSym, errorSym string) {\n\tif runtime.GOOS == \"windows\" {\n\t\tsuccessSym = \"\\u001b[32m\" + \"√\" + \"\\u001b[39m\"\n\t\terrorSym = \"\\u001b[31m\" + \"×\" + \"\\u001b[39m\"\n\t} else {\n\t\tsuccessSym = \"\\u001b[32m\" + \"✔\" + \"\\u001b[39m\"\n\t\terrorSym = \"\\u001b[31m\" + \"✖\" + \"\\u001b[39m\"\n\t}\n\treturn\n}", "func (_ElvTradable *ElvTradableCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _ElvTradable.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func (h *HUOBI) GetTrades(ctx context.Context, symbol currency.Pair) ([]Trade, error) {\n\tvals := url.Values{}\n\tsymbolValue, err := h.FormatSymbol(symbol, asset.Spot)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvals.Set(\"symbol\", symbolValue)\n\n\ttype response struct {\n\t\tResponse\n\t\tTick struct {\n\t\t\tData []Trade `json:\"data\"`\n\t\t} `json:\"tick\"`\n\t}\n\n\tvar result response\n\n\terr = h.SendHTTPRequest(ctx, exchange.RestSpot, common.EncodeURLValues(huobiMarketTrade, vals), &result)\n\tif result.ErrorMessage != \"\" {\n\t\treturn nil, errors.New(result.ErrorMessage)\n\t}\n\treturn result.Tick.Data, err\n}", "func (_TTFT20 *TTFT20Caller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar (\n\t\tret0 = new(string)\n\t)\n\tout := ret0\n\terr := _TTFT20.contract.Call(opts, out, \"symbol\")\n\treturn *ret0, err\n}", "func (_ElvToken *ElvTokenCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _ElvToken.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func (_ERC721Contract *ERC721ContractCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _ERC721Contract.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func (_Contract *ContractCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar (\n\t\tret0 = new(string)\n\t)\n\tout := ret0\n\terr := _Contract.contract.Call(opts, out, \"symbol\")\n\treturn *ret0, err\n}", "func (c *Connecter) GetCurrenciesTicker(ctReq CurrenciesTickerRequest) ([]CurrenciesTickerResponse, error) {\n\treq, err := c.newRequest(currenciesTickerURL)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Formulate query params.\n\tq := req.URL.Query()\n\tq.Add(\"key\", c.apiKey)\n\tif len(ctReq.Ids) > 0 {\n\t\tq.Add(\"ids\", strings.Join(ctReq.Ids[:], \",\"))\n\t}\n\tif len(ctReq.Interval) > 0 {\n\t\tq.Add(\"interval\", strings.Join(ctReq.Interval[:], \",\"))\n\t}\n\tif ctReq.Convert != \"\" {\n\t\tq.Add(\"convert\", ctReq.Convert)\n\t}\n\tif ctReq.Status != \"\" {\n\t\tq.Add(\"status\", ctReq.Status)\n\t}\n\tif ctReq.Filter != \"\" {\n\t\tq.Add(\"filter\", ctReq.Filter)\n\t}\n\tif ctReq.Sort != \"\" {\n\t\tq.Add(\"sort\", ctReq.Sort)\n\t}\n\tif ctReq.IncludeTransparency {\n\t\tq.Add(\"include-transparency\", strconv.FormatBool(ctReq.IncludeTransparency))\n\t}\n\tif ctReq.PerPage != 0 {\n\t\tq.Add(\"per-page\", strconv.Itoa(ctReq.PerPage))\n\t}\n\tif ctReq.Page != 0 {\n\t\tq.Add(\"page\", strconv.Itoa(ctReq.Page))\n\t}\n\treq.URL.RawQuery = q.Encode()\n\n\t// Do the requset to server.\n\tresp, err := c.do(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer resp.Body.Close()\n\n\t// Creates formatted response from the server's response.\n\tvar ctResp []CurrenciesTickerResponse\n\tif err := json.NewDecoder(resp.Body).Decode(&ctResp); err != nil {\n\t\treturn nil, err\n\t}\n\treturn ctResp, nil\n}", "func (_FixedSupplyToken *FixedSupplyTokenCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar (\n\t\tret0 = new(string)\n\t)\n\tout := ret0\n\terr := _FixedSupplyToken.contract.Call(opts, out, \"symbol\")\n\treturn *ret0, err\n}", "func (_DetailedTestToken *DetailedTestTokenCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _DetailedTestToken.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func (_Erc777 *Erc777Caller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _Erc777.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func (op CosinusOperator) GetSupportedSymbols() map[string]bool {\n\tsupportedSymbols := make(map[string]bool)\n\tsupportedSymbols[\"cos\"] = true\n\treturn supportedSymbols\n}", "func (_Erc20Mock *Erc20MockCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar (\n\t\tret0 = new(string)\n\t)\n\tout := ret0\n\terr := _Erc20Mock.contract.Call(opts, out, \"symbol\")\n\treturn *ret0, err\n}", "func (_FCToken *FCTokenCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _FCToken.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func (_ERC20 *ERC20Caller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _ERC20.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func (c *Coinbene) GetTrades(symbol string, limit int64) (Trades, error) {\n\tresp := struct {\n\t\tData [][]string `json:\"data\"`\n\t}{}\n\n\tparams := url.Values{}\n\tparams.Set(\"symbol\", symbol)\n\tparams.Set(\"limit\", strconv.FormatInt(limit, 10))\n\tpath := common.EncodeURLValues(coinbeneAPIVersion+coinbeneGetTrades, params)\n\terr := c.SendHTTPRequest(exchange.RestSpot, path, spotMarketTrades, &resp)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar trades Trades\n\tfor x := range resp.Data {\n\t\ttm, err := time.Parse(time.RFC3339, resp.Data[x][4])\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tprice, err := strconv.ParseFloat(resp.Data[x][1], 64)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tvolume, err := strconv.ParseFloat(resp.Data[x][2], 64)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\ttrades = append(trades, TradeItem{\n\t\t\tCurrencyPair: resp.Data[x][0],\n\t\t\tPrice: price,\n\t\t\tVolume: volume,\n\t\t\tDirection: resp.Data[x][3],\n\t\t\tTradeTime: tm,\n\t\t})\n\t}\n\treturn trades, nil\n}", "func (_Contract *ContractSession) Symbol() (string, error) {\n\treturn _Contract.Contract.Symbol(&_Contract.CallOpts)\n}", "func (_FixedSupplyToken *FixedSupplyTokenSession) Symbol() (string, error) {\n\treturn _FixedSupplyToken.Contract.Symbol(&_FixedSupplyToken.CallOpts)\n}", "func (api *API) GetMarketHistory(bSec uint32, start, end types.Time) ([]*MarketHistory, error) {\n\tvar resp []*MarketHistory\n\terr := api.call(\"market_history\", \"get_market_history\", []interface{}{bSec, start, end}, &resp)\n\treturn resp, err\n}", "func (_CrToken *CrTokenCaller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _CrToken.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func getStockPrices(w http.ResponseWriter, stockSymbol string, numberOfDays int) string {\n\tfullRemoteApiUrl := fmt.Sprintf(\"%s%s\", fullRemoteApiUrlPrefix, stockSymbol)\n\n\t// Don't log as it exposes the APIKEY.\n\t//log.Printf(\"Sending GET request to remote API %s for stock symbol %s\", fullRemoteApiUrl, stockSymbol)\n\tlog.Printf(\"Sending GET request to remote API for stock symbol %s\", stockSymbol)\n\n\tresp, err := http.Get(fullRemoteApiUrl)\n\n\tif err != nil {\n\t\tlog.Printf(\"Error getting stock price from remote API for stock symbol %s due to: %v\", stockSymbol, err)\n\t\thttp.Error(w, \"Error getting stock price from remote API\", http.StatusInternalServerError)\n\t\treturn \"\"\n\t}\n\n\trespData, err := ioutil.ReadAll(resp.Body)\n\n\t//log.Printf(\"GET request to remote API for stock symbol %s returned: %s\", stockSymbol, respData)\n\n\tif err != nil {\n\t\tlog.Printf(\"Error reading response from remote API for stock symbol %s due to: %v\", stockSymbol, err)\n\t\thttp.Error(w, \"Error reading response from remote API\", resp.StatusCode)\n\t\treturn \"\"\n\t}\n\n\tvar jsonStockPrices StockPrices\n\terr = json.Unmarshal(respData, &jsonStockPrices)\n\tif err != nil {\n\t\tlog.Printf(\"Error unmarshalling response for stock symbol %s due to: %v\", stockSymbol, err)\n\t\thttp.Error(w, \"Error unmarshalling response\", http.StatusInternalServerError)\n\t\treturn \"\"\n\t}\n\n\tvar serviceResp string\n\tserviceResp, err = buildServiceResp(stockSymbol, numberOfDays, jsonStockPrices)\n\tif err != nil {\n\t\tlog.Printf(\"Error building service response for stock symbol %s due to: %v\", stockSymbol, err)\n\t\thttp.Error(w, \"Error building service response\", http.StatusInternalServerError)\n\t\treturn \"\"\n\t}\n\n\treturn serviceResp\n}", "func (_ERC721 *ERC721Caller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar out []interface{}\n\terr := _ERC721.contract.Call(opts, &out, \"symbol\")\n\n\tif err != nil {\n\t\treturn *new(string), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(string)).(*string)\n\n\treturn out0, err\n\n}", "func (_DemoERC20 *DemoERC20Caller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar (\n\t\tret0 = new(string)\n\t)\n\tout := ret0\n\terr := _DemoERC20.contract.Call(opts, out, \"symbol\")\n\treturn *ret0, err\n}", "func (m *JSONFileManager) GetAllSymbols() []string {\n\treturn m.symbols\n}", "func NewGetSymbolsRequest(server string) (*http.Request, error) {\n\tvar err error\n\n\tserverURL, err := url.Parse(server)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\toperationPath := fmt.Sprintf(\"/symbols\")\n\tif operationPath[0] == '/' {\n\t\toperationPath = operationPath[1:]\n\t}\n\toperationURL := url.URL{\n\t\tPath: operationPath,\n\t}\n\n\tqueryURL := serverURL.ResolveReference(&operationURL)\n\n\treq, err := http.NewRequest(\"GET\", queryURL.String(), nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn req, nil\n}", "func GetHistoricalPrices(baseURL string, symbol string, timeframe string, limit int) []t.HistoricalPrice {\n\tvar url strings.Builder\n\n\tfmt.Fprintf(&url, \"%s/klines?symbol=%s&interval=%s&limit=%d\", baseURL, symbol, timeframe, limit)\n\tdata, err := h.Get(url.String())\n\tif err != nil {\n\t\treturn nil\n\t}\n\n\tvar hPrices []t.HistoricalPrice\n\tfor _, data := range gjson.ParseBytes(data).Array() {\n\t\td := data.Array()\n\t\tp := t.HistoricalPrice{\n\t\t\tSymbol: symbol,\n\t\t\tTime: d[0].Int(),\n\t\t\tOpen: d[1].Float(),\n\t\t\tHigh: d[2].Float(),\n\t\t\tLow: d[3].Float(),\n\t\t\tClose: d[4].Float(),\n\t\t}\n\t\thPrices = append(hPrices, p)\n\t}\n\treturn hPrices\n}", "func (_Erc20 *Erc20Session) Symbol() (string, error) {\n\treturn _Erc20.Contract.Symbol(&_Erc20.CallOpts)\n}", "func (_DetailedERC20 *DetailedERC20Caller) Symbol(opts *bind.CallOpts) (string, error) {\n\tvar (\n\t\tret0 = new(string)\n\t)\n\tout := ret0\n\terr := _DetailedERC20.contract.Call(opts, out, \"symbol\")\n\treturn *ret0, err\n}" ]
[ "0.7717973", "0.7574411", "0.71720934", "0.71366954", "0.6852238", "0.68451613", "0.66307586", "0.64887947", "0.6440578", "0.64132583", "0.63948405", "0.63337904", "0.6318402", "0.626815", "0.6262091", "0.6139871", "0.61348015", "0.6100352", "0.6099758", "0.6048265", "0.60227984", "0.60125035", "0.5984093", "0.59774977", "0.5946976", "0.5919543", "0.5912924", "0.5909036", "0.59059185", "0.589054", "0.5875695", "0.58731866", "0.58694553", "0.5847992", "0.5845089", "0.58320427", "0.5823019", "0.5805881", "0.57958525", "0.57757175", "0.5752578", "0.5745396", "0.5740267", "0.5736184", "0.5729984", "0.57228976", "0.57172626", "0.56984574", "0.5697849", "0.569557", "0.5693157", "0.5687445", "0.5687339", "0.5681472", "0.5670224", "0.5660023", "0.5646974", "0.5643536", "0.56424", "0.56424", "0.5640619", "0.5637725", "0.5637268", "0.5632422", "0.5632271", "0.5619964", "0.56134385", "0.56111157", "0.5610175", "0.56053007", "0.5600022", "0.55999726", "0.5594977", "0.55923116", "0.5590192", "0.55863625", "0.5581816", "0.5576983", "0.55754", "0.5571354", "0.5568205", "0.5564964", "0.55425787", "0.55412734", "0.55395484", "0.55388194", "0.55141395", "0.5510362", "0.55102915", "0.5493139", "0.5492607", "0.5491136", "0.5490837", "0.549014", "0.54809964", "0.54794824", "0.54682153", "0.54658484", "0.5464938", "0.5460962" ]
0.7291433
2
GetAssetTimestamp get tsStart & tsEnd for asset
func (db2 *DB2) GetAssetTimestamp(ctx context.Context, market string, symbol string, tsStart int64, tsEnd int64) ( int64, int64, error) { if market == "" || tradingdb2utils.IndexOfStringSlice(db2.cfg.DB2Markets, market, 0) < 0 { return 0, 0, ErrInvalidMarket } if symbol == "" { return 0, 0, ErrInvalidSymbol } os, oe, isok := db2.getCacheAssetTimestamp(symbol, tsStart, tsEnd) if isok { return os, oe, nil } is := tsStart ie := tsEnd if tsStart > 0 && tsEnd <= 0 { tsEnd = time.Now().Unix() } var rmints int64 var rmaxts int64 var mints int64 var maxts int64 rmints = tsEnd rmaxts = 0 mints = tsEnd maxts = 0 err := db2.AnkaDB.ForEachWithPrefix(ctx, market, makeCandlesDB2KeyPrefix(market, symbol), func(key string, buf []byte) error { cc := &tradingpb.Candles{} err := proto.Unmarshal(buf, cc) if err != nil { return err } for _, v := range cc.Candles { if rmints > v.Ts { rmints = v.Ts } if rmaxts < v.Ts { rmaxts = v.Ts } if v.Ts >= tsStart && v.Ts <= tsEnd { if mints > v.Ts { mints = v.Ts } if maxts < v.Ts { maxts = v.Ts } } } return nil }) if err != nil { return 0, 0, err } db2.updCacheAssetTimestamp(symbol, is, ie, mints, maxts, rmints, rmaxts) return mints, maxts, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetTimestamp(mbs *MrtBufferStack) time.Time {\n\tmrth := mbs.MrthBuf.(protoparse.MRTHeaderer).GetHeader()\n\tts := time.Unix(int64(mrth.Timestamp), 0)\n\treturn ts\n}", "func (em *IGMedia) GetTimestamp() int64 {\n\treturn em.TakenAtTimestamp\n}", "func (ms Span) StartTimestamp() pcommon.Timestamp {\n\treturn pcommon.Timestamp(ms.orig.StartTimeUnixNano)\n}", "func (o LookupRegionCommitmentResultOutput) StartTimestamp() pulumi.StringOutput {\n\treturn o.ApplyT(func(v LookupRegionCommitmentResult) string { return v.StartTimestamp }).(pulumi.StringOutput)\n}", "func (self *ResTransaction)GetTimestamp()time.Time{\n return self.Timestamp\n}", "func (_AggregatorV2V3Interface *AggregatorV2V3InterfaceCaller) GetTimestamp(opts *bind.CallOpts, roundId *big.Int) (*big.Int, error) {\n\tvar out []interface{}\n\terr := _AggregatorV2V3Interface.contract.Call(opts, &out, \"getTimestamp\", roundId)\n\n\tif err != nil {\n\t\treturn *new(*big.Int), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(*big.Int)).(**big.Int)\n\n\treturn out0, err\n\n}", "func (fctx *AvFormatContext) AvGetOutputTimestamp(stream int, dts, wall *int) int {\n\treturn int(C.av_get_output_timestamp((*C.struct_AVFormatContext)(fctx),\n\t\tC.int(stream), (*C.int64_t)(unsafe.Pointer(&dts)), (*C.int64_t)(unsafe.Pointer(&wall))))\n}", "func (pva PeriodicVestingAccount) GetStartTime() int64 {\n\treturn pva.StartTime\n}", "func GetTs() int64 {\n\treturn time.Now().UnixNano() / 1e6\n}", "func (cva ContinuousVestingAccount) GetStartTime() int64 {\n\treturn cva.StartTime\n}", "func (_TellorMesosphere *TellorMesosphereCaller) GetTimestampbyRequestIDandIndex(opts *bind.CallOpts, _requestId *big.Int, _index *big.Int) (*big.Int, error) {\n\tvar out []interface{}\n\terr := _TellorMesosphere.contract.Call(opts, &out, \"getTimestampbyRequestIDandIndex\", _requestId, _index)\n\n\tif err != nil {\n\t\treturn *new(*big.Int), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(*big.Int)).(**big.Int)\n\n\treturn out0, err\n\n}", "func (el Elements) Timestamp(i int) int64 {\n\tswitch el.Type {\n\tcase part3.Int32:\n\t\treturn el.I32[i].Ts\n\tcase part3.Float32:\n\t\treturn el.F32[i].Ts\n\tcase part3.Float64:\n\t\treturn el.F64[i].Ts\n\tdefault:\n\t\treturn int64(-1)\n\t}\n}", "func (o *HyperflexVmSnapshotInfoAllOf) GetSourceTimestamp() int64 {\n\tif o == nil || o.SourceTimestamp == nil {\n\t\tvar ret int64\n\t\treturn ret\n\t}\n\treturn *o.SourceTimestamp\n}", "func (va ClawbackVestingAccount) GetStartTime() int64 {\n\treturn va.StartTime\n}", "func (m *SharePostRequestBody) GetStartDateTime()(*i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time) {\n return m.startDateTime\n}", "func (_Aggregator *AggregatorCaller) GetTimestamp(opts *bind.CallOpts, roundId *big.Int) (*big.Int, error) {\n\tvar out []interface{}\n\terr := _Aggregator.contract.Call(opts, &out, \"getTimestamp\", roundId)\n\n\tif err != nil {\n\t\treturn *new(*big.Int), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(*big.Int)).(**big.Int)\n\n\treturn out0, err\n\n}", "func AvIndexSearchTimestamp(st *Stream, t int64, f int) int {\n\treturn int(C.av_index_search_timestamp((*C.struct_AVStream)(st), C.int64_t(t), C.int(f)))\n}", "func AvIndexSearchTimestamp(st *Stream, t int64, f int) int {\n\treturn int(C.av_index_search_timestamp((*C.struct_AVStream)(st), C.int64_t(t), C.int(f)))\n}", "func (mg *Graph) GetTimestamp() string {\n\treturn mg.ts.Get(mg.graph)\n}", "func (_AggregatorV2V3Interface *AggregatorV2V3InterfaceSession) GetTimestamp(roundId *big.Int) (*big.Int, error) {\n\treturn _AggregatorV2V3Interface.Contract.GetTimestamp(&_AggregatorV2V3Interface.CallOpts, roundId)\n}", "func Timestamp(scope *Scope) (ts tf.Output) {\n\tif scope.Err() != nil {\n\t\treturn\n\t}\n\topspec := tf.OpSpec{\n\t\tType: \"Timestamp\",\n\t}\n\top := scope.AddOperation(opspec)\n\treturn op.Output(0)\n}", "func (c *Context) getTimestamp() uint64 {\n\treturn uint64(c.Config.Timer.Now().UnixNano()) / c.Config.TimestampIncrement * c.Config.TimestampIncrement\n}", "func (o *SyntheticMonitorStepResult) GetStartTimestamp() int64 {\n\tif o == nil {\n\t\tvar ret int64\n\t\treturn ret\n\t}\n\n\treturn o.StartTimestamp\n}", "func (_AggregatorV2V3Interface *AggregatorV2V3InterfaceCallerSession) GetTimestamp(roundId *big.Int) (*big.Int, error) {\n\treturn _AggregatorV2V3Interface.Contract.GetTimestamp(&_AggregatorV2V3Interface.CallOpts, roundId)\n}", "func getTimeInfo(activity *driveactivity.DriveActivity) string {\n\tif activity.Timestamp != \"\" {\n\t\treturn activity.Timestamp\n\t}\n\tif activity.TimeRange != nil {\n\t\treturn activity.TimeRange.EndTime\n\t}\n\treturn \"unknown\"\n}", "func (s *Structured) GetTimestamp() time.Time {\n\treturn s.cloudEvent.EventTime\n}", "func (t TsTimestampPoint) Timestamp() time.Time {\n\treturn t.timestamp\n}", "func GetStartTime() time.Time {\n\treturn startAtTime\n}", "func (o *HyperflexSnapshotStatus) GetTimestamp() int64 {\n\tif o == nil || o.Timestamp == nil {\n\t\tvar ret int64\n\t\treturn ret\n\t}\n\treturn *o.Timestamp\n}", "func (o *EventAttributes) GetTimestamp() int64 {\n\tif o == nil || o.Timestamp == nil {\n\t\tvar ret int64\n\t\treturn ret\n\t}\n\treturn *o.Timestamp\n}", "func (ec *EthereumChain) BlockTimestamp(blockNumber *big.Int) (uint64, error) {\n\tctx, cancelCtx := context.WithTimeout(context.Background(), 1*time.Minute)\n\tdefer cancelCtx()\n\n\theader, err := ec.client.HeaderByNumber(ctx, blockNumber)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\treturn header.Time, nil\n}", "func (ms Span) EndTimestamp() pcommon.Timestamp {\n\treturn pcommon.Timestamp(ms.orig.EndTimeUnixNano)\n}", "func (t *TsBulk) GetTimestamp() (time.Time, error) {\n\tvar content C.qdb_timespec_t\n\terr := C.qdb_ts_row_get_timestamp(t.table, C.qdb_size_t(t.index), &content)\n\tt.index++\n\treturn content.toStructG(), makeErrorOrNil(err)\n}", "func (o *DeviceResourceVersionValueWeb) GetTimestamp() string {\n\tif o == nil || o.Timestamp == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Timestamp\n}", "func (o *DeviceParameterValue) GetTimestamp() string {\n\tif o == nil || o.Timestamp == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Timestamp\n}", "func getBoundaries(r *pb.GetServiceKpiRequest) (int64, int64) {\n\tstartTs := r.GetStartTimestamp()\n\tendTs := r.GetEndTimestamp()\n\tif endTs == 0 {\n\t\tendTs = time.Date(2999, 12, 31, 23, 59, 59, 999, time.UTC).Unix()\n\t}\n\treturn startTs, endTs\n}", "func Timestamp() int64 {\n\treturn atomic.LoadInt64(&_genesisTs)\n}", "func (n *Parser) getTimestamp(evMap map[string]interface{}) time.Time {\n\tvar (\n\t\tsetBothFieldsMsg = \"Timestamp format and field must both be set to be used, one was not. Using current time instead.\"\n\t)\n\n\t// Custom (user-defined) timestamp field/format takes priority over the\n\t// default parsing behavior. Try that first.\n\tif n.conf.TimeFieldFormat != \"\" || n.conf.TimeFieldName != \"\" {\n\t\tif n.conf.TimeFieldFormat == \"\" || n.conf.TimeFieldName == \"\" {\n\t\t\tlogrus.Debug(setBothFieldsMsg)\n\t\t\treturn httime.Now()\n\t\t}\n\t\treturn httime.GetTimestamp(evMap, n.conf.TimeFieldName, n.conf.TimeFieldFormat)\n\t}\n\n\tif _, ok := evMap[\"time_local\"]; ok {\n\t\treturn httime.GetTimestamp(evMap, \"time_local\", commonLogFormatTimeLayout)\n\t}\n\n\tif _, ok := evMap[\"time_iso8601\"]; ok {\n\t\treturn httime.GetTimestamp(evMap, \"time_iso8601\", iso8601TimeLayout)\n\t}\n\n\tif _, ok := evMap[\"msec\"]; ok {\n\t\treturn httime.GetTimestamp(evMap, \"msec\", \"\")\n\t}\n\n\treturn httime.GetTimestamp(evMap, \"\", \"\")\n}", "func (ms Int64DataPoint) StartTime() TimestampUnixNano {\n\treturn TimestampUnixNano((*ms.orig).StartTimeUnixNano)\n}", "func (o *ExportData) GetTimestamp() time.Time {\n\treturn time.Now().UTC()\n}", "func (b *BaseEvent) GetTimestamp() time.Time {\n\treturn b.Timestamp\n}", "func (its *OperationID) GetTimestamp() *Timestamp {\n\treturn &Timestamp{\n\t\tEra: its.Era,\n\t\tLamport: its.Lamport,\n\t\tCUID: its.CUID,\n\t\tDelimiter: 0,\n\t}\n}", "func (ms SummaryDataPoint) StartTime() TimestampUnixNano {\n\treturn TimestampUnixNano((*ms.orig).StartTimeUnixNano)\n}", "func (s *Shipper) Timestamps() (minTime, maxSyncTime int64, err error) {\n\tmeta, err := ReadMetaFile(s.dir)\n\tif err != nil {\n\t\treturn 0, 0, errors.Wrap(err, \"read shipper meta file\")\n\t}\n\t// Build a map of blocks we already uploaded.\n\thasUploaded := make(map[ulid.ULID]struct{}, len(meta.Uploaded))\n\tfor _, id := range meta.Uploaded {\n\t\thasUploaded[id] = struct{}{}\n\t}\n\n\tminTime = math.MaxInt64\n\tmaxSyncTime = math.MinInt64\n\n\tmetas, err := s.blockMetasFromOldest()\n\tif err != nil {\n\t\treturn 0, 0, err\n\t}\n\tfor _, m := range metas {\n\t\tif m.MinTime < minTime {\n\t\t\tminTime = m.MinTime\n\t\t}\n\t\tif _, ok := hasUploaded[m.ULID]; ok && m.MaxTime > maxSyncTime {\n\t\t\tmaxSyncTime = m.MaxTime\n\t\t}\n\t}\n\n\tif minTime == math.MaxInt64 {\n\t\t// No block yet found. We cannot assume any min block size so propagate 0 minTime.\n\t\tminTime = 0\n\t}\n\treturn minTime, maxSyncTime, nil\n}", "func (dva DelayedVestingAccount) GetStartTime() int64 {\n\treturn 0\n}", "func (ms DoubleDataPoint) StartTime() TimestampUnixNano {\n\treturn TimestampUnixNano((*ms.orig).StartTimeUnixNano)\n}", "func (_TellorMesosphere *TellorMesosphereSession) GetTimestampbyRequestIDandIndex(_requestId *big.Int, _index *big.Int) (*big.Int, error) {\n\treturn _TellorMesosphere.Contract.GetTimestampbyRequestIDandIndex(&_TellorMesosphere.CallOpts, _requestId, _index)\n}", "func Timestamp(ts *tspb.Timestamp) (time.Time, error) {\n\t// Don't return the zero value on error, because corresponds to a valid\n\t// timestamp. Instead return whatever time.Unix gives us.\n\tvar t time.Time\n\tif ts == nil {\n\t\tt = time.Unix(0, 0).UTC() // treat nil like the empty Timestamp\n\t} else {\n\t\tt = time.Unix(ts.Seconds, int64(ts.Nanos)).UTC()\n\t}\n\treturn t, validateTimestamp(ts)\n}", "func (plva PermanentLockedAccount) GetStartTime() int64 {\n\treturn 0\n}", "func (_TellorMesosphere *TellorMesosphereCallerSession) GetTimestampbyRequestIDandIndex(_requestId *big.Int, _index *big.Int) (*big.Int, error) {\n\treturn _TellorMesosphere.Contract.GetTimestampbyRequestIDandIndex(&_TellorMesosphere.CallOpts, _requestId, _index)\n}", "func (ms HistogramDataPoint) StartTime() TimestampUnixNano {\n\treturn TimestampUnixNano((*ms.orig).StartTimeUnixNano)\n}", "func (o *HyperflexClusterHealthCheckExecutionSnapshot) GetTimestamp() time.Time {\n\tif o == nil || o.Timestamp == nil {\n\t\tvar ret time.Time\n\t\treturn ret\n\t}\n\treturn *o.Timestamp\n}", "func (m *UserExperienceAnalyticsDeviceStartupHistory) GetStartTime()(*i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time) {\n val, err := m.GetBackingStore().Get(\"startTime\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time)\n }\n return nil\n}", "func getTimestamp() uint64 {\n\treturn uint64(time.Since(Epoch).Nanoseconds() / 1e6)\n}", "func (x *UpdateResult) GetTimestamp() int64 {\n\tif x != nil {\n\t\treturn x.Timestamp\n\t}\n\treturn 0\n}", "func (a *Account) Timestamp() uint32 { return a.timestamp }", "func (o TimelineOutput) StartTime() pulumi.IntPtrOutput {\n\treturn o.ApplyT(func(v *Timeline) pulumi.IntPtrOutput { return v.StartTime }).(pulumi.IntPtrOutput)\n}", "func (bi *BlockInfo) RequestTimestamp(requestIndex uint16) time.Time {\n\treturn bi.Timestamp.Add(time.Duration(-(bi.TotalRequests - requestIndex - 1)) * time.Nanosecond)\n}", "func (c CommitterProbe) GetStartTS() uint64 {\n\treturn c.startTS\n}", "func tokenTimes(t *Token) (start, end time.Time) {\n\tstart = time.Unix(int64(t.CreatedAt), 0)\n\tend = time.Unix(int64(t.CreatedAt)+int64(t.ExpiresIn), 0)\n\treturn\n}", "func (o *Kanban) GetTimestamp() time.Time {\n\tvar dt interface{} = o.UpdatedAt\n\tswitch v := dt.(type) {\n\tcase int64:\n\t\treturn datetime.DateFromEpoch(v).UTC()\n\tcase string:\n\t\ttv, err := datetime.ISODateToTime(v)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\treturn tv.UTC()\n\tcase time.Time:\n\t\treturn v.UTC()\n\t}\n\tpanic(\"not sure how to handle the date time format for Kanban\")\n}", "func (o *LogContent) GetTimestamp() time.Time {\n\tif o == nil || o.Timestamp == nil {\n\t\tvar ret time.Time\n\t\treturn ret\n\t}\n\treturn *o.Timestamp\n}", "func (o LookupInstanceResultOutput) LastStartTimestamp() pulumi.StringOutput {\n\treturn o.ApplyT(func(v LookupInstanceResult) string { return v.LastStartTimestamp }).(pulumi.StringOutput)\n}", "func Timestamp(t, i uint32) Val {\n\tv := Val{t: bsontype.Timestamp}\n\tv.bootstrap[0] = byte(i)\n\tv.bootstrap[1] = byte(i >> 8)\n\tv.bootstrap[2] = byte(i >> 16)\n\tv.bootstrap[3] = byte(i >> 24)\n\tv.bootstrap[4] = byte(t)\n\tv.bootstrap[5] = byte(t >> 8)\n\tv.bootstrap[6] = byte(t >> 16)\n\tv.bootstrap[7] = byte(t >> 24)\n\treturn v\n}", "func (bn *BinanceFetcher) getTimeStart() time.Time {\n\ttimeStart := time.Time{}\n\n\tfor _, symbol := range bn.symbols {\n\t\tfor _, baseCurrency := range bn.baseCurrencies {\n\t\t\tsymbolDir := fmt.Sprintf(\"binance_%s-%s\", symbol, baseCurrency)\n\t\t\ttbk := io.NewTimeBucketKey(symbolDir + \"/\" + bn.baseTimeframe.String + \"/OHLCV\")\n\t\t\tlastTimestamp := findLastTimestamp(tbk)\n\t\t\tlog.Info(\"lastTimestamp for %s = %v\", symbolDir, lastTimestamp)\n\t\t\tif timeStart.IsZero() || (!lastTimestamp.IsZero() && lastTimestamp.Before(timeStart)) {\n\t\t\t\ttimeStart = lastTimestamp\n\t\t\t}\n\t\t}\n\t}\n\n\t// Set start time if not given.\n\tif !bn.queryStart.IsZero() {\n\t\ttimeStart = bn.queryStart\n\t} else {\n\t\ttimeStart = time.Now().UTC().Add(-bn.baseTimeframe.Duration)\n\t}\n\n\treturn timeStart\n}", "func (m AggregationRecord) GetTimestamp() time.Time {\n\treturn m.Timestamp\n}", "func (et ExfatTimestamp) Second() int {\n\treturn int(et & 31)\n}", "func GetTimestamp(rxInfo []*gw.UplinkRXInfo) time.Time {\n\tfor i := range rxInfo {\n\t\tif rxInfo[i].Time != nil {\n\t\t\tt, err := ptypes.Timestamp(rxInfo[i].Time)\n\t\t\tif err == nil {\n\t\t\t\treturn t\n\t\t\t}\n\t\t}\n\t}\n\n\treturn time.Now()\n}", "func GetTimeFromTS(ts uint64) time.Time {\n\tms := ExtractPhysical(ts)\n\treturn time.Unix(ms/1e3, (ms%1e3)*1e6)\n}", "func (txn TxnProbe) GetStartTime() time.Time {\n\treturn txn.startTime\n}", "func (this *ValueTransaction) GetTimestamp() (result uint) {\n\tthis.timestampMutex.RLock()\n\tif this.timestamp == nil {\n\t\tthis.timestampMutex.RUnlock()\n\t\tthis.timestampMutex.Lock()\n\t\tdefer this.timestampMutex.Unlock()\n\t\tif this.timestamp == nil {\n\t\t\ttimestamp := uint(trinary.TritsToInt(this.trits[TIMESTAMP_OFFSET:TIMESTAMP_END]))\n\n\t\t\tthis.timestamp = &timestamp\n\t\t}\n\t} else {\n\t\tdefer this.timestampMutex.RUnlock()\n\t}\n\n\tresult = *this.timestamp\n\n\treturn\n}", "func (c *ConvertAllDayProcessor) getTimestamps(event ical.Event, loc *time.Location) (time.Time, time.Time, error) {\n\tvar startTimestamp time.Time\n\tvar endTimestamp time.Time\n\n\tif timestamp, err := event.Props.Get(ical.PropDateTimeStart).DateTime(loc); err != nil {\n\t\treturn startTimestamp, endTimestamp, err\n\t} else {\n\t\tstartTimestamp = timestamp\n\t}\n\tif timestamp, err := event.Props.Get(ical.PropDateTimeEnd).DateTime(loc); err != nil {\n\t\treturn startTimestamp, endTimestamp, err\n\t} else {\n\t\tendTimestamp = timestamp\n\t}\n\treturn startTimestamp, endTimestamp, nil\n}", "func (i *Iterator) Timestamp() int64 {\n\treturn i.n.timestamp\n}", "func (a *Client) GetAssetParams(params *GetAssetParamsParams) (*GetAssetParamsOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewGetAssetParamsParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"getAssetParams\",\n\t\tMethod: \"GET\",\n\t\tPathPattern: \"/asset/parameters\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"http\", \"https\"},\n\t\tParams: params,\n\t\tReader: &GetAssetParamsReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*GetAssetParamsOK)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\t// safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue\n\tmsg := fmt.Sprintf(\"unexpected success response for getAssetParams: API contract not enforced by server. Client expected to get an error, but got: %T\", result)\n\tpanic(msg)\n}", "func (rt RouteTable) GetBucketTimestamp(id string, bucket Bucket) (time.Time, error) {\n\treturn rt.dht.GetExpirationTime([]byte(id)), nil\n}", "func (_BaseContentSpace *BaseContentSpaceCaller) VersionTimestamp(opts *bind.CallOpts, arg0 *big.Int) (*big.Int, error) {\n\tvar out []interface{}\n\terr := _BaseContentSpace.contract.Call(opts, &out, \"versionTimestamp\", arg0)\n\n\tif err != nil {\n\t\treturn *new(*big.Int), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(*big.Int)).(**big.Int)\n\n\treturn out0, err\n\n}", "func (_TellorMesosphere *TellorMesosphereCaller) Timestamps(opts *bind.CallOpts, arg0 *big.Int, arg1 *big.Int) (*big.Int, error) {\n\tvar out []interface{}\n\terr := _TellorMesosphere.contract.Call(opts, &out, \"timestamps\", arg0, arg1)\n\n\tif err != nil {\n\t\treturn *new(*big.Int), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(*big.Int)).(**big.Int)\n\n\treturn out0, err\n\n}", "func ts() string {\n\tt := time.Now()\n\treturn strconv.FormatInt(t.UnixNano(), 10)\n}", "func getTimestamp() string {\n\treturn strconv.FormatInt(time.Now().UTC().Unix(), 10)\n}", "func determineStepStartTime(index int, running *corev1.ContainerStateRunning, terminated *corev1.ContainerStateTerminated, stageSteps []v1.CoreActivityStep) metav1.Time {\n\tvar startedAt metav1.Time\n\tif len(stageSteps) > 0 {\n\t\tpreviousStep := stageSteps[index-1]\n\t\tif previousStep.CompletedTimestamp != nil {\n\t\t\tstartedAt = *previousStep.CompletedTimestamp\n\t\t}\n\t} else {\n\t\tif running != nil {\n\t\t\tstartedAt = running.StartedAt\n\t\t} else if terminated != nil {\n\t\t\tstartedAt = terminated.StartedAt\n\t\t}\n\t}\n\treturn startedAt\n}", "func getTimeStamp(A []int) time {\n\ttimeStamp := time{}\n\ttimeStamp.hour = A[0]*10 + A[1]\n\ttimeStamp.minute = A[2]*10 + A[3]\n\treturn timeStamp\n}", "func (p *SASQueryParameters) StartTime() time.Time {\n\treturn p.startTime\n}", "func (e *Entry) Timestamp() uint64 {\n\tif e.stamp == 0 {\n\t\treturn uint64(time.Now().Unix())\n\t}\n\treturn e.stamp\n}", "func (b *Blob) Start() time.Time {\n\treturn b.start\n}", "func tdParseTimestamp( t string, a string ) int64 {\n n, err := strconv.ParseInt( t, 10, 64 )\n if err == nil {\n // NR feed is in Java time (millis) so convert to Unix time (seconds)\n n := n / int64(1000)\n\n if a != \"\" {\n dt := time.Now().Unix() - n\n // Hide anomaly around local midnight where a blip is recorded going from\n // +- 2400 (14400 when merged into 1m samples)\n if dt > -2400 && dt < 2400 {\n statistics.Set( \"td.\" + a, dt )\n }\n }\n\n return n\n }\n return 0\n}", "func (a *assigner) extractTimestamp(line []byte) (time.Time, []byte) {\n\tnanos, offset := influx.ExtractTimestamp(line)\n\tif offset < 0 {\n\t\treturn a.clock.Now(), line\n\t}\n\treturn time.Unix(0, nanos), line[:offset-1]\n}", "func GenerateTimestampArray(startTS, endTS int64) (tsArray []int64) {\n\tdaysNbr := (endTS - startTS) / (24 * 3600)\n\tvar i int64\n\tfor i = 0; i <= daysNbr; i++ {\n\t\ttsArray = append(tsArray, startTS+(i*24*3600))\n\t}\n\treturn tsArray\n}", "func (h *HUOBIHADAX) GetTimestamp() (int64, error) {\n\ttype response struct {\n\t\tResponse\n\t\tTimestamp int64 `json:\"data\"`\n\t}\n\n\tvar result response\n\turlPath := fmt.Sprintf(\"%s/v%s/%s\", h.APIUrl, huobihadaxAPIVersion, huobihadaxTimestamp)\n\n\terr := h.SendHTTPRequest(urlPath, &result)\n\tif result.ErrorMessage != \"\" {\n\t\treturn 0, errors.New(result.ErrorMessage)\n\t}\n\treturn result.Timestamp, err\n}", "func (o *UcsdBackupInfoAllOf) GetStartTime() time.Time {\n\tif o == nil || o.StartTime == nil {\n\t\tvar ret time.Time\n\t\treturn ret\n\t}\n\treturn *o.StartTime\n}", "func (utils MockJobLogUtils) GetJobRunStartTimestamp(job string, run int) (uint64, error) {\n\tvalue, ok := utils.MockStartTimestamps[run]\n\tif !ok {\n\t\treturn 0, fmt.Errorf(\"Run number %v not a key in the mock start timestamps map\", run)\n\t}\n\treturn value, nil\n}", "func (t TsTimestampAggregation) Range() TsRange {\n\treturn t.rng\n}", "func (n *ssvNode) getSlotStartTime(slot uint64) time.Time {\n\ttimeSinceGenesisStart := slot * uint64(n.ethNetwork.SlotDurationSec().Seconds())\n\tstart := time.Unix(int64(n.ethNetwork.MinGenesisTime()+timeSinceGenesisStart), 0)\n\treturn start\n}", "func (o GetRulesRuleOutput) ModifiedTimestamp() pulumi.StringOutput {\n\treturn o.ApplyT(func(v GetRulesRule) string { return v.ModifiedTimestamp }).(pulumi.StringOutput)\n}", "func (o *Echo) GetTimestamp() time.Time {\n\tvar dt interface{} = o.UpdatedAt\n\tswitch v := dt.(type) {\n\tcase int64:\n\t\treturn datetime.DateFromEpoch(v).UTC()\n\tcase string:\n\t\ttv, err := datetime.ISODateToTime(v)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\treturn tv.UTC()\n\tcase time.Time:\n\t\treturn v.UTC()\n\t}\n\tpanic(\"not sure how to handle the date time format for Echo\")\n}", "func (o *SyntheticMonitorStepResult) GetStartTimestampOk() (*int64, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.StartTimestamp, true\n}", "func (o LookupRegionCommitmentResultOutput) EndTimestamp() pulumi.StringOutput {\n\treturn o.ApplyT(func(v LookupRegionCommitmentResult) string { return v.EndTimestamp }).(pulumi.StringOutput)\n}", "func (ms HistogramBucketExemplar) Timestamp() TimestampUnixNano {\n\treturn TimestampUnixNano((*ms.orig).TimeUnixNano)\n}", "func (bse *BaseEvent) Timestamp(timestamp int64) {\n\tbse.Timestmp = timestamp\n}", "func Expires_AtTimestamp(t *float64) Expires {\n\t_init_.Initialize()\n\n\tvar returns Expires\n\n\t_jsii_.StaticInvoke(\n\t\t\"monocdk.aws_s3_deployment.Expires\",\n\t\t\"atTimestamp\",\n\t\t[]interface{}{t},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (client *BaseClient) GetTimestamp() string {\n\tgmt := time.FixedZone(\"GMT\", 0)\n\treturn time.Now().In(gmt).Format(\"2006-01-02T15:04:05Z\")\n}" ]
[ "0.5527146", "0.55097777", "0.54911846", "0.54698044", "0.53512263", "0.5291507", "0.5231119", "0.5174024", "0.5166227", "0.5155524", "0.5129262", "0.5126631", "0.51258165", "0.50907457", "0.50399584", "0.5039933", "0.5006237", "0.5006237", "0.49593735", "0.495261", "0.49291524", "0.49168673", "0.49152112", "0.48994058", "0.48931322", "0.4892752", "0.48926213", "0.4884847", "0.48743662", "0.48697314", "0.48388943", "0.48133117", "0.48109952", "0.4798643", "0.4792229", "0.4790318", "0.47838092", "0.4777977", "0.47774264", "0.4772353", "0.47682694", "0.47615036", "0.4758724", "0.47551897", "0.47415945", "0.4739093", "0.47295693", "0.47177303", "0.4713868", "0.47114483", "0.4705516", "0.47032565", "0.47030282", "0.46946427", "0.46868485", "0.46718347", "0.4668139", "0.46642315", "0.46456245", "0.4640951", "0.46335566", "0.46334478", "0.46262604", "0.46223307", "0.4616937", "0.46127304", "0.4607109", "0.4605384", "0.46051437", "0.46023294", "0.46004575", "0.4584873", "0.45777106", "0.45739657", "0.45736885", "0.45715818", "0.45676288", "0.45610023", "0.4560333", "0.4555246", "0.4554949", "0.45501003", "0.4548713", "0.4546667", "0.45465618", "0.45459482", "0.45454255", "0.4545065", "0.45422012", "0.45348164", "0.45263112", "0.45242015", "0.45145744", "0.4506946", "0.4505489", "0.4505297", "0.45017445", "0.44991654", "0.44981158", "0.44973716" ]
0.6679342
0
InitializeTelegramBot gets the authorized account of telegram bot key
func InitializeTelegramBot(ctx context.Context) { ctx, cancel := context.WithTimeout(ctx, time.Minute) defer cancel() c := make(chan struct{}) go func() { bot, err := tgbotapi.NewBotAPI(Config.telegram_bot_token) if err != nil { log.Fatalf("failed to initialize bot API: %v", err) } log.Infof("authorized as account %s", bot.Self.UserName) c <- struct{}{} }() select { case <-ctx.Done(): log.Fatalf("failed to initialize telegram bot: %v", ctx.Err()) case <-c: } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func InitTelegramBot(service *memdb.Service) error {\n\tekadashiURL := os.Getenv(ekadashiURL)\n\tif ekadashiURL == \"\" {\n\t\treturn fmt.Errorf(\"server URL cannot be empty\")\n\t}\n\ts := &EkadashiBot{\n\t\tdb: service,\n\t\tserverURL: ekadashiURL,\n\t\tregisterURL: \"/register\",\n\t\tloginURL: \"/login\",\n\t\tshowEkadashiURL: \"/ekadashi/next\",\n\t}\n\ttoken := os.Getenv(ekadashiToken)\n\tif token == \"\" {\n\t\treturn fmt.Errorf(\"ekadashi token cannot be empty\")\n\t}\n\tbot, err := tgbotapi.NewBotAPI(token)\n\tif err != nil {\n\t\treturn err\n\t}\n\tbot.Debug = true\n\tu := tgbotapi.NewUpdate(0)\n\tu.Timeout = 60\n\ts.ResponseEkadashiBot(bot, u)\n\treturn nil\n}", "func initTelegramSender() {\n\tif config.AppConf.Senders.Telegram.BotToken != \"\" {\n\t\tinitTelegramClient()\n\t\tproviders[\"TelegramChannel\"] = sendToTelegramChat\n\t\tlog.Println(\"Telegram sender initialized\")\n\t} else {\n\t\tproviders[\"TelegramChannel\"] = nil\n\t}\n}", "func (bot TipBot) initBotWallet() error {\n\tbotWalletInitialisation.Do(func() {\n\t\t_, err := bot.initWallet(bot.Telegram.Me)\n\t\tif err != nil {\n\t\t\tlog.Errorln(fmt.Sprintf(\"[initBotWallet] Could not initialize bot wallet: %s\", err.Error()))\n\t\t\treturn\n\t\t}\n\t})\n\treturn nil\n}", "func newTelegramBot() *tb.Bot {\n\ttgb, err := tb.NewBot(tb.Settings{\n\t\tToken: internal.Configuration.Telegram.ApiKey,\n\t\tPoller: &tb.LongPoller{Timeout: 60 * time.Second},\n\t\tParseMode: tb.ModeMarkdown,\n\t})\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn tgb\n}", "func init() {\n\tfile, err := ioutil.ReadFile(\"./token.json\")\n\tif err != nil {\n\t\tlog.Fatal(\"File doesn't exist\")\n\t}\n\tif err := json.Unmarshal(file, &BotKey); err != nil {\n\t\tlog.Fatal(\"Cannot parse token.json\")\n\t}\n}", "func getTelegramAPI(token string) string {\n\treturn fmt.Sprintf(\"https://api.telegram.org/bot%s/\", token)\n}", "func RunBot(ctx *cli.Context) {\n\n\t// err := i18n.SetMessage(\"ru-RU\", \"conf/locals/locale_ru-RU.ini\")\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\t// err = i18n.SetMessage(\"en-US\", \"conf/locals/locale_en-US.ini\")\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\t// i18n.SetDefaultLang(\"ru-RU\")\n\n\tlog.Println(\"Setting loaded\")\n\tlog.Println(setting.App)\n\n\tlog.Println(i18n.Tr(\"ru-RU\", \"my_bank\"))\n\n\t//tgbotapi.APIEndpoint = \"http://localhost:4000/bot%s/%s\"\n\n\ttw, err := tamework.New(setting.App.Telegram.BotToken)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tsetting.App.Telegram.BotUsername = tw.Bot().Self.UserName\n\n\tlog.Printf(\"[info] Run bot %s\", setting.App.Telegram.BotUsername)\n\n\tcolor.Cyan(\"%s\", tgbotapi.APIEndpoint)\n\ttw.Bot().Debug = true\n\n\t//tw.Bot().Send(tgbotapi.NewMessage(102710272, \"hello\"))\n\ttw.Locales = append(tw.Locales, i18n.Locale{Lang: \"ru-RU\"}.Tr, i18n.Locale{Lang: \"en-US\"}.Tr)\n\n\ttw.Use(tamework.Recovery())\n\ttw.Use(tamework.Waiterer())\n\ttw.Use(context.Contexter())\n\n\ttw.Use(func(c *context.Context) {\n\t\tcolor.Green(\"[%s] %s\", c.Method, c.Text)\n\t\tif c.Update().ChannelPost != nil {\n\t\t\tcolor.Cyan(\"%+v\", c.Update().ChannelPost)\n\t\t\tcolor.Cyan(\"%+v\", c.Update().ChannelPost.Chat.ID)\n\t\t}\n\t})\n\n\ttw.Prefix(\"/start\", handleStart)\n\n\t//банк\n\ttw.Text(buttons.MyBank, handleBank)\n\ttw.CallbackQuery(buttons.Deposit, handleRefill)\n\ttw.CallbackQuery(\"deposit_btc\", handleBitcoinChoose)\n\t//tw.CallbackQuery(buttons.Reinvest, handleReinvest)\n\t//tw.CallbackQuery(\"reinvest_submit\", handleReinvestSubmit)\n\ttw.CallbackQuery(buttons.HistoryOfTransactions, handleOutgoingTransactionHistory)\n\ttw.CallbackQuery(buttons.WithdrawRequest, handleTransactionHistory)\n\ttw.CallbackQuery(buttons.Calc, handleCalc)\n\ttw.CallbackQuery(\"deposit_\"+buttons.Qiwi, routers.Qiwi)\n\ttw.CallbackQuery(\"ps_change_qiwi\", routers.ChangeQiwiWalletID)\n\t//tw.CallbackQuery(buttons.WeRecommendMethod, routers.Recommend)\n\t//tw.CallbackQuery(\"manual_sum\", routers.ManualSum)\n\ttw.CallbackQuery(buttons.InvestPlanList, handlePlans)\n\n\t// Partners\n\ttw.Text(buttons.Partners, handlePartners)\n\n\t// About\n\ttw.Text(buttons.AboutService, handleAboutService)\n\n\t//Settings\n\ttw.Text(buttons.Settings, handleSetting)\n\ttw.CallbackQuery(buttons.Language, handleLanguage)\n\ttw.Text(buttons.LanguageButtonsRU[0], handleLanguageChoose)\n\ttw.Text(buttons.LanguageButtonsRU[1], handleLanguageChoose)\n\ttw.CallbackQuery(buttons.InviteLink, handleRefLink)\n\ttw.CallbackQuery(buttons.MyReferrer, handleRef)\n\ttw.CallbackQuery(buttons.WebToken, handleWebtoken)\n\n\ttw.Run()\n}", "func (b *Bot) Init() (err error) {\n\t// set global default timezone\n\tif b.config.Timezone != \"\" {\n\t\ttime.Local, err = time.LoadLocation(b.config.Timezone)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tlog.Info(\"Connecting to slack...\")\n\tb.auth, err = b.slackClient.AuthTest()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"auth error\")\n\t}\n\tclient.AuthResponse = *b.auth\n\tclient.AllChannels, err = b.loadChannels()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"error while fetching public channels\")\n\t}\n\n\terr = b.loadSlackData()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tlog.Infof(\"Loaded %d allowed users and %d channels\", len(b.allowedUsers), len(client.AllChannels))\n\tlog.Infof(\"Bot user: @%s with ID %s on workspace %s\", b.auth.User, b.auth.UserID, b.auth.URL)\n\n\tcommands := b.commands.GetCommandNames()\n\tlog.Infof(\"Initialized %d commands:\", len(commands))\n\tlog.Info(strings.Join(commands, \", \"))\n\n\treturn nil\n}", "func InitAuth() error {\n a := auth.NewEnv()\n key, err := a.GetAPIKey()\n if err != nil {\n return err\n }\n secret, err := a.GetSecret()\n if err != nil {\n return err\n }\n\n // main.go/client\n c = coinbase.ApiKeyClient(key, secret)\n return nil\n}", "func InitBot(config *models.BotConfig, members *models.Members) *Bot {\n\n\tb := Bot{\n\t\tToken: config.Token,\n\t\tDlg: map[int64]*Dialog{},\n\t\tUserStrategy: map[int64]string{},\n\t\tMembers: map[int64]bool{},\n\t\tMembersStrategy: make(map[int64]map[string]map[string]*Strategy),\n\t\tRunStrategy: make(chan ExchangeStrategy),\n\t\tStopStrategy: make(chan ExchangeStrategy),\n\t\tpass: config.Password,\n\t}\n\n\t// Create new bot\n\tbot, err := tgbotapi.NewBotAPI(b.Token)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tb.Bot = bot\n\tfor _, m := range members.M {\n\t\tb.Members[m.ChatId] = m.Notification\n\t}\n\n\treturn &b\n}", "func LoginByTelegram(telegram string) (string, bool) {\n\tval, ok := userStorage[telegram]\n\treturn val, ok\n}", "func New() *Telegram {\n\treturn &Telegram{\n\t\ttoken: os.Getenv(\"TOKEN\"),\n\t\tchatID: os.Getenv(\"CHAT_ID\"),\n\t\tendpoint: \"https://api.telegram.org\",\n\t}\n}", "func initBot() {\n\tbotConf := bot.Conf{}\n\terr := config.ParseConfig(beego.AppConfig, &botConf)\n\n\tif err != nil {\n\t\tlog.Fatalf(\"Bot Config Read Error %s\", err)\n\t\treturn\n\t}\n\n\tbotConf.Cmd = NFS.TgService.GetCmd()\n\n\tBot = bot.NewBot(botConf)\n\tBot.Start()\n\n\tgo ChannLogger(Bot.Errors(), Bot.Notifications())\n}", "func StartBot(){\n\tbot, err := tgbotapi.NewBotAPI(GetToken())\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n\n\t//bot.Debug = true\n\tbot.Debug = false\n\n\tlog.Printf(\"Authorized on account %s\", bot.Self.UserName)\n\n\tu := tgbotapi.NewUpdate(0)\n\tu.Timeout = 60\n\n\tupdates, err := bot.GetUpdatesChan(u)\n\n\tfor {\n\t\tfor update := range updates {\n\t\t\tprocessor(update)\n\t\t}\n\t}\n}", "func NewTelegram(token string, relayedChannel int64, BotUserID int, debug bool) *Telegram {\n\tbot, err := telegram.NewBotAPI(token)\n\tif err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\tbot.Debug = debug\n\treturn &Telegram{\n\t\tPump: cable.NewPump(),\n\t\tclient: bot,\n\t\trelayedChatID: relayedChannel,\n\t\tbotUserID: BotUserID,\n\t}\n}", "func initCredential() error {\n\tcredential = map[string]string{\n\t\t\"leedev\": \"Go123\",\n\t\t\"guest\": \"xxx\"}\n\n\treturn nil\n}", "func New() (*Bot, error) {\n\t// Each bot is given a unique authentication token when it is created\n\ttoken := osGetEnv(\"TOKEN\")\n\tif token == \"\" {\n\t\treturn nil, errors.New(\"bot token's missing\")\n\t}\n\n\t// call the chatbot with the provided token\n\tbotAPI, err := tgbot.NewBotAPI(token)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// uncomment this for tgbot debug\n\t// botAPI.Debug = true\n\n\t// contains config information about updating user requests\n\tupdateCfg := tgbot.NewUpdate(0)\n\tupdateCfg.Timeout = 60\n\n\treturn &Bot{API: botAPI, UpdateConfig: updateCfg}, nil\n}", "func (bot TipBot) Start() {\n\tlog.Infof(\"[Telegram] Authorized on account @%s\", bot.Telegram.Me.Username)\n\t// initialize the bot wallet\n\terr := bot.initBotWallet()\n\tif err != nil {\n\t\tlog.Errorf(\"Could not initialize bot wallet: %s\", err.Error())\n\t}\n\tbot.registerTelegramHandlers()\n\tbot.Telegram.Start()\n}", "func init() {\n\n\tflag.StringVar(&Token, \"t\", \"\", \"Bot Token\")\n}", "func (sender *Sender) Init(senderSettings map[string]string, logger moira.Logger, location *time.Location, dateTimeFormat string) error {\n\tvar err error\n\tsender.APIToken = senderSettings[\"api_token\"]\n\tif sender.APIToken == \"\" {\n\t\treturn fmt.Errorf(\"can not read telegram api_token from config\")\n\t}\n\tsender.FrontURI = senderSettings[\"front_uri\"]\n\tsender.logger = logger\n\tsender.location = location\n\n\tsender.bot, err = telebot.NewBot(telebot.Settings{\n\t\tToken: sender.APIToken,\n\t\tPoller: &telebot.LongPoller{Timeout: pollerTimeout},\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tsender.bot.Handle(telebot.OnText, func(message *telebot.Message) {\n\t\tif err = sender.handleMessage(message); err != nil {\n\t\t\tsender.logger.Errorf(\"Error handling incoming message: %s\", err.Error())\n\t\t}\n\t})\n\n\terr = sender.runTelebot()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error running bot: %s\", err.Error())\n\t}\n\treturn nil\n}", "func InitChat() {\n\tif config.BotConfig.ChatConfig.Key != \"\" {\n\t\tclient = openai.NewClient(config.BotConfig.ChatConfig.Key)\n\t\tgo chatService()\n\t}\n}", "func NewTelegramBotAPI(cfg *config.Config) (*TelegramBotAPI, error) {\n\tapi, err := tgbotapi.NewBotAPI(cfg.TelegramBot.Token)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &TelegramBotAPI{api}, nil\n}", "func (tgid TelegramID) InitAgent(name string, lockey LocKey) error {\n\tauthtoken := GenerateName()\n\n\tgid, err := lockey.Gid()\n\tif err != nil && err == sql.ErrNoRows {\n\t\terr = fmt.Errorf(\"location Share Key (%s) is not recognized\", lockey)\n\t\tLog.Notice(err)\n\t\treturn err\n\t}\n\tif err != nil {\n\t\tLog.Notice(err)\n\t\treturn err\n\t}\n\n\t_, err = db.Exec(\"INSERT INTO telegram (telegramID, telegramName, gid, verified, authtoken) VALUES (?, 'unused', ?, 0, ?)\", tgid, gid, authtoken)\n\tif err != nil {\n\t\tLog.Notice(err)\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func buildTwitchAuthConfig() {\n\tTwitchOauthConfig = &oauth2.Config{\n\t\tClientID: Config.TwitchClientID,\n\t\tClientSecret: Config.TwitchClientSecret,\n\t\tScopes: []string{\"user_read\"},\n\t\tRedirectURL: Config.TwitchRedirectURL,\n\t\tEndpoint: oauth2.Endpoint{\n\t\t\tAuthURL: Config.TwitchAuthURL,\n\t\t\tTokenURL: Config.TwitchTokenURL,\n\t\t},\n\t}\n\tOauthStateString = str.RandStringRunes(10)\n}", "func InitBot(Token string) *discordgo.Session {\n\t// Create a new Discord session using the provided bot token.\n\tdg, err := discordgo.New(Token)\n\tif err != nil {\n\t\tfmt.Println(\"error creating Discord session,\", err)\n\t}\n\t// Open a websocket connection to Discord and begin listening.\n\terr = dg.Open()\n\tif err != nil {\n\t\tfmt.Println(\"error opening connection,\", err)\n\t}\n\treturn dg\n}", "func New(token string) (*GAB, error) {\n\tbot, err := tapi.NewBotAPI(token)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Could not create new bot with provided token: %v\", err)\n\t}\n\tlog.Printf(\"Authorized on account %s\", bot.Self.UserName)\n\treturn &GAB{\n\t\tTelBot: bot,\n\t}, nil\n}", "func NewBot(cfg *config.Config, channels []string, db *gorm.DB) *Bot {\n\n\t// Twitch api\n\ttwitchAPI, err := helix.NewClient(&helix.Options{\n\t\tClientID: cfg.Bot.ClientID,\n\t\tClientSecret: cfg.Bot.ClientSecret,\n\t})\n\n\tif err != nil {\n\t\tlog.Error(\"could not creat twitch api client\")\n\t\tpanic(err)\n\t}\n\n\tapptoken, err := twitchAPI.RequestAppAccessToken([]string{})\n\n\tif err != nil {\n\t\tlog.Error(\"could not get app access token\")\n\t\tpanic(err)\n\t}\n\n\ttwitchAPI.SetAppAccessToken(apptoken.Data.AccessToken)\n\n\t// twitch bot\n\n\tclient := twitch.NewClient(cfg.Bot.Username, cfg.Bot.OAuth)\n\n\t// horoscope\n\thoroscope.StartIntervalScraping()\n\n\t// maps api\n\t// c, err := maps.NewClient(maps.WithAPIKey(cfg.Bot.MapsAPIToken))\n\n\t// if err != nil {\n\t// \tlog.Error(\"could not create maps api\")\n\t// \tpanic(err)\n\t// }\n\n\tbot := &Bot{client: client, db: db, channels: channels, twitchAPI: twitchAPI, weatherAPI: cfg.Bot.OpenWeatherAPI}\n\n\tbot.reloadConfigs()\n\n\tclient.OnPrivateMessage(bot.incomingMsg)\n\n\tclient.OnConnect(func() {\n\t\tlog.Info(\"client successfully connected to twitch chat\")\n\t})\n\n\tclient.Join(channels...)\n\n\tgo func() { client.Connect() }()\n\n\treturn bot\n}", "func NewBot(ctx context.Context, cfg *viper.Viper, sdk *yandex.SDK) (*Bot, error) {\n\tif cfg == nil {\n\t\treturn nil, errors.New(\"empty config\")\n\t}\n\n\tapi, err := tgbotapi.NewBotAPI(cfg.GetString(\"telegram.token\"))\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"unable to connect to Telegram\")\n\t}\n\tif log.IsLevelEnabled(log.DebugLevel) {\n\t\tlog.Debug(\"Enabling debug mode for bot\")\n\t\tapi.Debug = true\n\t}\n\n\tlog.Debugf(\"Authorized on account %s\", api.Self.UserName)\n\treturn &Bot{api: api, cfg: cfg, ctx: ctx, ySDK: sdk}, nil\n}", "func (s *Hipchat) Init(c *config.Config) error {\n\turl := c.Handler.Hipchat.Url\n\troom := c.Handler.Hipchat.Room\n\ttoken := c.Handler.Hipchat.Token\n\n\tif token == \"\" {\n\t\ttoken = os.Getenv(\"KW_HIPCHAT_TOKEN\")\n\t}\n\n\tif room == \"\" {\n\t\troom = os.Getenv(\"KW_HIPCHAT_ROOM\")\n\t}\n\n\tif url == \"\" {\n\t\turl = os.Getenv(\"KW_HIPCHAT_URL\")\n\t}\n\n\ts.Token = token\n\ts.Room = room\n\ts.Url = url\n\n\treturn checkMissingHipchatVars(s)\n}", "func getTwitterAuth() (o *oauth.OAuth, err error) {\n\to = new(oauth.OAuth)\n\to.ConsumerKey = vafanConf.twitter.consumerKey\n\to.ConsumerSecret = vafanConf.twitter.consumerSecret\n\to.AccessToken = vafanConf.twitter.accessToken\n\to.AccessSecret = vafanConf.twitter.accessSecret\n\to.SignatureMethod = \"HMAC-SHA1\"\n\treturn\n}", "func initBot(cpath, epath string, logger *log.Logger) {\n\tstopRegistrations = true\n\t// Seed the pseudo-random number generator, for plugin IDs, RandomString, etc.\n\trandom = rand.New(rand.NewSource(time.Now().UnixNano()))\n\n\tbotLogger.l = logger\n\n\tconfigPath = cpath\n\tinstallPath = epath\n\trobot.stop = make(chan struct{})\n\trobot.done = make(chan struct{})\n\trobot.shuttingDown = false\n\n\thandle := handler{}\n\tbot := &botContext{\n\t\tenvironment: make(map[string]string),\n\t}\n\tif err := bot.loadConfig(true); err != nil {\n\t\tLog(Fatal, fmt.Sprintf(\"Error loading initial configuration: %v\", err))\n\t}\n\n\tif len(robot.brainProvider) > 0 {\n\t\tif bprovider, ok := brains[robot.brainProvider]; !ok {\n\t\t\tLog(Fatal, fmt.Sprintf(\"No provider registered for brain: \\\"%s\\\"\", robot.brainProvider))\n\t\t} else {\n\t\t\tbrain := bprovider(handle, logger)\n\t\t\trobot.brain = brain\n\t\t}\n\t} else {\n\t\tbprovider, _ := brains[\"mem\"]\n\t\trobot.brain = bprovider(handle, logger)\n\t\tLog(Error, \"No brain configured, falling back to default 'mem' brain - no memories will persist\")\n\t}\n\tif encryptBrain {\n\t\tif len(robot.brainKey) > 0 {\n\t\t\tif initializeEncryption(robot.brainKey) {\n\t\t\t\tLog(Info, \"Successfully initialized brain encryption\")\n\t\t\t} else {\n\t\t\t\tLog(Error, \"Failed to initialize brain encryption with configured BrainKey\")\n\t\t\t}\n\t\t} else {\n\t\t\tLog(Warn, \"Brain encryption specified but no key configured; use 'initialize brain <key>' to initialize the encrypted brain\")\n\t\t}\n\t}\n\tif len(robot.historyProvider) > 0 {\n\t\tif hprovider, ok := historyProviders[robot.historyProvider]; !ok {\n\t\t\tLog(Fatal, fmt.Sprintf(\"No provider registered for history type: \\\"%s\\\"\", robot.historyProvider))\n\t\t} else {\n\t\t\thp := hprovider(handle)\n\t\t\trobot.history = hp\n\t\t}\n\t}\n\tif !listening {\n\t\tlistening = true\n\t\tgo func() {\n\t\t\th := handler{}\n\t\t\thttp.Handle(\"/json\", h)\n\t\t\tLog(Fatal, http.ListenAndServe(robot.port, nil))\n\t\t}()\n\t}\n}", "func (owner *WalletOwnerAPI) Init() error {\n\tecdsaPrivateKey, ecdsaPublicKey, err := newKey()\n\tif err != nil {\n\t\treturn err\n\t}\n\tprivkey := btcec.PrivateKey(*ecdsaPrivateKey)\n\tpubkey := btcec.PublicKey(*ecdsaPublicKey)\n\tserverPubKeyHex, err := owner.InitSecureAPI(pubkey.SerializeCompressed())\n\tif err != nil {\n\t\treturn err\n\t}\n\tserverPubKey, err := hex.DecodeString(serverPubKeyHex)\n\tif err != nil {\n\t\treturn err\n\t}\n\towner.ServerPublicKey, err = btcec.ParsePubKey(serverPubKey)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\towner.privateKey = privkey\n\towner.PublicKey = pubkey\n\towner.sharedSecret = btcec.GenerateSharedSecret(&privkey, owner.ServerPublicKey)\n\treturn nil\n}", "func init() {\n\tstartKeychain()\n}", "func New(config config.Config, urlToken, authToken, toChnnel string) *Bot {\n\treturn &Bot{\n\t\tConfig: config,\n\t\tURLVerifyToken: urlToken,\n\t\tAuthorizationToken: authToken,\n\t\tToChannel: toChnnel,\n\t}\n}", "func (c *Crypto) init() error {\n\turl := fmt.Sprintf(\"https://makemeapassword.ligos.net/api/v1/passphrase/plain\")\n\tif err := c.callPasswdAPI(url); err != nil {\n\t\treturn err\n\t}\n\n\tc.encode()\n\treturn nil\n}", "func NewClient(token string, chatID int64) *Telegram {\n\ttelegram := &Telegram{\n\t\ttoken: token,\n\t\tchatID: chatID,\n\t}\n\treturn telegram\n}", "func (tc *textileClient) getHubCtx(ctx context.Context) (context.Context, error) {\n\tlog.Debug(\"Authenticating with Textile Hub\")\n\n\tkey := os.Getenv(\"TXL_USER_KEY\")\n\tsecret := os.Getenv(\"TXL_USER_SECRET\")\n\n\tif key == \"\" || secret == \"\" {\n\t\treturn nil, errors.New(\"Couldn't get Textile key or secret from envs\")\n\t}\n\tctx = common.NewAPIKeyContext(ctx, key)\n\tvar apiSigCtx context.Context\n\tvar err error\n\tif apiSigCtx, err = common.CreateAPISigContext(ctx, time.Now().Add(time.Minute), secret); err != nil {\n\t\treturn nil, err\n\t}\n\tctx = apiSigCtx\n\n\tkc := keychain.New(tc.store)\n\tvar privateKey crypto.PrivKey\n\tif privateKey, _, err = kc.GetStoredKeyPairInLibP2PFormat(); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// TODO: CTX has to be made from session key received from lambda\n\ttok, err := tc.threads.GetToken(ctx, thread.NewLibp2pIdentity(privateKey))\n\n\tctx = thread.NewTokenContext(ctx, tok)\n\treturn ctx, nil\n}", "func (s *SmartContract) initLedger(APIstub shim.ChaincodeStubInterface, args []string) sc.Response {\n\n\tif len(args) != 3 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 3\")\n\t}\n\n\tcheckBytes, _ := APIstub.GetState(\"token\")\n\tif len(string(checkBytes)) > 0 {\n\t\treturn shim.Error(\"Token already instantiated.\")\n\t}\n\n\t//Initialise an admin account\n\tadminAdd, adminKey := generateKey()\n\tadmin := Account{Address: adminAdd, PrivateKey: hash(adminKey), AccountValue:args[2] }\n\tadminAsBytes, _ := json.Marshal(admin)\n\tAPIstub.PutState(\"admin\", adminAsBytes)\n\n\ttoken := Token{TokenName: args[0], TokenSymbol: args[1], TotalAmount: args[2]}\n\ttokenAsBytes, _ := json.Marshal(token)\n\tAPIstub.PutState(\"token\", tokenAsBytes)\n\tfmt.Println(string(`{\"Address\":\"` + adminAdd + `\",\"PrivateKey\":\"` + adminKey + `\"}`))\n\n\treturn shim.Success([]byte(`{\"Address\":\"` + adminAdd + `\",\"PrivateKey\":\"` + adminKey + `\"}`))\n}", "func (t *Account) Init(stub shim.ChaincodeStubInterface) pb.Response {\n\n\treturn shim.Success(nil)\n\n}", "func Init(paramClientID, paramClientSecret, paramAuth0URL, paramAudience string) {\n\tf := logrus.Fields{\n\t\t\"functionName\": \"token.Init\",\n\t\t\"auth0URL\": paramAuth0URL,\n\t\t\"audience\": paramAudience,\n\t}\n\tlog.WithFields(f).Debug(\"token init running...\")\n\n\tclientID = paramClientID\n\tclientSecret = paramClientSecret\n\taudience = paramAudience\n\toauthTokenURL = paramAuth0URL\n\n\tif expiry.Year() == 1 {\n\t\texpiry = time.Now()\n\t}\n\n\tgo retrieveToken() //nolint\n}", "func Init(ctx context.Context, local bool) (oauth2.TokenSource, error) {\n\treturn auth_steps.Init(ctx, local, auth.ScopeGerrit, auth.ScopeUserinfoEmail)\n}", "func Init() (err error) {\n\n\tbot, er := newBot(\"config.json\")\n\tif er != nil {\n\t\terr = er\n\t\treturn\n\t}\n\n\ts, er := discordgo.New(\"Bot \" + bot.Token)\n\tif er != nil {\n\t\terr = er\n\t\treturn\n\t}\n\n\ts.AddHandler(bot.MessageHandler)\n\n\ter = s.Open()\n\tif er != nil {\n\t\terr = er\n\t\treturn\n\t}\n\treturn\n}", "func init(){\n \n err := CreateSchema()\n if err != nil {\n fmt.Printf(\"token-mgr_test:init:%s\\n\", err)\n os.Exit(1)\n }\n MockTokenTuple = TokenTuple{1234, MockUserEmail, \"12345678901234567890\"}\n err = MockTokenTuple.UpdateToken()\n if err != nil {\n fmt.Printf(\"token-mgr_test:init:%s\\n\", err)\n os.Exit(2)\n }\n}", "func Init(b *thinkbot.BotConfig) {\n\tinitCommands(b.Commands())\n\tinitChat(b)\n}", "func (ec EthChain) BuildAuthTransactor() (*bind.TransactOpts, error) {\n\tprivKey, err := crypto.HexToECDSA(ec.Key)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tauth := bind.NewKeyedTransactor(privKey)\n\n\tnextNonce, err := ec.Client.PendingNonceAt(context.Background(), auth.From)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tauth.GasLimit = ec.GasLimit\n\tauth.GasPrice = ec.GasPrice\n\tauth.Nonce = big.NewInt(int64(nextNonce))\n\n\treturn auth, nil\n}", "func Run(ctx context.Context) {\n\ttoken := os.Getenv(\"TELEGRAM_TOKEN\")\n\tbot := tbot.New(token)\n\tc := bot.Client()\n\tHandle(ctx, c, bot)\n\t_ = bot.Start()\n}", "func NewBot(uname string, tgram telegram.MsgSender) *Bot {\n\treturn &Bot{username: uname, telegram: tgram, games: make(map[int]*Game)}\n}", "func Bot( /* ctx context.Context */ ) *User {\n\t// TODO: get bot from Firebase Database\n\treturn &User{\n\t\tIdentity: identity.Identity{\n\t\t\tName: \"chang\",\n\t\t\tProvider: \"bot\",\n\t\t\tImageURL: \"/public/img/bot.png\",\n\t\t},\n\t}\n}", "func InitFromSecret() {\n\t// FIXME(xnum): It shouldn't be used. This makes aes depend on secret\n\t// initialized.\n\tif len(keySecret.key) == 0 {\n\t\tkey := []byte(secret.Get(\"AES_KEY_SECRET\"))\n\t\tsetupKeySecret(key)\n\t}\n}", "func init() {\n\n\t// Discord Authentication Token\n\tSession.Token = os.Getenv(\"DELUBOT_TOKEN\")\n\tif Session.Token == \"\" {\n\t\tflag.StringVar(&Session.Token, \"t\", \"\", \"Discord Authentication Token\")\n\t}\n}", "func init() {\n\tgo chatroom()\n}", "func init() {\n\tconfig.URL = os.Getenv(\"RT_URL\")\n\tconfig.Username = os.Getenv(\"RT_USERNAME\")\n\tconfig.Password = os.Getenv(\"RT_PASSWORD\")\n\tconfig.Queue = os.Getenv(\"RT_QUEUE\")\n}", "func (w Web) Auth(c *gin.Context) {\n\tif expectedHash, ok := c.GetQuery(\"hash\"); ok {\n\t\tvar errorMessage string\n\t\tvar datas []string\n\t\tfor k, v := range c.Request.URL.Query() {\n\t\t\tif k == \"hash\" {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tdatas = append(datas, fmt.Sprintf(\"%s=%s\", k, v[0]))\n\t\t}\n\t\tsort.Strings(datas)\n\t\tmac := hmac.New(sha256.New, w.SecretKey[:])\n\t\tauthDataStr := strings.Join(datas, \"\\n\")\n\t\tio.WriteString(mac, authDataStr)\n\t\thash := fmt.Sprintf(\"%x\", mac.Sum(nil))\n\t\tif expectedHash != hash {\n\t\t\terrorMessage = \"data is not from Telegram\"\n\t\t} else if authDate, err := strconv.Atoi(c.Query(\"auth_date\")); err == nil {\n\t\t\tif int64(time.Now().Sub(time.Unix(int64(authDate), 0)).Seconds()) > 86400 {\n\t\t\t\terrorMessage = \"Data is outdated\"\n\t\t\t} else {\n\t\t\t\tw.setCookie(c, \"auth_data_str\", authDataStr)\n\t\t\t\tw.setCookie(c, \"auth_data_hash\", hash)\n\t\t\t\tuserid, err := strconv.ParseInt(c.Query(\"id\"), 10, 64)\n\t\t\t\tif err != nil {\n\t\t\t\t\t_logger.Printf(\"can not convert %s to int. err* %v\", c.Query(\"id\"), err)\n\t\t\t\t}\n\t\t\t\tmsg := tgbotapi.NewMessage(userid, fmt.Sprintf(\"hello https://t.me/%d, welcome to NS_FC_bot.\", userid))\n\t\t\t\t_, err = w.TgBotClient.Send(msg)\n\t\t\t\tif err != nil {\n\t\t\t\t\t_logger.Printf(\"send message to user telegram failed. err: %v\", err)\n\t\t\t\t}\n\t\t\t\tw.setCookie(c, \"authed\", \"true\")\n\t\t\t\tc.Redirect(http.StatusTemporaryRedirect, \"/user/\"+c.Query(\"id\"))\n\t\t\t\treturn\n\t\t\t}\n\t\t} else {\n\t\t\terrorMessage = err.Error()\n\t\t}\n\t\tc.Redirect(http.StatusTemporaryRedirect, \"/login?error=\"+errorMessage)\n\t\treturn\n\t}\n}", "func NewTGBot(token, webhookPath, webhookURL, cert string) {\n\tbot, err := tgbotapi.NewBotAPI(token)\n\tif err != nil {\n\t\tpp.Print(err.Error())\n\t}\n\n\tbot.Debug = false\n\t_, err = bot.SetWebhook(\n\t\ttgbotapi.NewWebhookWithCert(webhookURL, cert),\n\t)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\n\tTGBot.Bot = bot\n\tTGBot.Updates = make(chan tgbotapi.Update, 100)\n\tTGBot.handler = func(c *gin.Context) {\n\t\tbytes, _ := ioutil.ReadAll(c.Request.Body)\n\n\t\tvar update tgbotapi.Update\n\t\tjson.Unmarshal(bytes, &update)\n\n\t\tTGBot.Updates <- update\n\t}\n}", "func (b EchoBot) Run(ctx context.Context) error {\n\tlogger := b.suite.Log.Named(\"echo user\")\n\n\tdispatcher := tg.NewUpdateDispatcher()\n\tclient := b.suite.Client(logger, dispatcher.Handle)\n\tdispatcher.OnNewMessage(func(ctx tg.UpdateContext, u *tg.UpdateNewMessage) error {\n\t\tswitch m := u.Message.(type) {\n\t\tcase *tg.Message:\n\t\t\tswitch peer := m.PeerID.(type) {\n\t\t\tcase *tg.PeerUser:\n\t\t\t\tuser := ctx.Users[peer.UserID]\n\t\t\t\tlogger.With(\n\t\t\t\t\tzap.String(\"text\", m.Message),\n\t\t\t\t\tzap.Int(\"user_id\", user.ID),\n\t\t\t\t\tzap.String(\"user_first_name\", user.FirstName),\n\t\t\t\t\tzap.String(\"username\", user.Username),\n\t\t\t\t).Info(\"Got message\")\n\n\t\t\t\trandomID, err := client.RandInt64()\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tp := &tg.InputPeerUser{\n\t\t\t\t\tUserID: user.ID,\n\t\t\t\t\tAccessHash: user.AccessHash,\n\t\t\t\t}\n\t\t\t\treturn client.SendMessage(ctx, &tg.MessagesSendMessageRequest{\n\t\t\t\t\tRandomID: randomID,\n\t\t\t\t\tMessage: m.Message,\n\t\t\t\t\tPeer: p,\n\t\t\t\t})\n\t\t\t}\n\t\t}\n\n\t\treturn nil\n\t})\n\n\treturn client.Run(ctx, func(ctx context.Context) error {\n\t\tauth, err := client.AuthStatus(ctx)\n\t\tif err != nil {\n\t\t\treturn xerrors.Errorf(\"get auth status: %w\", err)\n\t\t}\n\t\tlogger.With(zap.Bool(\"authorized\", auth.Authorized)).Info(\"Auth status\")\n\n\t\tif err := b.suite.Authenticate(ctx, client); err != nil {\n\t\t\treturn xerrors.Errorf(\"authenticate: %w\", err)\n\t\t}\n\n\t\tme, err := client.Self(ctx)\n\t\tif err != nil {\n\t\t\treturn xerrors.Errorf(\"get self: %w\", err)\n\t\t}\n\t\tlogger.With(\n\t\t\tzap.String(\"user\", me.Username),\n\t\t\tzap.Int(\"id\", me.ID),\n\t\t).Info(\"Logged in\")\n\n\t\tselect {\n\t\tcase b.auth <- me:\n\t\t\treturn nil\n\t\tcase <-ctx.Done():\n\t\t\treturn ctx.Err()\n\t\t}\n\t})\n}", "func (e *ECU) authInit() error {\n\treturn nil\n}", "func CreateBot(telegramToken string) (*Bot, error) {\n\tapi, err := tgbotapi.NewBotAPI(telegramToken)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &Bot{api}, nil\n}", "func init() {\n\toauthConfig, err := adal.NewOAuthConfig(azure.PublicCloud.ActiveDirectoryEndpoint, config.TenantID)\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed to get OAuth config: %v\\n\", err)\n\t}\n\ttoken, err = adal.NewServicePrincipalToken(\n\t\t*oauthConfig,\n\t\tconfig.ServicePrincipalID,\n\t\tconfig.ServicePrincipalSecret,\n\t\tazure.PublicCloud.ResourceManagerEndpoint)\n\tif err != nil {\n\t\tlog.Fatalf(\"faled to get token: %v\\n\", err)\n\t}\n}", "func init() {\n\tflag.StringVar(&RedirectURL, \"redirect_url\", \"http://127.0.0.1:3000/auth\", \"Redirect URL used during oauth\")\n\tflag.Parse()\n\t// Gets Apps secrets & id for github\n\tghubCreds.Init()\n\tfmt.Println(ghubCreds)\n\tconf = &oauth2.Config{\n\t\tClientID: ghubCreds.Cid,\n\t\tClientSecret: ghubCreds.Csecret,\n\t\tRedirectURL: RedirectURL,\n\t\tScopes: []string{\"user:email\"},\n\t\tEndpoint: github.Endpoint,\n\t}\n}", "func (c *Eth) Initialize(con util.XapiClient) {\n c.con = con\n}", "func Init(logger *logging.Logger) *discordgo.Session {\n\t//var err error\n\t// set the config reference\n\tlog = logger\n\n\tlog.Debug(\"Registering message handler\")\n\t// type discordgo.Session (not a ref)\n\tsession = discordgo.Session{\n\t\tOnMessageCreate: handler.MessageHandler,\n\t}\n\n\tgo loginFlow(&session)\n\n\ttime.Sleep(1 * time.Second)\n\tfor {\n\t\tif session.Token != \"\" {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn &session\n}", "func (b *BotConfig) Init() error {\n\tif b.ASSET_CODE_A == b.ASSET_CODE_B && b.ISSUER_A == b.ISSUER_B {\n\t\treturn fmt.Errorf(\"error: both assets cannot be the same '%s:%s'\", b.ASSET_CODE_A, b.ISSUER_A)\n\t}\n\n\tasset, e := parseAsset(b.ASSET_CODE_A, b.ISSUER_A, \"A\")\n\tif e != nil {\n\t\treturn e\n\t}\n\tb.assetBase = *asset\n\n\tasset, e = parseAsset(b.ASSET_CODE_B, b.ISSUER_B, \"B\")\n\tif e != nil {\n\t\treturn e\n\t}\n\tb.assetQuote = *asset\n\n\tb.tradingAccount, e = utils.ParseSecret(b.TRADING_SECRET_SEED)\n\tif e != nil {\n\t\treturn e\n\t}\n\tif b.tradingAccount == nil {\n\t\treturn fmt.Errorf(\"no trading account specified\")\n\t}\n\n\tb.sourceAccount, e = utils.ParseSecret(b.SOURCE_SECRET_SEED)\n\treturn e\n}", "func init() {\n\ttoken = nep17.Token{\n\t\tName: \"Awesome NEO Token\",\n\t\tSymbol: \"ANT\",\n\t\tDecimals: decimals,\n\t\tOwner: owner,\n\t\tTotalSupply: 11000000 * multiplier,\n\t\tCirculationKey: \"TokenCirculation\",\n\t}\n\tctx = storage.GetContext()\n}", "func (l *Lockbox) InitWithMFA(namespace, username, password string) (*otp.Key, error) {\n\n\t// make sure the bucket exist to contain our namespace\n\terr := l.Store.Update(func(tx *bolt.Tx) error {\n\t\t_, err := tx.CreateBucketIfNotExists([]byte(namespace))\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"create bucket: %s\", err)\n\t\t}\n\t\treturn nil\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// we need a user to initialize the lockbox\n\t// NOTE: should this user default to \"root\"?\n\tuser := &User{\n\t\tUsername: username,\n\t\tPassword: password,\n\t}\n\t// TODO: validate user struct\n\n\terr = l.Store.Update(func(tx *bolt.Tx) error {\n\t\t// we write metadata to the lockbox bucket, which is stored seperately from\n\t\t// the acutal data. This should increase the security of lockbox by seperating\n\t\t// this data, from secret data.\n\t\tb := tx.Bucket([]byte(lockboxbucket))\n\t\tusernameHash := createHash(username)\n\t\tencPassword, err := encrypt([]byte(password), usernameHash)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\terr = b.Put([]byte(fmt.Sprintf(\"/lockbox/meta/%v/users/%v\", namespace, usernameHash)), []byte(encPassword))\n\t\treturn err\n\t})\n\n\t// generate the otp key\n\totpkey, err := generateOTPKey(namespace)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// encrypt the otp key with the user's encryption key\n\tencotpkey, err := encrypt([]byte(otpkey.String()), string(user.GetUserEncryptionKey()))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// store encrypted otp key within metadata for the namespace\n\terr = l.Store.Update(func(tx *bolt.Tx) error {\n\t\t// we write metadata to the lockbox bucket, which is stored seperately from\n\t\t// the acutal data. This should increase the security of lockbox by seperating\n\t\t// this data, from secret data.\n\t\tb := tx.Bucket([]byte(lockboxbucket))\n\t\terr := b.Put([]byte(fmt.Sprintf(\"/lockbox/meta/%v/otp/key\", namespace)), encotpkey)\n\t\treturn err\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// NOTE: it is the caller's job to retrieve the secret from the otpkey.\n\treturn otpkey, nil\n}", "func (hem *GW) Init(rootURL string, username string, password string) {\n\them.rootURL = rootURL\n\them.username = username\n\them.password = password\n\them.loadSmartMeterAttribute()\n}", "func initSlackSender() {\n\tif config.AppConf.Senders.Slack.AuthToken != \"\" {\n\t\tproviders[\"SlackChannel\"] = sendToSlackChat\n\t\tlog.Println(\"Slack sender initialized\")\n\t} else {\n\t\tproviders[\"SlackChannel\"] = nil\n\t}\n}", "func (p *provider) Init(ctx servicehub.Context) error {\n\tp.accessKeyValidator = &accessKeyValidator{\n\t\tTokenService: p.TokenService,\n\t\tcollection: AccessItemCollection{},\n\t}\n\tctx.AddTask(p.InitAKItemTask)\n\tctx.AddTask(p.SyncAKItemTask)\n\treturn nil\n}", "func mosquitto_auth_plugin_init(cUserData *unsafe.Pointer, cOpts *C.struct_mosquitto_opt, cOptCount C.int) C.int {\n\tvar err error\n\t// copy opts from the C world into Go\n\toptMap := extractOptions(cOpts, cOptCount)\n\n\t// initialise logger\n\tif logger, file, err = initialiseLogger(optMap[optLogDest]); err != nil {\n\t\tfmt.Printf(\"error initialising logger, %s\", err)\n\t\treturn C.MOSQ_ERR_AUTH\n\t}\n\tlogger.Println(\"Init plugin\")\n\n\t// initialise the user data that will be used in subsequent plugin calls\n\tuserData, err := initialiseUserData(optMap)\n\tif err != nil {\n\t\tlogger.Println(\"initialiseUserData failed with err:\", err)\n\t\treturn C.MOSQ_ERR_AUTH\n\t}\n\t*cUserData = unsafe.Pointer(&userData)\n\n\tlogger.Println(\"leave - plugin init successful\")\n\treturn C.MOSQ_ERR_SUCCESS\n}", "func Authenticate(ctx context.Context, jwt string, conn *grpc.ClientConn, u string, conv ConversationHandler) (string, error) {\n\tcli := idamV1.NewAuthenticatorClient(conn)\n\n\tvar md metadata.MD\n\n\tif jwt != \"\" {\n\t\tmd = metadata.New(map[string]string{\"authorization\": jwt})\n\t\tfmt.Printf(\"%#v\\n\", md)\n\t}\n\n\tctx = metadata.NewOutgoingContext(ctx, md)\n\n\tstream, err := cli.StartConversation(ctx)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer stream.CloseSend()\n\n\t// send username\n\tif err := stream.Send(buildUsername(u)); err != nil {\n\t\treturn \"\", err\n\t}\n\n\tfor {\n\t\tmsg, err := stream.Recv()\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\n\t\tif question := msg.GetQuestion(); question != nil {\n\t\t\tswitch question.GetType() {\n\t\t\tcase idamV1.ConversationChallengeType_PASSWORD:\n\t\t\t\tsecret, err := conv(question.GetType())\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn \"\", err\n\t\t\t\t}\n\n\t\t\t\tif err := stream.Send(buildPassword(secret)); err != nil {\n\t\t\t\t\treturn \"\", err\n\t\t\t\t}\n\t\t\tcase idamV1.ConversationChallengeType_TOTP:\n\t\t\t\tsecret, err := conv(question.GetType())\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn \"\", err\n\t\t\t\t}\n\n\t\t\t\tif err := stream.Send(buildOTP(secret)); err != nil {\n\t\t\t\t\treturn \"\", err\n\t\t\t\t}\n\t\t\tdefault:\n\t\t\t\treturn \"\", errors.New(\"unexpected question\")\n\t\t\t}\n\t\t} else if success := msg.GetLoginSuccess(); success != nil {\n\t\t\treturn success.GetToken(), err\n\t\t}\n\t}\n}", "func TelegramByLogin(login string) (string, bool) {\n\tfor key, value := range userStorage {\n\t\tif value == login {\n\t\t\treturn key, true\n\t\t}\n\t}\n\treturn \"\", false\n}", "func (c Config) New() (*tg.BotAPI, error) {\n\tbot, err := tg.NewBotAPI(c.Token)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"can't init connect to tg bot api\")\n\t}\n\n\tbot.Debug = c.Debug\n\n\treturn bot, nil\n}", "func initKey() error {\n\n\t// if key is not a valid length, try to load it from config\n\tif len(key) != 16 && len(key) != 24 && len(key) != 32 {\n\n\t\tkey = []byte(viper.GetString(\"aes_key\"))\n\n\t\t// check if key from config is proper size\n\t\tif len(key) != 16 && len(key) != 24 && len(key) != 32 {\n\t\t\treturn fmt.Errorf(\"aes_key specified in config is not of correct length: %d\", len(key))\n\t\t}\n\n\t}\n\n\treturn nil\n\n}", "func CreateBot(token string) SlackSession {\n\t// v, _ := persist.Get(\"test\")\n\tfmt.Printf(\"ST: %v\\n\", token)\n\tapi := slack.New(token)\n\trtm := api.NewRTM()\n\tgo rtm.ManageConnection()\n\tsendToUsers := []string{}\n\tusers, err := rtm.GetUsers()\n\tif err == nil {\n\t\tfmt.Print(\"usrs\")\n\t\tfor _, usr := range users {\n\t\t\tfmt.Print(usr.Name)\n\t\t\tif usr.Name == \"maisie\" {\n\t\t\t\tid, found := getIMChannelForUser(api, usr.ID)\n\t\t\t\tif found {\n\t\t\t\t\tsendToUsers = append(sendToUsers, id)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tst := fmt.Sprintf(\"%s%d%s\", \"found\", len(sendToUsers), \"users\")\n\tfmt.Println(st)\n\tsession := SlackSession{sendMessage: make(chan string), users: sendToUsers}\n\tgo sendMessages(rtm, session)\n\tgo ListenToBot(rtm)\n\treturn session\n}", "func (a *Ares) getBotandAdmin() {\n\tapi := slack.New(a.SlackAppToken)\n\tusers, err := api.GetUsers()\n\n\tif err != nil {\n\t\tlog.Fatal(\"Failed to fetch slack users info\", err.Error())\n\t}\n\n\ta.Users = make(map[string]string)\n\ta.MutedUsers = make(map[string]bool)\n\n\tfor _, user := range users {\n\n\t\tif user.Profile.ApiAppID == a.SlackAppID {\n\t\t\ta.BotUserID = user.ID\n\t\t}\n\n\t\tif user.IsAdmin {\n\t\t\ta.Admins = append(a.Admins, user.ID)\n\t\t} else {\n\t\t\ta.Users[user.Name] = user.ID\n\t\t}\n\t}\n\n\tif a.BotUserID == \"\" {\n\t\tlog.Fatal(\"Unable to find bot user on the Slack\")\n\t}\n}", "func New(cfg config.BotConfig, logger *zap.Logger, svc service.Service) Bot {\n\tb, err := botAPI.NewBotAPI(cfg.Token())\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn Bot{\n\t\tbot: b,\n\t\tlogger: logger,\n\t\tsvc: svc,\n\t\tadminChatID: cfg.GetAdminChatID(),\n\t}\n}", "func (me *CONFIGURATION_IMPL) ApiKey() string{\r\n return me.api-key\r\n}", "func init_owner(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar err error\n\tfmt.Println(\"starting init_owner\")\n\n\tif len(args) != 4 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 3\")\n\t}\n\n\t//input sanitation\n\terr = sanitize_arguments(args)\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tvar owner Owner\n\towner.ObjectType = \"marble_owner\"\n\towner.Id = args[0]\n\towner.Username = strings.ToLower(args[1])\n\towner.Company = args[2]\n\towner.AccountId = args[3]\n\towner.Enabled = true\n\tfmt.Println(owner)\n\n\t//check if user already exists\n\t_, err = get_owner(stub, owner.Id)\n\tif err == nil {\n\t\tfmt.Println(\"This owner already exists - \" + owner.Id)\n\t\treturn shim.Error(\"This owner already exists - \" + owner.Id)\n\t}\n\n\t//store user\n\townerAsBytes, _ := json.Marshal(owner) //convert to array of bytes\n\terr = stub.PutState(owner.Id, ownerAsBytes) //store owner by its Id\n\tif err != nil {\n\t\tfmt.Println(\"Could not store user\")\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tfmt.Println(\"- end init_owner marble\")\n\treturn shim.Success(nil)\n}", "func (_Rootchain *RootchainSession) Init() (*types.Transaction, error) {\n\treturn _Rootchain.Contract.Init(&_Rootchain.TransactOpts)\n}", "func NewBot(builder plutus.ArbitragerBuilder, initFuncs ...func(*Bot)) (*Bot, error) {\n\tb := &Bot{\n\t\tserver: server.New(),\n\t}\n\n\tfor _, fn := range initFuncs {\n\t\tfn(b)\n\t}\n\n\terr := b.initArbitrager(builder)\n\tif err != nil {\n\t\tlog.Errorf(\"failed to init arbitrager: %v\", err)\n\t\treturn nil, err\n\t}\n\n\treturn b, nil\n}", "func (g *Gateway) APIToken() string {\n\treturn g.clientAuth.Token\n}", "func getKttClient(cfg *Config) *KttClient {\n\n\tkttAuthorization := base64.StdEncoding.EncodeToString([]byte(cfg.KTT.Username + \":\" + cfg.KTT.Password))\n\n\tclient := &http.Client{}\n\n\tkttClient := KttClient{\n\t\tClient: client,\n\t\tAuthorization: kttAuthorization}\n\n\treturn &kttClient\n}", "func init_owner(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar err error\n\tfmt.Println(\"starting init_owner\")\n\n\tif len(args) != 3 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 3\")\n\t}\n\n\t//input sanitation\n\terr = sanitize_arguments(args)\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tvar owner Owner\n\towner.ObjectType = \"marble_owner\"\n\towner.Id = args[0]\n\towner.Username = strings.ToLower(args[1])\n\towner.Company = args[2]\n\towner.Enabled = true\n\tfmt.Println(owner)\n\n\t//check if user already exists\n\t_, err = get_owner(stub, owner.Id)\n\tif err == nil {\n\t\tfmt.Println(\"This owner already exists - \" + owner.Id)\n\t\treturn shim.Error(\"This owner already exists - \" + owner.Id)\n\t}\n\n\t//store user\n\townerAsBytes, _ := json.Marshal(owner) //convert to array of bytes\n\terr = stub.PutState(owner.Id, ownerAsBytes) //store owner by its Id\n\tif err != nil {\n\t\tfmt.Println(\"Could not store user\")\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tfmt.Println(\"- end init_owner marble\")\n\treturn shim.Success(nil)\n}", "func init_owner(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar err error\n\tfmt.Println(\"starting init_owner\")\n\n\tif len(args) != 3 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 3\")\n\t}\n\n\t//input sanitation\n\terr = sanitize_arguments(args)\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tvar owner Owner\n\towner.ObjectType = \"marble_owner\"\n\towner.Id = args[0]\n\towner.Username = strings.ToLower(args[1])\n\towner.Company = args[2]\n\towner.Enabled = true\n\tfmt.Println(owner)\n\n\t//check if user already exists\n\t_, err = get_owner(stub, owner.Id)\n\tif err == nil {\n\t\tfmt.Println(\"This owner already exists - \" + owner.Id)\n\t\treturn shim.Error(\"This owner already exists - \" + owner.Id)\n\t}\n\n\t//store user\n\townerAsBytes, _ := json.Marshal(owner) //convert to array of bytes\n\terr = stub.PutState(owner.Id, ownerAsBytes) //store owner by its Id\n\tif err != nil {\n\t\tfmt.Println(\"Could not store user\")\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tfmt.Println(\"- end init_owner marble\")\n\treturn shim.Success(nil)\n}", "func NewBot(nick, server, channel, channelKey string) *Bot {\n\tbot := Bot{nick: nick, server: server, channel: channel, channelKey: channelKey, Admins: new([]string)}\n\n\tbot.ircConn = irc.IRC(nick, nick)\n\tbot.ircConn.UseTLS = true\n\tbot.ircConn.Connect(server)\n\n\t//PRIVMSG\n\tbot.ircConn.AddCallback(\"PRIVMSG\", bot.mainHandler)\n\n\t// RPL_WELCOME\n\tbot.ircConn.AddCallback(\"001\", func(event *irc.Event) {\n\t\tgo func(event *irc.Event) {\n\t\t\tbot.ircConn.Join(channel + \" \" + channelKey)\n\t\t\t// Necessary because the callback for RPL_NAMREPLY is called after joining the channel (NAMES command)\n\t\t\t// If not called here updateUserListDone will always contains a value before being read by UpdateUserList()\n\t\t\t<-updateUserListDone\n\t\t}(event)\n\t})\n\n\t// RPL_NAMREPLY\n\tbot.ircConn.AddCallback(\"353\", func(event *irc.Event) {\n\t\tvar (\n\t\t\tcurrentAdmins []string\n\t\t\tcurrentUsers []string\n\t\t)\n\t\tfor _, user := range strings.Split(event.Message(), \" \") {\n\t\t\tif strings.HasPrefix(user, \"@\") {\n\t\t\t\tcurrentAdmins = append(currentAdmins, strings.TrimPrefix(user, \"@\"))\n\t\t\t\tcurrentUsers = append(currentUsers, strings.TrimPrefix(user, \"@\"))\n\t\t\t} else {\n\t\t\t\tcurrentUsers = append(currentUsers, user)\n\t\t\t}\n\t\t}\n\t\t*bot.Admins = currentAdmins\n\t\tbot.users = currentUsers\n\t\t// log.Printf(\"Current admnistrators: %s\", strings.Join(*bot.Admins, \", \"))\n\t\t// log.Printf(\"Current users: %s\", strings.Join(bot.users, \", \"))\n\t\tupdateUserListDone <- true\n\t})\n\n\tbot.cmdHandlers = make(map[string]func(*irc.Event, func(*ReplyCallbackData)) bool)\n\tbot.cmdReplyCallbacks = make(map[string]func(*ReplyCallbackData))\n\tbot.lastReplyTime = time.Now()\n\n\treturn &bot\n}", "func (s *DiscordState) Init() error {\n\ts.Session = new(discordgo.Session)\n\t\n\tfmt.Printf(\"\\nConnecting…\")\n\n\tdg, err := discordgo.New(Config.Username, Config.Password)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Open the websocket and begin listening.\n\tdg.Open()\n\n\t//Retrieve GuildID's from current User\n\t//need index of Guilds[] rather than UserGuilds[] (maybe)\n\tGuilds, err := dg.UserGuilds(0, \"\", \"\")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\ts.Guilds = Guilds\n\n\ts.Session = dg\n\n\ts.User, _ = s.Session.User(\"@me\")\n\n\tfmt.Printf(\" PASSED!\\n\")\n\n\treturn nil\n}", "func New(opts Options) (bot *Bot, err error) {\n\terr = opts.init()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"bot.New: %w\", err)\n\t}\n\n\tclientOpts := &http.ClientOptions{\n\t\tServerUrl: defURL + opts.Token + \"/\",\n\t}\n\tbot = &Bot{\n\t\topts: opts,\n\t\tclient: http.NewClient(clientOpts),\n\t}\n\n\tfmt.Printf(\"Bot options: %+v\\n\", opts)\n\tfmt.Printf(\"Bot options Webhook: %+v\\n\", opts.Webhook)\n\n\t// Check if Bot Token is correct by issuing \"getMe\" method to API\n\t// server.\n\tbot.user, err = bot.GetMe()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn bot, nil\n}", "func (_Rootchain *RootchainTransactorSession) Init() (*types.Transaction, error) {\n\treturn _Rootchain.Contract.Init(&_Rootchain.TransactOpts)\n}", "func (o *BasicBot) GetApiKey() interface{} {\n\tif o == nil {\n\t\tvar ret interface{}\n\t\treturn ret\n\t}\n\treturn o.ApiKey\n}", "func NewChatAuth(\n\tstorage Storage,\n\tpasswordOrNil Password,\n\tconfigPrefix string,\n\tconfig *viper.Viper,\n) *ChatAuth {\n\tvar (\n\t\tuserCollKey = configPrefix + \".userColl\"\n\t\taclCollKey = configPrefix + \".aclColl\"\n\t\tgameIDKey = configPrefix + \".gameId\"\n\t)\n\n\tconfig.SetDefault(userCollKey, \"mqtt_user\")\n\tconfig.SetDefault(aclCollKey, \"mqtt_acl\")\n\n\tif passwordOrNil == nil {\n\t\tpasswordOrNil = &PasswordPBKDF2{}\n\t}\n\n\treturn &ChatAuth{\n\t\tstorage: storage,\n\t\tpassword: passwordOrNil,\n\t\tuserColl: config.GetString(userCollKey),\n\t\taclColl: config.GetString(aclCollKey),\n\t\tgameID: config.GetString(gameIDKey),\n\t}\n}", "func makeAccount(){\n\toperatorSecret, err := hedera.SecretKeyFromString(secret)\n\tif err != nil{\n\t\tpanic(err)\n\t}\n\n\tsecretKey, _ := hedera.GenerateSecretKey()\n\tpublic := secretKey.Public()\n\n\tfmt.Printf(\"secret = %v\\n\", secretKey)\n\tfmt.Printf(\"public = %v\\n\", public)\n\n\tclient, err := hedera.Dial(server)\n\tif err !=nil{\n\t\tpanic(err)\n\t}\n\tdefer client.Close()\n\n\tnodeAccountID := hedera.AccountID{Account: 3}\n\toperatorAccountID := hedera.AccountID{Account: 1001}\n\ttime.Sleep(2* time.Second)\n\tresponse, err := client.CreateAccount().Key(public).InitialBalance(0).Operator(operatorAccountID).Node(nodeAccountID).Memo(\"Test make Account\").Sign(operatorSecret).Execute()\n\tif err != nil{\n\t\tpanic(err)\n\t}\n\n\ttransactionID := response.ID\n\tfmt.Printf(\"Created account; transaction = %v\\n\", transactionID)\n\ttime.Sleep(2* time.Second)\n \n\treceipt,err := client.Transaction(*transactionID).Receipt().Get()\n\tif err != nil{\n\t\tpanic(err)\n\t}\n\tfmt.Printf(\"Account = %v\\n\", *receipt.AccountID)\n\n}", "func attemptInit(ctx context.Context, account string, scopes []string, audience string) (TokenProvider, error) {\n\t// This mutex is used to avoid hitting GKE metadata server concurrently if\n\t// we have a stampede of goroutines. It doesn't actually protect any shared\n\t// state in the current process.\n\tglobalGCELock.Lock()\n\tdefer globalGCELock.Unlock()\n\n\tif account == \"\" {\n\t\taccount = \"default\"\n\t}\n\n\t// Grab an email associated with the account. This must not be failing on\n\t// a healthy VM if the account is present. If it does, the metadata server is\n\t// broken.\n\temail, err := metadataClient.Email(account)\n\tif err != nil {\n\t\t// Note: we purposefully delay this check only after the first call to\n\t\t// the metadata fails because metadata.OnGCE was observed to often report\n\t\t// \"false\" when running on GKE due to gke-metadata-server being slow. Our\n\t\t// metadataClient has (much) higher timeouts that the client used by\n\t\t// metadata.OnGCE, and it handles slow gke-metadata-server better. So if we\n\t\t// end up here and metadata.OnGCE also says \"false\", then we are not on GCE\n\t\t// with high probability. The downside is that it may take up to 15 sec to\n\t\t// detect this (or whatever ResponseHeaderTimeout in metadataClient is).\n\t\tif !metadata.OnGCE() {\n\t\t\treturn nil, ErrBadCredentials\n\t\t}\n\t\tif _, yep := err.(metadata.NotDefinedError); yep {\n\t\t\treturn nil, ErrInsufficientAccess\n\t\t}\n\t\treturn nil, transient.Tag.Apply(err)\n\t}\n\n\t// Ensure the account has requested scopes. Assume 'cloud-platform' scope\n\t// covers all possible scopes. This is important when using GKE Workload\n\t// Identities: the metadata server always reports only 'cloud-platform' scope\n\t// there. Its presence should be enough to cover all scopes used in practice.\n\t// The exception is non-cloud scopes (like gerritcodereview or G Suite). To\n\t// use such scopes, one will have to use impersonation through Cloud IAM APIs,\n\t// which *are* covered by cloud-platform (see ActAsServiceAccount in auth.go).\n\tif audience == \"\" {\n\t\tavailableScopes, err := metadataClient.Scopes(account)\n\t\tif err != nil {\n\t\t\treturn nil, transient.Tag.Apply(err)\n\t\t}\n\t\tavailableSet := stringset.NewFromSlice(availableScopes...)\n\t\tif !availableSet.Has(\"https://www.googleapis.com/auth/cloud-platform\") {\n\t\t\tfor _, requested := range scopes {\n\t\t\t\tif !availableSet.Has(requested) {\n\t\t\t\t\tlogging.Warningf(ctx, \"GCE service account %q doesn't have required scope %q (all scopes: %q)\", account, requested, availableScopes)\n\t\t\t\t\treturn nil, ErrInsufficientAccess\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn &gceTokenProvider{\n\t\taccount: account,\n\t\temail: email,\n\t\taudience: audience,\n\t\tcacheKey: CacheKey{\n\t\t\tKey: fmt.Sprintf(\"gce/%s\", account),\n\t\t\tScopes: scopes,\n\t\t},\n\t}, nil\n}", "func init() {\n\tSBC = data.NewBlockChain()\n\tid, _ := strconv.ParseInt(os.Args[1], 10, 32)\n\tPeers = data.NewPeerList( /*Register()*/ int32(id), 32) // Uses port number as ID since TA server is down\n\tprivateKey, publicKey = client.GenerateKeyPair()\n\tifStarted = false\n\tmpt.Initial()\n\tclientBalanceMap = make(map[string]int32)\n\tpendingTransaction = make(map[string]string)\n\ttransactionMpt.Initial()\n\tclientBalanceMap[string(client.PublicKeyToBytes(publicKey))] = 1000\n\thighestblockTransaction = 0\n}", "func Init(conf *Config) (*Client, error) {\n\tif conf.AccessToken == \"\" {\n\t\treturn nil, fmt.Errorf(\"%v\", \"Access token is required\")\n\t}\n\n\treturn &Client{conf}, nil\n}", "func Authenticate(keys *Keys) *Account {\n\treturn &Account{keys}\n}", "func (ircConn *Connection) Init(oauth, nick string) {\n\ttil := new(TwitchIRCListener)\n\ttil.ArchiumDataIdentifier = ArchiumDataIdentifier\n\ttil.ArchiumPrefix = ArchiumPrefix\n\ttil.IrcConn = ircConn\n\tarchiumCore.Register(til)\n\tircConn.JoinedChannels = make([]string, 1)\n\tircConn.oauth = oauth\n\tircConn.Username = nick\n\tif ircConn.ModOnly {\n\t\tircConn.privmsgLimiter = network.NewTokenbucket(30*time.Second, 100)\n\t} else {\n\t\tircConn.privmsgLimiter = network.NewTokenbucket(30*time.Second, 20)\n\t}\n\tircConn.joinLimiter = network.NewTokenbucket(15*time.Second, 50)\n\tircConn.lastActivity = time.Now()\n\tif !strings.HasPrefix(nick, \"justinfan\") {\n\t\tircConn.Sendln(\"PASS \" + oauth)\n\t}\n\tircConn.Sendln(\"NICK \" + nick)\n\tircConn.Sendln(\"CAP REQ :twitch.tv/tags twitch.tv/commands\")\n\n\ttime.Sleep(3 * time.Second)\n\n\t(*(til.IrcConn)).runningReconnect = false\n\tgo ircConn.start()\n\tgo keepalive(ircConn)\n}", "func initializeTokenRequester(centralCfg config.CentralConfig) {\n\tagent.tokenRequester = apicauth.NewPlatformTokenGetter(\n\t\tcentralCfg.GetAuthConfig().GetPrivateKey(),\n\t\tcentralCfg.GetAuthConfig().GetPublicKey(),\n\t\tcentralCfg.GetAuthConfig().GetKeyPassword(),\n\t\tcentralCfg.GetAuthConfig().GetTokenURL(),\n\t\tcentralCfg.GetAuthConfig().GetAudience(),\n\t\tcentralCfg.GetAuthConfig().GetClientID(),\n\t\tcentralCfg.GetAuthConfig().GetTimeout(),\n\t)\n}", "func init() {\n\tconfig = Config{DB: make(map[int]models.User, 10), Friends: make(map[int][]int, 10)}\n\n\t// Just to make PrivateKey assign on the next line\n\tvar err error\n\n\tconfig.PrivateKey, err = ioutil.ReadFile(\"./config/keys/key.pem\")\n\tif err != nil {\n\t\tlog.Println(\"Error reading private key\")\n\t\tlog.Println(\"private key reading error: \", err)\n\t\treturn\n\t}\n\n\tconfig.CertKey, err = ioutil.ReadFile(\"./config/keys/cert.pem\")\n\tif err != nil {\n\t\tlog.Println(\"Error reading cert key\")\n\t\tlog.Println(\"cert key error: \", err)\n\t\treturn\n\t}\n\n}", "func initAuth() {\n\tif authInitialized {\n\t\treturn\n\t}\n\tauthInitialized = true\n\n\t// Set up the credentials file\n\tInitCredentialsFile()\n\n\t// Add base auth commands\n\tauthCommand = &cobra.Command{\n\t\tUse: \"auth\",\n\t\tShort: \"Authentication settings\",\n\t}\n\tRoot.AddCommand(authCommand)\n\n\tauthAddCommand = &cobra.Command{\n\t\tUse: \"add-profile\",\n\t\tAliases: []string{\"add\"},\n\t\tShort: \"Add user profile for authentication\",\n\t}\n\tauthCommand.AddCommand(authAddCommand)\n\n\tauthCommand.AddCommand(&cobra.Command{\n\t\tUse: \"list-profiles\",\n\t\tAliases: []string{\"ls\"},\n\t\tShort: \"List available configured authentication profiles\",\n\t\tArgs: cobra.NoArgs,\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\tprofiles := Creds.GetStringMap(\"profiles\")\n\n\t\t\tif profiles != nil {\n\t\t\t\t// Use a map as a set to find the available auth type names.\n\t\t\t\ttypes := make(map[string]bool)\n\t\t\t\tfor _, v := range profiles {\n\t\t\t\t\tif typeName := v.(map[string]interface{})[\"type\"]; typeName != nil {\n\t\t\t\t\t\ttypes[typeName.(string)] = true\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// For each type name, draw a table with the relevant profile keys\n\t\t\t\tfor typeName := range types {\n\t\t\t\t\thandler := AuthHandlers[typeName]\n\t\t\t\t\tif handler == nil {\n\t\t\t\t\t\tcontinue\n\t\t\t\t\t}\n\n\t\t\t\t\tlistKeys := handler.ProfileKeys()\n\n\t\t\t\t\ttable := tablewriter.NewWriter(os.Stdout)\n\t\t\t\t\ttable.SetHeader(append([]string{fmt.Sprintf(\"%s Profile Name\", typeName)}, listKeys...))\n\n\t\t\t\t\tfor name, p := range profiles {\n\t\t\t\t\t\tprofile := p.(map[string]interface{})\n\t\t\t\t\t\tif ptype := profile[\"type\"]; ptype == nil || ptype.(string) != typeName {\n\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\trow := []string{name}\n\t\t\t\t\t\tfor _, key := range listKeys {\n\t\t\t\t\t\t\trow = append(row, profile[strings.Replace(key, \"-\", \"_\", -1)].(string))\n\t\t\t\t\t\t}\n\t\t\t\t\t\ttable.Append(row)\n\t\t\t\t\t}\n\t\t\t\t\ttable.Render()\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tfmt.Printf(\"No profiles configured. Use `%s auth add-profile` to add one.\\n\", Root.CommandPath())\n\t\t\t}\n\t\t},\n\t})\n\n\t// Install auth middleware\n\tClient.UseRequest(func(ctx *context.Context, h context.Handler) {\n\t\tprofile := GetProfile()\n\n\t\thandler := AuthHandlers[profile[\"type\"]]\n\t\tif handler == nil {\n\t\t\th.Error(ctx, fmt.Errorf(\"no handler for auth type %s\", profile[\"type\"]))\n\t\t\treturn\n\t\t}\n\n\t\tif err := handler.OnRequest(ctx.Get(\"log\").(*zerolog.Logger), ctx.Request); err != nil {\n\t\t\th.Error(ctx, err)\n\t\t\treturn\n\t\t}\n\n\t\th.Next(ctx)\n\t})\n}", "func readAccount() horizon.Account {\n\tvar (\n\t\taddress string\n\t\taccount horizon.Account\n\t\terr error\n\t)\n\n\tfmt.Print(\"Enter account ID (public key): \")\n\tfmt.Scan(&address)\n\n\tif isTest {\n\t\taccount, err = tnet.LoadAccount(address)\n\t\tif err != nil {\n\t\t\tlog.Println(\"Error loading account\")\n\t\t\tlog.Fatal(err)\n\t\t}\n\t} else {\n\t\taccount, err = client.LoadAccount(address)\n\t\tif err != nil {\n\t\t\tlog.Println(\"Error loading account\")\n\t\t\tlog.Fatal(err)\n\t\t}\n\t}\n\n\treturn account\n}", "func Init(ctx context.Context, local bool, workdir string) (string, error) {\n\tts, err := auth_steps.Init(ctx, local, auth.SCOPE_GERRIT, auth.SCOPE_USERINFO_EMAIL)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tvar gitcookiesPath string\n\tif local {\n\t\tgitcookiesPath = gerrit.DefaultGitCookiesPath()\n\t} else {\n\t\tgitcookiesPath = path.Join(workdir, \".gitcookies\")\n\t}\n\terr = td.Do(ctx, td.Props(\"Gitauth Init\").Infra(), func(ctx context.Context) error {\n\t\t_, err := gitauth.New(ts, gitcookiesPath, true, \"\")\n\t\treturn err\n\t})\n\treturn gitcookiesPath, err\n}" ]
[ "0.6786855", "0.59679854", "0.5855029", "0.5731784", "0.55864537", "0.55685085", "0.55140597", "0.54874945", "0.5455978", "0.53484327", "0.5195803", "0.51647794", "0.51558405", "0.51342577", "0.5110226", "0.50893694", "0.5088669", "0.5085698", "0.5036525", "0.50101554", "0.49739948", "0.49091902", "0.4890995", "0.48602083", "0.48207828", "0.47679046", "0.47351474", "0.47348338", "0.4727556", "0.47071776", "0.47060782", "0.4685093", "0.4674543", "0.4630638", "0.46233234", "0.45832115", "0.45813814", "0.45500433", "0.45334202", "0.4480955", "0.4473273", "0.4469663", "0.44591254", "0.4452026", "0.44344196", "0.44204485", "0.44138402", "0.44076705", "0.440394", "0.43968296", "0.4396324", "0.43718514", "0.43648902", "0.43601263", "0.43555203", "0.43536934", "0.43507487", "0.43490398", "0.43382013", "0.433154", "0.43247524", "0.43081614", "0.43015495", "0.43007317", "0.42967975", "0.42958364", "0.42806712", "0.42634308", "0.42624155", "0.42570564", "0.42558098", "0.42479122", "0.42447534", "0.42407918", "0.42272377", "0.42230746", "0.42070308", "0.4204631", "0.42033848", "0.41999078", "0.4193842", "0.41915086", "0.41915086", "0.41887856", "0.41861156", "0.41854826", "0.41826335", "0.4176239", "0.41702527", "0.4168128", "0.41506922", "0.4142491", "0.41379288", "0.41373563", "0.4135588", "0.4132642", "0.41283166", "0.41240507", "0.41226777", "0.4116675" ]
0.7187933
0
SensePushLog pushes message to telegram channel `telegram.chat_id`
func SensePushMessage(caption string, photo string) error { pushFunc := func() error { bot, err := tgbotapi.NewBotAPI(Config.telegram_bot_token) if err != nil { log.Warnf("failed to initialize bot API: %v", err) reportFailure.Add(1) return err } msg := tgbotapi.NewPhotoUpload(Config.telegram_chat_id, photo) msg.Caption = caption msg.ParseMode = "markdown" _, err = bot.Send(msg) if err != nil { log.Warnf("failed to send message: %v", err) reportFailure.Add(1) return err } reportSuccess.Add(1) return nil } i := 0 for ; ; { err := pushFunc() if err != nil { secs := (i + 1) * 5 log.Warnf("%d attempt failed, retrying sending telegram message after %d seconds", i+1, secs) time.Sleep(time.Second * time.Duration(secs)) } else { break } i += 1 if i == 15 { log.Warnf("all attempts failed, stop retrying") return err } } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func SensePushLog(message string) error {\n\tbot, err := tgbotapi.NewBotAPI(Config.telegram_bot_token)\n\tif err != nil {\n\t\tlog.Warnf(\"failed to initialize bot API %v\", err)\n\t\treportFailure.Add(1)\n\t\treturn err\n\t}\n\n\tmsg := tgbotapi.NewMessage(Config.telegram_log_chat_id, message)\n\tmsg.ParseMode = \"markdown\"\n\tmsg.DisableNotification = true\n\t_, err = bot.Send(msg)\n\n\tif err != nil {\n\t\tlog.Warnf(\"failed to send message %v\", err)\n\t\treportFailure.Add(1)\n\t\treturn err\n\t}\n\n\treportSuccess.Add(1)\n\n\treturn nil\n}", "func (bot *SlackBot) sendMessage(msg Message) error {\n\tmsg.ID = atomic.AddUint64(&counter, 1)\n\terr := websocket.JSON.Send(bot.ws, msg)\n\treturn err\n}", "func (g *Game) pushChatMessage(player *Player, text string) {\n\tm := ChatMessage{Player: player, Text: text}\n\tfor _, p := range g.Players {\n\t\tp.ChatMessages.Push(m)\n\t}\n}", "func sendSlackMessage(message slack.Message) {\n\tuserData := SlackAPI.GetUserInfo(message.User)\n\ttimestampSplit := strings.Split(message.Ts, \".\")\n\ttimestampInt, err := strconv.ParseInt(timestampSplit[0], 10, 64)\n\ttimestamp := time.Unix(timestampInt, 0)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\n\terr = SlackAPI.PostMessage(conf.ChannelToMessage, conf.SlackMessageText)\n\tif err != nil {\n\t\tspew.Dump(err)\n\t}\n\n\terr = SlackAPI.PostMessage(conf.ChannelToMessage, \"> <@\"+userData.User.ID+\"> - \"+timestamp.Format(\"03:04:05 PM\")+\": \\n> \"+message.Text)\n\tif err != nil {\n\t\tspew.Dump(err)\n\t}\n}", "func SendMessageToSlack(message string) {\n\tfmt.Println(\"Sending message to slack...\")\n\n\thttp.Post(url, \"\")\n\n\treturn nil\n}", "func (s *SlcLogger) sendNotification(logLevel logLevel, color string, message interface{}, titleParam []string) error {\n\n\tvar text string\n\tif t, ok := message.(error); ok {\n\t\ttext = t.Error()\n\t} else if t, ok := message.(string); ok {\n\t\ttext = t\n\t} else {\n\t\treturn &SlcErr{errors.New(\"the type of message parameter should be string or error\"), 0}\n\t}\n\n\tif logLevel < s.LogLevel {\n\t\treturn nil\n\t}\n\tslackChannel := s.getTargetChannel(logLevel)\n\n\tpayload, err := s.buildPayload(slackChannel, color, text, titleParam)\n\tif err != nil {\n\t\treturn &SlcErr{err, 0}\n\t}\n\n\treq, err := http.NewRequest(\"POST\", s.WebHookURL, bytes.NewBuffer(payload))\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\n\tif err != nil {\n\t\treturn &SlcErr{err, 0}\n\t}\n\tctx := context.Background()\n\treq.WithContext(ctx)\n\n\tresp, err := http.DefaultClient.Do(req)\n\n\tdefer func() {\n\t\tif resp != nil {\n\t\t\t_, _ = io.Copy(ioutil.Discard, resp.Body)\n\t\t\t_ = resp.Body.Close()\n\t\t}\n\t}()\n\n\tif err != nil {\n\t\treturn &SlcErr{err, 0}\n\t}\n\n\tif resp.StatusCode >= 400 {\n\t\tbody, _ := ioutil.ReadAll(resp.Body)\n\t\treturn &SlcErr{errors.New(string(body)), resp.StatusCode}\n\t}\n\n\treturn nil\n}", "func SendLog(taskID, msg string) {\n\tsockets.Message(taskID, []byte(msg))\n}", "func (hc *HuaweiPushClient) PushMsg(accessToken, deviceToken, payload string, timeToLive int) (string, error) {\n\treqUrl := PUSH_URL + \"?nsp_ctx=\" + url.QueryEscape(hc.NspCtx)\n\n\tnow := time.Now()\n\texpireSecond := time.Duration(timeToLive * 1e9)\n\texpireTime := now.Add(expireSecond)\n\n\tvar originParam = map[string]string{\n\t\t\"access_token\": accessToken,\n\t\t\"nsp_svc\": NSP_SVC,\n\t\t\"nsp_ts\": strconv.Itoa(int(time.Now().Unix())),\n\t\t\"device_token_list\": \"[\\\"\" + deviceToken + \"\\\"]\",\n\t\t\"payload\": payload,\n\t\t\"expire_time\": expireTime.Format(\"2006-01-02T15:04\"),\n\t}\n\n\tparam := make(url.Values)\n\tparam[\"access_token\"] = []string{originParam[\"access_token\"]}\n\tparam[\"nsp_svc\"] = []string{originParam[\"nsp_svc\"]}\n\tparam[\"nsp_ts\"] = []string{originParam[\"nsp_ts\"]}\n\tparam[\"device_token_list\"] = []string{originParam[\"device_token_list\"]}\n\tparam[\"payload\"] = []string{originParam[\"payload\"]}\n\tparam[\"expire_time\"] = []string{originParam[\"expire_time\"]}\n\n\t// push\n\tres, err := FormPost(reqUrl, param)\n\n\treturn string(res), err\n}", "func (sender *Sess) SendMessage(logEvent logevent.LogEvent) error {\n\tif sender.hecClient == nil {\n\t\treturn errors.New(\"SendMessage() called before OpenSvc()\")\n\t}\n\thecEvents := []*hec.Event{\n\t\tsender.formatLogEvent(logEvent),\n\t}\n\tsender.tracePretty(\"TRACE_SENDHEC time =\",\n\t\tlogEvent.Content.Time.UTC().Format(time.RFC3339),\n\t\t\" hecEvents =\", hecEvents)\n\terr := sender.hecClient.WriteBatch(hecEvents)\n\treturn err\n}", "func (c *ChanPost) slckSend(rtm *slack.RTM) {\n\tlog.Println(\"Message from bot received\")\n\tmessage := \"Infra Announce: \" + c.text + \" \" +\n\t\ttime.Unix(int64(c.date), 0).Format(\"Mon Jan _2 15:04\") +\n\t\t\"(from telegram channel)\"\n\trtm.SendMessage(rtm.NewOutgoingMessage(message, config.sChat))\n}", "func MessageLoggerSender(n *notif.SlackNotifier) {\n\t// for {\n\t// \tif len(messagesQueue) > 0 {\n\t// \t\tmsg := messagesQueue.Pop()\n\t// \t\tif n != nil {\n\t// \t\t\tif err := n.Notify(fmt.Sprintf(\"```%s```\", msg)); err != nil {\n\t// \t\t\t\tlog.Println(\"NOTIFY TO SLACK ERROR: \", err)\n\t// \t\t\t\tmessagesQueue = append(messagesQueue, msg)\n\t// \t\t\t}\n\t// \t\t}\n\t// \t\ttime.Sleep(10 * time.Millisecond)\n\t// \t} else {\n\t// \t\ttime.Sleep(10 * time.Second)\n\t// \t}\n\t// }\n}", "func (s *SlackSvc) SendMessage(report ReportPayload) error {\n\tattachments := make([]map[string]interface{}, 1)\n\tattachments[0] = map[string]interface{}{\"text\": fmt.Sprintf(\"Howdy! Here's a list of *%d* PRs waiting to be reviewed and merged:\", len(report.PRs))}\n\tfor _, v := range report.PRs {\n\t\tattachments = append(attachments, map[string]interface{}{\"text\": v.ToString()})\n\t}\n\n\tif len(report.Reminders) > 0 {\n\t\tfor _, v := range report.Reminders {\n\t\t\tattachments = append(attachments, map[string]interface{}{\"text\": v.Text})\n\t\t}\n\t}\n\n\tmessage := map[string]interface{}{\n\t\t\"channel\": s.channelID,\n\t\t\"username\": s.user,\n\t\t\"icon_emoji\": \":robot_face:\",\n\t\t\"attachments\": attachments,\n\t}\n\n\tpayload, err := json.Marshal(message)\n\tif err != nil {\n\t\tlog.Error().Err(err).Msg(\"Failed to serialize Slack payload\")\n\t\treturn err\n\t}\n\n\tresp, err := s.client.Post(s.webhook, \"application/json\", bytes.NewReader(payload))\n\tif err != nil {\n\t\tlog.Error().Err(err).Msgf(\"Failed to serialize Slack payload: %v\", err)\n\t\treturn err\n\t}\n\n\tdefer resp.Body.Close()\n\n\tlog.Info().Msgf(\"Message successfully sent to channel %s\", s.channelID)\n\treturn nil\n}", "func sendMessage(message string) {\n\ts, _ := session.GetSession()\n\tif s != nil {\n\t\tconn := s.OpenConnection(chatServiceId)\n\t\tio.WriteString(conn, message)\n\t\tconn.Close()\n\t} else {\n\t\tfmt.Println(\"No chat sessions available\")\n\t}\n}", "func (s *Service) SendMessage(TargetChannelID, messageToSend string) error {\n\t_, err := s.discordapi.ChannelMessageSend(TargetChannelID, messageToSend)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error sending message to channel %v\", err)\n\t}\n\treturn nil\n}", "func logMsg(format string, a ...interface{}) {\n\tmsg := fmt.Sprintf(format, a...)\n\tlog.Println(msg)\n\tdiscord.ChannelMessageSend(logChannel, msg)\n}", "func sendMessage(recipient string, reviewUrl string) {\n\tb := new(bytes.Buffer)\n\tjson.NewEncoder(b).Encode(SlackMessage{Channel: recipient, Text: reviewUrl})\n\tresp, _ := http.Post(os.Getenv(\"SLACKURL\"), \"application/json; charset=utf-8\", b)\n\n\tdefer resp.Body.Close()\n\tioutil.ReadAll(resp.Body)\n}", "func (c *ClientType) sendToLoggly(msg messageType) error {\n\n\t// Marshal message object to JSON\n\tmsgBytes, err := json.Marshal(msg)\n\tif err != nil {\n\t\treturn errors.New(\"Could not create JSON payload\")\n\t}\n\n\t// Construct the HTTP request with timeout\n\thttpClient := &http.Client{\n\t\tTimeout: 7 * time.Second}\n\trequest, err := http.NewRequest(http.MethodPost, c.URL, bytes.NewBuffer(msgBytes))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Set the header attributes\n\trequest.Header.Add(\"Content-Type\", \"application/json\")\n\n\t_, err = httpClient.Do(request)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (s *SlackService) SendMessage(channel string, message string) {\n\t// https://godoc.org/github.com/nlopes/slack#PostMessageParameters\n\tpostParams := slack.PostMessageParameters{\n\t\tAsUser: true,\n\t}\n\n\t// https://godoc.org/github.com/nlopes/slack#Client.PostMessage\n\ts.Client.PostMessage(channel, message, postParams)\n}", "func (t *Tracker) send(color, message string) error {\n\tenv := os.Getenv(\"ENV\")\n\t// If no ENV is specified, assume we are in development mode, so we don't want to flood Slack uselessly.\n\tif env == \"\" {\n\t\treturn nil\n\t}\n\n\t_, perr := poster.Post(\n\t\tt.WebHook,\n\t\tmap[string]interface{}{\n\t\t\t\"text\" : fmt.Sprintf(\"%s - %s\", t.Application, env),\n\t\t\t\"attachments\": []map[string]interface{}{\n\t\t\t\t{\n\t\t\t\t\t\"color\": color,\n\t\t\t\t\t\"text\": fmt.Sprintf(\n\t\t\t\t\t\t\"*Message*\\n%s\\n\\n*Stack*\\n```%s```\\n\\n*Time*\\n%s\",\n\t\t\t\t\t\tmessage,\n\t\t\t\t\t\tstring(debug.Stack()),\n\t\t\t\t\t\ttime.Now().Format(\"2006-01-02 03:04:05\"),\n\t\t\t\t\t),\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t)\n\n\t// An unexpected error happened when sending our message to Slack.\n\treturn perr\n}", "func sendMessageToChannel(s *discordgo.Session, channelID string, body string) (message *discordgo.Message) {\n\tmessage, err := s.ChannelMessageSend(channelID, body)\n\tif err != nil {\n\t\tfmt.Println(\"Error sending message to channel: \", err)\n\t}\n\treturn message\n}", "func (client *Client) SendMessage(channel, text string) error {\n\tmsg := struct {\n\t\tID int `json:\"id\"`\n\t\tType string `json:\"type\"`\n\t\tChannel string `json:\"channel\"`\n\t\tText string `json:\"text\"`\n\t}{client.messageID, \"message\", channel, text}\n\n\tif err := client.conn.WriteJSON(msg); err != nil {\n\t\treturn err\n\t}\n\n\tclient.messageID++\n\n\treturn nil\n}", "func (s *Slack) SendMessage(msg string) error {\n\tlog.Logger.Info(fmt.Sprintf(\">> Sending to slack: %+v\", msg))\n\n\tapi := slack.New(s.Token)\n\tparams := slack.PostMessageParameters{\n\t\tAsUser: true,\n\t}\n\n\tchannelID, timestamp, err := api.PostMessage(s.Channel, msg, params)\n\tif err != nil {\n\t\tlog.Logger.Errorf(\"Error in sending slack message %s\", err.Error())\n\t\treturn err\n\t}\n\n\tlog.Logger.Infof(\"Message successfully sent to channel %s at %s\", channelID, timestamp)\n\treturn nil\n}", "func (bot *Bot) SendMessage(chatID int64, message string) {\n\tlog.Printf(\"Send '%s' to %d chat id\\n\", message, chatID)\n\tmsg := tgbotapi.NewMessage(chatID, message)\n\tmsg.ParseMode = bot.MessageParseMode\n\t_, err := bot.API.Send(msg)\n\tif err != nil {\n\t\tlog.Panicf(\"Send Messsage Error: %v\\n\", err)\n\t}\n}", "func LogPush(log logging.Logger, v interface{}, path ...string) {\n\tlog.Println(\"Push to\", strings.Join(path, \"/\"), v)\n}", "func SendMessage() {\n\n\tmsg := fmt.Sprintf(`{\"Fuellid\":\"%t\", \"City\":\"%s\"}`, data.Fuellid, data.City)\n\terr := ch.Publish(\n\t\t\"\",\n\t\tq.Name,\n\t\tfalse,\n\t\tfalse,\n\t\tamqp.Publishing{\n\t\t\tContentType: \"text/plain\",\n\t\t\tBody: []byte(msg),\n\t\t},\n\t)\n\tlogger.LogMessage(err, \"Failed to Publish message\", \"Published the message\")\n}", "func sendEchoMessage(slackClient *slack.RTM, message, slackChannel string) {\n\tsplitMessage := strings.Fields(strings.ToLower(message))\n\tslackClient.SendMessage(slackClient.NewOutgoingMessage(strings.Join(splitMessage[1:], \" \"), slackChannel))\n}", "func (c *SlackConnection) SendMessage(message gateway.Message, channel *gateway.Channel) (*gateway.Message, error) {\n\tif strings.HasPrefix(message.Text, \"/\") {\n\t\t// Slash commands require some preprocessing.\n\t\treturn sendSlashCommand(c, message, channel)\n\t} else {\n\t\tlog.Printf(\"Sending message to team %s on channel %s\", c.Team().Name, channel.Name)\n\n\t\t// Otherwise just a plain message\n\t\t_, err := http.Get(\"https://slack.com/api/chat.postMessage?token=\" + c.token + \"&channel=\" + channel.Id + \"&text=\" + url.QueryEscape(message.Text) + \"&link_names=true&parse=full&unfurl_links=true&as_user=true\")\n\t\treturn nil, err\n\t}\n}", "func chat(ctx *gin.Context) {\n\tcred := credentials.NewStaticCredentials(os.Getenv(\"ACCESS_KEY_ID\"), os.Getenv(\"SECRET_ACCESS_KEY\"), \"\")\n\tconfig := aws.NewConfig().WithCredentials(cred).WithRegion(os.Getenv(\"AWS_REGION\"))\n\tsess := session.Must(session.NewSession(config))\n\tsvc := lexruntimeservice.New(sess)\n\tinput := &lexruntimeservice.PostTextInput{\n\t\tBotName: aws.String(ctx.Query(\"bot_name\")),\n\t\tBotAlias: aws.String(ctx.Query(\"bot_alias\")),\n\t\tInputText: aws.String(ctx.Query(\"message\")),\n\t\tUserId: aws.String(ctx.Query(\"user_id\")),\n\t}\n\tresult, err := svc.PostText(input)\n\tif err != nil {\n\t\tctx.JSON(http.StatusInternalServerError, gin.H{\"error\": err.Error(), \"message\": \"Server Error.\", \"data\": nil})\n\t} else {\n\t\tctx.JSON(http.StatusOK, gin.H{\"error\": nil, \"message\": \"Bot Updated with new intent.\", \"data\": result})\n\t}\n}", "func SendDiscordLogEntry(message string) {\n\t// Create WebhookInfo\n\tinfo := WebhookInfo{}\n\t// Sets the content\n\tinfo.Content = message + \"\\n\"\n\t// Encode JSON\n\traw, _ := json.Marshal(info)\n\n\t// Post the JSON to URL\n\tresponse, err := http.Post(DiscordWebhookUrl, \"application/json\", bytes.NewBuffer(raw))\n\n\t// Check if an error\n\tif err != nil {\n\t\t// Print error\n\t\tfmt.Println(err)\n\t\t// Print response body\n\t\tfmt.Println(ioutil.ReadAll(response.Body))\n\t}\n}", "func (s *Client) SendMessage(text, channel string) error {\n\t_, _, respText, err := s.client.SendMessage(channel, slack.MsgOptionText(text, false))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tlog.Println(\"Slack Respsonse text: \", respText)\n\n\treturn nil\n}", "func sendPush(apiKey string, name string, url string, newStatus string, oldStatus string) {\n\tlogging.MustGetLogger(\"\").Debug(\"Sending Push about \\\"\" + url + \"\\\"...\")\n\n\tpb := pushbullet.New(apiKey)\n\n\tpush := requests.NewLink()\n\tpush.Title = GetConfiguration().Application.Title + \" - Status Change\"\n\tpush.Body = name + \" went from \\\"\" + oldStatus + \"\\\" to \\\"\" + newStatus + \"\\\".\"\n\tpush.Url = url\n\n\t_, err := pb.PostPushesLink(push)\n\tif err != nil {\n\t\tlogging.MustGetLogger(\"\").Error(\"Unable to send Push: \", err)\n\t}\n}", "func (s *instance) postMessage(jsondata []byte) {\r\n\tif s.client != nil {\r\n\t\tchannelID, timestamp, err := s.client.PostMessage(s.config.Channel, slack.MsgOptionText(string(jsondata), false), slack.MsgOptionUsername(\"g0-h0m3\"), slack.MsgOptionAsUser(true))\r\n\t\tif err == nil {\r\n\t\t\ts.service.Logger.LogInfo(s.name, fmt.Sprintf(\"message '%s' send (%s, %s)\", string(jsondata), channelID, timestamp))\r\n\t\t} else {\r\n\t\t\ts.service.Logger.LogError(s.name, fmt.Sprintf(\"message '%s' not send (%s, %s)\", string(jsondata), s.config.Channel, timestamp))\r\n\t\t\ts.service.Logger.LogError(s.name, err.Error())\r\n\t\t}\r\n\t} else {\r\n\t\ts.service.Logger.LogError(s.name, \"service not connected\")\r\n\t}\r\n}", "func SyncPush(w http.ResponseWriter, req *http.Request) {\n\tqueryString := req.URL.Query()\n\tuid := queryString.Get(\"uid\")\n\n\tdata, err := ioutil.ReadAll(req.Body)\n\tif err != nil {\n\t\thttp.Error(w, \"bad request\", http.StatusBadRequest)\n\t\treturn\n\t}\n\tgo PushMessage(uid, data)\n\n\tif DEBUG {\n\t\t// echo\n\t\tw.Write(data)\n\t}\n}", "func (t *Client) SendPushNotification(title, msg string) (int, error) {\n\treq := graphql.NewRequest(`\n\t\tmutation sendPushNotification($input: PushNotificationInput!){\n\t\t\tsendPushNotification(input: $input){\n\t\t \t\tsuccessful\n\t\t \t\tpushedToNumberOfDevices\n\t\t\t}\n\t }`)\n\tinput := PushInput{\n\t\tTitle: title,\n\t\tMessage: msg,\n\t\tScreenToOpen: \"CONSUMPTION\",\n\t}\n\treq.Var(\"input\", input)\n\treq.Header.Set(\"Cache-Control\", \"no-cache\")\n\treq.Header.Set(\"Authorization\", \"Bearer \"+t.Token)\n\tctx := context.Background()\n\t//ctx, _ := context.WithTimeout(context.Background(), time.Second*2)\n\tvar result PushResponse\n\tif err := t.gqlClient.Run(ctx, req, &result); err != nil {\n\t\tlog.Error(err)\n\t\treturn 0, err\n\t}\n\treturn result.SendPushNotification.PushedToNumberOfDevices, nil\n}", "func SlackSendMsg(slackAPI *slack.Client, MsgText string) error {\n\tif _, _, err := slackAPI.PostMessage(viper.GetString(\"slack_channel_id\"), slack.MsgOptionText(MsgText, false)); err != nil {\n\t\tlog.Errorf(\"[ERROR] %s\\n\", err)\n\t\treturn err\n\t}\n\tlog.Infof(\"[Slack] Send %s\", MsgText)\n\treturn nil\n}", "func ws_SendMsg(ws *websocket.Conn, send_channel SendChannel) {\n\tfor {\n\t\tselect {\n\t\tcase send_msg := <-send_channel.containers:\n\t\t\tlog.Printf(\"[%s] containers sendMessage= \", __FILE__, send_msg)\n\t\t\twebsocket.JSON.Send(ws, send_msg)\n\t\tcase send_msg := <-send_channel.updateinfo:\n\t\t\tlog.Printf(\"[%s] update sendMessage=\", __FILE__, send_msg)\n\t\t}\n\t}\n}", "func (mb MessageBroadcast) Push(title, body, url, serviceName string) error {\n\terr := mb.Notifier.SendNotification(fmt.Sprintf(\"%s - %s\", serviceName, title), body, url)\n\treturn err\n}", "func Message(level string, msg string) {\n\tonce.Do(initChannel)\n\n\tbody := buildBody(level, msg)\n\tdata := body[\"data\"].(map[string]interface{})\n\tdata[\"body\"] = messageBody(msg)\n\n\tpush(body)\n}", "func (t *Telegram) SendMessage(text string) error {\n\tlog.Printf(\"Send to telegram chat %d: %s\", t.chatID, text)\n\tmessage := &Message{\n\t\tChatID: t.chatID,\n\t\tText: text,\n\t}\n\tdata, err := json.Marshal(message)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// build request to telegram API\n\treq, err := t.buildRequest(data, SendMessageMethod)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tclient := &http.Client{}\n\tresp, err := client.Do(req)\n\tif err != nil {\n\t\tbody, _ := ioutil.ReadAll(resp.Body)\n\t\tlog.Fatal(string(body))\n\t\treturn err\n\t}\n\tdefer resp.Body.Close()\n\n\treturn nil\n}", "func (chat ChatMsg) SendChatMsg(data string) {\n\n\tif strings.HasPrefix(data, \"L \") {\n\t\ttmp1 := strings.TrimLeft(data, \"L \")\n\n\t\tchatting.Msg += \"Me: \" + tmp1 + \"\\n\"\n\t\tqml.Changed(chatting, &chatting.Msg)\n\n\t} else if strings.HasPrefix(data, \"G \") {\n\t\ttmp2 := strings.TrimLeft(data, \"G \")\n\n\t\tglobalchatting.Msg += \"Me: \" + tmp2 + \"\\n\"\n\t\tqml.Changed(globalchatting, &globalchatting.Msg)\n\t}\n\n\tmessenger.Msnger.Send_message(data, mylib.CHAT_MESSAGE)\n}", "func sendTextToTelegramChat(chatId int, text string) (string, error) {\n\n\tlog.Printf(\"Sending %s to chat_id: %d\", text, chatId)\n\tresponse, err := http.PostForm(\n\t\ttelegramApi,\n\t\turl.Values{\n\t\t\t\"chat_id\": {strconv.Itoa(chatId)},\n\t\t\t\"text\": {text},\n\t\t})\n\n\tif err != nil {\n\t\tlog.Printf(\"error when posting text to the chat: %s\", err.Error())\n\t\treturn \"\", err\n\t}\n\tdefer response.Body.Close()\n\n\tvar bodyBytes, errRead = ioutil.ReadAll(response.Body)\n\tif errRead != nil {\n\t\tlog.Printf(\"error in parsing telegram answer %s\", errRead.Error())\n\t\treturn \"\", err\n\t}\n\tbodyString := string(bodyBytes)\n\tlog.Printf(\"Body of Telegram Response: %s\", bodyString)\n\n\treturn bodyString, nil\n}", "func (m *Slack) Send(mes *message.Message) error {\n\topts := createSlackMessageOptions(mes.Text, mes.Image, mes.Fields, mes.Level)\n\n\t_channel, _timestamp, _text, err := m.api.SendMessage(m.channel, opts...)\n\n\tm.logger.Debug(\"send slack message\",\n\t\tzap.String(\"channel\", _channel),\n\t\tzap.String(\"timestamp\", _timestamp),\n\t\tzap.String(\"text\", _text),\n\t\tzap.Error(err),\n\t)\n\n\treturn err\n}", "func (wechatPush *WechatPush) WriteMsg(when time.Time, msg string, level int) error {\n\tif level > wechatPush.Level {\n\t\treturn nil\n\t}\n\n\tdata := InitPushData(msg)\n\n\tfor _, id := range wechatPush.WechatIds {\n\t\terr := wechatPush.message.Push(id, \"\", wechatPush.TmpId, data)\n\t\tfmt.Printf(\"push data to user:%v, error:%v\\n\", id, err)\n\t}\n\treturn nil\n}", "func (lt *TransportGKE) SendLog(le *LogEntry) error {\n\tpayload := le.Payload\n\tif le.Message != \"\" {\n\t\tpayload[\"message\"] = le.Message\n\t}\n\n\tpayload[\"severity\"] = le.Severity\n\tif le.HTTPRequest != nil {\n\t\tpayload[\"httpRequest\"] = formatHTTPRequest(le.HTTPRequest)\n\t}\n\n\tif len(le.Labels) > 0 {\n\t\tpayload[\"labels\"] = le.Labels\n\t}\n\n\treturn json.NewEncoder(lt.logWriter).Encode(payload)\n}", "func PushMessage(action string, what string) string {\n\treturn fmt.Sprintf(\"To %s %s on the OpenShift Cluster, please use `odo push` \\n\", action, what)\n}", "func Chat(chub *connections.ConnectionHub) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tgo chub.WriteMessage()\n\t\tlog.Println(\"wrote message\")\n\t})\n}", "func (b *botHTTP) PushMessage(ctx context.Context, message *domain.LineMessage) error {\n\tbuffer := new(bytes.Buffer)\n\tencoder := json.NewEncoder(buffer)\n\tencoder.SetEscapeHTML(true)\n\tencoder.Encode(message)\n\n\turl := \"https://api.line.me/v2/bot/message/push\"\n\tbody, _, err := b.httpReq.Do(ctx, http.MethodPost, url, buffer.Bytes(), map[string]string{\n\t\t\"Content-Type\": \"application/json\",\n\t\t\"Authorization\": fmt.Sprintf(\"Bearer %s\", shared.GetEnv().LineClientToken),\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Println(string(body))\n\treturn nil\n}", "func (d *DingTalkClient) SendMessage(msg DingTalkMessage) error {\n\n\tvar message string\n\tswitch msg.Type {\n\tcase \"text\":\n\t\tmessage = fmt.Sprintf(`{\"msgtype\": \"text\",\"text\": {\"content\": \"监控报警: %s\"}}`, msg.Message)\n\tcase \"markdown\":\n\t\tmessage = fmt.Sprintf(`{\"msgtype\": \"markdown\",\"markdown\":{\"title\": 监控报警: \"%s\", \"text\": \"%s\"}}`, msg.Title, msg.Message)\n\tdefault:\n\t\tmessage = fmt.Sprintf(`{\"msgtype\": \"text\",\"text\": {\"content\": \"监控报警: %s\"}}`, msg.Message)\n\t}\n\n\tclient := &http.Client{}\n\trequest, _ := http.NewRequest(\"POST\", d.RobotURL, bytes.NewBuffer([]byte(message)))\n\trequest.Header.Set(\"Content-type\", \"application/json\")\n\tresponse, err := client.Do(request)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"访问钉钉URL(%s) 出错了: %s\", d.RobotURL, err)\n\t}\n\tif response.StatusCode != 200 {\n\t\tbody, _ := ioutil.ReadAll(response.Body)\n\t\treturn fmt.Errorf(\"访问钉钉URL(%s) 出错了: %s\", d.RobotURL, string(body))\n\t}\n\treturn nil\n}", "func sendChannelMessage(api *slack.Client, channel string, message models.Message) error {\n\treturn sendMessage(api, message.IsEphemeral, channel, message.Vars[\"_user.id\"], message.Output, message.ThreadTimestamp, message.Attributes[\"ws_token\"], message.Remotes.Slack.Attachments)\n}", "func (logger logger) Send(log log.Log) {\n\tval := log.Text\n\tif log.Error != nil {\n\t\tval += fmt.Sprintf(\" %s\", log.Error.Error())\n\t}\n\tfmt.Println(val)\n}", "func Send(config config.Config, text string) {\n\tapi := slack.New(config.Slack.Token)\n\tparams := slack.PostMessageParameters{}\n\tparams.IconEmoji = config.Slack.IconEmoji\n\tparams.Username = config.Slack.Username\n\tchannelID, timestamp, err := api.PostMessage(config.Slack.Channel, text, params)\n\tif err != nil {\n\t\tfmt.Printf(\"%s\\n\", err)\n\t\treturn\n\t}\n\tfmt.Printf(\"Message successfully sent to channel %s at %s\", channelID, timestamp)\n}", "func SendMsg(message string) {\n\tbotMutex.Lock()\n\tdefer botMutex.Unlock()\n\tif !messageReceived {\n\t\tlog.Println(\n\t\t\t\"Write a message to the bot for specifying notifiable chat ID\",\n\t\t)\n\t\treturn\n\t}\n\tmsg := tgbotapi.NewMessage(notifiableChatID, message)\n\t_, err := bot.Send(msg)\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n}", "func (bot *bot) sendMessage(content string, tts bool) {\n\tbot.channelsMu.RLock()\n\tdefer bot.channelsMu.RUnlock()\n\tfor channelID := range bot.channels {\n\t\tvar err error\n\t\tif tts {\n\t\t\t_, err = bot.discordSession.ChannelMessageSendTTS(string(channelID), content)\n\t\t} else {\n\t\t\t_, err = bot.discordSession.ChannelMessageSend(string(channelID), content)\n\t\t}\n\t\tif err != nil {\n\t\t\tbot.logger.Errorf(\"Failed sending message to channel %s: %+v\", channelID, err)\n\t\t}\n\t}\n}", "func (d *handler) SendMessage(msg *pb.Event) error {\n\terr := d.ChatStream.Send(msg)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error Sending message through ChatStream: %s\", err)\n\t}\n\treturn nil\n}", "func (client *Client) sendMessage(msg interface{}) {\n\tstr, err := json.Marshal(msg)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\ts := string(str)\n\tmetrics.SendMessage(len(s))\n\tclient.socketTx <- s\n}", "func sendToVK(token string, message string, ID int64) {\n\t//VK Part\n\n\t//client, err := vkapi.NewClientFromLogin(\"<username>\", \"<password>\", vkapi.ScopeMessages)\n\tclient, err := vkapi.NewClientFromToken(token)\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n\n\tclient.Log(true)\n\n\tif err := client.InitLongPoll(0, 2); err != nil {\n\t\tlog.Panic(err)\n\t}\n\n\t//Send one consoleMSG to chatID!\n\tclient.SendMessage(vkapi.NewMessage(vkapi.NewDstFromChatID(ID), message))\n}", "func (d *Discord) SendMessage(channel, message string) error {\n\tif channel == \"\" {\n\t\tlog.Println(\"Empty channel could not send message\", message)\n\t\treturn nil\n\t}\n\n\tif _, err := d.Session.ChannelMessageSend(channel, message); err != nil {\n\t\tlog.Println(\"Error sending discord message: \", err)\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func TelegramSendMsg(telegramAPI *tgbotapi.BotAPI, MsgText string) (tgbotapi.Message, error) {\n\tres, err := telegramAPI.Send(tgbotapi.NewMessage(viper.GetInt64(\"telegram_chat_id\"), MsgText))\n\tif err != nil {\n\t\tlog.Errorf(\"[ERROR] %s\\n\", err)\n\t\treturn res, err\n\t}\n\tlog.Infof(\"[Telegram] Send %s\", MsgText)\n\treturn res, nil\n}", "func (p *BoteaterServiceClient) SendChatChecked(ctx context.Context, seq int32, chatMid string, lastMessageId string) (err error) {\r\n var _args47 BoteaterServiceSendChatCheckedArgs\r\n _args47.Seq = seq\r\n _args47.ChatMid = chatMid\r\n _args47.LastMessageId = lastMessageId\r\n var _result48 BoteaterServiceSendChatCheckedResult\r\n if err = p.Client_().Call(ctx, \"sendChatChecked\", &_args47, &_result48); err != nil {\r\n return\r\n }\r\n switch {\r\n case _result48.E!= nil:\r\n return _result48.E\r\n }\r\n\r\n return nil\r\n}", "func (b *Bot) SendMessage(chatID int64, text string) (Message, error) {\n\n\turl := fmt.Sprintf(\"https://api.telegram.org/bot%s/sendMessage?chat_id=%d&text=%s\", b.Token, chatID, text)\n\n\tresp, err := getContent(url)\n\n\tif err != nil {\n\t\treturn Message{}, err\n\t}\n\n\tvar m Message\n\terr = json.Unmarshal(resp, &m)\n\n\tif err != nil {\n\t\treturn Message{}, err\n\t}\n\n\treturn m, nil\n\n}", "func sendNotificationToSlack(payloadJSONEncoded []byte, response chan<- *http.Response) {\n\tfmt.Println(\"Sending notification to Slack...\")\n\n\t// Récupération des paramètres\n\t// ---------------------------\n\thookURL = config.SlackHookURL\n\thookPayload = config.SlackHookPayload\n\n\t// Envoi de la requête\n\t// -------------------\n\tresponse <- sendNotificationToApplication(hookURL, hookPayload, payloadJSONEncoded)\n}", "func (bot *BotAPI) SendMessage(chatID int64, chatType string, message interface{}) (Message, error) {\n\treturn bot.Send(NewMessage(chatID, chatType, message))\n}", "func (s *Server) Push(id string, data []byte) error {\n\tch, ok := s.ChannelMap.Get(id)\n\tif !ok {\n\t\treturn errors.New(\"channel no found\")\n\t}\n\treturn ch.Push(data)\n}", "func message(w http.ResponseWriter, r *http.Request) {\n\tvar rBody request\n\n\tif err := json.NewDecoder(r.Body).Decode(&rBody); err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"Unable to decode message\", err.Error()), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tc, err := models.GetConnectionWithRecipients(rBody.ConnectionID)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"Unable to get connection\", err.Error()), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tdbMsg := &models.Message{\n\t\tConnectionID: rBody.ConnectionID,\n\t\tMessage: rBody.Text,\n\t\tSourceUser: *c.SourceUser,\n\t\tSourceUserID: c.SourceUserID,\n\t\tDestinationUserID: c.DestinationUserID,\n\t\tChannel: rBody.Channel,\n\t}\n\n\tif err := dbMsg.Save(); err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"Unable to save message. \", err.Error()), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tcMsg := models.ChannelMessage{\n\t\tID: int(dbMsg.ID),\n\t\tCreatedAt: dbMsg.CreatedAt,\n\t\tMessage: dbMsg.Message,\n\t\tReadBy: dbMsg.ReadBy,\n\t\tUser: rBody.User,\n\t}\n\n\tpayload := pubMessage{\n\t\t// populate embedded struct (promoted fields)\n\t\tdefaultProperties: defaultProperties{\n\t\t\tChannel: rBody.Channel,\n\t\t\tType: rBody.Type,\n\t\t\tSubType: rBody.SubType,\n\t\t},\n\t\tChannelMessage: cMsg,\n\t}\n\n\t// prep for ws\n\tbytes, err := json.Marshal(payload)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"Unable to marshal chat message\", err.Error()), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tpubMessage := hub.PublishMessage{\n\t\tTopic: rBody.Channel,\n\t\tPayload: bytes,\n\t}\n\n\tbroker.Hub.Publish(pubMessage)\n\n\tw.WriteHeader(http.StatusOK)\n\treturn\n}", "func (s *Slack) Send(color, msg string, v ...interface{}) error {\n\tb, err := json.Marshal(&payload{\n\t\tChannel: s.channel,\n\t\tUsername: s.username,\n\t\tIconURL: s.iconURL,\n\t\tAttachments: []attachment{\n\t\t\t{\n\t\t\t\tColor: color,\n\t\t\t\tText: fmt.Sprintf(msg, v...),\n\t\t\t},\n\t\t},\n\t})\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\ts.infof(\"payload: %s\", b)\n\tr, err := http.Post(s.webhookURL, \"application/json\", bytes.NewReader(b))\n\tif err != nil {\n\t\treturn err\n\t}\n\ts.infof(\"response: %s\", r.Status)\n\n\tif r.StatusCode >= 400 {\n\t\treturn &ResponseError{r}\n\t}\n\treturn nil\n}", "func sayPolo(chatID int64) error {\n\t// Create the request body struct\n\treqBody := &sendMessageReqBody{\n\t\tChatID: chatID,\n\t\tText: \"Polo!!\",\n\t}\n\t// Create the JSON body from the struct\n\treqBytes, err := json.Marshal(reqBody)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Send a post request with your token\n\tres, err := http.Post(\"https://api.telegram.org/bot1495263911:AAEjmxdCuPazMzeegGeZpm1RLBBMtFgx2oE/sendMessage\", \"application/json\", bytes.NewBuffer(reqBytes))\n\tif err != nil {\n\t\treturn err\n\t}\n\tif res.StatusCode != http.StatusOK {\n\t\treturn errors.New(\"unexpected status\" + res.Status)\n\t}\n\n\treturn nil\n}", "func (o IntegrationSlackOutput) PushChannel() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *IntegrationSlack) pulumi.StringPtrOutput { return v.PushChannel }).(pulumi.StringPtrOutput)\n}", "func (s *Slack) SendMessage(message string) error {\n\t_, _, err := s.client.PostMessage(s.channelID, slack.MsgOptionText(message, false))\n\tif err != nil {\n\t\treturn errors.Annotate(err, \"client.PostMessage(): failed to post message\")\n\t}\n\n\treturn nil\n}", "func SendMessage(version, build string) error {\n\texpanded, err := homedir.Expand(configPath)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif _, err := os.Stat(expanded); os.IsNotExist(err) {\n\t\treturn errgo.Mask(ErrNotConfigured, errgo.Any)\n\t}\n\n\tslackConfiguration := SlackConfiguration{\n\t\tNotificationUsername: \"KochoBot\",\n\t\tEmojiIcon: \":robot_face:\",\n\t}\n\n\tconfigFile, err := os.Open(expanded)\n\tif err != nil {\n\t\treturn errgo.WithCausef(err, ErrInvalidConfiguration, \"couldn't open Slack configuration file\")\n\t}\n\tdefer configFile.Close()\n\n\tif err := json.NewDecoder(configFile).Decode(&slackConfiguration); err != nil {\n\t\treturn errgo.WithCausef(err, ErrInvalidConfiguration, \"couldn't decode Slack configuration\")\n\t}\n\n\tclient := slack.New(slackConfiguration.Token)\n\n\tparams := slack.PostMessageParameters{}\n\tparams.Attachments = []slack.Attachment{\n\t\tslack.Attachment{\n\t\t\tColor: \"#2484BE\",\n\t\t\tText: fmt.Sprintf(\"*Kocho*: %s ran `%s`\", slackConfiguration.Username, strings.Join(os.Args, \" \")),\n\t\t\tFields: []slack.AttachmentField{\n\t\t\t\tslack.AttachmentField{\n\t\t\t\t\tTitle: \"Kocho Version\",\n\t\t\t\t\tValue: version,\n\t\t\t\t\tShort: true,\n\t\t\t\t},\n\t\t\t\tslack.AttachmentField{\n\t\t\t\t\tTitle: \"Kocho Build\",\n\t\t\t\t\tValue: build,\n\t\t\t\t\tShort: true,\n\t\t\t\t},\n\t\t\t},\n\t\t\tMarkdownIn: []string{\"text\"},\n\t\t},\n\t}\n\tparams.Username = slackConfiguration.NotificationUsername\n\tparams.IconEmoji = slackConfiguration.EmojiIcon\n\n\tif _, _, err := client.PostMessage(slackConfiguration.NotificationChannel, \"\", params); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func sendToWs(msg network.Message, update bool, s *session, messagesDb *MsgDb) {\n\tmsgNum := msg.Seqnum\n\twsMsg := wsMessage{Src: msg.Src, Dst: msg.Dst,\n\t\tMsgNumber: strconv.FormatUint(msgNum, 10), Payload: string(msg.Payload)}\n\ts.logger.Debug(\"sending json message to WS\", zap.Any(\"msg\", wsMsg))\n\tif update {\n\t\t(*messagesDb)[msgNum] = msg\n\t}\n\n\tif err := s.conn.WriteJSON(wsMsg); err != nil {\n\t\ts.logger.Error(\"failed to send json message\", zap.Error(err))\n\t\treturn\n\t}\n}", "func (bot *luaBot) sendChattable(config Chattable) (Message, error) {\n\tv, err := config.values()\n\tif err != nil {\n\t\treturn Message{}, err\n\t}\n\tmessage, err := bot.makeMessageRequest(config.method(), v)\n\tif err != nil {\n\t\treturn Message{}, err\n\t}\n\treturn message, nil\n}", "func (t *Telegram) SendMessage(message string) error {\n\tu := t.formatURL(\"%s/bot%s/sendMessage\")\n\tcontentType := \"application/x-www-form-urlencoded\"\n\tvalues := url.Values{}\n\tvalues.Add(\"chat_id\", t.chatID)\n\tvalues.Add(\"text\", message)\n\treader := strings.NewReader(values.Encode())\n\t_, err := http.Post(u, contentType, reader)\n\treturn err\n}", "func (s *slackMessenger) Send(args ...interface{}) error {\n\tif 1 > len(args) {\n\t\treturn fault.InsufficientSlackSendParameter\n\t}\n\n\tmessage := args[0].(string)\n\ts.rtm.SendMessage(s.rtm.NewOutgoingMessage(message, s.channelID))\n\treturn nil\n}", "func (m *GoogleChatManager) Push(alerts []alertmgrtmpl.Alert) error {\n\tm.lo.WithField(\"count\", len(alerts)).Info(\"dispatching alerts to google chat\")\n\n\t// For each alert, lookup the UUID and send the alert.\n\tfor _, a := range alerts {\n\t\t// If it's a new alert whose fingerprint isn't in the active alerts map, add it first.\n\t\tif m.activeAlerts.loookup(a.Fingerprint) == \"\" {\n\t\t\tm.activeAlerts.add(a)\n\t\t}\n\n\t\t// Prepare a list of messages to send.\n\t\tmsgs, err := m.prepareMessage(a)\n\t\tif err != nil {\n\t\t\tm.lo.WithError(err).Error(\"error preparing message\")\n\t\t\tcontinue\n\t\t}\n\n\t\t// Dispatch an HTTP request for each message.\n\t\tfor _, msg := range msgs {\n\t\t\tvar (\n\t\t\t\tthreadKey = m.activeAlerts.alerts[a.Fingerprint].UUID.String()\n\t\t\t\tnow = time.Now()\n\t\t\t)\n\n\t\t\tm.metrics.Increment(fmt.Sprintf(`alerts_dispatched_total{provider=\"%s\", room=\"%s\"}`, m.ID(), m.Room()))\n\n\t\t\t// Send message to API.\n\t\t\tif m.dryRun {\n\t\t\t\tm.lo.WithField(\"room\", m.Room()).Info(\"dry_run is enabled for this room. skipping pushing notification\")\n\t\t\t} else {\n\t\t\t\tif err := m.sendMessage(msg, threadKey); err != nil {\n\t\t\t\t\tm.metrics.Increment(fmt.Sprintf(`alerts_dispatched_errors_total{provider=\"%s\", room=\"%s\"}`, m.ID(), m.Room()))\n\t\t\t\t\tm.lo.WithError(err).Error(\"error sending message\")\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tm.metrics.Duration(fmt.Sprintf(`alerts_dispatched_duration_seconds{provider=\"%s\", room=\"%s\"}`, m.ID(), m.Room()), now)\n\t\t}\n\t}\n\n\treturn nil\n}", "func (dataChannel *DataChannel) SendMessage(log log.T, input []byte, inputType int) error {\n\treturn dataChannel.wsChannel.SendMessage(log, input, inputType)\n}", "func (o IntegrationSlackOutput) TagPushChannel() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *IntegrationSlack) pulumi.StringPtrOutput { return v.TagPushChannel }).(pulumi.StringPtrOutput)\n}", "func (c *Client) SendMessage(\n\tchannelID discord.ChannelID, content string, embed *discord.Embed) (*discord.Message, error) {\n\n\treturn c.SendMessageComplex(channelID, SendMessageData{\n\t\tContent: content,\n\t\tEmbed: embed,\n\t})\n}", "func (s *LoginSocket) SendMessage(c *Client, msgType string, p interface{}) {\n\tlogger.Info(\"SendMessage on login channel\", msgType, p)\n\tgo c.SendMessage(LoginChannel, msgType, p)\n}", "func (conn SplunkConnection) SendMessage(message *Message) (string, error) {\n\tdata := make(url.Values)\n\tdata.Add(\"name\", message.Name)\n\tdata.Add(\"value\", message.Content.Message)\n\tdata.Add(\"severity\", string(message.Content.Severity))\n\tresponse, err := conn.HTTPPost(fmt.Sprintf(\"%s/services/messages\", conn.BaseURL), &data)\n\treturn response, err\n}", "func (c *client) SendChatAction(args SendChatActionArgs) TelegramError {\n\tresponse, err := c.sendJSON(args)\n\tif err != nil {\n\t\treturn errToTelegramErr(err)\n\t}\n\tif response.StatusCode != http.StatusOK {\n\t\treturn responseToTgError(response)\n\t}\n\treturn nil\n}", "func SendPushNotification(ctx context.Context, m PubSubMessage) error {\n\tlog.Printf(\"SendPushNotification triggered with payload: %v\\n\", string(m.Data))\n\n\t// validate payload\n\tvar payload model.PushNotificationPayload\n\tif err := json.Unmarshal(m.Data, &payload); err != nil {\n\t\treturn err\n\t}\n\tif payload.Title == \"\" || payload.Body == \"\" || payload.UserID == \"\" {\n\t\treturn errors.New(\"Invalid message payload: missing user_id, title or body\")\n\t}\n\n\t// get user's FCM tokens\n\tfclient, err := firebaseApp.FirestoreClient(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer fclient.Close()\n\n\tdoc, err := fclient.Collection(\"users\").Doc(payload.UserID).Get(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\tuser := doc.Data()\n\ttokens, ok := user[\"fcm_tokens\"]\n\tif !ok {\n\t\tlog.Printf(\"User %v doesn't have FCM tokens\", payload.UserID)\n\t\treturn nil\n\t}\n\ttokensMap := tokens.(map[string]interface{}) // convert to map\n\tif len(tokensMap) == 0 {\n\t\tlog.Printf(\"User %v doesn't have FCM tokens\", payload.UserID)\n\t\treturn nil\n\t}\n\n\t// build notification message\n\tclient, err := firebaseApp.MessagingClient(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Android & iOS\n\tnotification := &messaging.Notification{\n\t\tTitle: payload.Title,\n\t\tBody: payload.Body,\n\t}\n\n\t// -- Android sepcific config\n\tandroidNotification := &messaging.AndroidNotification{\n\t\tIcon: \"https://firebasestorage.googleapis.com/v0/b/gatrabali.appspot.com/o/app%2Fnotification.png?alt=media&token=b76afe54-fc9c-4a05-addb-3f9eaaee7d2f\",\n\t\tColor: \"#4CB050\",\n\t}\n\tandroidConfig := messaging.AndroidConfig{\n\t\tNotification: androidNotification,\n\t}\n\tif payload.CollapseKey != \"\" {\n\t\tandroidConfig.CollapseKey = payload.CollapseKey\n\t}\n\t// -- End Android sepcific config\n\n\t// loop through tokens and send the notification\n\tfor token := range tokensMap {\n\t\tmessage := &messaging.Message{\n\t\t\tData: payload.Data,\n\t\t\tNotification: notification,\n\t\t\tToken: token,\n\t\t\tAndroid: &androidConfig,\n\t\t}\n\n\t\tresp, err := client.Send(ctx, message)\n\t\tif err != nil {\n\t\t\t// if error, delete token\n\t\t\tlog.Printf(\"Notification not sent: %v\\n\", err)\n\t\t\tdelete(tokensMap, token)\n\t\t} else {\n\t\t\tlog.Printf(\"Notification sent: %v\\n\", resp)\n\t\t}\n\t}\n\n\t// store back the remaining tokens to user document\n\t_, err = fclient.Collection(\"users\").Doc(payload.UserID).Update(ctx, []firestore.Update{{Path: \"fcm_tokens\", Value: tokensMap}})\n\tif err != nil {\n\t\tlog.Printf(\"Error saving fcm_tokens back to user doc: %v\", err)\n\t}\n\treturn nil\n}", "func (st *stream) Push(msg string) *SSEvent {\n\n st.lastId++\n now := time.Now()\n event := &SSEvent{\n Id: st.lastId,\n Event: \"message\",\n Data: msg,\n Timestamp: now,\n }\n\n /* Keep up to 1 minute of recent events. */\n st.recent = append(st.recent, event)\n cutoff := now.Add(-60 * time.Second)\n var i int\n var ev *SSEvent\n for i, ev = range st.recent {\n if ev.Timestamp.After(cutoff) {\n break\n }\n }\n st.recent = st.recent[i:]\n\n return event\n}", "func (s *SlackNotify) Send(v ...interface{}) error {\n\tif s.URL == \"\" {\n\t\treturn nil\n\t}\n\tpayload, err := json.Marshal(slackMsg{Text: fmt.Sprint(s.prefix, v)})\n\tif err != nil {\n\t\treturn err\n\t}\n\treq, _ := http.NewRequest(\"POST\", s.URL, bytes.NewBuffer(payload))\n\treq.Header.Add(\"content-type\", \"application/json\")\n\tres, err := s.c.Do(req)\n\tdefer res.Body.Close()\n\tif res.StatusCode != 200 {\n\t\treturn fmt.Errorf(\"Error posting to slack\")\n\t}\n\treturn nil\n}", "func SendMessage(gid wasabee.GoogleID, message string) (bool, error) {\n\tgid.FirebaseGenericMessage(message)\n\twasabee.Log.Debugw(\"generic message\", \"subsystem\", \"Firebase\", \"GID\", gid)\n\treturn false, nil\n}", "func (h *handler) logSend(msg string) {\n\th.logger.logSend(msg)\n}", "func (self *GameHeart) Logs(msg *HeartMessageType) {\n\n}", "func sendMessage(p orgbot.Platform, m message) error {\n\tqueueService, err := newQueueService(p.Config())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err := queueService.submit(&m); err != nil {\n\t\treturn errors.Wrap(err, \"failed to submit command\")\n\t}\n\n\treturn nil\n}", "func (c *Client) PushTo(ch chan<- message.Message) {\n\tdefer func() {\n\t\tif r := recover(); r != nil {\n\t\t\terr, ok := r.(error)\n\t\t\tif !ok {\n\t\t\t\tlog.Println(err)\n\t\t\t}\n\t\t}\n\t}()\n\n\tgo func() {\n\t\tfor {\n\t\t\tselect {\n\t\t\tdefault:\n\t\t\t\tvar msg message.Message\n\t\t\t\terr := c.conn.ReadJSON(&msg)\n\t\t\t\tif err == io.ErrUnexpectedEOF || websocket.IsUnexpectedCloseError(err, websocket.CloseGoingAway) {\n\t\t\t\t\tlog.Println(err)\n\t\t\t\t\tbreak\n\t\t\t\t}\n\n\t\t\t\tch <- msg\n\n\t\t\tcase <-c.done:\n\t\t\t\tclose(c.done)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}()\n}", "func sendText(msg string, s *discordgo.Session, m *discordgo.MessageCreate) {\n\t_, _ = s.ChannelMessageSend(m.ChannelID, msg)\n\tfmt.Printf(\"Sending %q\\n\", msg)\n}", "func SendMessage(botToken string, chatId string, text string) (*Message, error) {\n\tbody := &SendMessageRequestBody{\n\t\tChatID: chatId,\n\t\tText: text,\n\t}\n\tb, err := json.Marshal(&body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tresponse, err := http.Post(\"https://api.telegram.org/\"+botToken+\"/sendMessage\", \"application/json\", bytes.NewBuffer(b))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif response.StatusCode != http.StatusOK {\n\t\treturn nil, errors.New(\"sendMessage request returned: \" + response.Status)\n\t}\n\tvar result *ResponseBody\n\tif err := json.NewDecoder(response.Body).Decode(&result); err != nil {\n\t\treturn nil, errors.New(\"could not convert response to *ResponseBody\")\n\t}\n\treturn result.Result, nil\n}", "func (h *wsHub) send(message []byte) {\n\th.broadcast <- message\n}", "func SendMessage(channelID string, message string) (*discordgo.Message, error) {\n\ttranslations := cache.Geti18nTranslations()\n\n\tcache.GetDiscordSession().ChannelTyping(channelID)\n\n\t// check if the error code has a user translation\n\tif translations.ExistsP(message) {\n\t\tmessage = translations.Path(message).Data().(string)\n\t}\n\n\t// output translation to user\n\tsentMsg, err := cache.GetDiscordSession().ChannelMessageSend(channelID, message)\n\treturn sentMsg, err\n}", "func (s *Sun) SendMessage(Message string) {\n\tfor _, player := range s.Players {\n\t\t//Send raw chat to each player as client will accept it\n\t\t_ = player.conn.WritePacket(&packet.Text{Message: Message, TextType: packet.TextTypeRaw})\n\t}\n}", "func (d *Dao) Send(c context.Context, mc, title, msg string, mid int64) (err error) {\n\tparams := url.Values{}\n\tparams.Set(\"type\", \"json\")\n\tparams.Set(\"source\", \"1\")\n\tparams.Set(\"data_type\", \"4\")\n\tparams.Set(\"mc\", mc)\n\tparams.Set(\"title\", title)\n\tparams.Set(\"context\", msg)\n\tparams.Set(\"mid_list\", strconv.FormatInt(mid, 10))\n\tvar res struct {\n\t\tCode int `json:\"code\"`\n\t}\n\tif err = d.client.Post(c, d.uri, \"\", params, &res); err != nil {\n\t\tlog.Error(\"message url(%s) error(%v)\", d.uri+\"?\"+params.Encode(), err)\n\t\treturn\n\t}\n\tlog.Info(\"SendSysNotify url: (%s)\", d.uri+\"?\"+params.Encode())\n\tif res.Code != 0 {\n\t\tlog.Error(\"message url(%s) error(%v)\", d.uri+\"?\"+params.Encode(), res.Code)\n\t\terr = fmt.Errorf(\"message send failed\")\n\t}\n\treturn\n}", "func echoMessage(slackClient *slack.RTM, message, slackChannel string) {\n\tsplitMessage := strings.Fields(strings.ToLower(message))\n\n\tslackClient.SendMessage(slackClient.NewOutgoingMessage(strings.Join(splitMessage[1:], \" \"), slackChannel))\n}", "func send(api *slack.Client, message models.Message, bot *models.Bot) {\n\tusers, err := getSlackUsers(api, message)\n\tif err != nil {\n\t\tbot.Log.Errorf(\"Problem sending message: %s\", err.Error())\n\t}\n\tif message.DirectMessageOnly {\n\t\terr := handleDirectMessage(api, message, bot)\n\t\tif err != nil {\n\t\t\tbot.Log.Errorf(\"Problem sending message: %s\", err.Error())\n\t\t}\n\t} else {\n\t\terr := handleNonDirectMessage(api, users, message, bot)\n\t\tif err != nil {\n\t\t\tbot.Log.Errorf(\"Problem sending message: %s\", err.Error())\n\t\t}\n\t}\n}", "func (p *hub) SyncPush(caller ICaller, ds IDataSet) error {\n return p.notify(C_Mode_Push, caller, ds)\n}", "func (s *LoginSocket) SendMessageBySession(sessionId string, p interface{}) {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\tlogger.Info(\"SendMessageBySession on login channel\", sessionId, p)\n\n\t// go func() {\n\tfor conn, active := range loginSocket.subscriptions[sessionId] {\n\t\tif active {\n\t\t\ts.SendUpdateMessage(conn, p)\n\t\t}\n\t}\n\t// }()\n}", "func main() {\n\tmednisBot := bot.Bot{APIKey: \"APIkey:here\"}\n\n\tmednisBot.OnMessage = func(msg *tgtype.Message) {\n\t\tfmt.Printf(\"Recieved message \\\"%v\\\" from user with name %v\\n\", msg.Text, msg.From.FirstName)\n\t\tfmt.Println(msg.Chat)\n\t\tmethod := methods.SendMessage{\n\t\t\tChatID: msg.Chat.ID,\n\t\t\tText: \"Hello world!\",\n\t\t\tReplyToMessageID: msg.MessageID,\n\t\t}\n\t\tmethod.CallMethod(mednisBot.GetBotURL())\n\t}\n\n\tmednisBot.RunBot()\n}", "func MsgSlack(ctx *Context, msg string) error {\n\tcfg := ctx.Config\n\twebhookURL := \"https://hooks.slack.com/services/\" + cfg.Slacks[*ctx.ArgProfileName].Key\n\n\tlog.Printf(\"webhook: %s\", webhookURL)\n\n\tslackBody, _ := json.Marshal(slackRequestBody{Text: msg})\n\treq, err := http.NewRequest(http.MethodPost, webhookURL, bytes.NewBuffer(slackBody))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treq.Header.Add(\"Content-Type\", \"application/json\")\n\n\tclient := &http.Client{Timeout: 10 * time.Second}\n\tresp, err := client.Do(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tbuf := new(bytes.Buffer)\n\tbuf.ReadFrom(resp.Body)\n\tif buf.String() != \"ok\" {\n\t\treturn errors.New(\"Non-ok response returned from Slack\")\n\t}\n\treturn nil\n}" ]
[ "0.6924099", "0.5730299", "0.5725763", "0.5717606", "0.57023185", "0.5695726", "0.5656565", "0.5638851", "0.5627522", "0.55952996", "0.5541704", "0.55413514", "0.5527997", "0.55110794", "0.5500579", "0.54876614", "0.54860944", "0.5479855", "0.54772085", "0.54697764", "0.54671085", "0.5459049", "0.5452032", "0.54506797", "0.5434914", "0.54070944", "0.54052556", "0.53513974", "0.534439", "0.53431785", "0.5341282", "0.53328127", "0.5329042", "0.5320775", "0.53177434", "0.5313567", "0.5310342", "0.5292272", "0.5291357", "0.52878594", "0.5273968", "0.52721417", "0.5258348", "0.5244856", "0.523518", "0.5228286", "0.52218795", "0.52218735", "0.5214145", "0.5206505", "0.5189108", "0.51834667", "0.51648915", "0.5155141", "0.51441073", "0.5141275", "0.51411104", "0.51333064", "0.5129007", "0.5120483", "0.51132953", "0.5106776", "0.51063067", "0.50908977", "0.50890285", "0.50855994", "0.5074401", "0.50713533", "0.50659794", "0.50598174", "0.50574553", "0.5048369", "0.50376576", "0.50354886", "0.50337076", "0.5031169", "0.5029866", "0.5024666", "0.50154805", "0.5006998", "0.5003766", "0.5003514", "0.50021636", "0.49919286", "0.49910748", "0.4990046", "0.49888188", "0.4987432", "0.49759114", "0.49729976", "0.49715137", "0.49664736", "0.49606818", "0.4958923", "0.4954661", "0.49464005", "0.49460077", "0.49441895", "0.49401462", "0.49287015" ]
0.5593399
10
SensePushLog pushes message to telegram channel `telegram.log_chat_id`
func SensePushLog(message string) error { bot, err := tgbotapi.NewBotAPI(Config.telegram_bot_token) if err != nil { log.Warnf("failed to initialize bot API %v", err) reportFailure.Add(1) return err } msg := tgbotapi.NewMessage(Config.telegram_log_chat_id, message) msg.ParseMode = "markdown" msg.DisableNotification = true _, err = bot.Send(msg) if err != nil { log.Warnf("failed to send message %v", err) reportFailure.Add(1) return err } reportSuccess.Add(1) return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func SendLog(taskID, msg string) {\n\tsockets.Message(taskID, []byte(msg))\n}", "func logMsg(format string, a ...interface{}) {\n\tmsg := fmt.Sprintf(format, a...)\n\tlog.Println(msg)\n\tdiscord.ChannelMessageSend(logChannel, msg)\n}", "func LogPush(log logging.Logger, v interface{}, path ...string) {\n\tlog.Println(\"Push to\", strings.Join(path, \"/\"), v)\n}", "func (lt *TransportGKE) SendLog(le *LogEntry) error {\n\tpayload := le.Payload\n\tif le.Message != \"\" {\n\t\tpayload[\"message\"] = le.Message\n\t}\n\n\tpayload[\"severity\"] = le.Severity\n\tif le.HTTPRequest != nil {\n\t\tpayload[\"httpRequest\"] = formatHTTPRequest(le.HTTPRequest)\n\t}\n\n\tif len(le.Labels) > 0 {\n\t\tpayload[\"labels\"] = le.Labels\n\t}\n\n\treturn json.NewEncoder(lt.logWriter).Encode(payload)\n}", "func (sender *Sess) SendMessage(logEvent logevent.LogEvent) error {\n\tif sender.hecClient == nil {\n\t\treturn errors.New(\"SendMessage() called before OpenSvc()\")\n\t}\n\thecEvents := []*hec.Event{\n\t\tsender.formatLogEvent(logEvent),\n\t}\n\tsender.tracePretty(\"TRACE_SENDHEC time =\",\n\t\tlogEvent.Content.Time.UTC().Format(time.RFC3339),\n\t\t\" hecEvents =\", hecEvents)\n\terr := sender.hecClient.WriteBatch(hecEvents)\n\treturn err\n}", "func (logger logger) Send(log log.Log) {\n\tval := log.Text\n\tif log.Error != nil {\n\t\tval += fmt.Sprintf(\" %s\", log.Error.Error())\n\t}\n\tfmt.Println(val)\n}", "func (s *SlcLogger) sendNotification(logLevel logLevel, color string, message interface{}, titleParam []string) error {\n\n\tvar text string\n\tif t, ok := message.(error); ok {\n\t\ttext = t.Error()\n\t} else if t, ok := message.(string); ok {\n\t\ttext = t\n\t} else {\n\t\treturn &SlcErr{errors.New(\"the type of message parameter should be string or error\"), 0}\n\t}\n\n\tif logLevel < s.LogLevel {\n\t\treturn nil\n\t}\n\tslackChannel := s.getTargetChannel(logLevel)\n\n\tpayload, err := s.buildPayload(slackChannel, color, text, titleParam)\n\tif err != nil {\n\t\treturn &SlcErr{err, 0}\n\t}\n\n\treq, err := http.NewRequest(\"POST\", s.WebHookURL, bytes.NewBuffer(payload))\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\n\tif err != nil {\n\t\treturn &SlcErr{err, 0}\n\t}\n\tctx := context.Background()\n\treq.WithContext(ctx)\n\n\tresp, err := http.DefaultClient.Do(req)\n\n\tdefer func() {\n\t\tif resp != nil {\n\t\t\t_, _ = io.Copy(ioutil.Discard, resp.Body)\n\t\t\t_ = resp.Body.Close()\n\t\t}\n\t}()\n\n\tif err != nil {\n\t\treturn &SlcErr{err, 0}\n\t}\n\n\tif resp.StatusCode >= 400 {\n\t\tbody, _ := ioutil.ReadAll(resp.Body)\n\t\treturn &SlcErr{errors.New(string(body)), resp.StatusCode}\n\t}\n\n\treturn nil\n}", "func (c *ClientType) sendToLoggly(msg messageType) error {\n\n\t// Marshal message object to JSON\n\tmsgBytes, err := json.Marshal(msg)\n\tif err != nil {\n\t\treturn errors.New(\"Could not create JSON payload\")\n\t}\n\n\t// Construct the HTTP request with timeout\n\thttpClient := &http.Client{\n\t\tTimeout: 7 * time.Second}\n\trequest, err := http.NewRequest(http.MethodPost, c.URL, bytes.NewBuffer(msgBytes))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Set the header attributes\n\trequest.Header.Add(\"Content-Type\", \"application/json\")\n\n\t_, err = httpClient.Do(request)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (t *NoopAgent) SendLog(log []byte) {\n}", "func (g *Game) pushChatMessage(player *Player, text string) {\n\tm := ChatMessage{Player: player, Text: text}\n\tfor _, p := range g.Players {\n\t\tp.ChatMessages.Push(m)\n\t}\n}", "func (h *handler) logSend(msg string) {\n\th.logger.logSend(msg)\n}", "func SendDiscordLogEntry(message string) {\n\t// Create WebhookInfo\n\tinfo := WebhookInfo{}\n\t// Sets the content\n\tinfo.Content = message + \"\\n\"\n\t// Encode JSON\n\traw, _ := json.Marshal(info)\n\n\t// Post the JSON to URL\n\tresponse, err := http.Post(DiscordWebhookUrl, \"application/json\", bytes.NewBuffer(raw))\n\n\t// Check if an error\n\tif err != nil {\n\t\t// Print error\n\t\tfmt.Println(err)\n\t\t// Print response body\n\t\tfmt.Println(ioutil.ReadAll(response.Body))\n\t}\n}", "func sendSlackMessage(message slack.Message) {\n\tuserData := SlackAPI.GetUserInfo(message.User)\n\ttimestampSplit := strings.Split(message.Ts, \".\")\n\ttimestampInt, err := strconv.ParseInt(timestampSplit[0], 10, 64)\n\ttimestamp := time.Unix(timestampInt, 0)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\n\terr = SlackAPI.PostMessage(conf.ChannelToMessage, conf.SlackMessageText)\n\tif err != nil {\n\t\tspew.Dump(err)\n\t}\n\n\terr = SlackAPI.PostMessage(conf.ChannelToMessage, \"> <@\"+userData.User.ID+\"> - \"+timestamp.Format(\"03:04:05 PM\")+\": \\n> \"+message.Text)\n\tif err != nil {\n\t\tspew.Dump(err)\n\t}\n}", "func (ps *PubSub) Log(msg string) {\n\tcontent := []byte(fmt.Sprintf(\"\\n%s\", msg))\n\ttopicMsg := &broker.Message{\n\t\tBody: content,\n\t}\n\tif err := broker.Publish(logTopic, topicMsg); err != nil {\n\t\tlog.Printf(\"[pub] failed: %v\", err)\n\t} else {\n\t\tfmt.Println(\"[pub] pubbed message:\", string(topicMsg.Body))\n\t}\n\n}", "func (c *ChanPost) slckSend(rtm *slack.RTM) {\n\tlog.Println(\"Message from bot received\")\n\tmessage := \"Infra Announce: \" + c.text + \" \" +\n\t\ttime.Unix(int64(c.date), 0).Format(\"Mon Jan _2 15:04\") +\n\t\t\"(from telegram channel)\"\n\trtm.SendMessage(rtm.NewOutgoingMessage(message, config.sChat))\n}", "func (bot *SlackBot) sendMessage(msg Message) error {\n\tmsg.ID = atomic.AddUint64(&counter, 1)\n\terr := websocket.JSON.Send(bot.ws, msg)\n\treturn err\n}", "func MessageLoggerSender(n *notif.SlackNotifier) {\n\t// for {\n\t// \tif len(messagesQueue) > 0 {\n\t// \t\tmsg := messagesQueue.Pop()\n\t// \t\tif n != nil {\n\t// \t\t\tif err := n.Notify(fmt.Sprintf(\"```%s```\", msg)); err != nil {\n\t// \t\t\t\tlog.Println(\"NOTIFY TO SLACK ERROR: \", err)\n\t// \t\t\t\tmessagesQueue = append(messagesQueue, msg)\n\t// \t\t\t}\n\t// \t\t}\n\t// \t\ttime.Sleep(10 * time.Millisecond)\n\t// \t} else {\n\t// \t\ttime.Sleep(10 * time.Second)\n\t// \t}\n\t// }\n}", "func (h *grpcHandlers) SendLog(stream grpc.WorkflowQueue_SendLogServer) error {\n\tlog.Debug(\"grpc.SendLog> begin\")\n\tdefer log.Debug(\"grpc.SendLog> end\")\n\tfor {\n\t\tin, err := stream.Recv()\n\t\tif err == io.EOF {\n\t\t\treturn nil\n\t\t}\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tlog.Debug(\"grpc.SendLog> Got %+v\", in)\n\n\t\tdb := h.dbConnectionFactory.GetDBMap()\n\t\tif err := workflow.AddLog(db, nil, in); err != nil {\n\t\t\treturn sdk.WrapError(err, \"grpc.SendLog> Unable to insert log \")\n\t\t}\n\t}\n}", "func (hc *HuaweiPushClient) PushMsg(accessToken, deviceToken, payload string, timeToLive int) (string, error) {\n\treqUrl := PUSH_URL + \"?nsp_ctx=\" + url.QueryEscape(hc.NspCtx)\n\n\tnow := time.Now()\n\texpireSecond := time.Duration(timeToLive * 1e9)\n\texpireTime := now.Add(expireSecond)\n\n\tvar originParam = map[string]string{\n\t\t\"access_token\": accessToken,\n\t\t\"nsp_svc\": NSP_SVC,\n\t\t\"nsp_ts\": strconv.Itoa(int(time.Now().Unix())),\n\t\t\"device_token_list\": \"[\\\"\" + deviceToken + \"\\\"]\",\n\t\t\"payload\": payload,\n\t\t\"expire_time\": expireTime.Format(\"2006-01-02T15:04\"),\n\t}\n\n\tparam := make(url.Values)\n\tparam[\"access_token\"] = []string{originParam[\"access_token\"]}\n\tparam[\"nsp_svc\"] = []string{originParam[\"nsp_svc\"]}\n\tparam[\"nsp_ts\"] = []string{originParam[\"nsp_ts\"]}\n\tparam[\"device_token_list\"] = []string{originParam[\"device_token_list\"]}\n\tparam[\"payload\"] = []string{originParam[\"payload\"]}\n\tparam[\"expire_time\"] = []string{originParam[\"expire_time\"]}\n\n\t// push\n\tres, err := FormPost(reqUrl, param)\n\n\treturn string(res), err\n}", "func (w *Workspace) notifyLog(message string) {\n\tw.session.log.Debugf(context.Background(), \"%s\", message)\n}", "func SendMessageToSlack(message string) {\n\tfmt.Println(\"Sending message to slack...\")\n\n\thttp.Post(url, \"\")\n\n\treturn nil\n}", "func (trd *trxDispatcher) pushLog(lg retypes.Log, blk *types.Block, trx *types.Transaction, wg *sync.WaitGroup) bool {\n\twg.Add(1)\n\tselect {\n\tcase trd.outLog <- &types.LogRecord{\n\t\tWatchDog: wg,\n\t\tBlock: blk,\n\t\tTrx: trx,\n\t\tLog: lg,\n\t}:\n\tcase <-trd.sigStop:\n\t\treturn false\n\t}\n\treturn true\n}", "func (s *SlackSvc) SendMessage(report ReportPayload) error {\n\tattachments := make([]map[string]interface{}, 1)\n\tattachments[0] = map[string]interface{}{\"text\": fmt.Sprintf(\"Howdy! Here's a list of *%d* PRs waiting to be reviewed and merged:\", len(report.PRs))}\n\tfor _, v := range report.PRs {\n\t\tattachments = append(attachments, map[string]interface{}{\"text\": v.ToString()})\n\t}\n\n\tif len(report.Reminders) > 0 {\n\t\tfor _, v := range report.Reminders {\n\t\t\tattachments = append(attachments, map[string]interface{}{\"text\": v.Text})\n\t\t}\n\t}\n\n\tmessage := map[string]interface{}{\n\t\t\"channel\": s.channelID,\n\t\t\"username\": s.user,\n\t\t\"icon_emoji\": \":robot_face:\",\n\t\t\"attachments\": attachments,\n\t}\n\n\tpayload, err := json.Marshal(message)\n\tif err != nil {\n\t\tlog.Error().Err(err).Msg(\"Failed to serialize Slack payload\")\n\t\treturn err\n\t}\n\n\tresp, err := s.client.Post(s.webhook, \"application/json\", bytes.NewReader(payload))\n\tif err != nil {\n\t\tlog.Error().Err(err).Msgf(\"Failed to serialize Slack payload: %v\", err)\n\t\treturn err\n\t}\n\n\tdefer resp.Body.Close()\n\n\tlog.Info().Msgf(\"Message successfully sent to channel %s\", s.channelID)\n\treturn nil\n}", "func (k *Bot) LogChannel() string {\n\treturn <-k.logChan\n}", "func (self *GameHeart) Logs(msg *HeartMessageType) {\n\n}", "func (c *Channel) Log(l string) {\n\tlog.Printf(\"Channel <%s>: %s\", c.GetName(), l)\n}", "func sendMessage(message string) {\n\ts, _ := session.GetSession()\n\tif s != nil {\n\t\tconn := s.OpenConnection(chatServiceId)\n\t\tio.WriteString(conn, message)\n\t\tconn.Close()\n\t} else {\n\t\tfmt.Println(\"No chat sessions available\")\n\t}\n}", "func (t *Tracker) send(color, message string) error {\n\tenv := os.Getenv(\"ENV\")\n\t// If no ENV is specified, assume we are in development mode, so we don't want to flood Slack uselessly.\n\tif env == \"\" {\n\t\treturn nil\n\t}\n\n\t_, perr := poster.Post(\n\t\tt.WebHook,\n\t\tmap[string]interface{}{\n\t\t\t\"text\" : fmt.Sprintf(\"%s - %s\", t.Application, env),\n\t\t\t\"attachments\": []map[string]interface{}{\n\t\t\t\t{\n\t\t\t\t\t\"color\": color,\n\t\t\t\t\t\"text\": fmt.Sprintf(\n\t\t\t\t\t\t\"*Message*\\n%s\\n\\n*Stack*\\n```%s```\\n\\n*Time*\\n%s\",\n\t\t\t\t\t\tmessage,\n\t\t\t\t\t\tstring(debug.Stack()),\n\t\t\t\t\t\ttime.Now().Format(\"2006-01-02 03:04:05\"),\n\t\t\t\t\t),\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t)\n\n\t// An unexpected error happened when sending our message to Slack.\n\treturn perr\n}", "func SensePushMessage(caption string, photo string) error {\n\tpushFunc := func() error {\n\t\tbot, err := tgbotapi.NewBotAPI(Config.telegram_bot_token)\n\t\tif err != nil {\n\t\t\tlog.Warnf(\"failed to initialize bot API: %v\", err)\n\t\t\treportFailure.Add(1)\n\t\t\treturn err\n\t\t}\n\n\t\tmsg := tgbotapi.NewPhotoUpload(Config.telegram_chat_id, photo)\n\t\tmsg.Caption = caption\n\t\tmsg.ParseMode = \"markdown\"\n\n\t\t_, err = bot.Send(msg)\n\n\t\tif err != nil {\n\t\t\tlog.Warnf(\"failed to send message: %v\", err)\n\t\t\treportFailure.Add(1)\n\t\t\treturn err\n\t\t}\n\n\t\treportSuccess.Add(1)\n\n\t\treturn nil\n\t}\n\ti := 0\n\tfor ; ; {\n\t\terr := pushFunc()\n\t\tif err != nil {\n\t\t\tsecs := (i + 1) * 5\n\t\t\tlog.Warnf(\"%d attempt failed, retrying sending telegram message after %d seconds\", i+1, secs)\n\t\t\ttime.Sleep(time.Second * time.Duration(secs))\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t\ti += 1\n\t\tif i == 15 {\n\t\t\tlog.Warnf(\"all attempts failed, stop retrying\")\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (l *LogglyShipper) Log(ev kail.Event) error {\n\tif l.logglyClient != nil && ev != nil && len(ev.Log()) > 0 {\n\t\treturn l.logglyClient.Send(map[string]interface{}{\n\t\t\t\"rkubelog\": map[string]interface{}{\n\t\t\t\t\"tag\": l.customStaticTags,\n\t\t\t\t\"message\": string(ev.Log()),\n\t\t\t\t\"node\": ev.Source().Node(),\n\t\t\t\t\"pod\": ev.Source().Name(),\n\t\t\t\t\"namespace\": ev.Source().Namespace(),\n\t\t\t\t\"container\": ev.Source().Container(),\n\t\t\t},\n\t\t})\n\t}\n\treturn nil\n}", "func (chat ChatMsg) SendChatMsg(data string) {\n\n\tif strings.HasPrefix(data, \"L \") {\n\t\ttmp1 := strings.TrimLeft(data, \"L \")\n\n\t\tchatting.Msg += \"Me: \" + tmp1 + \"\\n\"\n\t\tqml.Changed(chatting, &chatting.Msg)\n\n\t} else if strings.HasPrefix(data, \"G \") {\n\t\ttmp2 := strings.TrimLeft(data, \"G \")\n\n\t\tglobalchatting.Msg += \"Me: \" + tmp2 + \"\\n\"\n\t\tqml.Changed(globalchatting, &globalchatting.Msg)\n\t}\n\n\tmessenger.Msnger.Send_message(data, mylib.CHAT_MESSAGE)\n}", "func (wechatPush *WechatPush) WriteMsg(when time.Time, msg string, level int) error {\n\tif level > wechatPush.Level {\n\t\treturn nil\n\t}\n\n\tdata := InitPushData(msg)\n\n\tfor _, id := range wechatPush.WechatIds {\n\t\terr := wechatPush.message.Push(id, \"\", wechatPush.TmpId, data)\n\t\tfmt.Printf(\"push data to user:%v, error:%v\\n\", id, err)\n\t}\n\treturn nil\n}", "func SlackSendMsg(slackAPI *slack.Client, MsgText string) error {\n\tif _, _, err := slackAPI.PostMessage(viper.GetString(\"slack_channel_id\"), slack.MsgOptionText(MsgText, false)); err != nil {\n\t\tlog.Errorf(\"[ERROR] %s\\n\", err)\n\t\treturn err\n\t}\n\tlog.Infof(\"[Slack] Send %s\", MsgText)\n\treturn nil\n}", "func LogEvent(thelog StructuredLog, level, event, msg string) {\n\tthelog.Timestamp = time.Now().Format(time.RFC3339)\n\thostname, _ := os.Hostname()\n\tthelog.Server = hostname\n\tthelog.Level = level\n\tthelog.Event = event\n\tthelog.Message = msg\n\t//thelog.Service = \"search-api\"\n\tlogJSON, err := json.Marshal(thelog)\n\tif err != nil {\n\t\tlog.Println(\"Structured Logger: Logger JSON Marshal failed !\")\n\t}\n\tlog.Println(string(logJSON))\n}", "func chat(ctx *gin.Context) {\n\tcred := credentials.NewStaticCredentials(os.Getenv(\"ACCESS_KEY_ID\"), os.Getenv(\"SECRET_ACCESS_KEY\"), \"\")\n\tconfig := aws.NewConfig().WithCredentials(cred).WithRegion(os.Getenv(\"AWS_REGION\"))\n\tsess := session.Must(session.NewSession(config))\n\tsvc := lexruntimeservice.New(sess)\n\tinput := &lexruntimeservice.PostTextInput{\n\t\tBotName: aws.String(ctx.Query(\"bot_name\")),\n\t\tBotAlias: aws.String(ctx.Query(\"bot_alias\")),\n\t\tInputText: aws.String(ctx.Query(\"message\")),\n\t\tUserId: aws.String(ctx.Query(\"user_id\")),\n\t}\n\tresult, err := svc.PostText(input)\n\tif err != nil {\n\t\tctx.JSON(http.StatusInternalServerError, gin.H{\"error\": err.Error(), \"message\": \"Server Error.\", \"data\": nil})\n\t} else {\n\t\tctx.JSON(http.StatusOK, gin.H{\"error\": nil, \"message\": \"Bot Updated with new intent.\", \"data\": result})\n\t}\n}", "func sendMessage(recipient string, reviewUrl string) {\n\tb := new(bytes.Buffer)\n\tjson.NewEncoder(b).Encode(SlackMessage{Channel: recipient, Text: reviewUrl})\n\tresp, _ := http.Post(os.Getenv(\"SLACKURL\"), \"application/json; charset=utf-8\", b)\n\n\tdefer resp.Body.Close()\n\tioutil.ReadAll(resp.Body)\n}", "func (s *Service) SendMessage(TargetChannelID, messageToSend string) error {\n\t_, err := s.discordapi.ChannelMessageSend(TargetChannelID, messageToSend)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error sending message to channel %v\", err)\n\t}\n\treturn nil\n}", "func sendMessageToChannel(s *discordgo.Session, channelID string, body string) (message *discordgo.Message) {\n\tmessage, err := s.ChannelMessageSend(channelID, body)\n\tif err != nil {\n\t\tfmt.Println(\"Error sending message to channel: \", err)\n\t}\n\treturn message\n}", "func (s *SlackService) SendMessage(channel string, message string) {\n\t// https://godoc.org/github.com/nlopes/slack#PostMessageParameters\n\tpostParams := slack.PostMessageParameters{\n\t\tAsUser: true,\n\t}\n\n\t// https://godoc.org/github.com/nlopes/slack#Client.PostMessage\n\ts.Client.PostMessage(channel, message, postParams)\n}", "func (s *Slack) SendMessage(msg string) error {\n\tlog.Logger.Info(fmt.Sprintf(\">> Sending to slack: %+v\", msg))\n\n\tapi := slack.New(s.Token)\n\tparams := slack.PostMessageParameters{\n\t\tAsUser: true,\n\t}\n\n\tchannelID, timestamp, err := api.PostMessage(s.Channel, msg, params)\n\tif err != nil {\n\t\tlog.Logger.Errorf(\"Error in sending slack message %s\", err.Error())\n\t\treturn err\n\t}\n\n\tlog.Logger.Infof(\"Message successfully sent to channel %s at %s\", channelID, timestamp)\n\treturn nil\n}", "func (l *Logger) Log(message string) {\n\tt := time.Now()\n\tl.LogChannel <- fmt.Sprintf(\"[%s]\\t%s\", t.Format(\"2006-01-02 15:04:05\"), message)\n}", "func sendEchoMessage(slackClient *slack.RTM, message, slackChannel string) {\n\tsplitMessage := strings.Fields(strings.ToLower(message))\n\tslackClient.SendMessage(slackClient.NewOutgoingMessage(strings.Join(splitMessage[1:], \" \"), slackChannel))\n}", "func (s *instance) postMessage(jsondata []byte) {\r\n\tif s.client != nil {\r\n\t\tchannelID, timestamp, err := s.client.PostMessage(s.config.Channel, slack.MsgOptionText(string(jsondata), false), slack.MsgOptionUsername(\"g0-h0m3\"), slack.MsgOptionAsUser(true))\r\n\t\tif err == nil {\r\n\t\t\ts.service.Logger.LogInfo(s.name, fmt.Sprintf(\"message '%s' send (%s, %s)\", string(jsondata), channelID, timestamp))\r\n\t\t} else {\r\n\t\t\ts.service.Logger.LogError(s.name, fmt.Sprintf(\"message '%s' not send (%s, %s)\", string(jsondata), s.config.Channel, timestamp))\r\n\t\t\ts.service.Logger.LogError(s.name, err.Error())\r\n\t\t}\r\n\t} else {\r\n\t\ts.service.Logger.LogError(s.name, \"service not connected\")\r\n\t}\r\n}", "func Message(level string, msg string) {\n\tonce.Do(initChannel)\n\n\tbody := buildBody(level, msg)\n\tdata := body[\"data\"].(map[string]interface{})\n\tdata[\"body\"] = messageBody(msg)\n\n\tpush(body)\n}", "func (t *ExplorerAgent) SendLog(log []byte) {\n\tt.explorerClient.Send(context.Background(), log, synchronization.ExplorerBinaryMessage)\n}", "func (bot *Bot) SendMessage(chatID int64, message string) {\n\tlog.Printf(\"Send '%s' to %d chat id\\n\", message, chatID)\n\tmsg := tgbotapi.NewMessage(chatID, message)\n\tmsg.ParseMode = bot.MessageParseMode\n\t_, err := bot.API.Send(msg)\n\tif err != nil {\n\t\tlog.Panicf(\"Send Messsage Error: %v\\n\", err)\n\t}\n}", "func (client *Client) SendMessage(channel, text string) error {\n\tmsg := struct {\n\t\tID int `json:\"id\"`\n\t\tType string `json:\"type\"`\n\t\tChannel string `json:\"channel\"`\n\t\tText string `json:\"text\"`\n\t}{client.messageID, \"message\", channel, text}\n\n\tif err := client.conn.WriteJSON(msg); err != nil {\n\t\treturn err\n\t}\n\n\tclient.messageID++\n\n\treturn nil\n}", "func (r *ClientMessagesService) Log(logmessagerequest *LogMessageRequest) *ClientMessagesLogCall {\n\tc := &ClientMessagesLogCall{s: r.s, urlParams_: make(gensupport.URLParams)}\n\tc.logmessagerequest = logmessagerequest\n\treturn c\n}", "func ws_SendMsg(ws *websocket.Conn, send_channel SendChannel) {\n\tfor {\n\t\tselect {\n\t\tcase send_msg := <-send_channel.containers:\n\t\t\tlog.Printf(\"[%s] containers sendMessage= \", __FILE__, send_msg)\n\t\t\twebsocket.JSON.Send(ws, send_msg)\n\t\tcase send_msg := <-send_channel.updateinfo:\n\t\t\tlog.Printf(\"[%s] update sendMessage=\", __FILE__, send_msg)\n\t\t}\n\t}\n}", "func (m *Slack) Send(mes *message.Message) error {\n\topts := createSlackMessageOptions(mes.Text, mes.Image, mes.Fields, mes.Level)\n\n\t_channel, _timestamp, _text, err := m.api.SendMessage(m.channel, opts...)\n\n\tm.logger.Debug(\"send slack message\",\n\t\tzap.String(\"channel\", _channel),\n\t\tzap.String(\"timestamp\", _timestamp),\n\t\tzap.String(\"text\", _text),\n\t\tzap.Error(err),\n\t)\n\n\treturn err\n}", "func SendMessage() {\n\n\tmsg := fmt.Sprintf(`{\"Fuellid\":\"%t\", \"City\":\"%s\"}`, data.Fuellid, data.City)\n\terr := ch.Publish(\n\t\t\"\",\n\t\tq.Name,\n\t\tfalse,\n\t\tfalse,\n\t\tamqp.Publishing{\n\t\t\tContentType: \"text/plain\",\n\t\t\tBody: []byte(msg),\n\t\t},\n\t)\n\tlogger.LogMessage(err, \"Failed to Publish message\", \"Published the message\")\n}", "func (l *Adapter) Stream(logstream chan *adapters.Message) {\n\tfor m := range logstream {\n\t\tl.queue <- logglyMessage{\n\t\t\tTimestamp: m.Timestamp,\n\t\t\tMessage: m.Message,\n\t\t\tUnit: m.Unit,\n\t\t\tHostname: m.Hostname,\n\t\t\tMachineID: m.MachineID,\n\t\t\tJ2JobName: m.J2JobName,\n\t\t\tJ2GroupName: m.J2GroupName,\n\t\t\tJ2GroupFull: m.J2GroupFull,\n\t\t\tJ2TaskName: m.J2TaskName,\n\t\t\tJ2TaskFull: m.J2TaskFull,\n\t\t\tJ2Kind: m.J2Kind,\n\t\t\tJ2Instance: m.J2Instance,\n\t\t}\n\t}\n}", "func (l *loggingServerStream) SendMsg(m interface{}) error {\n\terr := l.ServerStream.SendMsg(m)\n\tif l.li.LogStreamSendMsg {\n\t\tlogProtoMessageAsJSON(l.entry, m, status.Code(err), \"value\", \"StreamSend\")\n\t}\n\treturn err\n}", "func SyncPush(w http.ResponseWriter, req *http.Request) {\n\tqueryString := req.URL.Query()\n\tuid := queryString.Get(\"uid\")\n\n\tdata, err := ioutil.ReadAll(req.Body)\n\tif err != nil {\n\t\thttp.Error(w, \"bad request\", http.StatusBadRequest)\n\t\treturn\n\t}\n\tgo PushMessage(uid, data)\n\n\tif DEBUG {\n\t\t// echo\n\t\tw.Write(data)\n\t}\n}", "func sendTextToTelegramChat(chatId int, text string) (string, error) {\n\n\tlog.Printf(\"Sending %s to chat_id: %d\", text, chatId)\n\tresponse, err := http.PostForm(\n\t\ttelegramApi,\n\t\turl.Values{\n\t\t\t\"chat_id\": {strconv.Itoa(chatId)},\n\t\t\t\"text\": {text},\n\t\t})\n\n\tif err != nil {\n\t\tlog.Printf(\"error when posting text to the chat: %s\", err.Error())\n\t\treturn \"\", err\n\t}\n\tdefer response.Body.Close()\n\n\tvar bodyBytes, errRead = ioutil.ReadAll(response.Body)\n\tif errRead != nil {\n\t\tlog.Printf(\"error in parsing telegram answer %s\", errRead.Error())\n\t\treturn \"\", err\n\t}\n\tbodyString := string(bodyBytes)\n\tlog.Printf(\"Body of Telegram Response: %s\", bodyString)\n\n\treturn bodyString, nil\n}", "func (l *PlexLogger) send(level int, msg string, kvs ...interface{}) {\n\tkvmsg := []string{}\n\tfor k, v := range l.keyValues {\n\t\tkvmsg = append(kvmsg, fmt.Sprintf(\"%s:%+v\", k, v))\n\t}\n\tfor i := 0; i < len(kvs); i += 2 {\n\t\tkvmsg = append(kvmsg, fmt.Sprintf(\"%s:%+v\", kvs[i], kvs[i+1]))\n\t}\n\n\tif len(kvmsg) > 0 {\n\t\tmsg = fmt.Sprintf(\"%s %+v\", msg, kvmsg)\n\t}\n\n\tu := l.getURL()\n\tq := u.Query()\n\tq.Set(\"level\", fmt.Sprintf(\"%d\", level))\n\tq.Set(\"message\", msg)\n\tq.Set(\"source\", l.name)\n\tu.RawQuery = q.Encode()\n\n\treq, err := http.NewRequest(http.MethodGet, u.String(), nil)\n\tif err != nil {\n\t\t// We have an error, but no place to report it. Bail out!\n\t\treturn\n\t}\n\n\tplexToken := l.getPlexToken()\n\tif plexToken != \"\" {\n\t\treq.Header.Add(\"X-Plex-Token\", plexToken)\n\t}\n\treq.Header.Add(\"User-Agent\", \"PlexLogger\")\n\n\t// Ignore results\n\t_, err = http.DefaultClient.Do(req)\n\tif err != nil {\n\t\tfmt.Printf(\"ERROR! %v\", err)\n\t}\n}", "func (p *BoteaterServiceClient) SendChatChecked(ctx context.Context, seq int32, chatMid string, lastMessageId string) (err error) {\r\n var _args47 BoteaterServiceSendChatCheckedArgs\r\n _args47.Seq = seq\r\n _args47.ChatMid = chatMid\r\n _args47.LastMessageId = lastMessageId\r\n var _result48 BoteaterServiceSendChatCheckedResult\r\n if err = p.Client_().Call(ctx, \"sendChatChecked\", &_args47, &_result48); err != nil {\r\n return\r\n }\r\n switch {\r\n case _result48.E!= nil:\r\n return _result48.E\r\n }\r\n\r\n return nil\r\n}", "func (client *Client) sendMessage(msg interface{}) {\n\tstr, err := json.Marshal(msg)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\ts := string(str)\n\tmetrics.SendMessage(len(s))\n\tclient.socketTx <- s\n}", "func (p *Process) Log(event string) {\n\n\t\tmsg := groupMessage{ Sender: p.Name, Log: true, Message: event }\n\n\t\t// Keep everything here non blocking, so things are very thread safe\n\t\tselect {\n\n\t\t\tcase p.group_channel <- msg:\n\t\t\tdefault:\n\t\t}\n\t}", "func (s *Client) SendMessage(text, channel string) error {\n\t_, _, respText, err := s.client.SendMessage(channel, slack.MsgOptionText(text, false))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tlog.Println(\"Slack Respsonse text: \", respText)\n\n\treturn nil\n}", "func (mb MessageBroadcast) Push(title, body, url, serviceName string) error {\n\terr := mb.Notifier.SendNotification(fmt.Sprintf(\"%s - %s\", serviceName, title), body, url)\n\treturn err\n}", "func sendTestLog(ctx context.Context, comm client.Communicator, conf *internal.TaskConfig, log *model.TestLog) error {\n\treturn errors.Wrap(sendTestLogToCedar(ctx, conf.Task, comm, log), \"sending test logs to Cedar\")\n}", "func LineSendMsg(lineAPI *linebot.Client, TextMessage *linebot.TextMessage) error {\n\t_, err := lineAPI.PushMessage(viper.GetString(\"line_group_id\"), TextMessage).Do()\n\tif err != nil {\n\t\tlog.Errorf(\"[ERROR] %s\\n\", err)\n\t\treturn err\n\t}\n\tlog.Infof(\"[LINE] Send %s\", TextMessage.Text)\n\treturn nil\n}", "func sendPush(apiKey string, name string, url string, newStatus string, oldStatus string) {\n\tlogging.MustGetLogger(\"\").Debug(\"Sending Push about \\\"\" + url + \"\\\"...\")\n\n\tpb := pushbullet.New(apiKey)\n\n\tpush := requests.NewLink()\n\tpush.Title = GetConfiguration().Application.Title + \" - Status Change\"\n\tpush.Body = name + \" went from \\\"\" + oldStatus + \"\\\" to \\\"\" + newStatus + \"\\\".\"\n\tpush.Url = url\n\n\t_, err := pb.PostPushesLink(push)\n\tif err != nil {\n\t\tlogging.MustGetLogger(\"\").Error(\"Unable to send Push: \", err)\n\t}\n}", "func Chat(chub *connections.ConnectionHub) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tgo chub.WriteMessage()\n\t\tlog.Println(\"wrote message\")\n\t})\n}", "func SendMsg(message string) {\n\tbotMutex.Lock()\n\tdefer botMutex.Unlock()\n\tif !messageReceived {\n\t\tlog.Println(\n\t\t\t\"Write a message to the bot for specifying notifiable chat ID\",\n\t\t)\n\t\treturn\n\t}\n\tmsg := tgbotapi.NewMessage(notifiableChatID, message)\n\t_, err := bot.Send(msg)\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n}", "func (p *AsyncProducer) Send(msg string) {\n\tm := fmt.Sprintf(\"[%s] [%s] %s\",\n\t\ttime.Now().Format(time.RFC3339), p.loggerID, msg)\n\tmsgBytes, err := json.Marshal(m)\n\tif err != nil {\n\t\tlog.Println(\"Failed json encoding kafka log msg: \", err)\n\t\treturn\n\t}\n\tmsgLog := sarama.ProducerMessage{\n\t\tTopic: topic,\n\t\t// Key: sarama.StringEncoder(),\n\t\tTimestamp: time.Now(),\n\t\tValue: sarama.ByteEncoder(msgBytes),\n\t}\n\tp.prodr.Input() <- &msgLog\n}", "func (t *Client) SendPushNotification(title, msg string) (int, error) {\n\treq := graphql.NewRequest(`\n\t\tmutation sendPushNotification($input: PushNotificationInput!){\n\t\t\tsendPushNotification(input: $input){\n\t\t \t\tsuccessful\n\t\t \t\tpushedToNumberOfDevices\n\t\t\t}\n\t }`)\n\tinput := PushInput{\n\t\tTitle: title,\n\t\tMessage: msg,\n\t\tScreenToOpen: \"CONSUMPTION\",\n\t}\n\treq.Var(\"input\", input)\n\treq.Header.Set(\"Cache-Control\", \"no-cache\")\n\treq.Header.Set(\"Authorization\", \"Bearer \"+t.Token)\n\tctx := context.Background()\n\t//ctx, _ := context.WithTimeout(context.Background(), time.Second*2)\n\tvar result PushResponse\n\tif err := t.gqlClient.Run(ctx, req, &result); err != nil {\n\t\tlog.Error(err)\n\t\treturn 0, err\n\t}\n\treturn result.SendPushNotification.PushedToNumberOfDevices, nil\n}", "func (c *client) LogMessage(ctx context.Context, params *LogMessageParams) (err error) {\n\terr = c.Conn.Notify(ctx, MethodWindowLogMessage, params)\n\treturn\n}", "func (l *Logger) log(level int64, v string) { l.doMsg(level, v) }", "func (w *FileLoggerWriter) writeLog(msg []byte) {\n\tw.msgChan <- msg\n}", "func (d *DestinyLogger) Log(mc <-chan *common.Message) {\n\tvar subTrigger bool\n\tfor {\n\t\tm := <-mc\n\n\t\tswitch m.Command {\n\t\tcase \"BAN\":\n\t\t\td.writeLine(m.Time, \"Ban\", m.Data+\" banned by \"+m.Nick)\n\t\tcase \"UNBAN\":\n\t\t\td.writeLine(m.Time, \"Ban\", m.Data+\" unbanned by \"+m.Nick)\n\t\tcase \"MUTE\":\n\t\t\td.writeLine(m.Time, \"Ban\", m.Data+\" muted by \"+m.Nick)\n\t\tcase \"UNMUTE\":\n\t\t\td.writeLine(m.Time, \"Ban\", m.Data+\" unmuted by \"+m.Nick)\n\t\tcase \"BROADCAST\":\n\t\t\tif strings.Contains(m.Data, \"subscriber!\") || strings.Contains(m.Data, \"subscribed on Twitch!\") || strings.Contains(m.Data, \"has resubscribed! Active for\") {\n\t\t\t\td.writeLine(m.Time, \"Subscriber\", m.Data)\n\t\t\t\tsubTrigger = true\n\t\t\t} else if subTrigger {\n\t\t\t\td.writeLine(m.Time, \"SubscriberMessage\", m.Data)\n\t\t\t} else {\n\t\t\t\td.writeLine(m.Time, \"Broadcast\", m.Data)\n\t\t\t}\n\t\tcase \"MSG\":\n\t\t\td.writeLine(m.Time, m.Nick, m.Data)\n\t\t\tsubTrigger = false\n\t\t}\n\t}\n}", "func (c *Calcium) LogStream(ctx context.Context, ID string) (chan *types.LogStreamMessage, error) {\n\tch := make(chan *types.LogStreamMessage)\n\tgo func() {\n\t\tdefer close(ch)\n\t\tcontainer, err := c.GetContainer(ctx, ID)\n\t\tif err != nil {\n\t\t\tch <- &types.LogStreamMessage{ID: ID, Error: err}\n\t\t\treturn\n\t\t}\n\n\t\tresp, err := container.Engine.VirtualizationLogs(ctx, ID, true, true, true)\n\t\tif err != nil {\n\t\t\tch <- &types.LogStreamMessage{ID: ID, Error: err}\n\t\t\treturn\n\t\t}\n\n\t\tscanner := bufio.NewScanner(resp)\n\t\tfor scanner.Scan() {\n\t\t\tdata := scanner.Bytes()\n\t\t\tch <- &types.LogStreamMessage{ID: ID, Data: data}\n\t\t}\n\t}()\n\treturn ch, nil\n}", "func (c *SlackConnection) SendMessage(message gateway.Message, channel *gateway.Channel) (*gateway.Message, error) {\n\tif strings.HasPrefix(message.Text, \"/\") {\n\t\t// Slash commands require some preprocessing.\n\t\treturn sendSlashCommand(c, message, channel)\n\t} else {\n\t\tlog.Printf(\"Sending message to team %s on channel %s\", c.Team().Name, channel.Name)\n\n\t\t// Otherwise just a plain message\n\t\t_, err := http.Get(\"https://slack.com/api/chat.postMessage?token=\" + c.token + \"&channel=\" + channel.Id + \"&text=\" + url.QueryEscape(message.Text) + \"&link_names=true&parse=full&unfurl_links=true&as_user=true\")\n\t\treturn nil, err\n\t}\n}", "func (l *Logger) Log(message string) {\n\tlogMsg := Message{\n\t\tBody: message,\n\t\tOptions: l.Options,\n\t}\n\tl.transport.add(logMsg)\n}", "func newLogPusher(pusherKey PusherKey,\n\tsvcStructuredLog Client, logger *zap.Logger) *logPusher {\n\tpusher := &logPusher{\n\t\tlogGroupName: aws.String(pusherKey.LogGroupName),\n\t\tlogStreamName: aws.String(pusherKey.LogStreamName),\n\t\tsvcStructuredLog: svcStructuredLog,\n\t\tlogger: logger,\n\t}\n\tpusher.logEventBatch = newEventBatch(pusherKey)\n\n\treturn pusher\n}", "func Send(config config.Config, text string) {\n\tapi := slack.New(config.Slack.Token)\n\tparams := slack.PostMessageParameters{}\n\tparams.IconEmoji = config.Slack.IconEmoji\n\tparams.Username = config.Slack.Username\n\tchannelID, timestamp, err := api.PostMessage(config.Slack.Channel, text, params)\n\tif err != nil {\n\t\tfmt.Printf(\"%s\\n\", err)\n\t\treturn\n\t}\n\tfmt.Printf(\"Message successfully sent to channel %s at %s\", channelID, timestamp)\n}", "func (d *DingTalkClient) SendMessage(msg DingTalkMessage) error {\n\n\tvar message string\n\tswitch msg.Type {\n\tcase \"text\":\n\t\tmessage = fmt.Sprintf(`{\"msgtype\": \"text\",\"text\": {\"content\": \"监控报警: %s\"}}`, msg.Message)\n\tcase \"markdown\":\n\t\tmessage = fmt.Sprintf(`{\"msgtype\": \"markdown\",\"markdown\":{\"title\": 监控报警: \"%s\", \"text\": \"%s\"}}`, msg.Title, msg.Message)\n\tdefault:\n\t\tmessage = fmt.Sprintf(`{\"msgtype\": \"text\",\"text\": {\"content\": \"监控报警: %s\"}}`, msg.Message)\n\t}\n\n\tclient := &http.Client{}\n\trequest, _ := http.NewRequest(\"POST\", d.RobotURL, bytes.NewBuffer([]byte(message)))\n\trequest.Header.Set(\"Content-type\", \"application/json\")\n\tresponse, err := client.Do(request)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"访问钉钉URL(%s) 出错了: %s\", d.RobotURL, err)\n\t}\n\tif response.StatusCode != 200 {\n\t\tbody, _ := ioutil.ReadAll(response.Body)\n\t\treturn fmt.Errorf(\"访问钉钉URL(%s) 出错了: %s\", d.RobotURL, string(body))\n\t}\n\treturn nil\n}", "func (b *botHTTP) PushMessage(ctx context.Context, message *domain.LineMessage) error {\n\tbuffer := new(bytes.Buffer)\n\tencoder := json.NewEncoder(buffer)\n\tencoder.SetEscapeHTML(true)\n\tencoder.Encode(message)\n\n\turl := \"https://api.line.me/v2/bot/message/push\"\n\tbody, _, err := b.httpReq.Do(ctx, http.MethodPost, url, buffer.Bytes(), map[string]string{\n\t\t\"Content-Type\": \"application/json\",\n\t\t\"Authorization\": fmt.Sprintf(\"Bearer %s\", shared.GetEnv().LineClientToken),\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Println(string(body))\n\treturn nil\n}", "func sendNotificationToSlack(payloadJSONEncoded []byte, response chan<- *http.Response) {\n\tfmt.Println(\"Sending notification to Slack...\")\n\n\t// Récupération des paramètres\n\t// ---------------------------\n\thookURL = config.SlackHookURL\n\thookPayload = config.SlackHookPayload\n\n\t// Envoi de la requête\n\t// -------------------\n\tresponse <- sendNotificationToApplication(hookURL, hookPayload, payloadJSONEncoded)\n}", "func (s *LoginSocket) SendMessageBySession(sessionId string, p interface{}) {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\tlogger.Info(\"SendMessageBySession on login channel\", sessionId, p)\n\n\t// go func() {\n\tfor conn, active := range loginSocket.subscriptions[sessionId] {\n\t\tif active {\n\t\t\ts.SendUpdateMessage(conn, p)\n\t\t}\n\t}\n\t// }()\n}", "func PushMessage(action string, what string) string {\n\treturn fmt.Sprintf(\"To %s %s on the OpenShift Cluster, please use `odo push` \\n\", action, what)\n}", "func logLine(message LiveLoggerMessage) {\n\tvar line string\n\tswitch message.eventType {\n\tcase \"NEW\":\n\t\tline = fmt.Sprintf(\"[ NEW ] - %s : %s\", message.source, message.message)\n\tcase \"DROP\":\n\t\tline = fmt.Sprintf(\"[ DROP ] - %s : %s\", message.source, message.message)\n\tcase \"START\":\n\t\tline = fmt.Sprintf(\"[ START ] - %s : %s\", message.source, message.message)\n\tcase \"UPDATE\":\n\t\tline = fmt.Sprintf(\"[ UPDATE ] - %s ( %6.2f %% ) : %s\", message.source, message.progress, message.message)\n\tcase \"END\":\n\t\tline = fmt.Sprintf(\"[ DONE ] - %s : %s\", message.source, message.message)\n\tdefault:\n\t\tline = fmt.Sprintf(\"%s : %s\", message.source, message.message)\n\t}\n\tfmt.Fprintln(os.Stdout, line)\n}", "func sendToVK(token string, message string, ID int64) {\n\t//VK Part\n\n\t//client, err := vkapi.NewClientFromLogin(\"<username>\", \"<password>\", vkapi.ScopeMessages)\n\tclient, err := vkapi.NewClientFromToken(token)\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n\n\tclient.Log(true)\n\n\tif err := client.InitLongPoll(0, 2); err != nil {\n\t\tlog.Panic(err)\n\t}\n\n\t//Send one consoleMSG to chatID!\n\tclient.SendMessage(vkapi.NewMessage(vkapi.NewDstFromChatID(ID), message))\n}", "func sendChannelMessage(api *slack.Client, channel string, message models.Message) error {\n\treturn sendMessage(api, message.IsEphemeral, channel, message.Vars[\"_user.id\"], message.Output, message.ThreadTimestamp, message.Attributes[\"ws_token\"], message.Remotes.Slack.Attachments)\n}", "func (bot *luaBot) sendChattable(config Chattable) (Message, error) {\n\tv, err := config.values()\n\tif err != nil {\n\t\treturn Message{}, err\n\t}\n\tmessage, err := bot.makeMessageRequest(config.method(), v)\n\tif err != nil {\n\t\treturn Message{}, err\n\t}\n\treturn message, nil\n}", "func (ctx *Upgrader) WLOG(data interface{}, room string) {\n\tctx.WCFG(ModeClient.Log, data, room)\n}", "func (s *LoginSocket) SendMessage(c *Client, msgType string, p interface{}) {\n\tlogger.Info(\"SendMessage on login channel\", msgType, p)\n\tgo c.SendMessage(LoginChannel, msgType, p)\n}", "func TelegramSendMsg(telegramAPI *tgbotapi.BotAPI, MsgText string) (tgbotapi.Message, error) {\n\tres, err := telegramAPI.Send(tgbotapi.NewMessage(viper.GetInt64(\"telegram_chat_id\"), MsgText))\n\tif err != nil {\n\t\tlog.Errorf(\"[ERROR] %s\\n\", err)\n\t\treturn res, err\n\t}\n\tlog.Infof(\"[Telegram] Send %s\", MsgText)\n\treturn res, nil\n}", "func (dataChannel *DataChannel) SendMessage(log log.T, input []byte, inputType int) error {\n\treturn dataChannel.wsChannel.SendMessage(log, input, inputType)\n}", "func (c *Chat) Log(lvl LogCat, s string) {\n\ts = strings.Replace(strings.Replace(s, \"\\\\\", \"\\\\\\\\\", -1), \"\\n\", \"\\\\n\", -1)\n\n\tc.logger.LogLine(MakeLogLine(lvl, s))\n}", "func sayPolo(chatID int64) error {\n\t// Create the request body struct\n\treqBody := &sendMessageReqBody{\n\t\tChatID: chatID,\n\t\tText: \"Polo!!\",\n\t}\n\t// Create the JSON body from the struct\n\treqBytes, err := json.Marshal(reqBody)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Send a post request with your token\n\tres, err := http.Post(\"https://api.telegram.org/bot1495263911:AAEjmxdCuPazMzeegGeZpm1RLBBMtFgx2oE/sendMessage\", \"application/json\", bytes.NewBuffer(reqBytes))\n\tif err != nil {\n\t\treturn err\n\t}\n\tif res.StatusCode != http.StatusOK {\n\t\treturn errors.New(\"unexpected status\" + res.Status)\n\t}\n\n\treturn nil\n}", "func (c *Client) Log(event interface{}) (error) {\n\t// create Splunk log\n\tlog := NewEvent(event, c.Source, c.SourceType, c.Index)\n\n\t// Convert requestBody struct to byte slice to prep for http.NewRequest\n\tb, err := json.Marshal(log)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t//log.Print(string(b[:])) // print what the splunk post body will be for checking/debugging\n\n\t// make new request\n\turl := c.URL\n\treq, err := http.NewRequest(\"POST\", url, bytes.NewBuffer(b))\n\treq.Header.Add(\"Content-Type\", \"application/json\")\n\treq.Header.Add(\"Authorization\", \"Splunk \" + c.Token)\n\n\t// receive response\n\tres, err := c.HTTPClient.Do(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// If statusCode is not good, return error string\n\tswitch res.StatusCode {\n\tcase 200:\n\t\treturn nil\n\tdefault:\n\t\t// Turn response into string and return it\n\t\tbuf := new(bytes.Buffer)\n\t\tbuf.ReadFrom(res.Body)\n\t\tresponseBody := buf.String()\n\t\terr = errors.New(responseBody)\n\n\t}\n\t//log.Print(responseBody)\t// print error to screen for checking/debugging\n\treturn err\n}", "func (t *Telegram) SendMessage(text string) error {\n\tlog.Printf(\"Send to telegram chat %d: %s\", t.chatID, text)\n\tmessage := &Message{\n\t\tChatID: t.chatID,\n\t\tText: text,\n\t}\n\tdata, err := json.Marshal(message)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// build request to telegram API\n\treq, err := t.buildRequest(data, SendMessageMethod)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tclient := &http.Client{}\n\tresp, err := client.Do(req)\n\tif err != nil {\n\t\tbody, _ := ioutil.ReadAll(resp.Body)\n\t\tlog.Fatal(string(body))\n\t\treturn err\n\t}\n\tdefer resp.Body.Close()\n\n\treturn nil\n}", "func (ml *MessageLog) SendMessage(message string) {\n\t// Prepend the message onto the messageLog slice\n\tif len(ml.messages) >= ml.MaxLength {\n\t\t// Throw away any messages that exceed our total queue size\n\t\tml.messages = ml.messages[:len(ml.messages)-1]\n\t}\n\tml.messages = append([]string{message}, ml.messages...)\n}", "func LevelPush( level Level) {\n logLevelStack = append(logLevelStack, level)\n logLevel = level\n}", "func (s *Server) Push(id string, data []byte) error {\n\tch, ok := s.ChannelMap.Get(id)\n\tif !ok {\n\t\treturn errors.New(\"channel no found\")\n\t}\n\treturn ch.Push(data)\n}", "func sendToWs(msg network.Message, update bool, s *session, messagesDb *MsgDb) {\n\tmsgNum := msg.Seqnum\n\twsMsg := wsMessage{Src: msg.Src, Dst: msg.Dst,\n\t\tMsgNumber: strconv.FormatUint(msgNum, 10), Payload: string(msg.Payload)}\n\ts.logger.Debug(\"sending json message to WS\", zap.Any(\"msg\", wsMsg))\n\tif update {\n\t\t(*messagesDb)[msgNum] = msg\n\t}\n\n\tif err := s.conn.WriteJSON(wsMsg); err != nil {\n\t\ts.logger.Error(\"failed to send json message\", zap.Error(err))\n\t\treturn\n\t}\n}", "func (m *GoogleChatManager) Push(alerts []alertmgrtmpl.Alert) error {\n\tm.lo.WithField(\"count\", len(alerts)).Info(\"dispatching alerts to google chat\")\n\n\t// For each alert, lookup the UUID and send the alert.\n\tfor _, a := range alerts {\n\t\t// If it's a new alert whose fingerprint isn't in the active alerts map, add it first.\n\t\tif m.activeAlerts.loookup(a.Fingerprint) == \"\" {\n\t\t\tm.activeAlerts.add(a)\n\t\t}\n\n\t\t// Prepare a list of messages to send.\n\t\tmsgs, err := m.prepareMessage(a)\n\t\tif err != nil {\n\t\t\tm.lo.WithError(err).Error(\"error preparing message\")\n\t\t\tcontinue\n\t\t}\n\n\t\t// Dispatch an HTTP request for each message.\n\t\tfor _, msg := range msgs {\n\t\t\tvar (\n\t\t\t\tthreadKey = m.activeAlerts.alerts[a.Fingerprint].UUID.String()\n\t\t\t\tnow = time.Now()\n\t\t\t)\n\n\t\t\tm.metrics.Increment(fmt.Sprintf(`alerts_dispatched_total{provider=\"%s\", room=\"%s\"}`, m.ID(), m.Room()))\n\n\t\t\t// Send message to API.\n\t\t\tif m.dryRun {\n\t\t\t\tm.lo.WithField(\"room\", m.Room()).Info(\"dry_run is enabled for this room. skipping pushing notification\")\n\t\t\t} else {\n\t\t\t\tif err := m.sendMessage(msg, threadKey); err != nil {\n\t\t\t\t\tm.metrics.Increment(fmt.Sprintf(`alerts_dispatched_errors_total{provider=\"%s\", room=\"%s\"}`, m.ID(), m.Room()))\n\t\t\t\t\tm.lo.WithError(err).Error(\"error sending message\")\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tm.metrics.Duration(fmt.Sprintf(`alerts_dispatched_duration_seconds{provider=\"%s\", room=\"%s\"}`, m.ID(), m.Room()), now)\n\t\t}\n\t}\n\n\treturn nil\n}", "func (st *stream) Push(msg string) *SSEvent {\n\n st.lastId++\n now := time.Now()\n event := &SSEvent{\n Id: st.lastId,\n Event: \"message\",\n Data: msg,\n Timestamp: now,\n }\n\n /* Keep up to 1 minute of recent events. */\n st.recent = append(st.recent, event)\n cutoff := now.Add(-60 * time.Second)\n var i int\n var ev *SSEvent\n for i, ev = range st.recent {\n if ev.Timestamp.After(cutoff) {\n break\n }\n }\n st.recent = st.recent[i:]\n\n return event\n}" ]
[ "0.6161097", "0.5922427", "0.5848327", "0.58106387", "0.5733693", "0.5664714", "0.56604105", "0.56219655", "0.54866457", "0.5474753", "0.545893", "0.5450425", "0.5430241", "0.5413946", "0.5412267", "0.5411329", "0.5405167", "0.53996307", "0.53549343", "0.53183866", "0.5283264", "0.5279946", "0.5274837", "0.5273039", "0.52727896", "0.52640384", "0.5244388", "0.5227349", "0.5218871", "0.52114767", "0.51697415", "0.51691353", "0.51626277", "0.51593953", "0.5156178", "0.5149148", "0.51255465", "0.51147234", "0.51140743", "0.5111787", "0.509851", "0.5098362", "0.50913095", "0.5089759", "0.5087134", "0.5083314", "0.5074231", "0.5071548", "0.50615174", "0.5039237", "0.5038725", "0.5028795", "0.5014399", "0.5010637", "0.5009938", "0.50098395", "0.50005424", "0.49955553", "0.49941164", "0.4992441", "0.4991198", "0.49755964", "0.49719188", "0.49674177", "0.49673554", "0.49616283", "0.49524373", "0.49502045", "0.49483508", "0.4942001", "0.49398866", "0.49389553", "0.49360615", "0.49290147", "0.49228463", "0.49196064", "0.49192768", "0.49109742", "0.4903565", "0.49017656", "0.4899768", "0.48997617", "0.48982894", "0.48977265", "0.48952526", "0.48929164", "0.48878443", "0.4883146", "0.48823583", "0.48812607", "0.48792145", "0.4873233", "0.4867345", "0.48659748", "0.48544115", "0.48415524", "0.48406586", "0.48363996", "0.4833544", "0.48325282" ]
0.71376157
0
Params grabs the parameters from the URL.
func Params(r *http.Request) map[string]string { previousPathOffset, ok := r.Context().Value(previousPathOffsetContextKey).(int) if !ok { previousPathOffset = 0 } routePath, ok := r.Context().Value(pathContextKey).(string) if !ok { routePath = "/" } requestPathComponents := strings.Split(r.URL.Path[1:], "/")[previousPathOffset:] routePathComponents := strings.Split(routePath[1:], "/") result := make(map[string]string) for i := 0; i < len(routePathComponents); i++ { if routePathComponents[i][0] == ':' { result[routePathComponents[i][1:]] = requestPathComponents[i] } } return result }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func getAllURLParameters(u *url.URL) map[string][]string {\n\tv, err := url.ParseQuery(u.RawQuery)\n\n\tif err != nil {\n\t\treturn nil\n\t}\n\treturn v\n}", "func getURLParameters(u *url.URL, key string) []string {\n\tv, err := url.ParseQuery(u.RawQuery)\n\n\tif err != nil {\n\t\treturn []string{}\n\t}\n\n\treturn v[key]\n}", "func GetParams(regEx, url string) (paramsMap map[string]string) {\n\n\tvar compRegEx = regexp.MustCompile(regEx)\n\tmatch := compRegEx.FindStringSubmatch(url)\n\n\tparamsMap = make(map[string]string)\n\tfor i, name := range compRegEx.SubexpNames() {\n\t\tif i > 0 && i <= len(match) {\n\t\t\tparamsMap[name] = match[i]\n\t\t}\n\t}\n\treturn\n}", "func getParams(regEx, url string) (paramsMap map[string]string) {\n\n\tvar compRegEx = regexp.MustCompile(regEx)\n\tmatch := compRegEx.FindStringSubmatch(url)\n\n\tparamsMap = make(map[string]string)\n\tfor i, name := range compRegEx.SubexpNames() {\n\t\tif i > 0 && i <= len(match) {\n\t\t\tparamsMap[name] = match[i]\n\t\t}\n\t}\n\treturn\n}", "func Params(r *http.Request, allowOwner bool) (*SearchParams, error) {\n\tqs, err := url.ParseQuery(r.URL.RawQuery)\n\tif err != nil {\n\t\treturn nil, errors.New(\"invalid query parameters\")\n\t}\n\tps := SearchParams{\tDatabaseParams: db.DatabaseParams{Pagination: &chassis.Pagination{},}}\n\n\t// Pagination parameters.\n\tif err := chassis.PaginationParams(qs, &ps.Pagination.Page, &ps.Pagination.PerPage); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Format parameter.\n\tswitch qs.Get(\"format\") {\n\tcase \"full\":\n\t\tps.Format = FullResults\n\tcase \"summary\", \"\":\n\t\tps.Format = SummaryResults\n\tdefault:\n\t\treturn nil, errors.New(\"invalid format parameter\")\n\t}\n\n\t// Filtering parameters.\n\tif ps.PostTypes, err = PostTypeParam(qs); err != nil {\n\t\treturn nil, err\n\t}\n\n\tif err = chassis.SortingParam(qs,\"sort_by\", &ps.SortBy); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &ps, nil\n}", "func url_parser(url_string string) url.Values{\n\tparams, err := url.ParseQuery(url_string)\n\tif err != nil{\n\t\tfmt.Println(err)\n\t}\n\treturn params\n}", "func GetParams(q url.Values, schema *SchemaVersion) (*Params, error) {\n\tstartDate, err := cleanDateString(q.Get(\"startDate\"))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tendDate, err := cleanDateString(q.Get(\"endDate\"))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tcarelink := false\n\tif values, ok := q[\"carelink\"]; ok {\n\t\tif len(values) < 1 {\n\t\t\treturn nil, errors.New(\"carelink parameter not valid\")\n\t\t}\n\t\tcarelink, err = strconv.ParseBool(values[len(values)-1])\n\t\tif err != nil {\n\t\t\treturn nil, errors.New(\"carelink parameter not valid\")\n\t\t}\n\t}\n\n\tdexcom := false\n\tif values, ok := q[\"dexcom\"]; ok {\n\t\tif len(values) < 1 {\n\t\t\treturn nil, errors.New(\"dexcom parameter not valid\")\n\t\t}\n\t\tdexcom, err = strconv.ParseBool(values[len(values)-1])\n\t\tif err != nil {\n\t\t\treturn nil, errors.New(\"dexcom parameter not valid\")\n\t\t}\n\t}\n\n\tlatest := false\n\tif values, ok := q[\"latest\"]; ok {\n\t\tif len(values) < 1 {\n\t\t\treturn nil, errors.New(\"latest parameter not valid\")\n\t\t}\n\t\tlatest, err = strconv.ParseBool(values[len(values)-1])\n\t\tif err != nil {\n\t\t\treturn nil, errors.New(\"latest parameter not valid\")\n\t\t}\n\t}\n\n\tmedtronic := false\n\tif values, ok := q[\"medtronic\"]; ok {\n\t\tif len(values) < 1 {\n\t\t\treturn nil, errors.New(\"medtronic parameter not valid\")\n\t\t}\n\t\tmedtronic, err = strconv.ParseBool(values[len(values)-1])\n\t\tif err != nil {\n\t\t\treturn nil, errors.New(\"medtronic parameter not valid\")\n\t\t}\n\t}\n\n\tp := &Params{\n\t\tUserID: q.Get(\":userID\"),\n\t\tDeviceID: q.Get(\"deviceId\"),\n\t\tUploadID: q.Get(\"uploadId\"),\n\t\t//the query params for type and subtype can contain multiple values seperated\n\t\t//by a comma e.g. \"type=smbg,cbg\" so split them out into an array of values\n\t\tTypes: strings.Split(q.Get(\"type\"), \",\"),\n\t\tSubTypes: strings.Split(q.Get(\"subType\"), \",\"),\n\t\tDate: Date{startDate, endDate},\n\t\tSchemaVersion: schema,\n\t\tCarelink: carelink,\n\t\tDexcom: dexcom,\n\t\tLatest: latest,\n\t\tMedtronic: medtronic,\n\t}\n\n\treturn p, nil\n\n}", "func (h *auth) Params(c echo.Context) error {\n\t// Fetch params from URL queries\n\temail := c.QueryParam(\"email\")\n\tif email == \"\" {\n\t\treturn c.JSON(http.StatusUnauthorized, sferror.New(\"No email provided.\"))\n\t}\n\n\treturn h.params(c, email)\n}", "func FetchParams(r *http.Request) httprouter.Params {\n\tctx := r.Context()\n\treturn ctx.Value(\"params\").(httprouter.Params)\n}", "func parseParams(params map[string][]string) url.Values {\n\tv := url.Values{}\n\tfor key, values := range params {\n\t\tfor _, value := range values {\n\t\t\tv.Add(key, value)\n\t\t}\n\t}\n\treturn v\n}", "func (s *Service) getParams(r *http.Request) map[string]string {\n\treturn mux.Vars(r)\n}", "func (u URIExpr) Params() []string {\n\tr := regexp.MustCompile(`\\{([^\\{\\}]+)\\}`)\n\tmatches := r.FindAllStringSubmatch(string(u), -1)\n\tif len(matches) == 0 {\n\t\treturn nil\n\t}\n\twcs := make([]string, len(matches))\n\tfor i, m := range matches {\n\t\twcs[i] = m[1]\n\t}\n\treturn wcs\n}", "func (r InboundRequest) QueryParams() map[string][]string {\n return r.URL.Query()\n}", "func (ctx *Context) URLParams() map[string]string {\n\treturn ctx.urlParams\n}", "func extractParamFromUrl(urlPattern string, urlRecieved string) map[string]string {\n\tsplitUrlPattern := strings.Split(strings.Trim(urlPattern, \"/\"), \"/\")\n\tsplitUrlRecieved := strings.Split(strings.Trim(urlRecieved, \"/\"), \"/\")\n\n\tparams := make(map[string]string)\n\n\tvar paramValue string\n\tvar paramKey string\n\tfor index, urlPatternElement := range splitUrlPattern {\n\t\tif urlPatternElement[:1] == \":\" {\n\t\t\tparamKey = urlPatternElement[1:len(urlPatternElement)]\n\t\t\tif len(splitUrlRecieved) > index {\n\t\t\t\tparamValue = splitUrlRecieved[index]\n\t\t\t} else {\n\t\t\t\tparamValue = \"\"\n\t\t\t}\n\t\t\tparams[paramKey] = paramValue\n\t\t}\n\t}\n\n\treturn params\n}", "func (k Querier) Params(c context.Context, _ *types.QueryParamsRequest) (*types.QueryParamsResponse, error) {\n\tctx := sdk.UnwrapSDKContext(c)\n\tvar params types.Params\n\tk.paramSpace.GetParamSet(ctx, &params)\n\treturn &types.QueryParamsResponse{Params: params}, nil\n}", "func (rp RoutePath) GetURLParams(path string) map[string]string {\n\tprintln(path)\n\tif path[0] != '/' {\n\t\tpath = \"/\" + path\n\t}\n\tparams := map[string]string{}\n\tpathTokens := strings.Split(path, \"/\")\n\trpTokens := strings.Split(string(rp), \"/\")\n\tfor idx, t := range rpTokens {\n\t\tif t == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tif t[0] == ':' {\n\t\t\tparams[t[1:]] = pathTokens[idx]\n\t\t}\n\t}\n\treturn params\n}", "func collectParameters(r *http.Request, oauthParams map[string]string) map[string]string {\n\tparams := map[string]string{}\n\tfor key, value := range r.URL.Query() {\n\t\tparams[key] = value[0]\n\t}\n\tfor key, value := range oauthParams {\n\t\tparams[key] = value\n\t}\n\treturn params\n}", "func getParams(req *http.Request, key string) (string, bool) {\n\tif req.Method != http.MethodGet {\n\t\treturn \"\", false\n\t}\n\n\tvalues := req.URL.Query()\n\tvalue := values.Get(key)\n\n\tif value == \"\" {\n\t\treturn \"\", false\n\t}\n\n\treturn value, true\n}", "func (q *Query) Params() interface{} {\n\treturn q.Request.Params\n}", "func (r PatternedRoute) Params() map[string]interface{} {\n\tpv := make(map[string]interface{})\n\n\tparamNames := r.ParamNames()\n\tfor i, v := range r.ParamValues() {\n\t\tpv[paramNames[i]] = v\n\t\tif n, err := strconv.Atoi(v.(string)); err == nil {\n\t\t\tpv[paramNames[i]] = n\n\t\t}\n\t}\n\treturn pv\n}", "func getParams(r *http.Request) (string, appcurrency.Currency, error) {\n\tvars := mux.Vars(r)\n\tID, _ := vars[\"id\"]\n\n\tcursym := r.URL.Query().Get(\"currency\")\n\tif cursym == \"\" {\n\t\treturn \"\", nil, errors.New(errorParamQueryMissing)\n\t}\n\n\tcur, e := appcurrency.New(cursym)\n\tif e != nil {\n\t\treturn \"\", nil, fmt.Errorf(errorAmountCreation, e.Error())\n\t}\n\n\treturn ID, cur, nil\n}", "func (k Querier) Params(c context.Context, req *types.QueryParamsRequest) (*types.QueryParamsResponse, error) {\n\tif req == nil {\n\t\treturn nil, status.Error(codes.InvalidArgument, \"empty request\")\n\t}\n\n\tctx := sdk.UnwrapSDKContext(c)\n\n\tparams := k.GetParams(ctx)\n\n\treturn &types.QueryParamsResponse{Params: params}, nil\n}", "func (u *URL) QueryParams() map[string][]string {\n\tif u.query == nil {\n\t\tu.query = u.Query()\n\t}\n\treturn map[string][]string(u.query)\n}", "func (sc SearchClient) QueryParams() url.Values {\n\tparams := url.Values{}\n\n\tif sc.FilterID > 0 {\n\t\tparams.Add(\"filter_id\", strconv.Itoa(sc.FilterID))\n\t}\n\n\tif sc.PerPage > 1 && sc.PerPage != 25 {\n\t\tparams.Add(\"per_page\", strconv.Itoa(sc.PerPage))\n\t}\n\n\tif len(sc.Key) > 0 {\n\t\tparams.Add(\"key\", sc.Key)\n\t}\n\n\tif len(sc.SortDirection) > 0 {\n\t\tparams.Add(\"sd\", sc.SortDirection)\n\t}\n\n\tif len(sc.SortField) > 0 {\n\t\tparams.Add(\"sf\", sc.SortField)\n\t}\n\n\treturn params\n}", "func (req *request) Params() *EntrySet {\n return req.params\n}", "func (config StickerConfig) params() (map[string]string, error) {\n\tparams, _ := config.BaseFile.params()\n\n\treturn params, nil\n}", "func ParseUrl(urlstring string) map[string][]string{\n values,err := url.ParseQuery(urlstring);\n if(err != nil){\n panic(\"boom\")\n }\n return values;\n}", "func (k Querier) Params(ctx context.Context, _ *types.QueryParamsRequest) (*types.QueryParamsResponse, error) {\n\tparams, err := k.GetParams(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &types.QueryParamsResponse{Params: params}, nil\n}", "func (k Keeper) Params(c context.Context, req *types.QueryParamsRequest) (*types.QueryParamsResponse, error) {\n\tctx := sdk.UnwrapSDKContext(c)\n\tvar params types.Params\n\tk.paramSpace.GetParamSet(ctx, &params)\n\n\treturn &types.QueryParamsResponse{Params: params}, nil\n}", "func (k Keeper) Params(c context.Context, req *types.QueryParamsRequest) (*types.QueryParamsResponse, error) {\n\tctx := sdk.UnwrapSDKContext(c)\n\n\tparams := k.GetParamSet(ctx)\n\n\treturn &types.QueryParamsResponse{Params: params}, nil\n}", "func (k Keeper) Params(c context.Context, req *types.QueryParamsRequest) (*types.QueryParamsResponse, error) {\n\tdefer telemetry.MeasureSince(time.Now(), types.ModuleName, \"query\", \"Params\")\n\tctx := sdk.UnwrapSDKContext(c)\n\tvar params types.Params\n\tk.paramSpace.GetParamSet(ctx, &params)\n\n\treturn &types.QueryParamsResponse{Params: params, Request: req}, nil\n}", "func (_this *URL) SearchParams() *URLSearchParams {\n\tvar ret *URLSearchParams\n\tvalue := _this.Value_JS.Get(\"searchParams\")\n\tret = URLSearchParamsFromJS(value)\n\treturn ret\n}", "func (ctx *Context) QueryParams(key string) []string {\r\n\tif ctx.queryParams == nil {\r\n\t\tctx.queryParams = ctx.R.URL.Query()\r\n\t}\r\n\treturn ctx.queryParams[key]\r\n}", "func (ctx *SimpleContext) QueryParams(typ interface{}) error {\n\tq := ctx.request.URL.Query()\n\terr := query.Unmarshal(q, typ)\n\tif nil != err {\n\t\treturn err\n\t}\n\treturn ctx.validate(typ)\n}", "func (j *Jsonnet) Params(envName string) ([]ModuleParameter, error) {\n\tj.log().WithField(\"env-name\", envName).Debug(\"getting component params\")\n\n\tparamsData, err := j.readParams(envName)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tprops, err := params.ToMap(j.Name(false), paramsData, paramsComponentRoot)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"could not find components\")\n\t}\n\n\tvar params []ModuleParameter\n\tfor k, v := range props {\n\t\tvStr, err := j.paramValue(v)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tnp := ModuleParameter{\n\t\t\tComponent: j.Name(false),\n\t\t\tKey: k,\n\t\t\tValue: vStr,\n\t\t}\n\n\t\tparams = append(params, np)\n\t}\n\n\tsort.Slice(params, func(i, j int) bool {\n\t\treturn params[i].Key < params[j].Key\n\t})\n\n\treturn params, nil\n}", "func ParseParams(urlString string) (Params, error) {\n\tp := NewParams()\n\tif !strings.HasPrefix(urlString, \"http\") {\n\t\turlString = \"https://\" + urlString\n\t}\n\tu, err := url.Parse(urlString)\n\tif err != nil {\n\t\treturn p, err\n\t}\n\tp.Query = u.Query()\n\tif u.Scheme == \"\" {\n\t\tu.Scheme = \"https\"\n\t}\n\tif u.Path != \"\" {\n\t\tp.Prefix = strings.Trim(u.Path, \"/\")\n\t}\n\tu.RawQuery = \"\"\n\tu.Fragment = \"\"\n\tu.Path = \"\"\n\tu.RawPath = \"\"\n\tp.Server = u.String()\n\treturn p, nil\n}", "func PathParams(r *http.Request) map[string]string {\n\treturn mux.Vars(r)\n}", "func GetParams(name string, r *http.Request) []string {\n\tif params := r.Context().Value(ParamsKey); params != nil {\n\t\tparams := params.(Params)\n\t\tif name != \"*\" {\n\t\t\tname = \":\" + name\n\t\t}\n\t\tif param := params[name]; param != nil {\n\t\t\tswitch param := param.(type) {\n\t\t\tcase []string:\n\t\t\t\treturn param\n\t\t\tdefault:\n\t\t\t\treturn []string{param.(string)}\n\t\t\t}\n\t\t}\n\t}\n\treturn []string{}\n}", "func (r *route) parsePatternParams(path string) string {\n pathParts := strings.Split(path, \"/\")\n patternParts := strings.Split(r.pattern, \"/\")\n params := \"\"\n for i, s := range patternParts {\n if strings.HasPrefix(s, \":\") {\n if params != \"\" {\n params += \"&\"\n }\n params += s[1:] + \"=\" + pathParts[i]\n }\n }\n return params\n}", "func (*Handler) paginationParams(r *http.Request) (int, int) {\n\tq := r.URL.Query()\n\tpage, err := strconv.Atoi(q.Get(\"page\"))\n\tif err != nil {\n\t\tpage = 1\n\t}\n\tlimit, err := strconv.Atoi(q.Get(\"limit\"))\n\tif err != nil {\n\t\tlimit = 20\n\t}\n\treturn page*limit - limit, limit\n}", "func (input *BeegoInput) Params() map[string]string {\n\tm := make(map[string]string)\n\tfor i, v := range input.pnames {\n\t\tif i <= len(input.pvalues) {\n\t\t\tm[v] = input.pvalues[i]\n\t\t}\n\t}\n\treturn m\n}", "func (q querier) Params(c context.Context, req *types.QueryParamsRequest) (*types.QueryParamsResponse, error) {\n\tctx := sdk.UnwrapSDKContext(c)\n\treturn &types.QueryParamsResponse{Params: q.GetParams(ctx)}, nil\n}", "func extractPathParameters(url string, pageUrl string, eventName string) []string {\n\t// validate\n\tif !strings.HasPrefix(url, pageUrl) {\n\t\tpanic(fmt.Sprintf(\"%v should has prefix %v\", url, pageUrl))\n\t}\n\n\t// parepare parameters\n\tparamsString := url[len(pageUrl):]\n\tif eventName != \"\" {\n\t\tindex := strings.Index(paramsString, \"/\")\n\t\tif index > 0 {\n\t\t\tparamsString = paramsString[index:]\n\t\t}\n\t}\n\tvar pathParams []string\n\tif len(paramsString) > 0 {\n\t\tif strings.HasPrefix(paramsString, \"/\") {\n\t\t\tparamsString = paramsString[1:]\n\t\t}\n\t\tpathParams = strings.Split(paramsString, \"/\")\n\t}\n\tdebug.Log(\"- - [injection] URL:%v, parameters:%v\", url, pathParams)\n\treturn pathParams\n}", "func GetParams() map[string]interface{} {\n\treturn map[string]interface{}{\n\t\t\"app_name\": Params.AppName,\n\t\t\"selfreg\": Params.Selfreg,\n\t\t\"use_captcha\": Params.UseCaptcha,\n\t\t\"use_pin\": Params.UsePin,\n\t\t\"login_not_confirmed_email\": Params.LoginNotConfirmedEmail,\n\t\t\"max_attempts\": Params.MaxAttempts,\n\t\t\"reset_time\": Params.ResetTime,\n\t\t\"no_schema\": Params.NoSchema,\n\t}\n}", "func GetParams()(retParams ParamStruct){\n\tvar userFlag = flag.String(\"user\",\"\",\"IPS Username\")\n\tvar passFlag = flag.String(\"password\",\"\",\"IPS Password\")\n\tvar cmdFlag = flag.String(\"cmd\",\"ls\",\"Command String\")\n\tvar testFlag = flag.Bool(\"test\",false,\"Testing Mode\")\n\tvar hostFlag = flag.String(\"hostname\",\"localhost\",\"Hostname or IP Address\")\n\tvar fileFlag = flag.String(\"hostfile\",\"\",\"HostsFile Name\")\n\tvar portFlag = flag.String(\"port\",\"22\",\"Host Port\")\n\tflag.Parse()\n\n\tretParams.UserName = *userFlag\n\tretParams.UserPass = *passFlag\n\tretParams.Cmd = *cmdFlag\n\tretParams.Test = *testFlag\n\tretParams.HostName = *hostFlag\n\tretParams.HostFile = *fileFlag\n\tretParams.HostPort = *portFlag\n\n\t// Test Params\n\tif retParams.UserName == \"\" {\n\t\tretParams.UserName = getUser()\n\t}\n\tif retParams.UserPass == \"\" {\n\t\tretParams.UserPass = getPasswd()\n\t}\n\n\treturn retParams\n}", "func parseDSNParams(cfg *DSN, params string) (err error) {\n\tfor _, v := range strings.Split(params, \"&\") {\n\t\tparam := strings.SplitN(v, \"=\", 2)\n\t\tif len(param) != 2 {\n\t\t\tcontinue\n\t\t}\n\t\t// lazy init\n\t\tif cfg.Params == nil {\n\t\t\tcfg.Params = make(map[string]string)\n\t\t}\n\t\tvalue := param[1]\n\t\tif cfg.Params[param[0]], err = url.QueryUnescape(value); err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\treturn\n}", "func (config VideoConfig) params() (map[string]string, error) {\n\tparams, _ := config.BaseFile.params()\n\n\tif config.Caption != \"\" {\n\t\tparams[\"caption\"] = config.Caption\n\t\tif config.ParseMode != \"\" {\n\t\t\tparams[\"parse_mode\"] = config.ParseMode\n\t\t}\n\t}\n\n\treturn params, nil\n}", "func URL(r *http.Request, data interface{}) error {\n\tq := r.URL.Query()\n\tv := reflect.ValueOf(data)\n\tt := v.Elem().Type()\n\tfor i := 0; i < v.Elem().NumField(); i++ {\n\t\tft := t.Field(i)\n\t\tfv := v.Elem().Field(i)\n\t\tif tv, exist := ft.Tag.Lookup(paramTag); exist {\n\t\t\tval := fmt.Sprintf(\"%v\", fv)\n\t\t\tif !(len(val) == 0 && strings.Contains(tv, omitEmpty)) {\n\t\t\t\tq.Add(strings.SplitN(tv, \",\", 2)[0], val)\n\t\t\t}\n\t\t}\n\t}\n\tr.URL.RawQuery = q.Encode()\n\treturn nil\n}", "func PathParams(req *http.Request) (params map[string]string) {\n\tparams = req.Context().Value(paramsKey).(map[string]string)\n\n\treturn\n}", "func Params(r *http.Request) httprouter.Params {\n\treturn context.Get(r, params).(httprouter.Params)\n}", "func (config PhotoConfig) params() (map[string]string, error) {\n\tparams, _ := config.BaseFile.params()\n\n\tif config.Caption != \"\" {\n\t\tparams[\"caption\"] = config.Caption\n\t\tif config.ParseMode != \"\" {\n\t\t\tparams[\"parse_mode\"] = config.ParseMode\n\t\t}\n\t}\n\n\treturn params, nil\n}", "func Params(r *http.Request) map[string]string {\n\treturn httptreemux.ContextParams(r.Context())\n}", "func Params(ctx context.Context) httprouter.Params {\n\tif ctx == nil {\n\t\treturn emptyParams\n\t}\n\tif p, ok := ctx.Value(paramsKey).(httprouter.Params); ok {\n\t\treturn p\n\t}\n\treturn emptyParams\n}", "func (k Keeper) Params(c context.Context, _ *types.QueryParamsRequest) (*types.QueryParamsResponse, error) {\n\tctx := sdk.UnwrapSDKContext(c)\n\tparams := k.GetParams(ctx)\n\n\treturn &types.QueryParamsResponse{Params: params}, nil\n}", "func (options *Options) Params() []interface{} {\n\treturn options.params\n}", "func (t *Task) GetParams(p interface{}) error {\n\tparams := t.Params\n\tif params == nil {\n\t\treturn nil\n\t}\n\treturn json.Unmarshal(params, p)\n}", "func ParseURL(url string) Request {\n\tr := Request{\"\", \"\", make([]string, 0), make(map[string]int, 0)}\n\tfmt.Println(url)\n\turl = strings.ToLower(url)\n\n\t//replace less than/greater than symbols in url encode\n\turl = strings.Replace(url, \"%3c\", \"<\", -1)\n\turl = strings.Replace(url, \"%3e\", \">\", -1)\n\n\t//remove the last character if we end in \"/\"\n\tif url[len(url)-1:len(url)] == \"/\" {\n\t\turl = url[:len(url)-1]\n\t}\n\n\turlSections := strings.Split(url, \"/\")\n\n\tr.Type = urlSections[0]\n\tif r.Type == \"api\" {\n\t\t//title exists\n\t\tif len(urlSections) > 1 {\n\t\t\ttitleParamStr := urlSections[1]\n\n\t\t\t// splits table name and parameters by \"?\"\n\t\t\tqMarkSplit := strings.Split(titleParamStr, \"?\")\n\t\t\tr.TableName = qMarkSplit[0]\n\n\t\t\t// if parameters exist, separate by \"&\"\n\t\t\tif len(qMarkSplit) > 1 {\n\t\t\t\tparamSplit := strings.Split(qMarkSplit[1], \"&\")\n\t\t\t\tfor _, param := range paramSplit {\n\t\t\t\t\t//fmt.Println(\"Param: \" + param)\n\t\t\t\t\t//if space, we make exception\n\t\t\t\t\tif strings.Contains(param, \"_\") {\n\t\t\t\t\t\t//fmt.Println(\"Contains \" + param)\n\t\t\t\t\t\tparam = strings.Replace(param, \"_\", \" \", -1)\n\t\t\t\t\t\tindex := strings.Index(param, \"=\")\n\t\t\t\t\t\tparam = param[0:index+1] + \"'\" + param[index+1:] + \"'\"\n\t\t\t\t\t}\n\n\t\t\t\t\tfmt.Println(param)\n\t\t\t\t\tparams := strings.Split(param, \"=\")\n\t\t\t\t\tequivSign := \"=\"\n\t\t\t\t\tif params[0] == param {\n\t\t\t\t\t\tparams = strings.Split(param, \"<\")\n\t\t\t\t\t\tequivSign = \"<\"\n\t\t\t\t\t}\n\t\t\t\t\tif params[0] == param {\n\t\t\t\t\t\tparams = strings.Split(param, \">\")\n\t\t\t\t\t\tequivSign = \">\"\n\t\t\t\t\t}\n\t\t\t\t\tfmt.Println(params)\n\t\t\t\t\tparamKey := params[0]\n\t\t\t\t\tparamVal := params[1]\n\n\t\t\t\t\tif paramKey == \"page\" || paramKey == \"size\" {\n\t\t\t\t\t\tr.SpecialParameters[paramKey], _ = strconv.Atoi(strings.Split(param, \"=\")[1])\n\t\t\t\t\t} else {\n\t\t\t\t\t\tif paramKey == \"objectid\" {\n\t\t\t\t\t\t\tparamKey = \"apiobjects.objectid\"\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tr.Parameters = append(r.Parameters, paramKey+equivSign+paramVal)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\t//second potential urlSection (after tableName & parameters) is specified id\n\t\t//by nature of SQLParser, this is considered as a parameter\n\t\tif len(urlSections) > 2 && urlSections[2] != \"\" {\n\t\t\t//this only works for objectID!\n\t\t\tr.Parameters = append(r.Parameters, r.TableName+\".\"+tableNameToId[r.TableName]+\"=\"+urlSections[2])\n\t\t}\n\n\t\t//set special paraemters\n\t\tif _, ok := r.SpecialParameters[\"size\"]; ok {\n\t\t} else {\n\t\t\tr.SpecialParameters[\"size\"] = 10\n\t\t\t//r.SpecialParameters[\"size\"] = 1000000\n\t\t}\n\n\t\tif _, ok := r.SpecialParameters[\"page\"]; ok {\n\t\t} else {\n\t\t\tr.SpecialParameters[\"page\"] = 1\n\t\t}\n\t} else if r.Type == \"info\" {\n\t\t// if length = 2, wants column data\n\t\tif len(urlSections) >= 2 {\n\t\t\tr.TableName = urlSections[1]\n\t\t}\n\n\t\t// if length is 3, wants column data info\n\t\tif len(urlSections) >= 3 {\n\t\t\tparamSplit := strings.Split(urlSections[2], \"&\")\n\t\t\tfor _, param := range paramSplit {\n\t\t\t\tif strings.Contains(param, \"_\") {\n\t\t\t\t\tparam = strings.Replace(param, \"_\", \" \", -1)\n\t\t\t\t\tparam = \"'\" + param + \"'\"\n\t\t\t\t}\n\t\t\t\tr.Parameters = append(r.Parameters, param)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn r\n}", "func (ctx *Context) QueryParams() url.Values {\n\tif ctx.queryParams == nil {\n\t\tctx.queryParams = ctx.Request.URL.Query()\n\t}\n\treturn ctx.queryParams\n}", "func (r *Request) Params() types.M {\n\treturn r.params\n}", "func (q queryServer) Params(ctx context.Context, req *v1.QueryParamsRequest) (*v1.QueryParamsResponse, error) {\n\tif req == nil {\n\t\treturn nil, status.Error(codes.InvalidArgument, \"invalid request\")\n\t}\n\n\tparams, err := q.k.Params.Get(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tresponse := &v1.QueryParamsResponse{}\n\n\t//nolint:staticcheck // needed for legacy parameters\n\tswitch req.ParamsType {\n\tcase v1.ParamDeposit:\n\t\tdepositParams := v1.NewDepositParams(params.MinDeposit, params.MaxDepositPeriod)\n\t\tresponse.DepositParams = &depositParams\n\n\tcase v1.ParamVoting:\n\t\tvotingParams := v1.NewVotingParams(params.VotingPeriod)\n\t\tresponse.VotingParams = &votingParams\n\n\tcase v1.ParamTallying:\n\t\ttallyParams := v1.NewTallyParams(params.Quorum, params.Threshold, params.VetoThreshold)\n\t\tresponse.TallyParams = &tallyParams\n\tdefault:\n\t\tif len(req.ParamsType) > 0 {\n\t\t\treturn nil, status.Errorf(codes.InvalidArgument, \"unknown params type: %s\", req.ParamsType)\n\t\t}\n\t}\n\tresponse.Params = &params\n\n\treturn response, nil\n}", "func (af *filtBase) GetParams() (int, float64, []float64) {\n\treturn af.n, af.mu, af.w.RawRowView(0)\n}", "func (c Context) GetParams(p interface{}) error {\n\tparams := c.Current().Params\n\tif params == nil {\n\t\treturn nil\n\t}\n\treturn json.Unmarshal(params, p)\n}", "func (r *Route) ReadParameters(route string) map[string]string {\n params := make(map[string]string);\n submatches := r.expr.FindStringSubmatch(route);\n for i := 1; i < len(submatches); i++ {\n params[ r.params[i-1] ] = submatches[i];\n }\n return params;\n}", "func TestGetParams(t *testing.T) {\n\tts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tfmt.Fprint(w, r.FormValue(\"p\"))\n\t}))\n\tdefer ts.Close()\n\n\tj := jaguar.New()\n\tj.Params.Add(\"p\", \"hello\")\n\tresp, err := j.Url(ts.URL).Send()\n\tif err != nil {\n\t\tt.Errorf(\"Error: %v\", err)\n\t}\n\n\tif resp.String() != \"hello\" {\n\t\tt.Errorf(\"Unexpected result: %v\", resp.String())\n\t}\n}", "func AppendParams(u *url.URL, nameValues ...string) *url.URL {\n\tlength := len(nameValues)\n\tif length%2 != 0 {\n\t\tpanic(\"nameValues must have even length.\")\n\t}\n\tresult := *u\n\tvalues := result.Query()\n\tfor i := 0; i < length; i += 2 {\n\t\tvalues.Add(nameValues[i], nameValues[i+1])\n\t}\n\tresult.RawQuery = values.Encode()\n\treturn &result\n}", "func GetParamsViewHandler(w http.ResponseWriter, r *http.Request) {\n\ttitle, err := r.URL.Query()[\"title\"]\n\tif !err || len(title[0]) < 1 {\n\t\ttemplates.BasicHTTPRender(w, \"Error: Title is not supplied\")\n\t\treturn\n\t}\n\n\tbody, err := r.URL.Query()[\"body\"]\n\tif !err || len(body[0]) < 1 {\n\t\ttemplates.BasicHTTPRender(w, \"Error: Body is not supplied\")\n\t\treturn\n\t}\n\n\tp := &templates.Page{Title: strings.Join(title, \" \"), Body: strings.Join(body, \" \")}\n\ttemplates.RenderTemplate(w, \"templates/dynamic\", p)\n\n}", "func ParseGetParams(r *http.Request, dst interface{}) error {\n\terr := r.ParseForm()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn schema.NewDecoder().Decode(dst, r.Form)\n}", "func (r *Request) MatchParams(params map[string]string) *Request {\n\tquery := r.URLStruct.Query()\n\tfor key, value := range params {\n\t\tquery.Set(key, value)\n\t}\n\tr.URLStruct.RawQuery = query.Encode()\n\treturn r\n}", "func (config AudioConfig) params() (map[string]string, error) {\n\tparams, _ := config.BaseFile.params()\n\n\tif config.Duration != 0 {\n\t\tparams[\"duration\"] = strconv.Itoa(config.Duration)\n\t}\n\n\tif config.Performer != \"\" {\n\t\tparams[\"performer\"] = config.Performer\n\t}\n\tif config.Title != \"\" {\n\t\tparams[\"title\"] = config.Title\n\t}\n\tif config.Caption != \"\" {\n\t\tparams[\"caption\"] = config.Caption\n\t\tif config.ParseMode != \"\" {\n\t\t\tparams[\"parse_mode\"] = config.ParseMode\n\t\t}\n\t}\n\n\treturn params, nil\n}", "func (config VideoNoteConfig) params() (map[string]string, error) {\n\tparams, _ := config.BaseFile.params()\n\n\tif config.Length != 0 {\n\t\tparams[\"length\"] = strconv.Itoa(config.Length)\n\t}\n\tif config.Duration != 0 {\n\t\tparams[\"duration\"] = strconv.Itoa(config.Duration)\n\t}\n\n\treturn params, nil\n}", "func (config VoiceConfig) params() (map[string]string, error) {\n\tparams, _ := config.BaseFile.params()\n\n\tif config.Duration != 0 {\n\t\tparams[\"duration\"] = strconv.Itoa(config.Duration)\n\t}\n\tif config.Caption != \"\" {\n\t\tparams[\"caption\"] = config.Caption\n\t\tif config.ParseMode != \"\" {\n\t\t\tparams[\"parse_mode\"] = config.ParseMode\n\t\t}\n\t}\n\n\treturn params, nil\n}", "func (ctx *Context) GetParams() map[string]string {\n\treturn ctx.Params\n}", "func GetParams(c context.Context, r *http.Request) RouteParams {\n\tif val, ok := c.Get(r, context.BaseCtxKey(\"params\")); ok {\n\t\treturn val.(RouteParams)\n\t}\n\n\treturn RouteParams{}\n}", "func (p *Params) GetParams() *Params {\n\treturn p\n}", "func (p *Params) GetParams() *Params {\n\treturn p\n}", "func (req *Request) ParamsBySource() (map[string]url.Values, error) {\n\tparams := map[string]url.Values{\n\t\t\"url\": req.MuxVariables(),\n\t\t\"query\": req.Request.URL.Query(),\n\t\t\"form\": url.Values{},\n\t}\n\n\tform, err := req.JSONBody()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tparams[\"form\"] = form\n\n\treturn params, nil\n}", "func SearchForParams(lines []string) map[string]string {\n\tre := `<([\\S].+?[\\S])>`\n\tif len(lines) == 1 {\n\t\tr, _ := regexp.Compile(re)\n\n\t\tparams := r.FindAllStringSubmatch(lines[0], -1)\n\t\tif len(params) == 0 {\n\t\t\treturn nil\n\t\t}\n\n\t\textracted := map[string]string{}\n\t\tfor _, p := range params {\n\t\t\tsplitted := strings.Split(p[1], \"=\")\n\t\t\tif len(splitted) == 1 {\n\t\t\t\textracted[p[0]] = \"\"\n\t\t\t} else {\n\t\t\t\textracted[p[0]] = splitted[1]\n\t\t\t}\n\t\t}\n\t\treturn extracted\n\t}\n\treturn nil\n}", "func (f *PushFilter) Parameters() url.Values {\n\n\tif f.Params == nil {\n\t\treturn nil\n\t}\n\n\tout := url.Values{}\n\tfor k, v := range f.Params {\n\t\tout[k] = v\n\t}\n\n\treturn out\n}", "func GetQueryes(r *http.Request) url.Values {\n\treturn r.URL.Query()\n}", "func (k Keeper) GetParams(ctx sdk.Context) (params types.Params) {\n\tk.paramSubspace.GetParamSet(ctx, &params)\n\treturn params\n}", "func (db *DB) GetParams() (params core.Params, err error) {\n\tif err := db.Instance.First(&params).Error; err != nil {\n\t\treturn params, err\n\t}\n\treturn params, nil\n}", "func NewFetchParams(timeout time.Duration, url string) *Params {\n\tparams := &Params{\n\t\tTimeout: timeout,\n\t\tURL: url,\n\t}\n\treturn params\n}", "func (k Keeper) GetParams(ctx sdk.Context) (params types.Params) {\n\tk.paramspace.GetParamSet(ctx, &params)\n\treturn params\n}", "func (config DocumentConfig) params() (map[string]string, error) {\n\tparams, _ := config.BaseFile.params()\n\n\tif config.Caption != \"\" {\n\t\tparams[\"caption\"] = config.Caption\n\t\tif config.ParseMode != \"\" {\n\t\t\tparams[\"parse_mode\"] = config.ParseMode\n\t\t}\n\t}\n\n\treturn params, nil\n}", "func parsePlanParams(params *PlanParams, values *url.Values) {\n\n\t// Use parseMetaData from metadata.go to setup the metadata param\n\tif params.Metadata != nil {\n\t\tparseMetadata(params.Metadata, values)\n\t}\n\n\taddParamsToValues(params, values)\n}", "func GetPostParams(r *http.Request) url.Values {\n\tswitch {\n\tcase r.Header.Get(\"Content-Type\") == \"application/json\":\n\t\tparams := map[string]interface{}{}\n\t\tresult := url.Values{}\n\t\tdecoder := json.NewDecoder(r.Body)\n\t\tdecoder.Decode(&params)\n\t\tfor k, v := range params {\n\t\t\tif reflect.ValueOf(v).Kind().String() == \"string\" {\n\t\t\t\tresult.Set(k, v.(string))\n\t\t\t}\n\t\t}\n\t\treturn result\n\tcase r.Header.Get(\"Content-Type\") == \"application/x-www-form-urlencoded\":\n\t\tr.ParseForm()\n\t\treturn r.Form\n\tcase strings.Contains(r.Header.Get(\"Content-Type\"), \"multipart/form-data\"):\n\t\tr.ParseMultipartForm(int64(10 * 1000))\n\t\treturn r.Form\n\t}\n\treturn url.Values{}\n}", "func (a Action) Params() map[string]interface{} {\n\tvar kk string\n\tout := make(map[string]interface{})\n\tfor k, v := range a.params {\n\t\tswitch k.(string) {\n\t\tcase \"context\":\n\t\t\tkk = \"ScriptContext\"\n\t\tcase \"q\":\n\t\t\tkk = \"Query\"\n\t\tcase \"ignore\":\n\t\t\t// TODO: Properly handle ignoring status codes\n\t\t\tcontinue\n\t\tdefault:\n\t\t\tkk = utils.NameToGo(k.(string), utils.APIToGo(a.method))\n\t\t}\n\t\tswitch v.(type) {\n\t\tcase bool:\n\t\t\tout[kk] = v.(bool)\n\t\tcase string:\n\t\t\tout[kk] = v.(string)\n\t\tcase int:\n\t\t\tout[kk] = v.(int)\n\t\tcase float64:\n\t\t\tout[kk] = v.(float64)\n\t\tdefault:\n\t\t\tout[kk] = v\n\t\t}\n\t}\n\n\treturn out\n}", "func WithParams(u *url.URL, nameValues ...string) *url.URL {\n\tlength := len(nameValues)\n\tif length%2 != 0 {\n\t\tpanic(\"nameValues must have even length.\")\n\t}\n\tresult := *u\n\tvalues := result.Query()\n\tfor i := 0; i < length; i += 2 {\n\t\tvalues.Set(nameValues[i], nameValues[i+1])\n\t}\n\tresult.RawQuery = values.Encode()\n\treturn &result\n}", "func (k Keeper) GetParams(ctx sdk.Context) types.Params {\n\tvar p types.Params\n\tk.paramSubspace.GetParamSet(ctx, &p)\n\treturn p\n}", "func read_params() *Params {\n\tvar params *Params = new(Params)\n\tflag.IntVar(&params.Customers, \"customers\", 10, \"Number of customers to come to barber shop\")\n\tflag.IntVar(&params.Seats, \"seats\", 3, \"Number of seats in barber shop\")\n\tflag.IntVar(&params.Customer_delay, \"customers_delay\", 5, \"Time between customers incoming to shop [ms]\")\n\tflag.IntVar(&params.Customer_return_delay, \"customers_return_delay\", 20, \"Time between customers' retry if the barber shop is full [ms]\")\n\tflag.IntVar(&params.Barber_delay, \"barber_delay\", 15, \"Time of barber working on customer [ms]\")\n\n\tflag.Parse()\n\treturn params\n}", "func (r *Request) Params(names ...string) map[string]*validation.Value {\n\tif len(names) == 0 {\n\t\treturn r.Input\n\t}\n\tparams := map[string]*validation.Value{}\n\tfor _, n := range names {\n\t\tp, ok := r.Input[n]\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\t\tparams[n] = p\n\t}\n\treturn params\n}", "func (q Query) GetParams() (params Parameters) {\n\tif q != nil {\n\t\tparams, _ = q[paramsKey].(Parameters)\n\t}\n\treturn\n}", "func parseParams(params []string) (map[string]interface{}, error) {\n\tparamsMap := make(map[string]interface{})\n\tregex := regexp.MustCompile(`^([A-z_]+[A-z0-9_]*[\\.{1}[A-z0-9_]+]*)=([\\s\\S]*)$`)\n\tfor _, param := range params {\n\t\tif regex.MatchString(param) {\n\t\t\tcaptures := regex.FindStringSubmatch(param)\n\t\t\tparamsMap[captures[1]] = captures[2]\n\t\t} else {\n\t\t\treturn nil, fmt.Errorf(\"'%s' does not match the pattern 'key=var', ex: MODE=test\", param)\n\t\t}\n\t}\n\n\treturn paramsMap, nil\n}", "func (s ServerAPIPaths) GetQueryParams() url.Values {\n\tvals := url.Values{}\n\tt := reflect.TypeOf(s)\n\tfor i := 0; i < t.NumField(); i++ {\n\t\tfield := t.Field(i)\n\t\ttag := field.Tag.Get(\"yaml\")\n\t\tsplitTag := strings.Split(tag, \",\")\n\t\tif len(splitTag) == 0 {\n\t\t\tcontinue\n\t\t}\n\t\tname := splitTag[0]\n\t\tif name == \"-\" {\n\t\t\tcontinue\n\t\t}\n\t\tv := reflect.ValueOf(s).Field(i)\n\t\tvals.Add(name, v.String())\n\t}\n\treturn vals\n}", "func getQueryParams(v interface{}, vals url.Values) error {\n\t// normalize all query string key/values\n\targs := make(map[string]string)\n\n\tfor k, v := range vals {\n\t\tif len(v) > 0 {\n\t\t\targs[k] = v[0]\n\t\t}\n\t}\n\n\tb, err := json.Marshal(args)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn json.Unmarshal(b, v)\n}", "func (ctx *Context) QueryParamAll() url.Values {\r\n\tif ctx.queryParams == nil {\r\n\t\tctx.queryParams = ctx.R.URL.Query()\r\n\t}\r\n\treturn ctx.queryParams\r\n}", "func (c Etcd) makeParams(opts KeyOptions) url.Values {\n\tv := url.Values{}\n\t// TODO(ashcrow): This is a hack to avoid colliding with int:0. Fix it.\n\tif opts.CASet != \"\" {\n\t\tv.Set(\"prevIndex\", opts.CASet)\n\t}\n\tif opts.TTL != 0 {\n\t\tv.Set(\"ttl\", strconv.Itoa(opts.TTL))\n\t}\n\treturn v\n}", "func parseQuery(url string) ([]string, error) {\n\turlList := strings.Split(url, \"?\")\n\tif len(urlList) < 2 {\n\t\treturn make([]string, 0), nil\n\t}\n\tquery := make([]string, 0)\n\tfor _, val := range strings.Split(urlList[1], \"&\") {\n\t\tv := strings.Split(val, \"=\")\n\t\tif len(v) < 2 {\n\t\t\treturn make([]string, 0), errors.New(\"query parameter error\")\n\t\t}\n\t\tquery = append(query, fmt.Sprintf(\"%s=%s\", v[0], v[1]))\n\t}\n\treturn query, nil\n}", "func GetQueryParams(r *http.Request) url.Values {\n\tr.ParseForm()\n\treturn r.Form\n}" ]
[ "0.6775707", "0.66441417", "0.66315275", "0.6494454", "0.6422985", "0.6382329", "0.63328004", "0.6284008", "0.62720656", "0.623012", "0.61907434", "0.615531", "0.61098415", "0.6048737", "0.6005734", "0.5959736", "0.59511", "0.59452254", "0.593594", "0.5934275", "0.5930599", "0.5882338", "0.5873272", "0.5850315", "0.58437455", "0.5835657", "0.58307564", "0.58288926", "0.58266294", "0.5815213", "0.5805082", "0.57877904", "0.5786092", "0.5747623", "0.57354003", "0.57296103", "0.5711556", "0.570847", "0.570339", "0.5695172", "0.5685155", "0.563987", "0.56376857", "0.5630618", "0.562673", "0.56067604", "0.5604453", "0.560168", "0.5589175", "0.5585217", "0.55597115", "0.55482453", "0.55402917", "0.5519063", "0.55138886", "0.55022407", "0.5501024", "0.5483862", "0.5481036", "0.54795176", "0.5474551", "0.54663163", "0.54595643", "0.5456791", "0.5446662", "0.54446703", "0.54385936", "0.542584", "0.5414944", "0.54120386", "0.5405974", "0.54042673", "0.53947645", "0.5387332", "0.5386085", "0.5386085", "0.5372915", "0.5372121", "0.5371752", "0.535218", "0.53507286", "0.5335383", "0.53342634", "0.53307503", "0.5329545", "0.5325981", "0.5323078", "0.5322817", "0.5315066", "0.53084993", "0.5299424", "0.529181", "0.5290863", "0.5284909", "0.52695656", "0.5269049", "0.5265373", "0.5253878", "0.5251199", "0.52489465" ]
0.58292097
27
getPrivateKeyName retruns the name of the private key of user `name` in the key store.
func (fs *FabricSetup) getPrivateKeyName(name string) (string, error) { ctxProvider2 := fs.sdk.Context(fabsdk.WithOrg(fs.OrgName)) mspClient2, err := msp.New(ctxProvider2) if err != nil { Logr.Fatalf("getPrivateKeyName: Failed to init client: %v", err) return "", ErrInitUser } si, err := mspClient2.GetSigningIdentity(name) if err != nil { Logr.Fatalf("getPrivateKeyName: could not get signing identity of %s: %v", name, err) return "", ErrInitUser } a := si.PrivateKey().SKI() s := hex.EncodeToString(a) + "_sk" return s, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (o *PipelineSshKeyPairAllOf) GetPrivateKey() string {\n\tif o == nil || o.PrivateKey == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.PrivateKey\n}", "func (u user) GetPrivateKey() crypto.PrivateKey {\n\treturn u.key\n}", "func (a *Account) GetPrivateKey() crypto.PrivateKey { return a.key }", "func (km *Keystore) Get(name string) (ci.PrivKey, error) {\n\tif err := validateName(name); err != nil {\n\t\treturn nil, err\n\t}\n\tif has, err := km.Has(name); err != nil {\n\t\treturn nil, err\n\t} else if !has {\n\t\treturn nil, errors.New(ErrNoSuchKey)\n\t}\n\tencryptedPKBytes, err := km.ds.Get(ds.NewKey(name))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treader := bytes.NewReader(encryptedPKBytes)\n\tpkBytes, err := km.em.Decrypt(reader)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ci.UnmarshalPrivateKey(pkBytes)\n}", "func GetPrivateKey(passphrase string, address string) (string, error) {\n\tkeys, err := GetPublicPrivateKey(passphrase, address)\n\n\treturn keys.PrivateKey, err\n}", "func NewPrivateKey(name ndn.Name, key *rsa.PrivateKey) (keychain.PrivateKeyKeyLocatorChanger, error) {\n\tif !keychain.IsKeyName(name) {\n\t\treturn nil, keychain.ErrKeyName\n\t}\n\tvar pvt privateKey\n\tpvt.name = name\n\tpvt.key = key\n\treturn &pvt, nil\n}", "func (w *Wallet) GetPrivateKey() string {\n\treturn hex.EncodeToString(w.PrivateKey.D.Bytes())\n}", "func getPrivateKey(data []byte) ([]byte, error) {\n\tvar der []byte\n\tvar derKey []byte\n\tfor {\n\t\tpemBlock, rest := pem.Decode(data)\n\t\tif pemBlock == nil {\n\t\t\tbreak\n\t\t}\n\t\tif pemBlock.Type != certType {\n\t\t\tder = pemBlock.Bytes\n\t\t}\n\t\tdata = rest\n\t}\n\n\tif key, err := x509.ParsePKCS1PrivateKey(der); err == nil {\n\t\tderKey = x509.MarshalPKCS1PrivateKey(key)\n\t}\n\n\tif key, err := x509.ParsePKCS8PrivateKey(der); err == nil {\n\t\tswitch key := key.(type) {\n\t\tcase *rsa.PrivateKey:\n\t\t\tderKey = x509.MarshalPKCS1PrivateKey(key)\n\t\tcase *ecdsa.PrivateKey:\n\t\t\tderKey, err = x509.MarshalECPrivateKey(key)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\tdefault:\n\t\t\treturn nil, fmt.Errorf(\"unknown private key type found while getting key. Only rsa and ecdsa are supported\")\n\t\t}\n\t}\n\tif key, err := x509.ParseECPrivateKey(der); err == nil {\n\t\tderKey, err = x509.MarshalECPrivateKey(key)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tblock := &pem.Block{\n\t\tType: privateKeyType,\n\t\tBytes: derKey,\n\t}\n\n\treturn pem.EncodeToMemory(block), nil\n}", "func (w *Whisper) GetPrivateKey(id string) (*ecdsa.PrivateKey, error) {\n\tw.keyMu.RLock()\n\tdefer w.keyMu.RUnlock()\n\tkey := w.privateKeys[id]\n\tif key == nil {\n\t\treturn nil, fmt.Errorf(\"invalid id\")\n\t}\n\treturn key, nil\n}", "func GetPrivateKey() ed25519.PrivateKey {\n\tkey, _ := DecodePrivateKey(privateKey)\n\treturn key\n}", "func (o *TppCertificateParams) GetPrivateKey() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.PrivateKey\n}", "func (c *ConfigurationData) GetUserAccountPrivateKey() ([]byte, string) {\n\treturn []byte(c.v.GetString(varUserAccountPrivateKey)), c.v.GetString(varUserAccountPrivateKeyID)\n}", "func (m *AgedAccountsPayable) GetName()(*string) {\n val, err := m.GetBackingStore().Get(\"name\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*string)\n }\n return nil\n}", "func (grc *GitRemoteConfiguration) GetPrivateKey() *string {\n\treturn grc.PrivateKey\n}", "func (decryptor *PgDecryptor) GetPrivateKey() (*keys.PrivateKey, error) {\n\tif decryptor.IsWithZone() {\n\t\treturn decryptor.keyStore.GetZonePrivateKey(decryptor.GetMatchedZoneID())\n\t}\n\treturn decryptor.keyStore.GetServerDecryptionPrivateKey(decryptor.clientID)\n}", "func (m *CalendarGroup) GetName()(*string) {\n val, err := m.GetBackingStore().Get(\"name\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*string)\n }\n return nil\n}", "func (kt KeyType) PrivateKey() string {\n\treturn kt.KeyBaseName\n}", "func (k *key) getPrivateKey() (*ecdsa.PrivateKey, error) {\n\tby, err := base64.StdEncoding.DecodeString(k.PrivateKeyB64)\n\tif err != nil {\n\t\treturn (*ecdsa.PrivateKey)(nil), err\n\t}\n\n\tblock, _ := pem.Decode([]byte(by))\n\ttempKey, err := x509.ParseECPrivateKey(block.Bytes)\n\tif err != nil {\n\t\treturn (*ecdsa.PrivateKey)(nil), err\n\t}\n\n\treturn tempKey, nil\n}", "func (k *KeyPair) GetPrivateKey() p2pCrypto.PrivKey {\n\treturn k.privKey\n}", "func (a *Account) GetPrivateKey() crypto.PrivateKey {\n\treturn a.key\n}", "func GetWalletPrivKey(privKey string) (*btcutil.WIF, error) {\n\tkey, err := btcutil.DecodeWIF(privKey)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn key, nil\n}", "func (o *Gojwt) GetPrivKeyPath()(string){\n return o.privKeyPath\n}", "func (sc Credential) getPrivateKey(privateKeyPath string, privateKeyPassphrase string) (ssh.AuthMethod, error) {\n\tif !fileExist(privateKeyPath) {\n\t\tprivateKeyPath = filepath.Join(os.Getenv(\"HOME\"), \".ssh/id_rsa\")\n\t}\n\n\tkey, err := ioutil.ReadFile(privateKeyPath)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unable to parse private key: %v\", err)\n\t}\n\n\tvar signer ssh.Signer\n\tif privateKeyPassphrase != \"\" {\n\t\tsigner, err = ssh.ParsePrivateKeyWithPassphrase(key, []byte(privateKeyPassphrase))\n\t} else {\n\t\tsigner, err = ssh.ParsePrivateKey(key)\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"parse private key failed: %v\", err)\n\t}\n\n\treturn ssh.PublicKeys(signer), nil\n}", "func (o SslCertificateSelfManagedSslCertificateOutput) PrivateKey() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v SslCertificateSelfManagedSslCertificate) *string { return v.PrivateKey }).(pulumi.StringPtrOutput)\n}", "func (o ProviderOutput) PrivateKey() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *Provider) pulumi.StringPtrOutput { return v.PrivateKey }).(pulumi.StringPtrOutput)\n}", "func LoadPemEncodedPrivateRSAKey(name string) (key *rsa.PrivateKey, err error) {\n\tif k, e := ReadPrivateKey(name); e != nil {\n\t\tpanic(e)\n\t} else {\n\t\tkey = k.(*rsa.PrivateKey)\n\t}\n\tif app.Debug {\n\t\tfmt.Println(Jsonify(key))\n\t}\n\treturn\n}", "func BitcoinPrivateKey() string {\n\treturn \"5\" + Password(true, true, true, false, false, 50)\n}", "func (d *identityManager) PrivateKey() []byte {\n\treturn d.key.PrivateKey\n}", "func (o SslCertificateSelfManagedSslCertificatePtrOutput) PrivateKey() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *SslCertificateSelfManagedSslCertificate) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.PrivateKey\n\t}).(pulumi.StringPtrOutput)\n}", "func GetPrivateKey(bitSize int) *rsa.PrivateKey {\n\tfilePath := \"private.pem\"\n\n\tif _, err := os.Stat(filePath); os.IsNotExist(err) {\n\t\tprivateKey, err := GeneratePrivateKey(bitSize)\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"could not generate private key, %v\", err)\n\t\t}\n\n\t\tEncodePrivateKeyToPEM(filePath, privateKey)\n\t\treturn privateKey\n\t}\n\treturn DecodePrivateKeyFromFile(filePath)\n\n}", "func (w *Wallet) PrivateKeyString() string {\n\treturn fmt.Sprintf(\"%x\", w.privateKey.D.Bytes())\n}", "func (s *Service) GetPrivateKey(keyID string) (crypto.PrivateKey, error) {\n\tprivateKey, ok := s.keys[keyID]\n\tif !ok {\n\t\ts.log.Error(\"The specified key was not found\", \"keyID\", keyID)\n\t\treturn nil, signingkeys.ErrSigningKeyNotFound.Errorf(\"The specified key was not found: %s\", keyID)\n\t}\n\n\treturn privateKey, nil\n}", "func (o SslCertificateSelfManagedSslCertificateResponseOutput) PrivateKey() pulumi.StringOutput {\n\treturn o.ApplyT(func(v SslCertificateSelfManagedSslCertificateResponse) string { return v.PrivateKey }).(pulumi.StringOutput)\n}", "func (m *MessageFileTypePrivate) GetName() (value string) {\n\tif m == nil {\n\t\treturn\n\t}\n\treturn m.Name\n}", "func GeneratePrivateKey() (string, error) {\n\t// Private Key generation\n\tprivateKey, err := rsa.GenerateKey(rand.Reader, 4096)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn fmt.Sprintf(\"%x\", privateKey.D.Bytes()), nil\n}", "func (o SslCertOutput) PrivateKey() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *SslCert) pulumi.StringOutput { return v.PrivateKey }).(pulumi.StringOutput)\n}", "func (um *UserManager) GetKeyIDByName(username, keyName string) (string, error) {\n\tu, err := um.FindByUserName(username)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tfor k, v := range u.IPFSKeyNames {\n\t\tif v == keyName {\n\t\t\treturn u.IPFSKeyIDs[k], nil\n\t\t}\n\t}\n\treturn \"\", errors.New(\"key not found\")\n}", "func (o SslCertificateSelfManagedSslCertificateResponsePtrOutput) PrivateKey() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *SslCertificateSelfManagedSslCertificateResponse) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn &v.PrivateKey\n\t}).(pulumi.StringPtrOutput)\n}", "func GetAccountKeyPairFor(name string) (string, string) {\n\n\tar := AccountsRepository()\n\tpk1, ok := ar.publicKey[name]\n\tvar puk, prk string\n\tif ok {\n\t\tpuk = pk1\n\t} else {\n\t\tpuk = \"\"\n\t}\n\tpk2, ok := ar.privateKey[name]\n\tif ok {\n\t\tprk = pk2\n\t} else {\n\t\tprk = \"\"\n\t}\n\treturn puk, prk\n}", "func (m *PolicyRule) GetName()(*string) {\n val, err := m.GetBackingStore().Get(\"name\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*string)\n }\n return nil\n}", "func (o *PipelineSshKeyPairAllOf) GetPrivateKeyOk() (*string, bool) {\n\tif o == nil || o.PrivateKey == nil {\n\t\treturn nil, false\n\t}\n\treturn o.PrivateKey, true\n}", "func (m *ChatMessageAttachment) GetName()(*string) {\n val, err := m.GetBackingStore().Get(\"name\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*string)\n }\n return nil\n}", "func (o *TppCredentialsParams) GetTppName() string {\n\tif o == nil || o.TppName == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.TppName\n}", "func (cfg X509Config) GetTLSPrivateKeyFileName() string {\n\treturn cfg.TLSServer.TLSHost + \".\" + skFileExt\n}", "func (mc *MoacChain) exportWalletPrivateKey(addr, addrPasswd string) (privateKey string, err error) {\n\n\tdefer func() {\n\t\tif re := recover(); re != nil {\n\t\t\terr = re.(error)\n\t\t}\n\t}()\n\n\tvar acc accounts.Account\n\tacc, err = mc.fetchKeystore().Find(accounts.Account{Address: common.HexToAddress(addr)})\n\tif err == nil {\n\t\tvar jsonBytes []byte\n\t\tjsonBytes, err = ioutil.ReadFile(acc.URL.Path)\n\t\tif err == nil {\n\t\t\tvar storeKey *keystore.Key\n\t\t\tstoreKey, err = keystore.DecryptKey(jsonBytes, addrPasswd)\n\t\t\tif err == nil {\n\t\t\t\tprivateKey = hex.EncodeToString(ethMath.PaddedBigBytes(storeKey.PrivateKey.D, storeKey.PrivateKey.Params().BitSize/8))\n\t\t\t}\n\t\t}\n\t}\n\n\treturn privateKey, err\n}", "func GetShadowPasswordName(name string) (*ShadowPassword, error) {\n\tnameC := C.CString(name)\n\tdefer C.free(unsafe.Pointer(nameC))\n\tspasswordC, err := C.getspnam(nameC)\n\n\t// If matching shadow password record cannot be found, `getspnam()` should\n\t// return NULL and leave `errno` unchanged.\n\tif spasswordC == nil {\n\t\tif err == nil {\n\t\t\treturn nil, errors.New(\"Shadow password record not found.\")\n\t\t} else {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn convertShadowPassword(spasswordC), nil\n}", "func GetKeyValueExpireViaName(iName string) (*KeyValueExpire, error) {\n\tvar _KeyValueExpire = &KeyValueExpire{Name: iName}\n\thas, err := Engine.Get(_KeyValueExpire)\n\tif has {\n\t\treturn _KeyValueExpire, err\n\t} else {\n\t\treturn nil, err\n\t}\n}", "func readPrivateKey(path string) (*rsa.PrivateKey, error) {\n\t// https://stackoverflow.com/a/44231740/559350\n\tdata, err := readFile(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tblock, _ := pem.Decode(data)\n\tif block == nil {\n\t\treturn nil, errors.New(\"cannot parse PEM-encoded private key\")\n\t}\n\treturn x509.ParsePKCS1PrivateKey(block.Bytes)\n}", "func (c *Crypto) PrivateKeyPath() string {\n\treturn c.privateKeyPath\n}", "func privateKeyToPem(key *rsa.PrivateKey) string {\n\tkeyInBytes := x509.MarshalPKCS1PrivateKey(key)\n\tkeyinPem := pem.EncodeToMemory(\n\t\t&pem.Block{\n\t\t\tType: \"RSA PRIVATE KEY\",\n\t\t\tBytes: keyInBytes,\n\t\t},\n\t)\n\treturn string(keyinPem)\n}", "func (o SslCertificateOutput) PrivateKey() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *SslCertificate) pulumi.StringOutput { return v.PrivateKey }).(pulumi.StringOutput)\n}", "func readPrivateKey(path string) (*rsa.PrivateKey, error) {\n\tdata, err := readFile(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tkeyBlock, _ := pem.Decode(data)\n\tif key, err := x509.ParsePKCS1PrivateKey(keyBlock.Bytes); err == nil {\n\t\treturn key, nil\n\t} else if key, err := x509.ParsePKCS8PrivateKey(keyBlock.Bytes); err == nil {\n\t\treturn key.(*rsa.PrivateKey), nil\n\t}\n\treturn nil, err\n}", "func (e *EtcdCert) PrivateKeyPath() string { return path.Join(e.BaseDir, etcdKeyFileName) }", "func generateKeyPairName() string {\n\tid := fmt.Sprintf(\"%x\", rand.Int())\n\treturn securityGroupNamePrefix + id\n}", "func (serv *ExchangeServer) GetAddrPrivKey(cp, addr string) (string, error) {\n\t_, key, err := serv.wallets.GetKeypair(cp, addr)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn key, nil\n}", "func (p *ec2ProviderImpl) getPrivateDNSNameCache(id string) (string, error) {\n\tp.privateDNSCache.lock.RLock()\n\tdefer p.privateDNSCache.lock.RUnlock()\n\tname, ok := p.privateDNSCache.cache[id]\n\tif ok {\n\t\treturn name, nil\n\t}\n\treturn \"\", errors.New(\"instance id not found\")\n}", "func GetPrivateKey(address, password, folderPath string) (*ecdsa.PrivateKey, error) {\n\n\t// Get the file that contains the private key\n\tfile, err := getUTCFile(address[2:], folderPath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Read the file\n\tjsonBytes, err := ioutil.ReadFile(file)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Get the private key\n\tkeyWrapper, err := keystore.DecryptKey(jsonBytes, password)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn keyWrapper.PrivateKey, nil\n}", "func LoadPrivateKey(basepath, name string) (*PrivateKey, error) {\n\tkeysdir := filepath.Join(basepath, \"keys\")\n\tprivateKeyPath := fmt.Sprintf(DefaultKeystoreFile, keysdir, name, PrivateType)\n\n\tprivateExists, err := checkFileStat(privateKeyPath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif !privateExists {\n\t\treturn nil, ErrKeyNotFound\n\t}\n\n\tprivateKeyData, err := ioutil.ReadFile(privateKeyPath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tpemBlock, _ := pem.Decode(privateKeyData)\n\tecdsaPrivateKey, err := x509.ParseECPrivateKey(pemBlock.Bytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &PrivateKey{\n\t\tecdsaPrivateKey,\n\t}, nil\n}", "func (P2PKeyResource) GetName() string {\n\treturn \"encryptedP2PKeys\"\n}", "func (c Certificate) GetPrivateKey() []byte {\n\treturn c.privateKey\n}", "func GetSecretName(saName string) string {\n\treturn secretNamePrefix + saName\n}", "func GeneratePrivateKey(algorithm string) (k *PrivateKey, err error) {\n\tswitch algorithm {\n\tcase \"(ecdsa-sha2 (curve p256))\":\n\t\treturn GenerateP256Key()\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"Unknown algorithm '%s'\", algorithm)\n\t}\n}", "func GetTokenPrivateKey() []byte {\n\treturn []byte(viper.GetString(varTokenPrivateKey))\n}", "func readPrivateKey() ([]byte, error) {\n\tkeyData, err := ioutil.ReadFile(authConfigFile)\n\tif err != nil {\n\t\tlog.Errorf(\"Returning error, authConfigFile %s not found\", authConfigFile)\n\t\treturn []byte{}, err\n\t}\n\n\tlog.Debug(\"Key: %s\", string(keyData))\n\treturn keyData, nil\n}", "func GetPrivateKey() (*ecdsa.PrivateKey, error) {\n\t// Opens the wallet\n\tfile, err := os.Open(\"aurum_wallet.json\")\n\tif err != nil {\n\t\treturn nil, errors.New(\"Failed to open wallet\")\n\t}\n\tdefer file.Close()\n\n\t// Reads the file and stores the data into a byte slice\n\tdata, err := ioutil.ReadAll(file)\n\tif err != nil {\n\t\treturn nil, errors.New(\"Failed to read wallet\")\n\t}\n\n\t// Json struct for storing the private key from the json file\n\ttype jsonStruct struct {\n\t\tPrivateKey string\n\t}\n\n\t// Parse the data from the json file into a jsonStruct\n\tvar j jsonStruct\n\terr = json.Unmarshal(data, &j)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Decodes the private key from the jsonStruct\n\tpemEncoded, _ := hex.DecodeString(j.PrivateKey)\n\tprivateKey, err := privatekey.Decode(pemEncoded)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn privateKey, nil\n}", "func readPrivateKey() ([]byte, error) {\n\tprivateKey, e := ioutil.ReadFile(\"keys/sample-key\")\n\treturn privateKey, e\n}", "func (kp *MockKeyProvider) GetPrivateKey() (crypto.PrivateKey, error) {\n\treturn kp.PrivateKey, kp.Err\n}", "func (s *ClusterScope) GetPrivateDNSZoneName() string {\n\tif len(s.AzureCluster.Spec.NetworkSpec.PrivateDNSZoneName) > 0 {\n\t\treturn s.AzureCluster.Spec.NetworkSpec.PrivateDNSZoneName\n\t}\n\treturn azure.GeneratePrivateDNSZoneName(s.ClusterName())\n}", "func (Functions) GoPrivateName(obj interface{}) string {\n\treturn nameOptions{\n\t\tUntitleFirst: true,\n\t\tUnderscoreToTitle: true,\n\t\tPreserveSpecial: true,\n\t\tRemap: goKeywords,\n\t}.convert(nameOf(obj))\n}", "func decryptByName(ctx context.Context, decoderName string) (*secrets.Keeper, string, error) {\n\tif !strings.HasPrefix(decoderName, \"decrypt\") {\n\t\treturn nil, decoderName, nil\n\t}\n\tkeeperURL := os.Getenv(\"RUNTIMEVAR_KEEPER_URL\")\n\tif keeperURL == \"\" {\n\t\treturn nil, \"\", errors.New(\"environment variable RUNTIMEVAR_KEEPER_URL needed to open a *secrets.Keeper for decryption\")\n\t}\n\tk, err := secrets.OpenKeeper(ctx, keeperURL)\n\tif err != nil {\n\t\treturn nil, \"\", err\n\t}\n\tdecoderName = strings.TrimPrefix(decoderName, \"decrypt\")\n\tif decoderName != \"\" {\n\t\tdecoderName = strings.TrimLeftFunc(decoderName, func(r rune) bool {\n\t\t\treturn r == ' ' || r == '+'\n\t\t})\n\t}\n\t// The parsed value is \"decrypt <decoderName>\".\n\treturn k, decoderName, nil\n}", "func (u *walletIdentity) PrivateKey() core.Key {\n\treturn u.privateKey\n}", "func GetAnotherPrivateKey() (*ecdsa.PrivateKey, error) {\n\treturn crypto.HexToECDSA(\"a05b7b4580376959940f3bbdb84dab4780c49e97f47c1e8792c12963552931b3\")\n}", "func (cfg X509Config) GetCAPrivateKeyFileName() string {\n\treturn cfg.RootCA.CAName + \".\" + skFileExt\n}", "func (o *RequestSepaMoneyTransferParams) GetRecipientName() string {\n\tif o == nil || o.RecipientName == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.RecipientName\n}", "func GetModKeyByName(name string) ModKey {\n\treturn modKeyMap[name]\n}", "func PrivateKeyFile(usage Usage, version scrypto.KeyVersion) string {\n\treturn fmt.Sprintf(\"%s-v%d.key\", usage, version)\n}", "func aptKeyGetName(key string) (name string, err error) {\n\tel, err := openpgp.ReadArmoredKeyRing(bytes.NewBufferString(key))\n\tif err != nil {\n\t\treturn\n\t}\n\n\tif len(el) == 0 {\n\t\terr = fmt.Errorf(\"Error determining userid of key\")\n\t\treturn\n\t}\n\n\tidentities := el[0].Identities\n\tfor k, _ := range identities {\n\t\tif name == \"\" {\n\t\t\tname = k\n\t\t}\n\t}\n\n\treturn\n}", "func (ms *MemStore) GetPrivateKeyEntry(pubKeyHash string) (*uid.KeyEntry, error) {\n\tke, ok := ms.privateKeyEntryMap[pubKeyHash]\n\tif !ok {\n\t\treturn nil, log.Error(session.ErrNoKeyEntry)\n\t}\n\treturn ke, nil\n}", "func (m *Win32LobAppRegistryDetection) GetValueName()(*string) {\n val, err := m.GetBackingStore().Get(\"valueName\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*string)\n }\n return nil\n}", "func (factory *Factory) GetSecretName() string {\n\treturn factory.singleton.certificate.Name\n}", "func GetPrivateKey(loader KeyLoader) (*rsa.PrivateKey, error) {\n\tif loader == nil {\n\t\treturn nil, errors.New(\"no loader\")\n\t}\n\n\tdata, err := loader.GetBytes()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tprivPem, _ := pem.Decode(data)\n\tif privPem.Type != \"RSA PRIVATE KEY\" {\n\t\treturn nil, errors.New(\"incorrect pem type: \" + privPem.Type)\n\t}\n\n\tvar parsedKey interface{}\n\tif parsedKey, err = x509.ParsePKCS1PrivateKey(privPem.Bytes); err != nil {\n\t\treturn nil, err\n\t}\n\n\tif privateKey, ok := parsedKey.(*rsa.PrivateKey); !ok {\n\t\treturn nil, errors.New(\"failed convert parsed key to private key\")\n\t} else {\n\t\treturn privateKey, nil\n\t}\n}", "func (ks *KeyStore) Name() string {\n\treturn name\n}", "func (s SSHAuthKeys) Name() string {\n\treturn authKeysFilename\n}", "func genKey() (peerid string, privatekey string, err error) {\n\t// generate private key\n\tpriv, _, err := crypto.GenerateKeyPairWithReader(crypto.Ed25519, -1, crand.Reader)\n\tif err != nil {\n\t\treturn \"\", \"\", err\n\t}\n\n\t// convert to bytes\n\tkBytes, err := crypto.MarshalPrivateKey(priv)\n\tif err != nil {\n\t\treturn \"\", \"\", err\n\t}\n\n\t// Obtain Peer ID from public key\n\tpid, err := libp2p_peer.IDFromPublicKey(priv.GetPublic())\n\tif err != nil {\n\t\treturn \"\", \"\", err\n\t}\n\n\treturn pid.String(), base64.StdEncoding.EncodeToString(kBytes), nil\n}", "func (k *EdX25519Key) PrivateKey() *[ed25519.PrivateKeySize]byte {\n\treturn k.privateKey\n}", "func PrivateKey(filename string) (*rsa.PrivateKey, error) {\n\trsaPrivateKey, err := ioutil.ReadFile(filename)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn jwt.ParseRSAPrivateKeyFromPEM(rsaPrivateKey)\n}", "func (ks *KeyStore) PrivateKey(kid string) (*rsa.PrivateKey, error) {\n\tks.mu.RLock()\n\tdefer ks.mu.RUnlock()\n\n\tprivateKey, found := ks.store[kid]\n\tif !found {\n\t\treturn nil, errors.New(\"kid lookup failed\")\n\t}\n\treturn privateKey, nil\n}", "func (c *ConfigurationData) GetServiceAccountPrivateKey() ([]byte, string) {\n\treturn []byte(c.v.GetString(varServiceAccountPrivateKey)), c.v.GetString(varServiceAccountPrivateKeyID)\n}", "func (c *HTTPClient) GetPrivKey() crypto.PrivateKey {\n\treturn c.privKey\n}", "func (p *ProvisionTokenV2) GetName() string {\n\treturn p.Metadata.Name\n}", "func (m *DeviceManagementIntentDeviceState) GetUserPrincipalName()(*string) {\n val, err := m.GetBackingStore().Get(\"userPrincipalName\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*string)\n }\n return nil\n}", "func getKeyPath(name string) string {\n\treturn configDir + \"/hil-vpn-\" + name + \".key\"\n}", "func (m *DeviceManagementConfigurationPolicy) GetName()(*string) {\n val, err := m.GetBackingStore().Get(\"name\")\n if err != nil {\n panic(err)\n }\n if val != nil {\n return val.(*string)\n }\n return nil\n}", "func (p *PrivateKey) PrivateKey() *ecdsa.PrivateKey {\n\treturn p.privateKey\n}", "func (e *EtcdClientCert) PrivateKeyPath() string { return path.Join(e.BaseDir, etcdClientKeyFileName) }", "func ImportFromPrivateKey(privateKey, name, passphrase string) (string, error) {\n\tprivateKey = strings.TrimPrefix(privateKey, \"0x\")\n\n\tif name == \"\" {\n\t\tname = generateName() + \"-imported\"\n\t\tfor store.DoesNamedAccountExist(name) {\n\t\t\tname = generateName() + \"-imported\"\n\t\t}\n\t} else if store.DoesNamedAccountExist(name) {\n\t\treturn \"\", fmt.Errorf(\"account %s already exists\", name)\n\t}\n\n\tprivateKeyBytes, err := hex.DecodeString(privateKey)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tif len(privateKeyBytes) != common.Secp256k1PrivateKeyBytesLength {\n\t\treturn \"\", common.ErrBadKeyLength\n\t}\n\n\t// btcec.PrivKeyFromBytes only returns a secret key and public key\n\tsk, _ := btcec.PrivKeyFromBytes(btcec.S256(), privateKeyBytes)\n\toneAddress := address.ToBech32(crypto.PubkeyToAddress(sk.PublicKey))\n\n\tif store.FromAddress(oneAddress) != nil {\n\t\treturn \"\", fmt.Errorf(\"address %s already exists\", oneAddress)\n\t}\n\n\tks := store.FromAccountName(name)\n\t_, err = ks.ImportECDSA(sk.ToECDSA(), passphrase)\n\treturn name, err\n}", "func (x *Ed25519Credentials) PrivateKey() PrivateKey {\n\n\treturn PrivateKey{\n\t\tAlgorithm: AlgorithmEd25519,\n\t\tPrivate: base64.URLEncoding.EncodeToString(x.Private[:]),\n\t}\n\n}", "func getSecretName(name, suffix string) *string {\n\t// 2 chars for '%s' in suffix\n\tif len(name) > 251-len(suffix) {\n\t\tname = name[0 : 251-len(suffix)]\n\t}\n\ts := fmt.Sprintf(suffix, name)\n\n\treturn &s\n}", "func parsePrivateKey(der []byte) (crypto.PrivateKey, error) {\n\tif key, err := gmx509.ParsePKCS8SM2PrivateKey(der); err == nil {\n\t\treturn key, nil\n\t}\n\n\tif key, err := gmx509.ParseSM2PrivateKey(der); err == nil {\n\t\treturn key, nil\n\t}\n\n\treturn nil, errors.New(\"tls: failed to parse private key\")\n}", "func (a *Client) GetPrivateChangeSubaccountName(params *GetPrivateChangeSubaccountNameParams) (*GetPrivateChangeSubaccountNameOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewGetPrivateChangeSubaccountNameParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"GetPrivateChangeSubaccountName\",\n\t\tMethod: \"GET\",\n\t\tPathPattern: \"/private/change_subaccount_name\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"\"},\n\t\tSchemes: []string{\"http\"},\n\t\tParams: params,\n\t\tReader: &GetPrivateChangeSubaccountNameReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*GetPrivateChangeSubaccountNameOK)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\t// safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue\n\tmsg := fmt.Sprintf(\"unexpected success response for GetPrivateChangeSubaccountName: API contract not enforced by server. Client expected to get an error, but got: %T\", result)\n\tpanic(msg)\n}" ]
[ "0.60367626", "0.59223175", "0.57508206", "0.5750226", "0.57476753", "0.57200986", "0.5614334", "0.5601044", "0.5586634", "0.5534874", "0.5503815", "0.5434949", "0.5353804", "0.5348842", "0.53186756", "0.5272389", "0.5270665", "0.52706563", "0.5259415", "0.525318", "0.52186894", "0.52186084", "0.5213778", "0.52108794", "0.521049", "0.52054095", "0.52014667", "0.51958746", "0.5187452", "0.51822656", "0.517855", "0.5176877", "0.51544285", "0.51537395", "0.51536494", "0.5143515", "0.51416624", "0.51322776", "0.51188654", "0.5088678", "0.50878495", "0.5072398", "0.5045832", "0.50282985", "0.5026788", "0.50203663", "0.50183815", "0.50148666", "0.50119275", "0.5011544", "0.501085", "0.5010579", "0.5000158", "0.49970832", "0.49898243", "0.49812415", "0.49772725", "0.49751666", "0.49600422", "0.49558356", "0.4953401", "0.49522", "0.495209", "0.4950377", "0.49438202", "0.4943731", "0.49391085", "0.4925646", "0.49234793", "0.49195233", "0.49191043", "0.49176976", "0.48983586", "0.4894114", "0.48906627", "0.4888949", "0.4884596", "0.48782533", "0.4876637", "0.48445004", "0.48336095", "0.48252743", "0.48042807", "0.4801549", "0.4800909", "0.47899905", "0.47856057", "0.47719237", "0.47685277", "0.4762815", "0.4762453", "0.4755761", "0.47537953", "0.47532442", "0.4741941", "0.4740327", "0.47337928", "0.47175628", "0.47163725", "0.4708984" ]
0.7462058
0
using strings.Index to find substring
func main() { //NOTE there are two blanks after comma(,) tracer := "死神來了, 死神bye bye" comma := strings.Index(tracer, ",") fmt.Printf("comma is %d\n", comma) pos := strings.Index(tracer[comma:], "死神") fmt.Printf("pos is %d\n", pos) fmt.Println(comma, pos, tracer[comma+pos:]) fmt.Println(comma, pos, tracer[comma:]) //sol. //死 神 來 了 , 死 神 b y e b y e //0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 fmt.Printf("len of tracer is %d\n", utf8.RuneCountInString(tracer)) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func IndexString(a, b string) int", "func Index(substr, operand string) int { return strings.Index(operand, substr) }", "func TestFindStringIndex(t *testing.T) {\n\tregex := regexp.MustCompile(\"Brian\")\n\tsubject := \"Hello Brian\"\n\tindex := regex.FindStringIndex(subject)\n\tAssert(6, index[0], t)\n\tAssert(11, index[1], t)\n}", "func IndexOf(str string, sub string, start int) int {\n\n\tif start < 0 {\n\t\tstart = 0\n\t}\n\n\tif len(str) < start {\n\t\treturn INDEX_NOT_FOUND\n\t}\n\n\tif IsEmpty(str) || IsEmpty(sub) {\n\t\treturn INDEX_NOT_FOUND\n\t}\n\n\tpartialIndex := strings.Index(str[start:len(str)], sub)\n\tif partialIndex == -1 {\n\t\treturn INDEX_NOT_FOUND\n\t}\n\treturn partialIndex + start\n}", "func getIndexPosition(str, substr string) int {\n\treturn strings.Index(str, substr)\n}", "func TestStringsIndex(t *testing.T) {\n\ttcs := []struct {\n\t\tword string\n\t\tsubstr string\n\t\texp int\n\t}{\n\t\t{word: \"Gophers are amazing!\", substr: \"are\", exp: 8},\n\t\t{word: \"Testing in Go is fun.\", substr: \"fun\", exp: 17},\n\t\t{word: \"The answer is 42.\", substr: \"is\", exp: 11},\n\t}\n\n\tfor i := range tcs {\n\t\ttc := tcs[i]\n\t\tt.Run(tc.word, func(t *testing.T) {\n\t\t\tt.Parallel()\n\t\t\tgot := strings.Index(tc.word, tc.substr)\n\t\t\tt.Logf(\"testing %q\", tc.word)\n\t\t\tif got != tc.exp {\n\t\t\t\tt.Errorf(\"unexpected value '%s' index of '%s' got: %d, exp: %d\", tc.substr, tc.word, got, tc.exp)\n\t\t\t}\n\t\t})\n\t}\n}", "func (re *RegexpStd) FindStringIndex(s string) (loc []int) {\n\t// a := re.doExecute(nil, nil, s, 0, 2, nil)\n\t// if a == nil {\n\t// \treturn nil\n\t// }\n\t// return a[0:2]\n\tpanic(\"\")\n}", "func (s *Stringish) Index(str string) int {\n\treturn strings.Index(s.str, str)\n}", "func IndexOf(str1, str2 string, off int) int {\n\tindex := strings.Index(str1[off:], str2)\n\tif index == -1 {\n\t\treturn -1\n\t}\n\treturn index + off\n}", "func main() {\n\t//fmt.Println(findSubstring(\"barfoothefoobarman\", []string{\"foo\", \"bar\"}))\n\t//fmt.Println(findSubstring(\"wordgoodgoodgoodbestword\", []string{\"word\", \"good\", \"best\", \"good\"}))\n\tfmt.Println(findSubstring(\"foobarfoobar\", []string{\"foo\", \"bar\"}))\n}", "func StrAt(slice []string, val string) int {\n\tfor i, v := range slice {\n\t\tif v == val {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}", "func Test_Index(t *testing.T) {\n\ttt := []struct {\n\t\tValue string\n\t\tSubstring string\n\t\tAnswer int\n\t}{\n\t\t{\n\t\t\t\"Gophers are amazing!\",\n\t\t\t\"are\",\n\t\t\t8,\n\t\t},\n\t\t{\n\t\t\t\"Testing in Go is fun.\",\n\t\t\t\"fun\",\n\t\t\t17,\n\t\t},\n\t\t{\n\t\t\t\"The answer is 42.\",\n\t\t\t\"is\",\n\t\t\t11,\n\t\t},\n\t}\n\n\tfor _, test := range tt {\n\t\tif actual := strings.Index(test.Value, test.Substring); actual != test.Answer {\n\t\t\tt.Fatalf(\"expected index of substring '%s' in string '%s' to be %v\", test.Substring, test.Value, test.Answer)\n\t\t}\n\t}\n}", "func strStr(haystack string, needle string) int {\n if needle == \"\" {\n return 0\n }\n if haystack == \"\" {\n return -1\n }\n lh := len(haystack)\n ln := len(needle)\n check := needle[0]\n start := 0\n \n for start + ln <= lh {\n if string(haystack[start:start+ln]) == needle {\n return start\n }\n start++\n for start < lh && haystack[start] != check {\n start++\n }\n }\n return -1\n}", "func indexOfString(h []string, n string) int {\n\tfor i, v := range h {\n\t\tif v == n {\n\t\t\treturn i\n\t\t}\n\t}\n\n\treturn -1\n}", "func IndexString(vs []string, t string) int {\n for i, v := range vs {\n if v == t {\n return i\n }\n }\n return -1\n}", "func (re *RegexpStd) FindStringSubmatchIndex(s string) []int {\n\t//return re.pad(re.doExecute(nil, nil, s, 0, re.prog.NumCap, nil))\n\tpanic(\"\")\n}", "func TestFindAllStringIndex(t *testing.T) {\n\tregex := regexp.MustCompile(\"Brian\")\n\tsubject := \"Brian. Meet Brian\"\n\tindexes := regex.FindAllStringIndex(subject, 2)\n\tAssert(0, indexes[0][0], t)\n\tAssert(5, indexes[0][1], t)\n\tAssert(12, indexes[1][0], t)\n\tAssert(17, indexes[1][1], t)\n}", "func IndexStr(haystack []string, needle string) int {\n\tfor idx, s := range haystack {\n\t\tif s == needle {\n\t\t\treturn idx\n\t\t}\n\t}\n\treturn -1\n}", "func (self *kmp) FindStringIndex(s string) int {\n\t// sanity check\n\tif len(s) < self.size {\n\t\treturn -1\n\t}\n\tm, i := 0, 0\n\tfor m+i < len(s) {\n\t\tif self.pattern[i] == s[m+i] {\n\t\t\tif i == self.size-1 {\n\t\t\t\treturn m\n\t\t\t}\n\t\t\ti++\n\t\t} else {\n\t\t\tm += i - self.next[i]\n\t\t\tif self.next[i] > -1 {\n\t\t\t\ti = self.next[i]\n\t\t\t} else {\n\t\t\t\ti = 0\n\t\t\t}\n\t\t}\n\t}\n\treturn -1\n}", "func TestFindIndex(t *testing.T) {\n\tregex := regexp.MustCompile(\"Brian\")\n\tsubject := []byte(\"My name is Brian, pleased to meet you\")\n\tindex := regex.FindIndex(subject)\n\tAssert(index[0], 11, t)\n\tAssert(index[1], 16, t)\n}", "func indexInSlice(slice []string, substr string) int {\n\tfor i := range slice {\n\t\tif strings.Contains(slice[i], substr) {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}", "func (slice StringSlice) IndexOf(str string) int {\n\tfor p, v := range slice {\n\t\tif v == str {\n\t\t\treturn p\n\t\t}\n\t}\n\treturn -1\n}", "func getStrIndex(strings []string, s string) int {\n\tfor i, col := range strings {\n\t\tif col == s {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}", "func Index(s, t string) int {\n\tx := 0\n\ty := 0\n\tfor _, c := range s {\n\t\tif c == c {\n\t\t\tx++\n\t\t}\n\t}\n\tfor _, c := range t {\n\t\tif c == c {\n\t\t\ty++\n\t\t}\n\t}\n\tfor i := 0; i < x; i++ {\n\t\tif y != 0 && s[i] == t[0] {\n\t\t\tok := true\n\t\t\tmok := 0\n\t\t\tfor j := 0; j < y; j++ {\n\t\t\t\tif i+mok >= x || t[j] != s[i+mok] {\n\t\t\t\t\tok = false\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\tmok++\n\t\t\t}\n\t\t\tif ok == true {\n\t\t\t\treturn i\n\t\t\t}\n\t\t}\n\t}\n\treturn -1\n}", "func TestStringIndex(t *testing.T) {\n\tt.Parallel()\n\tvar tests = []struct {\n\t\thaystack []string\n\t\tneedle string\n\t\texpected int\n\t}{\n\t\t{[]string{\"foo\", \"bar\"}, \"foo\", 0},\n\t\t{[]string{\"foo\", \"bar\"}, \"bar\", 1},\n\t\t{[]string{\"foo\", \"bar\"}, \"\\u0062\\u0061\\u0072\", 1},\n\t\t{[]string{\"foo\", \"bar\"}, \"\", -1},\n\t\t{[]string{\"foo\", \"bar\"}, \"blah\", -1},\n\t}\n\tfor _, test := range tests {\n\t\tactual := primitives.Index(&test.haystack, test.needle)\n\t\tassert.Equal(t, test.expected, actual, \"expected value '%v' | actual : '%v'\", test.expected, actual)\n\t}\n}", "func (k *Kmp) Index(s string) int {\n\tlen1, len2 := len(s), len(k.pattern)\n\ti, j := 0, 0\n\tfor i < len1 && j < len2 {\n\t\tif j == -1 || s[i] == k.pattern[j] {\n\t\t\ti++\n\t\t\tj++\n\t\t} else {\n\t\t\tj = k.next[j]\n\t\t}\n\t}\n\tif j == len2 {\n\t\treturn i - j\n\t} else {\n\t\treturn -1\n\t}\n}", "func stringPositionInSlice(a string, list []string) (int, error) {\n\tfor i, v := range list {\n\t\tmatch, _ := regexp.MatchString(a, v)\n\t\tif match {\n\t\t\treturn i, nil\n\t\t}\n\t}\n\treturn 0, fmt.Errorf(\"No matching headers\")\n}", "func strStr(haystack string, needle string) int {\n\tif len(haystack) == 0{\n\t\treturn 0\n\t}\n\tindex := strings.Index(haystack , needle)\n\treturn index\n}", "func indexOfStringSlice(strings []string, s string) int {\n\tfor i, b := range strings {\n\t\tif b == s {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}", "func (t *StringSlice) Index(s string) int {\n\tret := -1\n\tfor i, item := range t.items {\n\t\tif s == item {\n\t\t\tret = i\n\t\t\tbreak\n\t\t}\n\t}\n\treturn ret\n}", "func Index(ss []string, s string) int {\n\tfor i, b := range ss {\n\t\tif b == s {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}", "func strStr1(str string, pat string) int {\n\t// go over the entire string\n\ti, j := 0, 0\n\tfor ; i < len(str) && j < len(pat); i++ {\n\n\t\tif str[i] == pat[j] { // if not found break and continue\n\t\t\tj++\n\t\t} else {\n\t\t\ti -= j // explicit backup\n\t\t\tj = 0 // reset j\n\t\t}\n\n\t}\n\tif j == len(pat) {\n\t\treturn i - len(pat)\n\t}\n\treturn -1 // string not found\n}", "func GetStringIndex(slice []string, target string) int {\n\tfor i := range slice {\n\t\tif slice[i] == target {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}", "func IndexOfString(value string, list []string) int {\n\tfor i, match := range list {\n\t\tif match == value {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}", "func IndexOf(ss []string, e string) int {\n\tfor i, s := range ss {\n\t\tif s == e {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}", "func indexOfSingleDiff(s1 string, s2 string) int {\r\n\tfor i, val := range s1 {\r\n\t\tif val != rune(s2[i]) {\r\n\t\t\treturn i\r\n\t\t}\r\n\t}\r\n\treturn -1\r\n}", "func StringsIndex(s []string, want string) int {\n\tfor i, str := range s {\n\t\tif str == want {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}", "func IndexWithTable(d *[256]int, s string, substr string) int {\n\tlsub := len(substr)\n\tls := len(s)\n\t// fmt.Println(ls, lsub)\n\tswitch {\n\tcase lsub == 0:\n\t\treturn 0\n\tcase lsub > ls:\n\t\treturn -1\n\tcase lsub == ls:\n\t\tif s == substr {\n\t\t\treturn 0\n\t\t}\n\t\treturn -1\n\t}\n\n\ti := 0\n\tfor i+lsub-1 < ls {\n\t\tj := lsub - 1\n\t\tfor ; j >= 0 && s[i+j] == substr[j]; j-- {\n\t\t}\n\t\tif j < 0 {\n\t\t\treturn i\n\t\t}\n\n\t\tslid := j - d[s[i+j]]\n\t\tif slid < 1 {\n\t\t\tslid = 1\n\t\t}\n\t\ti += slid\n\t}\n\treturn -1\n}", "func StringSliceIndexOf(slice []string, s string) int {\n\treturn strings.IndexOf(slice, s)\n}", "func DetectSubStr(str, substr string) int {\n\tidx, j := 0, 0\n\tflag := false\n\n\tfor i := range str {\n\t\tif str[i] == substr[j] && !flag {\n\t\t\tidx = i\n\t\t\tj++\n\t\t\tflag = true\n\t\t} else if str[i] == substr[j] && flag {\n\t\t\tj++\n\t\t} else if str[i] != substr[j] && flag {\n\t\t\tj = 0\n\t\t\tflag = false\n\t\t}\n\n\t\tif j == len(substr)-1 {\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif flag {\n\t\treturn idx\n\t}\n\n\treturn -1\n}", "func Test_Index_WithSubTest(t *testing.T) {\n\ttt := []struct {\n\t\tValue string\n\t\tSubstring string\n\t\tAnswer int\n\t}{\n\t\t{\n\t\t\t\"Gophers are amazing!\",\n\t\t\t\"are\",\n\t\t\t8,\n\t\t},\n\t\t{\n\t\t\t\"Testing in Go is fun.\",\n\t\t\t\"fun\",\n\t\t\t17,\n\t\t},\n\t\t{\n\t\t\t\"The answer is 42.\",\n\t\t\t\"is\",\n\t\t\t11,\n\t\t},\n\t}\n\n\tfor i, test := range tt {\n\n\t\tt.Run(fmt.Sprintf(\"sub test (%d)\", i), func(st *testing.T) {\n\t\t\tif actual := strings.Index(test.Value, test.Substring); actual != test.Answer {\n\t\t\t\tst.Fatalf(\"expected index of substring '%s' in string '%s' to be %v\", test.Substring, test.Value, test.Answer)\n\t\t\t}\n\t\t})\n\t}\n}", "func posString(slice []string, element string) int {\n\tfor index, elem := range slice {\n\t\tif elem == element {\n\t\t\treturn index\n\t\t}\n\t}\n\treturn -1\n}", "func posString(slice []string, element string) int {\n\tfor index, elem := range slice {\n\t\tif elem == element {\n\t\t\treturn index\n\t\t}\n\t}\n\treturn -1\n}", "func posString(slice []string, element string) int {\n\tfor index, elem := range slice {\n\t\tif elem == element {\n\t\t\treturn index\n\t\t}\n\t}\n\treturn -1\n}", "func posString(slice []string, element string) int {\n\tfor index, elem := range slice {\n\t\tif elem == element {\n\t\t\treturn index\n\t\t}\n\t}\n\treturn -1\n}", "func posString(slice []string, element string) int {\n\tfor index, elem := range slice {\n\t\tif elem == element {\n\t\t\treturn index\n\t\t}\n\t}\n\treturn -1\n}", "func findnextstring(file []byte, index int) (int, []byte, int) {\n\ti := 0\n\n\tfor i = index; i < len(file); i++ {\n\t\tlen := gblen(file, i)\n\t\t//fmt.Printf(\"Length is %d\\n\",len);\n\t\tif len >= *minlength && validgb(file[i:i+len]) {\n\t\t\treturn i, file[i : i+len], i + len\n\t\t}\n\t}\n\n\t//Returns the string address, the string, and the next index\n\t//to search.\n\treturn -1, []byte{}, index + 1\n}", "func IndexStringInSlice(a string, l []string) (int, bool) {\n\tfor i, b := range l {\n\t\tif b == a {\n\t\t\treturn i, true\n\t\t}\n\t}\n\treturn -1, false\n}", "func IndexFrom(s, substr []byte, from int) int {\n\tif from >= len(s) {\n\t\treturn -1\n\t}\n\tif from <= 0 {\n\t\treturn bytes.Index(s, substr)\n\t}\n\ti := bytes.Index(s[from:], substr)\n\tif i == -1 {\n\t\treturn -1\n\t}\n\treturn from + i\n}", "func TestFindSubstring(t *testing.T) {\n\tres := FindSubstring(\"helloWorld\", []string{\"hello\", \"World\"})\n\tt.Log(res)\n\tres1 := FindSubstring(\"wordgoodgoodgoodbestword\", []string{\"word\", \"good\", \"best\", \"word\"})\n\tt.Log(res1)\n\tres2 := FindSubstring(\"barfoofoobarthefoobarman\", []string{\"bar\", \"foo\", \"the\"})\n\tt.Log(res2)\n\tres3 := FindSubstring(\"a\", []string{})\n\tt.Log(res3)\n\n}", "func GetStringIndexInSlice(strToFind string, sliceOfStrings []string) int {\n\tfor strInd, str := range sliceOfStrings {\n\t\tif str == strToFind {\n\t\t\treturn strInd\n\t\t}\n\t}\n\treturn -1\n}", "func Strpos(haystack, needle string) int {\n\tpos := strings.Index(haystack, needle)\n\tif pos < 0 {\n\t\treturn pos\n\t}\n\n\trs := []rune(haystack[0:pos])\n\n\treturn len(rs)\n}", "func StrStr(haystack string, needle string) int {\n\tif len(needle) == 0 {\n\t\treturn 0\n\t}\n\n\thead, tail := 0, len(needle)-1\n\tfor tail < len(haystack) {\n\t\tif haystack[head:tail+1] == needle {\n\t\t\treturn head\n\t\t}\n\n\t\thead++\n\t\ttail++\n\t}\n\n\treturn -1\n}", "func Index(a []string, s string) int {\n\tif len(a) == 0 {\n\t\treturn -1\n\t}\n\tfor i, v := range a {\n\t\tif v == s {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}", "func stringSuffixIndexFunc(s string, f func(c rune) bool) (i int) {\n var hasSuffix bool\n i = strings.LastIndexFunc(s, func(c rune) (done bool) {\n if done = !f(c); !hasSuffix {\n hasSuffix = !done\n }\n return\n })\n if i++; !hasSuffix {\n i = -1\n }\n return\n}", "func strStr(haystack string, needle string) int {\n\tif needle == \"\" {\n\t\treturn 0\n\t}\n\n\tif len(haystack) < len(needle) {\n\t\treturn -1\n\t}\n\n\tfor i := 0; i <= len(haystack)-len(needle); i++ {\n\t\tfor j := 0; j < len(needle) && needle[j] == haystack[j+i]; j++ {\n\t\t\tif j == len(needle)-1 {\n\t\t\t\treturn i\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn -1\n}", "func IndexAny(chars, operand string) int { return strings.IndexAny(operand, chars) }", "func inStrSlice(a []string, x string, caseSensitive bool) int {\n\tfor idx, n := range a {\n\t\tif !caseSensitive && strings.EqualFold(x, n) {\n\t\t\treturn idx\n\t\t}\n\t\tif x == n {\n\t\t\treturn idx\n\t\t}\n\t}\n\treturn -1\n}", "func findIndex(v []string, s string, i int) int {\n\tif s == \"\" {\n\t\tif i < len(v) {\n\t\t\treturn i\n\t\t}\n\t\treturn -1\n\t}\n\treturn indexOf(v, s)\n}", "func indexOf(v []string, s string) int {\n\ts = strings.TrimSpace(s)\n\tif i, err := strconv.Atoi(s); err == nil {\n\t\ti--\n\t\tif i >= 0 && i < len(v) {\n\t\t\treturn i\n\t\t}\n\t\treturn -1\n\t}\n\tfor i, vv := range v {\n\t\tif strings.EqualFold(s, strings.TrimSpace(vv)) {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}", "func IndexOfString(array []string, val string) int {\n\tfor index, arrayVal := range array {\n\t\tif arrayVal == val {\n\t\t\treturn index\n\t\t}\n\t}\n\treturn -1\n}", "func strStr(haystack string, needle string) int {\n\tif needle == \"\" {\n\t\treturn 0\n\t}\n\n\tnlen := len(needle)\n\thlen := len(haystack)\n\tif nlen == hlen {\n\t\tif haystack == needle {\n\t\t\treturn 0\n\t\t}\n\t}\n\n\tfirstN := needle[0]\n\n\tfor i := 0; i <= hlen-nlen; i++ {\n\t\tif haystack[i] == firstN {\n\t\t\tif haystack[i:i+nlen] == needle {\n\t\t\t\treturn i\n\t\t\t}\n\t\t}\n\t}\n\n\treturn -1\n}", "func (re *RegexpStd) FindIndex(b []byte) (loc []int) {\n\t// a := re.doExecute(nil, b, \"\", 0, 2, nil)\n\t// if a == nil {\n\t// \treturn nil\n\t// }\n\t// return a[0:2]\n\tpanic(\"\")\n}", "func StringSliceIndex(slice []string, str string) int {\n\tfor i := range slice {\n\t\tif slice[i] == str {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}", "func indexOf(list []string, word string) int {\n\tfor i := range list {\n\t\tif strings.EqualFold(word, list[i]) {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}", "func (arr StringArray) IndexOf(v string) int {\n\tfor i, s := range arr {\n\t\tif v == s {\n\t\t\treturn i\n\t\t}\n\t}\n\n\treturn -1\n}", "func Strripos(haystack, needle string) int {\n\treturn Strrpos(strings.ToLower(haystack), strings.ToLower(needle))\n}", "func (slice stringSlice) pos(value string) int {\n\tfor p, v := range slice {\n\t\tif v == value {\n\t\t\treturn p\n\t\t}\n\t}\n\n\treturn -1\n}", "func strStr(haystack string, needle string) int {\n\tif 0 == len(needle) {\n\t\treturn 0\n\t}\n\n\tfor i, j := 0, 0; i <= len(haystack)-len(needle); i++ {\n\t\tfor j = 0; j < len(needle); j++ {\n\t\t\tif haystack[i+j] != needle[j] {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif len(needle) == j {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}", "func (c *compiler) stringIndex(s, i *LLVMValue) *LLVMValue {\n\tptr := c.builder.CreateExtractValue(s.LLVMValue(), 0, \"\")\n\tptr = c.builder.CreateGEP(ptr, []llvm.Value{i.LLVMValue()}, \"\")\n\treturn c.NewValue(c.builder.CreateLoad(ptr, \"\"), types.Typ[types.Byte])\n}", "func lookupNameIndex(ss []string, s string) int {\n\tq := -1\n\t// apples to apples\n\ts = strings.ToLower(s)\n\tfor i := range ss {\n\t\t// go through all the names looking for a prefix match\n\t\tif s == ss[i] {\n\t\t\t// exact matches always result in an index\n\t\t\treturn i\n\t\t} else if strings.HasPrefix(ss[i], s) {\n\t\t\t// unambiguous prefix matches result in an index\n\t\t\tif q >= 0 {\n\t\t\t\treturn -1\n\t\t\t}\n\t\t\tq = i\n\t\t}\n\t}\n\treturn q\n}", "func FindIndex(text, pat string) int {\n\tlsp := LspTable(pat)\n\ti, j := 0, 0\n\n\tfor ; i < len(text) && j < len(pat); {\n\t\tif text[i] == pat[j] {\n\t\t\tif j == len(pat)-1 {\n\t\t\t\treturn i - (len(pat) - 1)\n\t\t\t}\n\t\t\ti++\n\t\t\tj++\n\t\t} else {\n\t\t\tj--\n\t\t\tif j < 0 {\n\t\t\t\tj = 0\n\t\t\t\ti++\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tj = lsp[j]\n\t\t}\n\t}\n\treturn -1\n}", "func IndexOfDifference(str1 string, str2 string) int {\n\tif str1 == str2 {\n\t\treturn INDEX_NOT_FOUND\n\t}\n\tif IsEmpty(str1) || IsEmpty(str2) {\n\t\treturn 0\n\t}\n\tvar i int\n\tfor i = 0; i < len(str1) && i < len(str2); i++ {\n\t\tif rune(str1[i]) != rune(str2[i]) {\n\t\t\tbreak\n\t\t}\n\t}\n\tif i < len(str2) || i < len(str1) {\n\t\treturn i\n\t}\n\treturn INDEX_NOT_FOUND\n}", "func FindString(s []string, val string) int {\n\tfor i, v := range s {\n\t\tif v == val {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}", "func stringBinarySearch(slice []string, elem string) int {\n\tidx := sort.SearchStrings(slice, elem)\n\n\tif idx != len(slice) && slice[idx] == elem {\n\t\treturn idx\n\t} else {\n\t\treturn -1\n\t}\n}", "func (c *compiler) strIndex(s string) int {\n\tif index, ok := c.indexes.strs[s]; ok {\n\t\treturn index // reuse existing constant\n\t}\n\tindex := len(c.program.Strs)\n\tc.program.Strs = append(c.program.Strs, s)\n\tc.indexes.strs[s] = index\n\treturn index\n}", "func (g *Game) indexOf(name string) (int, int) {\n\tt := strings.ToLower(name)\n\treturn int(rune(t[0]) - 'a'), int(rune(t[1]) - '1')\n}", "func (re *RegexpStd) FindAllStringIndex(s string, n int) [][]int {\n\t// if n < 0 {\n\t// \tn = len(s) + 1\n\t// }\n\t// var result [][]int\n\t// re.allMatches(s, nil, n, func(match []int) {\n\t// \tif result == nil {\n\t// \t\tresult = make([][]int, 0, startSize)\n\t// \t}\n\t// \tresult = append(result, match[0:2])\n\t// })\n\t// return result\n\tpanic(\"\")\n}", "func StrStr(haystack string, needle string) int {\n\tif len(needle) == 0 {\n\t\treturn 0\n\t}\n\n\tvar i, j int\n\t//outer loop is for iterating on haystack\n\tfor i = 0; i < len(haystack)-len(needle); i++ {\n\t\t//inner loop is for checking every haystack char with needle char\n\t\tfor j = 0; j < len(needle); j++ {\n\t\t\tif haystack[i+j] != needle[j] {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif j == len(needle) {\n\t\t\treturn i\n\t\t}\n\t}\n\n\treturn -1\n\n}", "func StringContains(arr []string, val string) (index int) {\n\tindex = -1\n\tfor i := 0; i < len(arr); i++ {\n\t\tif arr[i] == val {\n\t\t\tindex = i\n\t\t\treturn\n\t\t}\n\t}\n\treturn\n}", "func Index(s string, substr string) int {\n\td := CalculateSlideTable(substr)\n\treturn IndexWithTable(&d, s, substr)\n}", "func (b *profileBuilder) stringIndex(s string) int64 {\n\tid, ok := b.stringMap[s]\n\tif !ok {\n\t\tid = len(b.strings)\n\t\tb.strings = append(b.strings, s)\n\t\tb.stringMap[s] = id\n\t}\n\treturn int64(id)\n}", "func StringLastIndex(a, b string) int { return strings.LastIndex(a, b) }", "func indexOf(index string) (int, error) {\n\tif index[0] != '#' {\n\t\treturn 0, errors.New(\"no index\")\n\t}\n\treturn strconv.Atoi(index[1:])\n}", "func FindString(slice []string, val string) (int, bool) {\n\tfor i, item := range slice {\n\t\tif item == val {\n\t\t\treturn i, true\n\t\t}\n\t}\n\treturn -1, false\n}", "func findString(list []string, p string) (bool, int){\n\tfor ind, l := range list{\n\t\tif l == p {\n\t\t\treturn true, ind\n\t\t}\n\t}\n\treturn false, -1\n}", "func (re *RegexpStd) FindSubmatchIndex(b []byte) []int {\n\t//return re.pad(re.doExecute(nil, b, \"\", 0, re.prog.NumCap, nil))\n\tpanic(\"\")\n}", "func (pn *offsetNode) searchString(s string, inLeft, inRight int64) (offset, left, right int64, ok bool) {\n\tf := func(node *offsetNode) int {\n\t\tif s < node.s {\n\t\t\treturn -1\n\t\t} else if s > node.s {\n\t\t\treturn 1\n\t\t}\n\t\treturn 0\n\t}\n\n\tvar node *offsetNode\n\tnode, left, right = pn.search(f, inLeft, inRight)\n\tif node != nil {\n\t\toffset = node.offset\n\t\tok = true\n\t}\n\treturn\n}", "func LastIndex(substr, operand string) int { return strings.LastIndex(operand, substr) }", "func Strrpos(haystack, needle string) int {\n\n\tpos := strings.LastIndex(haystack, needle)\n\n\tif pos < 0 {\n\t\treturn pos\n\t}\n\n\trs := []rune(haystack[0:pos])\n\n\treturn len(rs)\n}", "func TestFindStringSubmatch(t *testing.T) {\n\tregex := regexp.MustCompile(\"Hello.*(world)\")\n\tsubject := \"Hello brave new world\"\n\tmatches := regex.FindStringSubmatch(subject)\n\tAssert(\"world\", matches[1], t)\n}", "func main() {\n\trv := substr(\"abc\", \"abc\")\n\tfmt.Println(rv)\n}", "func findSubstring(response http.ResponseWriter, request *http.Request) {\n\tlog.Println(\"main.findSubstring, enter\")\n\turl := request.URL\n\tqueryString := url.Query()\n\tquery := queryString[\"query\"]\n\tq := \"\"\n\tif len(query) > 0 {\n\t\tq = query[0]\n\t}\n\ttopic := queryString[\"topic\"]\n\tt := \"\"\n\tif len(topic) > 0 {\n\t\tt = topic[0]\n\t}\n\tsubtopic := queryString[\"subtopic\"]\n\tst := \"placeholder\"\n\tif len(subtopic) > 0 {\n\t\tst = subtopic[0]\n\t}\n\tif b.substrIndex == nil {\n\t\tlog.Println(\"main.findSubstring index not configured\")\n\t\thttp.Error(response, \"Error, index not configured\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\tctx := context.Background()\n\tresults, err := b.substrIndex.LookupSubstr(ctx, q, t, st)\n\tif err != nil {\n\t\tlog.Printf(\"main.findSubstring Error looking up term, %v\", err)\n\t\thttp.Error(response, \"Error looking up term\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\tresultsJson, err := json.Marshal(results)\n\tif err != nil {\n\t\tlog.Printf(\"main.findSubstring error marshalling JSON, %v\", err)\n\t\thttp.Error(response, \"Error marshalling results\",\n\t\t\thttp.StatusInternalServerError)\n\t} else {\n\t\tresponse.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\t\tfmt.Fprint(response, string(resultsJson))\n\t}\n}", "func instr(ch byte, st string) bool {\n for i := 0; i < len(st); i++ {\n if st[i] == ch { return true }\n }\n return false\n}", "func contains(s string, substr string) bool {\n if len(substr) == 0 {\n return true\n }\n s = strings.ToLower(s)\n split := strings.Split(s, \"-\")\n s = strings.Join(split, \"\") + \" \" + strings.Join(split, \" \")\n\n substr = strings.ToLower(substr)\n substr = strings.Join(strings.Split(substr, \"-\"), \"\")\n\n index := strings.Index(s, substr)\n if index == -1 {\n return false\n }\n if index + len(substr) < len(s) {\n char := s[index + len(substr)]\n if char >= 'a' && char <= 'z' || char >= '0' && char <= '9' {\n return false\n }\n }\n if index > 0 {\n char := s[index - 1]\n if char >= 'a' && char <= 'z' || char >= '0' && char <= '9' {\n return false\n }\n }\n return true\n}", "func index(slice []string, item string) int {\n\tfor i := range slice {\n\t\tif slice[i] == item {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}", "func StringIdxOff(list []string, indices []int, element string, offset int) (int, int, bool) {\n\tleft := 0\n\tright := len(indices) - 1\n\tif len(element) > 0 {\n\t\tfor left <= right {\n\t\t\tmiddle := (left + right) / 2\n\t\t\tvalueIndex := indices[middle]\n\t\t\tvalue := list[valueIndex]\n\t\t\tif len(value) >= offset {\n\t\t\t\tvalue = value[offset:]\n\t\t\t\tif element > value {\n\t\t\t\t\tleft = middle + 1\n\t\t\t\t} else if strings.HasPrefix(value, element) {\n\t\t\t\t\tfrom := stringIdxOffL(list, indices, element, left, middle-1, offset)\n\t\t\t\t\tto := stringIdxOffR(list, indices, element, middle+1, right, offset)\n\t\t\t\t\treturn from, to, true\n\t\t\t\t} else {\n\t\t\t\t\tright = middle - 1\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tleft = middle + 1\n\t\t\t}\n\t\t}\n\t} else if len(list) > 0 {\n\t\treturn 0, len(list), true\n\t}\n\treturn left, left + 1, false\n}", "func ContainsString(array []string, val string) (index int) {\n index = -1\n for i := 0; i < len(array); i++ {\n if array[i] == val {\n index = i\n return\n }\n }\n return\n}", "func FindSubstring(s string, words []string) []int {\n\t// 排除特殊情况\n\tif len(words) == 0 {\n\t\treturn nil\n\t}\n\twordLen := len(words[0])\n\tcombLen := len(words) * wordLen\n\tif len(s) < combLen {\n\t\treturn nil\n\t}\n\n\tvar indices []int\n\tm := make(map[string]int)\n\tfor _, word := range words {\n\t\tm[word]++\n\t}\n\t// 分wordLen批次比较子串\n\tfor i := 0; i < wordLen; i++ {\n\t\tcounter := make(map[string]int)\n\t\tl, r := i, i\n\t\tnum := 0\n\t\tfor r+wordLen <= len(s) {\n\t\t\tword := s[r : r+wordLen]\n\t\t\tr += wordLen\n\t\t\t// 若word在words中不存在\n\t\t\tif m[word] == 0 {\n\t\t\t\tl = r\n\t\t\t\tnum = 0\n\t\t\t\tfor word := range counter {\n\t\t\t\t\tcounter[word] = 0\n\t\t\t\t}\n\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tcounter[word]++\n\t\t\tnum++\n\t\t\t// 若word重复了\n\t\t\tfor counter[word] > m[word] {\n\t\t\t\tcounter[s[l:l+wordLen]]--\n\t\t\t\tl += wordLen\n\t\t\t\tnum--\n\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\t// 若子串中的单词与words中的单词完全匹配\n\t\t\tif num == len(words) {\n\t\t\t\tindices = append(indices, l)\n\t\t\t\tcounter[s[l:l+wordLen]]--\n\t\t\t\tnum--\n\t\t\t\tl += wordLen\n\t\t\t}\n\t\t}\n\t}\n\n\treturn indices\n}", "func IndexOf(slice []string, needle string) int {\n\tfor idx, s := range slice {\n\t\tif s == needle {\n\t\t\treturn idx\n\t\t}\n\t}\n\n\treturn -1\n}", "func SubStr(str string, start, end int) string {\n\tcounter, startIdx := 0, 0\n\n\tfor idx := range str {\n\t\tif counter == start {\n\t\t\tstartIdx = idx\n\t\t}\n\n\t\tif counter == end {\n\t\t\treturn str[startIdx:idx]\n\t\t}\n\t\tcounter++\n\t}\n\n\treturn str[startIdx:]\n}" ]
[ "0.78677785", "0.75737554", "0.7267148", "0.7187149", "0.71856123", "0.71032894", "0.70547855", "0.7043875", "0.7039487", "0.7014951", "0.69575894", "0.69506973", "0.6919924", "0.6904088", "0.68523514", "0.6800168", "0.6799114", "0.6764676", "0.6712692", "0.66777605", "0.6647385", "0.6625434", "0.6616353", "0.6612849", "0.65800273", "0.65701026", "0.6567383", "0.65509444", "0.65312207", "0.6494183", "0.6492659", "0.64792895", "0.6479209", "0.64763033", "0.6469485", "0.6452042", "0.6440895", "0.6433098", "0.6410922", "0.6407093", "0.64019775", "0.6399631", "0.6399631", "0.6399631", "0.6399631", "0.6399631", "0.63791376", "0.63255024", "0.6320736", "0.6318191", "0.62653667", "0.62407297", "0.6229458", "0.62030685", "0.6201611", "0.61900425", "0.61720103", "0.6170052", "0.61584413", "0.6153998", "0.6152679", "0.6151408", "0.6130642", "0.61198735", "0.610811", "0.60859126", "0.6084933", "0.60837865", "0.60816836", "0.60614294", "0.60410094", "0.60406184", "0.6033827", "0.6030402", "0.60245854", "0.5961602", "0.5944805", "0.59435683", "0.5936047", "0.59086376", "0.5906895", "0.59039503", "0.58798456", "0.5875357", "0.58590007", "0.5848876", "0.58430123", "0.5836829", "0.5817923", "0.5803955", "0.5802242", "0.57915866", "0.57728255", "0.5771814", "0.57705766", "0.57671314", "0.57564753", "0.5735945", "0.5722444", "0.5722404", "0.57127047" ]
0.0
-1
GetJsonString provides a mock function with given fields:
func (_m *MockWriteBufferJsonBased) GetJsonString() (string, error) { ret := _m.Called() var r0 string var r1 error if rf, ok := ret.Get(0).(func() (string, error)); ok { return rf() } if rf, ok := ret.Get(0).(func() string); ok { r0 = rf() } else { r0 = ret.Get(0).(string) } if rf, ok := ret.Get(1).(func() error); ok { r1 = rf() } else { r1 = ret.Error(1) } return r0, r1 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferJsonBased_Expecter) GetJsonString() *MockWriteBufferJsonBased_GetJsonString_Call {\n\treturn &MockWriteBufferJsonBased_GetJsonString_Call{Call: _e.mock.On(\"GetJsonString\")}\n}", "func (m *MockHumanJSONStringer) JSONString() (string, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"JSONString\")\n\tret0, _ := ret[0].(string)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *Environment) GetString(_a0 string) string {\n\tret := _m.Called(_a0)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(string) string); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (_m *ContextHandler) JSON(_a0 int, _a1 interface{}) error {\n\tret := _m.Called(_a0, _a1)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(int, interface{}) error); ok {\n\t\tr0 = rf(_a0, _a1)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockArguments) GetString(arg0 string) string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"GetString\", arg0)\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (_m *MockInterface) ReadString(prompt string) (string, error) {\n\tret := _m.Called(prompt)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(string) string); ok {\n\t\tr0 = rf(prompt)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string) error); ok {\n\t\tr1 = rf(prompt)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *MockCALIdentifyTag) GetAddressString() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (_m *RequesterArgSameAsNamedImport) Get(_a0 string) *json.RawMessage {\n\tret := _m.Called(_a0)\n\n\tvar r0 *json.RawMessage\n\tif rf, ok := ret.Get(0).(func(string) *json.RawMessage); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*json.RawMessage)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) GetXmlString() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (_m *MockPlcWriteResponse) String() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (_m *Manager) Get(ctx context.Context, projectID int64, meta ...string) (map[string]string, error) {\n\t_va := make([]interface{}, len(meta))\n\tfor _i := range meta {\n\t\t_va[_i] = meta[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, projectID)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 map[string]string\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func(context.Context, int64, ...string) (map[string]string, error)); ok {\n\t\treturn rf(ctx, projectID, meta...)\n\t}\n\tif rf, ok := ret.Get(0).(func(context.Context, int64, ...string) map[string]string); ok {\n\t\tr0 = rf(ctx, projectID, meta...)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(map[string]string)\n\t\t}\n\t}\n\n\tif rf, ok := ret.Get(1).(func(context.Context, int64, ...string) error); ok {\n\t\tr1 = rf(ctx, projectID, meta...)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *MockTestTransportInstance) String() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (_m *Requester) Get(path string) (string, error) {\n\tret := _m.Called(path)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(string) string); ok {\n\t\tr0 = rf(path)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string) error); ok {\n\t\tr1 = rf(path)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func dummyJSONStr() string {\n\treturn `{\n \"number\": 1234.56,\n \"string\": \"foo bar\",\n \"arrayOfString\": [\n \"one\",\n \"two\",\n \"three\",\n \"four\"\n ],\n \"object\": {\n \"foo\": \"bar\",\n \"hello\": \"world\",\n \"answer\": 42\n },\n \"true\": true,\n \"false\": false,\n \"null\": null\n }`\n}", "func (m *MockCityRecord) String() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"String\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func JSONGetString(ctx context.Context, subject map[string]interface{}, keys ...string) string {\n\tdata := MapInterfaceToJSONBytes(subject)\n\n\tvalue, err := jsonparser.GetString(data, keys...)\n\tif err != nil {\n\t\tlogger.Errorf(ctx, \"JSON util\", \"failed to retrieve key, %#v error:%s\", keys, err.Error())\n\t\treturn \"\"\n\t}\n\n\treturn value\n}", "func (m *MockMessageBody) String() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"String\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (_m *MockPlcQuery) GetQueryString() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (_m *MockStore) String() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (_m *SecretClient) Get(name string) (string, error) {\n\tret := _m.Called(name)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(string) string); ok {\n\t\tr0 = rf(name)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string) error); ok {\n\t\tr1 = rf(name)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *ORM) StoreString(chainID *big.Int, key string, val string) error {\n\tret := _m.Called(chainID, key, val)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(*big.Int, string, string) error); ok {\n\t\tr0 = rf(chainID, key, val)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockMappedResource) String() string {\n\tret := m.ctrl.Call(m, \"String\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (m *ManagerMock) GetByUserJSON(ctx context.Context) ([]byte, error) {\n\targs := m.Called(ctx)\n\tdata, _ := args.Get(0).([]byte)\n\treturn data, args.Error(1)\n}", "func (m *MockCountryRecord) String() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"String\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (m *MockLoader) MustGetString(key string) string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"MustGetString\", key)\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (_m *RedsyncConn) Get(name string) (string, error) {\n\tret := _m.Called(name)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(string) string); ok {\n\t\tr0 = rf(name)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string) error); ok {\n\t\tr1 = rf(name)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (m *MockBufferInterface) String() string {\n\tret := m.ctrl.Call(m, \"String\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (_m *FileReader) ReadFileAsString(path string) (string, error) {\n\tret := _m.Called(path)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(string) string); ok {\n\t\tr0 = rf(path)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string) error); ok {\n\t\tr1 = rf(path)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func mockTest0104(w http.ResponseWriter, r *http.Request) {\n\twait, err := common.GetIntArgFromQuery(r, \"wait\")\n\tif err != nil {\n\t\tcommon.ErrHandler(w, err)\n\t\treturn\n\t}\n\tunit, err := common.GetStringArgFromQuery(r, \"unit\")\n\tif err != nil {\n\t\tcommon.ErrHandler(w, err)\n\t\treturn\n\t}\n\n\tretIP := `\"42.48.232.7\", \"10.200.20.21\"`\n\tretJSON := fmt.Sprintf(`{\"errno\":0, \"iplist\":[%s]}`, retIP)\n\t// retContent := `{\"errno\":-1, \"iplist\":[]}`\n\tw.Header().Set(common.TextContentLength, strconv.Itoa(len(retJSON)))\n\tw.Header().Set(common.TextContentType, common.ContentTypeJSON)\n\tw.WriteHeader(http.StatusOK)\n\n\tif wait > 0 {\n\t\tif unit == \"milli\" {\n\t\t\ttime.Sleep(time.Duration(wait) * time.Millisecond)\n\t\t} else {\n\t\t\ttime.Sleep(time.Duration(wait) * time.Second)\n\t\t}\n\t}\n\tif _, err := io.Copy(w, bufio.NewReader(strings.NewReader(retJSON))); err != nil {\n\t\tcommon.ErrHandler(w, err)\n\t}\n}", "func (vd ViewData) GetJsonString() (j []byte, err error) {\n\tj, err = json.Marshal(vd.JsonData)\n\treturn\n}", "func (m *MockValue) String(def string) string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"String\", def)\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func mockTokenAsString(t *testing.T) string {\n\ttoken := mockToken(t)\n\treturn token.Encode()\n}", "func (_m *esClientInterface) Get(_a0 string, _a1 string, _a2 string) (*elastic.GetResult, error) {\n\tret := _m.Called(_a0, _a1, _a2)\n\n\tvar r0 *elastic.GetResult\n\tif rf, ok := ret.Get(0).(func(string, string, string) *elastic.GetResult); ok {\n\t\tr0 = rf(_a0, _a1, _a2)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*elastic.GetResult)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string, string, string) error); ok {\n\t\tr1 = rf(_a0, _a1, _a2)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (client PrimitiveClient) GetString() (result StringWrapper, err error) {\n req, err := client.GetStringPreparer()\n if err != nil {\n return result, autorest.NewErrorWithError(err, \"complexgroup.PrimitiveClient\", \"GetString\", nil , \"Failure preparing request\")\n }\n\n resp, err := client.GetStringSender(req)\n if err != nil {\n result.Response = autorest.Response{Response: resp}\n return result, autorest.NewErrorWithError(err, \"complexgroup.PrimitiveClient\", \"GetString\", resp, \"Failure sending request\")\n }\n\n result, err = client.GetStringResponder(resp)\n if err != nil {\n err = autorest.NewErrorWithError(err, \"complexgroup.PrimitiveClient\", \"GetString\", resp, \"Failure responding to request\")\n }\n\n return\n}", "func (_m *ResponseHeaderMapReadOnly) Get(name string) volatile.String {\n\tret := _m.Called(name)\n\n\tvar r0 volatile.String\n\tif rf, ok := ret.Get(0).(func(string) volatile.String); ok {\n\t\tr0 = rf(name)\n\t} else {\n\t\tr0 = ret.Get(0).(volatile.String)\n\t}\n\n\treturn r0\n}", "func (_m *MockInterface) Get(ctx context.Context, key string) (string, error) {\n\tret := _m.Called(ctx, key)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(context.Context, string) string); ok {\n\t\tr0 = rf(ctx, key)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, string) error); ok {\n\t\tr1 = rf(ctx, key)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *MockCALIdentifyTag) String() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (_m *RequestContext) BindJSON(_a0 interface{}) error {\n\tret := _m.Called(_a0)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(interface{}) error); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (fps *CreateRegionRequest_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func GetString(json []byte, path ...string) (string, error) {\n\tval, err := Get(json, path...)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn string(val), err\n}", "func (m *MockRecord) String() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"String\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (_m *MockPlcMessage) String() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (_m *MockDefaultPlcConnectionCloseResult) String() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (fps *CreateMetricDescriptorRequest_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (_m *RequestHeaderMapReadOnly) Get(name string) volatile.String {\n\tret := _m.Called(name)\n\n\tvar r0 volatile.String\n\tif rf, ok := ret.Get(0).(func(string) volatile.String); ok {\n\t\tr0 = rf(name)\n\t} else {\n\t\tr0 = ret.Get(0).(volatile.String)\n\t}\n\n\treturn r0\n}", "func _getMock(url string) (content []byte, err error) {\n\tvar idnum = crc32.ChecksumIEEE([]byte(url))%uint32(5) + 1\n\tvar response = fmt.Sprintf(mockResponseTemplate, idnum, idnum, \"no message\", 200)\n\treturn []byte(response), nil\n}", "func Get_Json_String(db *sql.DB) string {\n sqlStmt := `\n select value from metadata where name = \"json\";\n `\n var jsonstring string\n err := db.QueryRow(sqlStmt).Scan(&jsonstring)\n\n if err != nil {\n log.Printf(\"%q: %s\\n\", err, sqlStmt)\n }\n return jsonstring\n}", "func (_m *MapFunc) Get(m map[string]func(string) string) error {\n\tret := _m.Called(m)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(map[string]func(string) string) error); ok {\n\t\tr0 = rf(m)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *StoreService) Get(_a0 context.Context, _a1 *models.Snapshot, _a2 []string) ([]byte, error) {\n\tret := _m.Called(_a0, _a1, _a2)\n\n\tvar r0 []byte\n\tif rf, ok := ret.Get(0).(func(context.Context, *models.Snapshot, []string) []byte); ok {\n\t\tr0 = rf(_a0, _a1, _a2)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]byte)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, *models.Snapshot, []string) error); ok {\n\t\tr1 = rf(_a0, _a1, _a2)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func TestJSON(t *testing.T) {\n\ta := assert.New(t)\n\t// r := require.New(t)\n\trender := NewRender()\n\n\tstr := \"foobar\"\n\n\tc := &stubController{r: render, status: http.StatusOK, obj: str}\n\ta.HTTPBodyContains(c.Handler, \"GET\", \"/\", url.Values{}, \"foobar\")\n\n\tc.obj = &str\n\ta.HTTPBodyContains(c.Handler, \"GET\", \"/\", url.Values{}, \"foobar\")\n\n\tc.obj = nil\n\ta.HTTPBodyNotContains(c.Handler, \"GET\", \"/\", url.Values{}, \"foobar\")\n}", "func (m *MockBackend) String() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"String\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (_m *mockHTTPGetter) get(r *http.Request, url string) (*http.Response, error) {\n\tret := _m.Called(r, url)\n\n\tvar r0 *http.Response\n\tif rf, ok := ret.Get(0).(func(*http.Request, string) *http.Response); ok {\n\t\tr0 = rf(r, url)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*http.Response)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(*http.Request, string) error); ok {\n\t\tr1 = rf(r, url)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (m *MockRepo) String() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"String\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (_m *MockEStringToStringMapEntry) GetKey() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (mock *KeyValueMock) GetStringCalls() []struct {\n\tKey string\n} {\n\tvar calls []struct {\n\t\tKey string\n\t}\n\tlockKeyValueMockGetString.RLock()\n\tcalls = mock.calls.GetString\n\tlockKeyValueMockGetString.RUnlock()\n\treturn calls\n}", "func (_m *mockJwksGetter) get(r *http.Request, url string) (jose.JSONWebKeySet, error) {\n\tret := _m.Called(r, url)\n\n\tvar r0 jose.JSONWebKeySet\n\tif rf, ok := ret.Get(0).(func(*http.Request, string) jose.JSONWebKeySet); ok {\n\t\tr0 = rf(r, url)\n\t} else {\n\t\tr0 = ret.Get(0).(jose.JSONWebKeySet)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(*http.Request, string) error); ok {\n\t\tr1 = rf(r, url)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *ContainerIface) String() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (m *MockTimeSource) DateString() (string, string, string, string, string) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"DateString\")\n\tret0, _ := ret[0].(string)\n\tret1, _ := ret[1].(string)\n\tret2, _ := ret[2].(string)\n\tret3, _ := ret[3].(string)\n\tret4, _ := ret[4].(string)\n\treturn ret0, ret1, ret2, ret3, ret4\n}", "func (_m *Consumer) String() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (_m *MockEStringToStringMapEntry) GetValue() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func lookupJSONString(genericData map[string]interface{}, path ...string) (string, error) {\n\tresult, err := lookupJSONValue(genericData, path, \"\")\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tstringResult, isString := result.(string)\n\tif !isString {\n\t\treturn \"\", fmt.Errorf(\"type error for key %v\", path)\n\t}\n\treturn stringResult, nil\n}", "func (_m *Cache) Get(ctx context.Context, hash string) (string, bool) {\n\tret := _m.Called(ctx, hash)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(context.Context, string) string); ok {\n\t\tr0 = rf(ctx, hash)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 bool\n\tif rf, ok := ret.Get(1).(func(context.Context, string) bool); ok {\n\t\tr1 = rf(ctx, hash)\n\t} else {\n\t\tr1 = ret.Get(1).(bool)\n\t}\n\n\treturn r0, r1\n}", "func (m *ManagerMock) GetByPackageJSON(ctx context.Context, packageID string) ([]byte, error) {\n\targs := m.Called(ctx, packageID)\n\tdata, _ := args.Get(0).([]byte)\n\treturn data, args.Error(1)\n}", "func (_m *Prompter) StringRequired(pr string) string {\n\tret := _m.Called(pr)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(string) string); ok {\n\t\tr0 = rf(pr)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (_m *CacheManager) Get(key string) (*objects.Object, error) {\n\tret := _m.Called(key)\n\n\tvar r0 *objects.Object\n\tif rf, ok := ret.Get(0).(func(string) *objects.Object); ok {\n\t\tr0 = rf(key)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*objects.Object)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string) error); ok {\n\t\tr1 = rf(key)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (fps *UpdateRegionRequest_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func JsonString(data string) (*Typed, error) {\n\treturn Json([]byte(data))\n}", "func (_m *TaskStatuser) Str(keyvals ...string) virtualmachine.TaskStatuser {\n\t_va := make([]interface{}, len(keyvals))\n\tfor _i := range keyvals {\n\t\t_va[_i] = keyvals[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 virtualmachine.TaskStatuser\n\tif rf, ok := ret.Get(0).(func(...string) virtualmachine.TaskStatuser); ok {\n\t\tr0 = rf(keyvals...)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(virtualmachine.TaskStatuser)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (m *MockReader) String() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"String\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (m *MockManager) String() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"String\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (r *HashJsonCodecRedisController) GetString(key string, field string) (value string, err error) {\n\t// redis conn\n\tconn := r.pool.Get()\n\tdefer conn.Close()\n\n\t// get field\n\treturn github_com_gomodule_redigo_redis.String(conn.Do(\"HGET\", key, field))\n}", "func (fps *BatchGetRegionsResponse_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (fps *ListRegionsResponse_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (fps *ListMetricDescriptorsResponse_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (mock *KeyValueMock) PutStringCalls() []struct {\n\tKey string\n\tValue string\n} {\n\tvar calls []struct {\n\t\tKey string\n\t\tValue string\n\t}\n\tlockKeyValueMockPutString.RLock()\n\tcalls = mock.calls.PutString\n\tlockKeyValueMockPutString.RUnlock()\n\treturn calls\n}", "func (fps *WatchRegionsResponse_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (fpm *RegionalDistribution_FieldPathMap) JSONString() string {\n\treturn strcase.ToLowerCamel(fpm.selector.String()) + \".\" + fpm.key\n}", "func TestGetObject(t *testing.T) {\n\tmock := mockS3Impl{\n\t\tInMemoryStore: make(map[string]string),\n\t}\n\n\tmock.InMemoryStore[\"foo\"] = \"bar\"\n\n\tmyservice := mockS3.Myservice{\n\t\tS3Client: mock,\n\t}\n\n\tstr, err := myservice.GetObjectAsString(\"foo\")\n\tif err != nil {\n\t\tt.Fail()\n\t}\n\n\tif str != \"bar\" {\n\t\tt.Fail()\n\t}\n}", "func (p *Parser) GetString(pattern string) string {\n return p.json.GetString(pattern)\n}", "func (m *MockInfra) String() string {\n\tret := m.ctrl.Call(m, \"String\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func MockPayload(content string) string {\n\tdata := make(map[string]interface{})\n\tdata[\"content\"] = content\n\n\tres, err := json.Marshal(data)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn string(res)\n}", "func Test_RetMsgGenJson_Case1(t *testing.T) {\n // invoke requestGen()\n msgBuf := RetMsgGenJson(\"OK\", \"nothing happens\")\n\n\t// decode the msg\n\tvar msg interface{}\n\terr := json.Unmarshal(msgBuf, &msg)\n\tif err != nil {\n\t\tt.Errorf(\"json.Unmarshal():%s\", err.Error())\n\t\treturn\n\t}\n\n\tstrMap, ok := msg.(map[string]interface{})\n\tif !ok {\n\t\tt.Errorf(\"fail to convert to map\")\n\t\treturn\n\t}\n\tvalue, ok := strMap[\"retCode\"]\n\tif !ok {\n\t\tt.Errorf(\"no retCode\")\n\t\treturn\n\t}\n\tif value != \"OK\" {\n\t\tt.Errorf(\"retCode(%s) != OK\", value)\n\t\treturn\n\t}\n\n\tvalue, ok = strMap[\"msg\"]\n\tif !ok {\n\t\tt.Errorf(\"no msg\")\n\t\treturn\n\t}\n\tif value != \"nothing happens\" {\n\t\tt.Errorf(\"msg(%s) != nothing happens\", value)\n\t\treturn\n\t}\n}", "func (_m *MockCache) Get(key string) (interface{}, bool) {\n\tret := _m.Called(key)\n\n\tvar r0 interface{}\n\tif rf, ok := ret.Get(0).(func(string) interface{}); ok {\n\t\tr0 = rf(key)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(interface{})\n\t\t}\n\t}\n\n\tvar r1 bool\n\tif rf, ok := ret.Get(1).(func(string) bool); ok {\n\t\tr1 = rf(key)\n\t} else {\n\t\tr1 = ret.Get(1).(bool)\n\t}\n\n\treturn r0, r1\n}", "func (m *MockConn) ReadJSON(v interface{}) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ReadJSON\", v)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *MockReader) String() (string, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"String\")\n\tret0, _ := ret[0].(string)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func TestGetJSON(t *testing.T) {\n\tassert := audit.NewTestingAssertion(t, true)\n\t// Setup the test server.\n\tmux := newMultiplexer(assert)\n\tts := restaudit.StartServer(mux, assert)\n\tdefer ts.Close()\n\terr := mux.Register(\"test\", \"json\", NewTestHandler(\"json\", assert))\n\tassert.Nil(err)\n\t// Perform test requests.\n\treq := restaudit.NewRequest(\"GET\", \"/base/test/json/4711?foo=0815\")\n\treq.AddHeader(restaudit.HeaderAccept, restaudit.ApplicationJSON)\n\tresp := ts.DoRequest(req)\n\tresp.AssertStatusEquals(200)\n\tresp.AssertBodyContains(`\"ResourceID\":\"4711\"`)\n\tresp.AssertBodyContains(`\"Query\":\"0815\"`)\n\tresp.AssertBodyContains(`\"Context\":\"foo\"`)\n}", "func RunJSONSerializationTestForJsonField(subject JsonField) string {\n\t// Serialize to JSON\n\tbin, err := json.Marshal(subject)\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\n\t// Deserialize back into memory\n\tvar actual JsonField\n\terr = json.Unmarshal(bin, &actual)\n\tif err != nil {\n\t\treturn err.Error()\n\t}\n\n\t// Check for outcome\n\tmatch := cmp.Equal(subject, actual, cmpopts.EquateEmpty())\n\tif !match {\n\t\tactualFmt := pretty.Sprint(actual)\n\t\tsubjectFmt := pretty.Sprint(subject)\n\t\tresult := diff.Diff(subjectFmt, actualFmt)\n\t\treturn result\n\t}\n\n\treturn \"\"\n}", "func (_m *TaskStatuser) Get() map[string]interface{} {\n\tret := _m.Called()\n\n\tvar r0 map[string]interface{}\n\tif rf, ok := ret.Get(0).(func() map[string]interface{}); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(map[string]interface{})\n\t\t}\n\t}\n\n\treturn r0\n}", "func (o *FakeObject) String() string { return o.Value.(string) }", "func (fps *ListMyProjectInvitationsResponse_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (c *EzClient) JSONStr(j string) *EzClient {\n\tc.body = strings.NewReader(j)\n\treturn c\n}", "func (_m *ICacheInteractor) Get(key string) (cache.CachedResponse, error) {\n\tret := _m.Called(key)\n\n\tvar r0 cache.CachedResponse\n\tif rf, ok := ret.Get(0).(func(string) cache.CachedResponse); ok {\n\t\tr0 = rf(key)\n\t} else {\n\t\tr0 = ret.Get(0).(cache.CachedResponse)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string) error); ok {\n\t\tr1 = rf(key)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (fps *Limit_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (fps *UpdateRegionRequestCAS_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (fps *Resource_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (m *MockValue) StringMap(def map[string]string) map[string]string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"StringMap\", def)\n\tret0, _ := ret[0].(map[string]string)\n\treturn ret0\n}", "func (c *Config) GetString(pattern string, def ...interface{}) string {\n\tif j := c.getJson(); j != nil {\n\t\treturn j.GetString(pattern, def...)\n\t}\n\treturn \"\"\n}", "func (fps *UpdateLimitPoolRequest_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (fps *Plan_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}" ]
[ "0.68565124", "0.6683317", "0.5848859", "0.575837", "0.55876327", "0.5565529", "0.54658836", "0.5456677", "0.54443014", "0.5422334", "0.538292", "0.5372283", "0.53590333", "0.535695", "0.5347867", "0.5335149", "0.5322711", "0.53080773", "0.5300438", "0.5270826", "0.5217578", "0.52082217", "0.5204585", "0.51843715", "0.5181398", "0.5176887", "0.5168872", "0.5161372", "0.51582", "0.51581186", "0.51555765", "0.514642", "0.5143306", "0.51427656", "0.5140779", "0.5136194", "0.5109734", "0.5107495", "0.51012886", "0.5098607", "0.5097614", "0.50907224", "0.50860703", "0.5075626", "0.5073772", "0.5064471", "0.5056891", "0.50496197", "0.50409377", "0.5040001", "0.50288105", "0.5024262", "0.502101", "0.5020164", "0.5014619", "0.5012819", "0.50077975", "0.50011206", "0.50008035", "0.49972248", "0.49967223", "0.49935383", "0.49934742", "0.4989186", "0.49836403", "0.4978662", "0.49764726", "0.49718812", "0.4970008", "0.49658504", "0.4964895", "0.49577996", "0.49506453", "0.4949867", "0.49427396", "0.49367243", "0.49351478", "0.49217176", "0.49209902", "0.49208933", "0.49178866", "0.4905004", "0.4903696", "0.48971832", "0.4891803", "0.4890312", "0.4884723", "0.48841324", "0.4877207", "0.48747873", "0.4871107", "0.48558503", "0.48505768", "0.48504105", "0.48500153", "0.48491877", "0.48490214", "0.4846253", "0.4842417", "0.48382944" ]
0.7805901
0
GetJsonString is a helper method to define mock.On call
func (_e *MockWriteBufferJsonBased_Expecter) GetJsonString() *MockWriteBufferJsonBased_GetJsonString_Call { return &MockWriteBufferJsonBased_GetJsonString_Call{Call: _e.mock.On("GetJsonString")} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferJsonBased) GetJsonString() (string, error) {\n\tret := _m.Called()\n\n\tvar r0 string\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func() (string, error)); ok {\n\t\treturn rf()\n\t}\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tif rf, ok := ret.Get(1).(func() error); ok {\n\t\tr1 = rf()\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (m *MockHumanJSONStringer) JSONString() (string, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"JSONString\")\n\tret0, _ := ret[0].(string)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func JsonToString() (string, error) {\n\treturn DefaultWorker.JsonToString()\n}", "func (vd ViewData) GetJsonString() (j []byte, err error) {\n\tj, err = json.Marshal(vd.JsonData)\n\treturn\n}", "func JSONString(e interface{}) string {\n\treturn string(JSON(e))\n}", "func JSONGetString(ctx context.Context, subject map[string]interface{}, keys ...string) string {\n\tdata := MapInterfaceToJSONBytes(subject)\n\n\tvalue, err := jsonparser.GetString(data, keys...)\n\tif err != nil {\n\t\tlogger.Errorf(ctx, \"JSON util\", \"failed to retrieve key, %#v error:%s\", keys, err.Error())\n\t\treturn \"\"\n\t}\n\n\treturn value\n}", "func (client PrimitiveClient) GetStringResponder(resp *http.Response) (result StringWrapper, err error) { \n err = autorest.Respond(\n resp,\n client.ByInspecting(),\n azure.WithErrorUnlessStatusCode(http.StatusOK),\n autorest.ByUnmarshallingJSON(&result),\n autorest.ByClosing())\n result.Response = autorest.Response{Response: resp}\n return\n}", "func (ep *EndpointConfig) GetJSONString(path string) string {\n\tif ep.json.Exists(\"data\") {\n\t\treturn ep.json.Path(\"data\").String()\n\t}\n\treturn \"\"\n}", "func (m *ManagerMock) GetByUserJSON(ctx context.Context) ([]byte, error) {\n\targs := m.Called(ctx)\n\tdata, _ := args.Get(0).([]byte)\n\treturn data, args.Error(1)\n}", "func getJson(url string, target interface{}) error {\n\tr, err := sendClient.Get(url)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer r.Body.Close()\n\treturn json.NewDecoder(r.Body).Decode(target)\n}", "func (_e *MockTestTransportInstance_Expecter) String() *MockTestTransportInstance_String_Call {\n\treturn &MockTestTransportInstance_String_Call{Call: _e.mock.On(\"String\")}\n}", "func TestGetJSON(t *testing.T) {\n\tassert := audit.NewTestingAssertion(t, true)\n\t// Setup the test server.\n\tmux := newMultiplexer(assert)\n\tts := restaudit.StartServer(mux, assert)\n\tdefer ts.Close()\n\terr := mux.Register(\"test\", \"json\", NewTestHandler(\"json\", assert))\n\tassert.Nil(err)\n\t// Perform test requests.\n\treq := restaudit.NewRequest(\"GET\", \"/base/test/json/4711?foo=0815\")\n\treq.AddHeader(restaudit.HeaderAccept, restaudit.ApplicationJSON)\n\tresp := ts.DoRequest(req)\n\tresp.AssertStatusEquals(200)\n\tresp.AssertBodyContains(`\"ResourceID\":\"4711\"`)\n\tresp.AssertBodyContains(`\"Query\":\"0815\"`)\n\tresp.AssertBodyContains(`\"Context\":\"foo\"`)\n}", "func Get_Json_String(db *sql.DB) string {\n sqlStmt := `\n select value from metadata where name = \"json\";\n `\n var jsonstring string\n err := db.QueryRow(sqlStmt).Scan(&jsonstring)\n\n if err != nil {\n log.Printf(\"%q: %s\\n\", err, sqlStmt)\n }\n return jsonstring\n}", "func (_m *ContextHandler) JSON(_a0 int, _a1 interface{}) error {\n\tret := _m.Called(_a0, _a1)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(int, interface{}) error); ok {\n\t\tr0 = rf(_a0, _a1)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func _getMock(url string) (content []byte, err error) {\n\tvar idnum = crc32.ChecksumIEEE([]byte(url))%uint32(5) + 1\n\tvar response = fmt.Sprintf(mockResponseTemplate, idnum, idnum, \"no message\", 200)\n\treturn []byte(response), nil\n}", "func GetString(json []byte, path ...string) (string, error) {\n\tval, err := Get(json, path...)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn string(val), err\n}", "func jsonToString(jsonData []byte, err error) string {\n\tif err != nil {\n\t\tlogErr(\"Error on serialization %s\", err.Error())\n\t\treturn \"{}\"\n\t}\n\n\tbuf := bytes.NewBuffer(jsonData)\n\n\treturn buf.String()\n}", "func (fpm *RegionalDistribution_FieldPathMap) JSONString() string {\n\treturn strcase.ToLowerCamel(fpm.selector.String()) + \".\" + fpm.key\n}", "func GetAsJson(pIFC interface{}) string {\n\tvar labJSON []byte\n\tvar lsText string\n\t// var lErr error\n\n\tlabJSON, _ = json.Marshal(pIFC)\n\n\tlsText = bytes.NewBuffer(labJSON).String()\n\t// log.Println(lsText)\n\treturn lsText\n}", "func (_e *MockPlcDriver_Expecter) String() *MockPlcDriver_String_Call {\n\treturn &MockPlcDriver_String_Call{Call: _e.mock.On(\"String\")}\n}", "func (_e *MockCALIdentifyTag_Expecter) String() *MockCALIdentifyTag_String_Call {\n\treturn &MockCALIdentifyTag_String_Call{Call: _e.mock.On(\"String\")}\n}", "func (fps *AlertingCondition_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (p *Parser) GetString(pattern string) string {\n return p.json.GetString(pattern)\n}", "func (m *ManagerMock) GetByPackageJSON(ctx context.Context, packageID string) ([]byte, error) {\n\targs := m.Called(ctx, packageID)\n\tdata, _ := args.Get(0).([]byte)\n\treturn data, args.Error(1)\n}", "func ToJsonString(v interface{}) string {\n\tb, err := json.Marshal(v)\n\tif err != nil {\n\t\treturn \"\"\n\t}\n\treturn string(b)\n}", "func (t Transaction) JSONString() string {\n\ts, _ := toJSONString(t)\n\treturn s\n}", "func (l *LogFormat) GetString() string {\n\tb, _ := json.Marshal(l)\n\treturn string(b)\n}", "func ToJsonStr(v interface{}) string {\n\tbuffer := &bytes.Buffer{}\n\tencoder := json.NewEncoder(buffer)\n\tencoder.SetEscapeHTML(false)\n\terr := encoder.Encode(v)\n\tif err != nil {\n\t\treturn \"\"\n\t}\n\ts := string(buffer.Bytes())\n\treturn strings.TrimRight(s, \"\\n\")\n}", "func mockTokenAsString(t *testing.T) string {\n\ttoken := mockToken(t)\n\treturn token.Encode()\n}", "func (j Json) String() string {\n\treturn string(j)\n}", "func (fps *WatchRegionsResponse_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func dummyJSONStr() string {\n\treturn `{\n \"number\": 1234.56,\n \"string\": \"foo bar\",\n \"arrayOfString\": [\n \"one\",\n \"two\",\n \"three\",\n \"four\"\n ],\n \"object\": {\n \"foo\": \"bar\",\n \"hello\": \"world\",\n \"answer\": 42\n },\n \"true\": true,\n \"false\": false,\n \"null\": null\n }`\n}", "func (_m *Environment) GetString(_a0 string) string {\n\tret := _m.Called(_a0)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(string) string); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (_e *MockPlcWriteResponse_Expecter) String() *MockPlcWriteResponse_String_Call {\n\treturn &MockPlcWriteResponse_String_Call{Call: _e.mock.On(\"String\")}\n}", "func (fps *BatchGetRegionsResponse_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func lookupJSONString(genericData map[string]interface{}, path ...string) (string, error) {\n\tresult, err := lookupJSONValue(genericData, path, \"\")\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tstringResult, isString := result.(string)\n\tif !isString {\n\t\treturn \"\", fmt.Errorf(\"type error for key %v\", path)\n\t}\n\treturn stringResult, nil\n}", "func SendJsonRequest(service string, endpoint string, jsonString string, timeout time.Duration, retries int) (out string, err error) {\n\treq, opts, err := CreateRequest(service, endpoint, jsonString, timeout, retries)\n\tif err != nil {\n\t\treturn\n\t}\n\tlatestTrace = req.TraceID()\n\trsp, dur, err := SendRequest(req, opts)\n\tif err != nil {\n\t\tout = fmt.Sprintf(\"Duration: %v\\nTraceId: %s\\n\", dur.String(), latestTrace)\n\t\treturn\n\t}\n\n\tvar buf bytes.Buffer\n\terr = json.Indent(&buf, rsp.Body(), \"\", \" \")\n\tif err != nil {\n\t\tout = fmt.Sprintf(\"Duration: %v\\nTraceId: %s\\n\", dur.String(), latestTrace)\n\t\treturn\n\t}\n\n\tout = fmt.Sprintf(\"%s\\n\\nDuration: %v\\nTraceId: %s\\n\", string(buf.Bytes()), dur.String(), latestTrace)\n\treturn\n}", "func (e cfxManager) GetTestJson() []byte {\n\tif e.p == subscriber.RPC {\n\t\tmsg := JsonrpcMessage{\n\t\t\tVersion: \"2.0\",\n\t\t\tID: json.RawMessage(`1`),\n\t\t\tMethod: \"cfx_epochNumber\",\n\t\t}\n\n\t\tbytes, err := json.Marshal(msg)\n\t\tif err != nil {\n\t\t\treturn nil\n\t\t}\n\n\t\treturn bytes\n\t}\n\n\treturn nil\n}", "func (_m *RequestContext) JSON(_a0 int, _a1 interface{}) {\n\t_m.Called(_a0, _a1)\n}", "func RespondWithJSON(payload interface{}) string {\n\tjsonObject, err := json.Marshal(payload)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn RespondWithError(500, \"Cannot convert object to JSON string\")\n\t}\n\n\treturn string(jsonObject)\n}", "func (fps *ListRegionsResponse_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (fps *Plan_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (c *Config) GetString(pattern string, def ...interface{}) string {\n\tif j := c.getJson(); j != nil {\n\t\treturn j.GetString(pattern, def...)\n\t}\n\treturn \"\"\n}", "func (c *EzClient) JSONStr(j string) *EzClient {\n\tc.body = strings.NewReader(j)\n\treturn c\n}", "func (t Test1) String() string {\n\tjt, _ := json.Marshal(t)\n\treturn string(jt)\n}", "func (client PrimitiveClient) GetString() (result StringWrapper, err error) {\n req, err := client.GetStringPreparer()\n if err != nil {\n return result, autorest.NewErrorWithError(err, \"complexgroup.PrimitiveClient\", \"GetString\", nil , \"Failure preparing request\")\n }\n\n resp, err := client.GetStringSender(req)\n if err != nil {\n result.Response = autorest.Response{Response: resp}\n return result, autorest.NewErrorWithError(err, \"complexgroup.PrimitiveClient\", \"GetString\", resp, \"Failure sending request\")\n }\n\n result, err = client.GetStringResponder(resp)\n if err != nil {\n err = autorest.NewErrorWithError(err, \"complexgroup.PrimitiveClient\", \"GetString\", resp, \"Failure responding to request\")\n }\n\n return\n}", "func (fpm *AlertingConditionSpecTimeSeriesCombineThreshold_FieldPathMap) JSONString() string {\n\treturn strcase.ToLowerCamel(fpm.selector.String()) + \".\" + fpm.key\n}", "func JsonString(data string) (*Typed, error) {\n\treturn Json([]byte(data))\n}", "func JSONHandler() string {\n\treturn \"hello\"\n}", "func (fps *CreateRegionRequest_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func JSONToString(v interface{}) string {\n\tdata, _ := json.MarshalIndent(v, \"\", \" \")\n\tSTR := string(data)\n\tSTR = strings.ReplaceAll(STR, string(10), ``)\n\treturn STR\n}", "func (fps *Resource_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (m *MockHistogram) String() string {\n\targs := m.Called()\n\treturn args.String(0)\n}", "func generatedString(generator jsonElementGenerator) string {\n\tvar buffer bytes.Buffer\n\twritten, _ := generator.generate(&buffer)\n\treturn string(buffer.Bytes()[0:written])\n}", "func (fps *WatchLimitPoolsResponse_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (_e *MockPlcMessage_Expecter) String() *MockPlcMessage_String_Call {\n\treturn &MockPlcMessage_String_Call{Call: _e.mock.On(\"String\")}\n}", "func GetJson(w http.ResponseWriter, Status string, Msg string, httpStatus int) string {\n\tmsgJsonStruct := &JsonMsg{Status, Msg}\n\tmsgJson, errj := json.Marshal(msgJsonStruct)\n\tif errj != nil {\n\t\tmsg := `{\"status\":\"error\",\"message\":\"We could not generate the json error!\"}`\n\t\treturn msg\n\t}\n\treturn string(msgJson)\n}", "func (fps *LimitPool_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (m *Manager) GetJson(name string, target interface{}) (err error) {\n\tfile, ok := m.json[name]\n\tif !ok {\n\t\tfile, err = os.Open(name + \".json\")\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tm.json[name] = file\n\t}\n\tdecoder := json.NewDecoder(file)\n\terr = decoder.Decode(target)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (mock *KeyValueMock) GetStringCalls() []struct {\n\tKey string\n} {\n\tvar calls []struct {\n\t\tKey string\n\t}\n\tlockKeyValueMockGetString.RLock()\n\tcalls = mock.calls.GetString\n\tlockKeyValueMockGetString.RUnlock()\n\treturn calls\n}", "func (m *MockArguments) GetString(arg0 string) string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"GetString\", arg0)\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (fps *BatchGetLimitPoolsResponse_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (fps *CreateMetricDescriptorRequest_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (fps *UpdateRegionRequest_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (fps *AlertingConditionSpec_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (fps *Limit_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (fps *ListLimitPoolsResponse_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func mockTest0104(w http.ResponseWriter, r *http.Request) {\n\twait, err := common.GetIntArgFromQuery(r, \"wait\")\n\tif err != nil {\n\t\tcommon.ErrHandler(w, err)\n\t\treturn\n\t}\n\tunit, err := common.GetStringArgFromQuery(r, \"unit\")\n\tif err != nil {\n\t\tcommon.ErrHandler(w, err)\n\t\treturn\n\t}\n\n\tretIP := `\"42.48.232.7\", \"10.200.20.21\"`\n\tretJSON := fmt.Sprintf(`{\"errno\":0, \"iplist\":[%s]}`, retIP)\n\t// retContent := `{\"errno\":-1, \"iplist\":[]}`\n\tw.Header().Set(common.TextContentLength, strconv.Itoa(len(retJSON)))\n\tw.Header().Set(common.TextContentType, common.ContentTypeJSON)\n\tw.WriteHeader(http.StatusOK)\n\n\tif wait > 0 {\n\t\tif unit == \"milli\" {\n\t\t\ttime.Sleep(time.Duration(wait) * time.Millisecond)\n\t\t} else {\n\t\t\ttime.Sleep(time.Duration(wait) * time.Second)\n\t\t}\n\t}\n\tif _, err := io.Copy(w, bufio.NewReader(strings.NewReader(retJSON))); err != nil {\n\t\tcommon.ErrHandler(w, err)\n\t}\n}", "func GetScriptFailJSONMocked(t *testing.T, script *types.Script) *types.Script {\n\n\tassert := assert.New(t)\n\n\t// wire up\n\tcs := &utils.MockConcertoService{}\n\tds, err := NewScriptService(cs)\n\tassert.Nil(err, \"Couldn't load script service\")\n\tassert.NotNil(ds, \"Script service not instanced\")\n\n\t// wrong json\n\tdIn := []byte{10, 20, 30}\n\n\t// call service\n\tcs.On(\"Get\", fmt.Sprintf(APIPathBlueprintScript, script.ID)).Return(dIn, 200, nil)\n\tscriptOut, err := ds.GetScript(script.ID)\n\n\tassert.NotNil(err, \"We are expecting a marshalling error\")\n\tassert.Nil(scriptOut, \"Expecting nil output\")\n\tassert.Contains(err.Error(), \"invalid character\", \"Error message should include the string 'invalid character'\")\n\n\treturn scriptOut\n}", "func (_m *MockCALIdentifyTag) GetAddressString() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (_SimpleStorage *SimpleStorageCaller) GetString(opts *bind.CallOpts) (string, error) {\n\tvar (\n\t\tret0 = new(string)\n\t)\n\tout := ret0\n\terr := _SimpleStorage.contract.Call(opts, out, \"getString\")\n\treturn *ret0, err\n}", "func GetAsJsonF(pIFC interface{}) string {\n\tvar labJSON []byte\n\tvar lsText string\n\t// var lErr error\n\n\tlabJSON, _ = json.MarshalIndent(pIFC, \"\", \" \")\n\n\tlsText = bytes.NewBuffer(labJSON).String()\n\t// log.Println(lsText)\n\treturn lsText\n}", "func (s TelemetryRecord) ToJSONStr() string {\n\treturn string(s.ToJSON())\n}", "func (fps *UpdateLimitPoolRequest_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (t Target) String() string {\n\tjt, _ := json.Marshal(t)\n\treturn string(jt)\n}", "func (c *Client) GetJSONString(path string, prettify bool) (string, error) {\n\tbody, err := c.Get(path)\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed: %v\\n\", err)\n\t\treturn \"\", err\n\t}\n\tdefer body.Close()\n\n\tbuf := new(bytes.Buffer)\n\tvar s string\n\n\tif prettify {\n\t\tvar m json.RawMessage\n\n\t\tdec := json.NewDecoder(body)\n\t\tif err := dec.Decode(&m); err != nil {\n\t\t\tlog.Fatalf(\"Failed to decode: %v\\n\", err)\n\t\t\treturn \"\", err\n\t\t}\n\n\t\tb, err := json.MarshalIndent(&m, \"\", \"\\t\")\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Error during indent: %v\\n\", err)\n\t\t\treturn \"\", err\n\t\t}\n\n\t\ts = string(b)\n\t} else {\n\t\tbuf.ReadFrom(body)\n\t\ts = buf.String()\n\t}\n\n\treturn s, nil\n}", "func (sr *statusResponse) getJson(s string) []byte {\n\tsr.Status = s\n\tjsr, _ := json.Marshal(sr)\n\treturn jsr\n}", "func GetString(filename, JSONpath string) (string, error) {\n\tjf, err := NewFile(filename)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn jf.GetString(JSONpath)\n}", "func (fps *ListMetricDescriptorsResponse_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func getJSONFunc(pretty bool) (func(map[string]string) string, string) {\n\t// The function that marshals the records into json\n\tvar jsonFunc func(map[string]string) string\n\t// The linebreak character to use\n\tvar breakLine string\n\n\t// If pretty is enabled, we should format the JSON with line breaks and indentation\n\tif pretty {\n\t\t// The linebreak char will be a newline\n\t\tbreakLine = \"\\n\"\n\t\tjsonFunc = func(record map[string]string) string {\n\t\t\tjsonData, _ := json.MarshalIndent(record, \" \", \" \")\n\t\t\treturn \" \" + string(jsonData)\n\t\t}\n\t} else {\n\t\tbreakLine = \"\"\n\t\tjsonFunc = func(record map[string]string) string {\n\t\t\tjsonData, _ := json.Marshal(record)\n\t\t\treturn string(jsonData)\n\t\t}\n\t}\n\treturn jsonFunc, breakLine\n}", "func (t Test1s) String() string {\n\tjt, _ := json.Marshal(t)\n\treturn string(jt)\n}", "func (m *OutboundMock) AsByteString() (r string) {\n\tcounter := atomic.AddUint64(&m.AsByteStringPreCounter, 1)\n\tdefer atomic.AddUint64(&m.AsByteStringCounter, 1)\n\n\tif len(m.AsByteStringMock.expectationSeries) > 0 {\n\t\tif counter > uint64(len(m.AsByteStringMock.expectationSeries)) {\n\t\t\tm.t.Fatalf(\"Unexpected call to OutboundMock.AsByteString.\")\n\t\t\treturn\n\t\t}\n\n\t\tresult := m.AsByteStringMock.expectationSeries[counter-1].result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the OutboundMock.AsByteString\")\n\t\t\treturn\n\t\t}\n\n\t\tr = result.r\n\n\t\treturn\n\t}\n\n\tif m.AsByteStringMock.mainExpectation != nil {\n\n\t\tresult := m.AsByteStringMock.mainExpectation.result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the OutboundMock.AsByteString\")\n\t\t}\n\n\t\tr = result.r\n\n\t\treturn\n\t}\n\n\tif m.AsByteStringFunc == nil {\n\t\tm.t.Fatalf(\"Unexpected call to OutboundMock.AsByteString.\")\n\t\treturn\n\t}\n\n\treturn m.AsByteStringFunc()\n}", "func (fps *ResendProjectInvitationResponse_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func (i *Info) JSONString(indent bool) string {\n\treturn marshal.SafeJSON(i.ctx, netPrinter{i}, indent)\n}", "func GetTemplateFailJSONMocked(t *testing.T, template *types.Template) *types.Template {\n\n\tassert := assert.New(t)\n\n\t// wire up\n\tcs := &utils.MockConcertoService{}\n\tds, err := NewTemplateService(cs)\n\tassert.Nil(err, \"Couldn't load template service\")\n\tassert.NotNil(ds, \"Template service not instanced\")\n\n\t// wrong json\n\tdIn := []byte{10, 20, 30}\n\n\t// call service\n\tcs.On(\"Get\", fmt.Sprintf(\"/blueprint/templates/%s\", template.ID)).Return(dIn, 200, nil)\n\ttemplateOut, err := ds.GetTemplate(template.ID)\n\tassert.NotNil(err, \"We are expecting a marshalling error\")\n\tassert.Nil(templateOut, \"Expecting nil output\")\n\tassert.Contains(err.Error(), \"invalid character\", \"Error message should include the string 'invalid character'\")\n\n\treturn templateOut\n}", "func getJson(url string, target interface{}) error {\n\tr, err := http.Get(url)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer r.Body.Close()\n\n\treturn json.NewDecoder(r.Body).Decode(target)\n}", "func ToJsonString(obj map[string]interface{}, pretty bool) (jsons string, e error) {\n var ba []byte\n\n if pretty {\n ba, e = json.MarshalIndent(obj, \"\", \"\\t\")\n } else {\n ba, e = json.Marshal(obj)\n }\n\n if e == nil {\n jsons = string(ba)\n }\n\n return\n}", "func (_e *MockWriteBufferXmlBased_Expecter) GetXmlString() *MockWriteBufferXmlBased_GetXmlString_Call {\n\treturn &MockWriteBufferXmlBased_GetXmlString_Call{Call: _e.mock.On(\"GetXmlString\")}\n}", "func JSONString(response string) {\n\t// pretty-print the json\n\tutils.PrintJSON([]byte(response))\n}", "func (fps *ListMyProjectInvitationsResponse_FieldSubPath) JSONString() string {\n\treturn strcase.ToLowerCamel(fps.selector.String()) + \".\" + fps.subPath.JSONString()\n}", "func testMarshalJSON(t *testing.T, cmd interface{}) {\n\tjsonCmd, err := json.Marshal(cmd)\n\tif err != nil {\n\t\tt.Error(err)\n\t}\n\tfmt.Println(string(jsonCmd))\n}", "func jsonString(b []byte) string {\n\ts := string(b)\n\tif s == \"\" {\n\t\treturn \"{}\"\n\t}\n\treturn s\n}", "func (hit Hit) String() string {\n\tout, _ := json.Marshal(hit)\n\treturn string(out)\n}", "func ToJSONString(i interface{}) string {\n\tjsonStr, _ := json.Marshal(i)\n\treturn string(jsonStr)\n}", "func (_SimpleStorage *SimpleStorageCallerSession) GetString() (string, error) {\n\treturn _SimpleStorage.Contract.GetString(&_SimpleStorage.CallOpts)\n}", "func getCommandResponse(cmd_hash string, templatefile string, context CommandEnv) string {\n jsondata := renderTemplate(templatefile, context)\n var objmap map[string]CommandResponse\n // Get json object\n err := json.Unmarshal(jsondata, &objmap)\n if err != nil {\n // Invalid JSON\n fmt.Println(err)\n }\n //fmt.Printf(\"Looking at %s\", cmd_hash)\n // we need to marshal again so we can just get the specific command hash\n value, ok := objmap[cmd_hash]\n if !ok {\n return renderNotFoundError()\n } else {\n cmd_json, de_err := json.Marshal(value)\n if de_err != nil {\n return renderNotFoundError()\n }\n return string(cmd_json)\n }\n}", "func (uc *UseCase) String() string {\n\treturn string(uc.JSON())\n}", "func asJSONText(source interface{}) string {\n\tif source == nil {\n\t\treturn \"\"\n\t}\n\tresult, err := toolbox.AsJSONText(source)\n\tif err == nil {\n\t\treturn result\n\t}\n\treturn toolbox.AsString(source)\n}", "func GetFromJsonString(jsonString, key string) (result string, err error) {\n\tvar responseMap map[string]*json.RawMessage\n\terr = json.Unmarshal([]byte(jsonString), &responseMap)\n\tif err != nil {\n\t\treturn\n\t}\n\tfmt.Println(string(*responseMap[key]))\n\terr = json.Unmarshal(*responseMap[key], &result)\n\treturn\n}", "func (_m *RouterContext) JSON(code int, obj interface{}) {\n\t_m.Called(code, obj)\n}" ]
[ "0.6850838", "0.6505971", "0.5972557", "0.5932386", "0.59088945", "0.5619681", "0.55723035", "0.55525285", "0.5549147", "0.554586", "0.55030924", "0.5439396", "0.54330343", "0.5395226", "0.5366335", "0.53583777", "0.53555435", "0.5353157", "0.5316131", "0.5315162", "0.53015804", "0.5300237", "0.529913", "0.52976894", "0.5297173", "0.52804565", "0.527661", "0.52765733", "0.52744025", "0.5271694", "0.5267587", "0.52671915", "0.52539444", "0.52487934", "0.524818", "0.5247971", "0.52330154", "0.52284706", "0.52278924", "0.5223419", "0.52125174", "0.520785", "0.52047634", "0.5202342", "0.52002484", "0.51968455", "0.5195723", "0.5187475", "0.5178149", "0.5176569", "0.51742893", "0.5157391", "0.5146988", "0.514526", "0.5141324", "0.5139896", "0.51336664", "0.5132858", "0.512528", "0.51244724", "0.5114915", "0.5106254", "0.5101181", "0.5097795", "0.5095991", "0.5093482", "0.50914466", "0.5091138", "0.5083253", "0.50803804", "0.5071794", "0.50684386", "0.50624305", "0.5061673", "0.50587445", "0.5055546", "0.504804", "0.5047361", "0.50446314", "0.50331295", "0.50261253", "0.50221664", "0.50198823", "0.5017505", "0.50172514", "0.5011044", "0.50076425", "0.50039953", "0.49985343", "0.49956983", "0.49910316", "0.49824744", "0.49811384", "0.49805176", "0.49762994", "0.4976018", "0.4975601", "0.49751806", "0.49722537", "0.4969843" ]
0.76541966
0
GetPos provides a mock function with given fields:
func (_m *MockWriteBufferJsonBased) GetPos() uint16 { ret := _m.Called() var r0 uint16 if rf, ok := ret.Get(0).(func() uint16); ok { r0 = rf() } else { r0 = ret.Get(0).(uint16) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferXmlBased) GetPos() uint16 {\n\tret := _m.Called()\n\n\tvar r0 uint16\n\tif rf, ok := ret.Get(0).(func() uint16); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(uint16)\n\t}\n\n\treturn r0\n}", "func (m *MockEngine) Position() error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Position\")\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) GetPos() *MockWriteBufferJsonBased_GetPos_Call {\n\treturn &MockWriteBufferJsonBased_GetPos_Call{Call: _e.mock.On(\"GetPos\")}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) GetPos() *MockWriteBufferXmlBased_GetPos_Call {\n\treturn &MockWriteBufferXmlBased_GetPos_Call{Call: _e.mock.On(\"GetPos\")}\n}", "func (m *StoreMock) MinimockGetPositionInspect() {\n\tfor _, e := range m.GetPositionMock.expectations {\n\t\tif mm_atomic.LoadUint64(&e.Counter) < 1 {\n\t\t\tm.t.Errorf(\"Expected call to StoreMock.GetPosition with params: %#v\", *e.params)\n\t\t}\n\t}\n\n\t// if default expectation was set then invocations count should be greater than zero\n\tif m.GetPositionMock.defaultExpectation != nil && mm_atomic.LoadUint64(&m.afterGetPositionCounter) < 1 {\n\t\tif m.GetPositionMock.defaultExpectation.params == nil {\n\t\t\tm.t.Error(\"Expected call to StoreMock.GetPosition\")\n\t\t} else {\n\t\t\tm.t.Errorf(\"Expected call to StoreMock.GetPosition with params: %#v\", *m.GetPositionMock.defaultExpectation.params)\n\t\t}\n\t}\n\t// if func was set then invocations count should be greater than zero\n\tif m.funcGetPosition != nil && mm_atomic.LoadUint64(&m.afterGetPositionCounter) < 1 {\n\t\tm.t.Error(\"Expected call to StoreMock.GetPosition\")\n\t}\n}", "func (m *MockBroker) GetPositions(arg0 ...model.Asset) ([]model.Position, error) {\n\tvarargs := []interface{}{}\n\tfor _, a := range arg0 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"GetPositions\", varargs...)\n\tret0, _ := ret[0].([]model.Position)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *Consumer) Position(partitions []kafka.TopicPartition) ([]kafka.TopicPartition, error) {\n\tret := _m.Called(partitions)\n\n\tvar r0 []kafka.TopicPartition\n\tif rf, ok := ret.Get(0).(func([]kafka.TopicPartition) []kafka.TopicPartition); ok {\n\t\tr0 = rf(partitions)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]kafka.TopicPartition)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func([]kafka.TopicPartition) error); ok {\n\t\tr1 = rf(partitions)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (obj Player) GetPos() (xf, yf, zf float64, err error) {\n\ts := \"player.getPos(\" + obj.name + \")\"\n\txf = 0.0\n\tyf = 0.0\n\tzf = 0.0\n\tvar r string\n\tr, err = object(obj.obj).sendReceive(s)\n\tif err != nil {\n\t\treturn\n\t}\n\tarr := strings.Split(r, \",\")\n\tarr2 := make([]*float64, 3)\n\tarr2[0] = &xf\n\tarr2[1] = &yf\n\tarr2[2] = &zf\n\tfor index, rs := range arr {\n\t\t*(arr2[index]), err = strconv.ParseFloat(rs, 64)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\treturn\n}", "func (m *MockClient) GetOffset(arg0 string, arg1 int32, arg2 int64) (int64, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"GetOffset\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int64)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockStorage) GetProductPositions(ctx context.Context, productID int64) ([]model.Position, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"GetProductPositions\", ctx, productID)\n\tret0, _ := ret[0].([]model.Position)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockFile) Seek(arg0 int64, arg1 int) (int64, error) {\n\tret := m.ctrl.Call(m, \"Seek\", arg0, arg1)\n\tret0, _ := ret[0].(int64)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_Contracts *ContractsCaller) MPosition(opts *bind.CallOpts, arg0 *big.Int, arg1 *big.Int) (struct {\n\tProposalId *big.Int\n\tPositionId *big.Int\n\tMaxCandidate uint8\n\tTitle [32]byte\n}, error) {\n\tret := new(struct {\n\t\tProposalId *big.Int\n\t\tPositionId *big.Int\n\t\tMaxCandidate uint8\n\t\tTitle [32]byte\n\t})\n\tout := ret\n\terr := _Contracts.contract.Call(opts, out, \"mPosition\", arg0, arg1)\n\treturn *ret, err\n}", "func (m *MockNode) PositionsByParty(arg0 *api.PositionsByPartyRequest) (*api.PositionsByPartyResponse, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"PositionsByParty\", arg0)\n\tret0, _ := ret[0].(*api.PositionsByPartyResponse)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockInterface) Location() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Location\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (mmGetPosition *mStoreMockGetPosition) Set(f func(account string, contractID string) (t1 pb.TotalPosition, err error)) *StoreMock {\n\tif mmGetPosition.defaultExpectation != nil {\n\t\tmmGetPosition.mock.t.Fatalf(\"Default expectation is already set for the Store.GetPosition method\")\n\t}\n\n\tif len(mmGetPosition.expectations) > 0 {\n\t\tmmGetPosition.mock.t.Fatalf(\"Some expectations are already set for the Store.GetPosition method\")\n\t}\n\n\tmmGetPosition.mock.funcGetPosition = f\n\treturn mmGetPosition.mock\n}", "func (m *StoreMock) MinimockSavePositionInspect() {\n\tfor _, e := range m.SavePositionMock.expectations {\n\t\tif mm_atomic.LoadUint64(&e.Counter) < 1 {\n\t\t\tm.t.Errorf(\"Expected call to StoreMock.SavePosition with params: %#v\", *e.params)\n\t\t}\n\t}\n\n\t// if default expectation was set then invocations count should be greater than zero\n\tif m.SavePositionMock.defaultExpectation != nil && mm_atomic.LoadUint64(&m.afterSavePositionCounter) < 1 {\n\t\tif m.SavePositionMock.defaultExpectation.params == nil {\n\t\t\tm.t.Error(\"Expected call to StoreMock.SavePosition\")\n\t\t} else {\n\t\t\tm.t.Errorf(\"Expected call to StoreMock.SavePosition with params: %#v\", *m.SavePositionMock.defaultExpectation.params)\n\t\t}\n\t}\n\t// if func was set then invocations count should be greater than zero\n\tif m.funcSavePosition != nil && mm_atomic.LoadUint64(&m.afterSavePositionCounter) < 1 {\n\t\tm.t.Error(\"Expected call to StoreMock.SavePosition\")\n\t}\n}", "func (client *APIClient) GetPosition(commanderName string) (commanderPosition CommanderPosition, err error) {\n\tresponse, err := client.request(\"GET\", urlGetPosition(commanderName), nil)\n\tif err != nil {\n\t\treturn\n\t}\n\n\terr = utilities.FromJSON(response, &commanderPosition)\n\treturn\n}", "func (m *MockFile) ReadAt(arg0 []byte, arg1 int64) (int, error) {\n\tret := m.ctrl.Call(m, \"ReadAt\", arg0, arg1)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *IntelligenceServiceInterface) GetLocation(satellites []dtos.Satellites) (utils.Point, error) {\n\tret := _m.Called(satellites)\n\n\tvar r0 utils.Point\n\tif rf, ok := ret.Get(0).(func([]dtos.Satellites) utils.Point); ok {\n\t\tr0 = rf(satellites)\n\t} else {\n\t\tr0 = ret.Get(0).(utils.Point)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func([]dtos.Satellites) error); ok {\n\t\tr1 = rf(satellites)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (o *InputEventScreenTouch) GetPosition() gdnative.Vector2 {\n\t//log.Println(\"Calling InputEventScreenTouch.GetPosition()\")\n\n\t// Build out the method's arguments\n\tptrArguments := make([]gdnative.Pointer, 0, 0)\n\n\t// Get the method bind\n\tmethodBind := gdnative.NewMethodBind(\"InputEventScreenTouch\", \"get_position\")\n\n\t// Call the parent method.\n\t// Vector2\n\tretPtr := gdnative.NewEmptyVector2()\n\tgdnative.MethodBindPtrCall(methodBind, o.GetBaseObject(), ptrArguments, retPtr)\n\n\t// If we have a return type, convert it from a pointer into its actual object.\n\tret := gdnative.NewVector2FromPointer(retPtr)\n\treturn ret\n}", "func (mmGetPosition *mStoreMockGetPosition) When(account string, contractID string) *StoreMockGetPositionExpectation {\n\tif mmGetPosition.mock.funcGetPosition != nil {\n\t\tmmGetPosition.mock.t.Fatalf(\"StoreMock.GetPosition mock is already set by Set\")\n\t}\n\n\texpectation := &StoreMockGetPositionExpectation{\n\t\tmock: mmGetPosition.mock,\n\t\tparams: &StoreMockGetPositionParams{account, contractID},\n\t}\n\tmmGetPosition.expectations = append(mmGetPosition.expectations, expectation)\n\treturn expectation\n}", "func (p *Player) getPosition() (int64, bool) {\n\tpos, err := p.Player.GetProperty(INTERFACE + \".Player.Position\")\n\tif err != nil {\n\t\treturn 0, false\n\t}\n\tswitch position := pos.Value().(type) {\n\tcase int64:\n\t\treturn position, true\n\tcase uint64:\n\t\treturn int64(position), true\n\tdefault:\n\t\treturn 0, false\n\t}\n}", "func (m *MockStorage) GetStorePositions(ctx context.Context, storeID int64) ([]model.Position, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"GetStorePositions\", ctx, storeID)\n\tret0, _ := ret[0].([]model.Position)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (o *InputEventScreenDrag) GetPosition() gdnative.Vector2 {\n\t//log.Println(\"Calling InputEventScreenDrag.GetPosition()\")\n\n\t// Build out the method's arguments\n\tptrArguments := make([]gdnative.Pointer, 0, 0)\n\n\t// Get the method bind\n\tmethodBind := gdnative.NewMethodBind(\"InputEventScreenDrag\", \"get_position\")\n\n\t// Call the parent method.\n\t// Vector2\n\tretPtr := gdnative.NewEmptyVector2()\n\tgdnative.MethodBindPtrCall(methodBind, o.GetBaseObject(), ptrArguments, retPtr)\n\n\t// If we have a return type, convert it from a pointer into its actual object.\n\tret := gdnative.NewVector2FromPointer(retPtr)\n\treturn ret\n}", "func (m *MockCore) Location() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Location\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (m *MockMovableNode) UpdatePosition(arg0 zounds.Point) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"UpdatePosition\", arg0)\n}", "func (_m *MockENotifyingList) Move(_a0 int, _a1 int) interface{} {\n\tret := _m.Called(_a0, _a1)\n\n\tvar r0 interface{}\n\tif rf, ok := ret.Get(0).(func(int, int) interface{}); ok {\n\t\tr0 = rf(_a0, _a1)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(interface{})\n\t\t}\n\t}\n\n\treturn r0\n}", "func (_m *MockEStore) Move(object EObject, feature EStructuralFeature, targetIndex int, sourceIndex int) interface{} {\n\tret := _m.Called(object, feature, targetIndex, sourceIndex)\n\n\tvar r0 interface{}\n\tif rf, ok := ret.Get(0).(func(EObject, EStructuralFeature, int, int) interface{}); ok {\n\t\tr0 = rf(object, feature, targetIndex, sourceIndex)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(interface{})\n\t\t}\n\t}\n\n\treturn r0\n}", "func (m *MockTextureMapping2D) Map(si *SurfaceInteraction) (*Point2f, *Vector2f, *Vector2f) {\n\tret := m.ctrl.Call(m, \"Map\", si)\n\tret0, _ := ret[0].(*Point2f)\n\tret1, _ := ret[1].(*Vector2f)\n\tret2, _ := ret[2].(*Vector2f)\n\treturn ret0, ret1, ret2\n}", "func (m *StorageMock) MinimockGetUserLocationInspect() {\n\tfor _, e := range m.GetUserLocationMock.expectations {\n\t\tif mm_atomic.LoadUint64(&e.Counter) < 1 {\n\t\t\tm.t.Errorf(\"Expected call to StorageMock.GetUserLocation with params: %#v\", *e.params)\n\t\t}\n\t}\n\n\t// if default expectation was set then invocations count should be greater than zero\n\tif m.GetUserLocationMock.defaultExpectation != nil && mm_atomic.LoadUint64(&m.afterGetUserLocationCounter) < 1 {\n\t\tif m.GetUserLocationMock.defaultExpectation.params == nil {\n\t\t\tm.t.Error(\"Expected call to StorageMock.GetUserLocation\")\n\t\t} else {\n\t\t\tm.t.Errorf(\"Expected call to StorageMock.GetUserLocation with params: %#v\", *m.GetUserLocationMock.defaultExpectation.params)\n\t\t}\n\t}\n\t// if func was set then invocations count should be greater than zero\n\tif m.funcGetUserLocation != nil && mm_atomic.LoadUint64(&m.afterGetUserLocationCounter) < 1 {\n\t\tm.t.Error(\"Expected call to StorageMock.GetUserLocation\")\n\t}\n}", "func (self *TileSprite) CameraOffset() *Point{\n return &Point{self.Object.Get(\"cameraOffset\")}\n}", "func (o *VersionedControllerService) GetPositionOk() (*Position, bool) {\n\tif o == nil || o.Position == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Position, true\n}", "func (m *Mob) GetPosition() context.Position {\n\tm.mutex.RLock()\n\tdefer m.mutex.RUnlock()\n\n\treturn *m.position\n}", "func (_m *Manager) Get(ctx context.Context, projectID int64, meta ...string) (map[string]string, error) {\n\t_va := make([]interface{}, len(meta))\n\tfor _i := range meta {\n\t\t_va[_i] = meta[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, projectID)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 map[string]string\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func(context.Context, int64, ...string) (map[string]string, error)); ok {\n\t\treturn rf(ctx, projectID, meta...)\n\t}\n\tif rf, ok := ret.Get(0).(func(context.Context, int64, ...string) map[string]string); ok {\n\t\tr0 = rf(ctx, projectID, meta...)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(map[string]string)\n\t\t}\n\t}\n\n\tif rf, ok := ret.Get(1).(func(context.Context, int64, ...string) error); ok {\n\t\tr1 = rf(ctx, projectID, meta...)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (m *MockClusterScoper) Location() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Location\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (mmGetPosition *mStoreMockGetPosition) Calls() []*StoreMockGetPositionParams {\n\tmmGetPosition.mutex.RLock()\n\n\targCopy := make([]*StoreMockGetPositionParams, len(mmGetPosition.callArgs))\n\tcopy(argCopy, mmGetPosition.callArgs)\n\n\tmmGetPosition.mutex.RUnlock()\n\n\treturn argCopy\n}", "func (mmGetPosition *mStoreMockGetPosition) Expect(account string, contractID string) *mStoreMockGetPosition {\n\tif mmGetPosition.mock.funcGetPosition != nil {\n\t\tmmGetPosition.mock.t.Fatalf(\"StoreMock.GetPosition mock is already set by Set\")\n\t}\n\n\tif mmGetPosition.defaultExpectation == nil {\n\t\tmmGetPosition.defaultExpectation = &StoreMockGetPositionExpectation{}\n\t}\n\n\tmmGetPosition.defaultExpectation.params = &StoreMockGetPositionParams{account, contractID}\n\tfor _, e := range mmGetPosition.expectations {\n\t\tif minimock.Equal(e.params, mmGetPosition.defaultExpectation.params) {\n\t\t\tmmGetPosition.mock.t.Fatalf(\"Expectation set by When has same params: %#v\", *mmGetPosition.defaultExpectation.params)\n\t\t}\n\t}\n\n\treturn mmGetPosition\n}", "func (_m *MockStorage) GetLastRequestTimeInWindow(user string, timeStamp int64) (int64, error) {\n\tret := _m.Called(user, timeStamp)\n\n\tvar r0 int64\n\tif rf, ok := ret.Get(0).(func(string, int64) int64); ok {\n\t\tr0 = rf(user, timeStamp)\n\t} else {\n\t\tr0 = ret.Get(0).(int64)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string, int64) error); ok {\n\t\tr1 = rf(user, timeStamp)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (o *ReportingTaskEntity) GetPositionOk() (*PositionDTO, bool) {\n\tif o == nil || o.Position == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Position, true\n}", "func (_m *Consumer) Seek(partition kafka.TopicPartition, timeoutMs int) error {\n\tret := _m.Called(partition, timeoutMs)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(kafka.TopicPartition, int) error); ok {\n\t\tr0 = rf(partition, timeoutMs)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (o *ComponentReferenceDTO) GetPositionOk() (*PositionDTO, bool) {\n\tif o == nil || o.Position == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Position, true\n}", "func (o *ReportingTaskEntity) GetPosition() PositionDTO {\n\tif o == nil || o.Position == nil {\n\t\tvar ret PositionDTO\n\t\treturn ret\n\t}\n\treturn *o.Position\n}", "func (a *Client) GetPrivateGetPosition(params *GetPrivateGetPositionParams) (*GetPrivateGetPositionOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewGetPrivateGetPositionParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"GetPrivateGetPosition\",\n\t\tMethod: \"GET\",\n\t\tPathPattern: \"/private/get_position\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"\"},\n\t\tSchemes: []string{\"http\"},\n\t\tParams: params,\n\t\tReader: &GetPrivateGetPositionReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*GetPrivateGetPositionOK)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\t// safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue\n\tmsg := fmt.Sprintf(\"unexpected success response for GetPrivateGetPosition: API contract not enforced by server. Client expected to get an error, but got: %T\", result)\n\tpanic(msg)\n}", "func (_m *MockQueryCoord) Start() error {\n\tret := _m.Called()\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func() error); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Client) Relocate(fromRef string, toRef string) (image.Name, error) {\n\tret := _m.Called(fromRef, toRef)\n\n\tvar r0 image.Name\n\tif rf, ok := ret.Get(0).(func(string, string) image.Name); ok {\n\t\tr0 = rf(fromRef, toRef)\n\t} else {\n\t\tr0 = ret.Get(0).(image.Name)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string, string) error); ok {\n\t\tr1 = rf(fromRef, toRef)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (m *MockMovableNode) Velocity() zounds.Point {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Velocity\")\n\tret0, _ := ret[0].(zounds.Point)\n\treturn ret0\n}", "func (_m *MockStorage) GetRequestCountInWindow(user string, timeStamp int64) (int64, error) {\n\tret := _m.Called(user, timeStamp)\n\n\tvar r0 int64\n\tif rf, ok := ret.Get(0).(func(string, int64) int64); ok {\n\t\tr0 = rf(user, timeStamp)\n\t} else {\n\t\tr0 = ret.Get(0).(int64)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string, int64) error); ok {\n\t\tr1 = rf(user, timeStamp)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (mmGetPosition *mStoreMockGetPosition) Return(t1 pb.TotalPosition, err error) *StoreMock {\n\tif mmGetPosition.mock.funcGetPosition != nil {\n\t\tmmGetPosition.mock.t.Fatalf(\"StoreMock.GetPosition mock is already set by Set\")\n\t}\n\n\tif mmGetPosition.defaultExpectation == nil {\n\t\tmmGetPosition.defaultExpectation = &StoreMockGetPositionExpectation{mock: mmGetPosition.mock}\n\t}\n\tmmGetPosition.defaultExpectation.results = &StoreMockGetPositionResults{t1, err}\n\treturn mmGetPosition.mock\n}", "func (m *MockManagedClusterScoper) Location() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Location\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (m *MockReaderAt) ReadAt(arg0 []byte, arg1 int64) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ReadAt\", arg0, arg1)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *ReadSeekerCloser) Seek(offset int64, whence int) (int64, error) {\n\tret := _m.Called(offset, whence)\n\n\tvar r0 int64\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func(int64, int) (int64, error)); ok {\n\t\treturn rf(offset, whence)\n\t}\n\tif rf, ok := ret.Get(0).(func(int64, int) int64); ok {\n\t\tr0 = rf(offset, whence)\n\t} else {\n\t\tr0 = ret.Get(0).(int64)\n\t}\n\n\tif rf, ok := ret.Get(1).(func(int64, int) error); ok {\n\t\tr1 = rf(offset, whence)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (lm *LocalMeta) Pos() mysql.Position {\n\tlm.RLock()\n\tdefer lm.RUnlock()\n\n\treturn mysql.Position{Name: lm.BinLogName, Pos: lm.BinLogPos}\n}", "func (level *Level) GetRandomPosition() Position {\n\tx := rand.Intn(level.size.Width)\n\ty := rand.Intn(level.size.Height)\n\t// log.Println(\"Generated a random position at\", Position{x, y})\n\treturn Position{x, y}\n}", "func (v *Provider) Position() (float64, float64, error) {\n\tres, err := v.statusLG()\n\tcoord := res.ResMsg.VehicleStatusInfo.VehicleLocation.Coord\n\treturn coord.Lat, coord.Lon, err\n}", "func (this *AVTransport) GetPositionInfo(instanceId uint32) (*PositionInfo, error) {\n\ttype Response struct {\n\t\tXMLName xml.Name\n\t\tPositionInfo\n\t\tErrorResponse\n\t}\n\targs := []Arg{\n\t\t{\"InstanceID\", instanceId},\n\t}\n\tresponse := this.Svc.Call(\"GetPositionInfo\", args)\n\tdoc := Response{}\n\txml.Unmarshal([]byte(response), &doc)\n\treturn &doc.PositionInfo, doc.Error()\n}", "func (self *Rectangle) GetPoint() *Point{\n return &Point{self.Object.Call(\"getPoint\")}\n}", "func (m *MockPacketHandler) LocalAddr() net.Addr {\n\tret := m.ctrl.Call(m, \"LocalAddr\")\n\tret0, _ := ret[0].(net.Addr)\n\treturn ret0\n}", "func (mock *MockWorldMap) GetCellValue(x, y int) int {\n\targs := mock.Called(x, y)\n\treturn args.Int(0)\n}", "func (_m *API) GetTopManga(_type int, page int) ([]model.TopManga, int, error) {\n\tret := _m.Called(_type, page)\n\n\tvar r0 []model.TopManga\n\tif rf, ok := ret.Get(0).(func(int, int) []model.TopManga); ok {\n\t\tr0 = rf(_type, page)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]model.TopManga)\n\t\t}\n\t}\n\n\tvar r1 int\n\tif rf, ok := ret.Get(1).(func(int, int) int); ok {\n\t\tr1 = rf(_type, page)\n\t} else {\n\t\tr1 = ret.Get(1).(int)\n\t}\n\n\tvar r2 error\n\tif rf, ok := ret.Get(2).(func(int, int) error); ok {\n\t\tr2 = rf(_type, page)\n\t} else {\n\t\tr2 = ret.Error(2)\n\t}\n\n\treturn r0, r1, r2\n}", "func (t Table) GetPos(d Data) (int64, error) {\n\tdb, err := openDB(t.Config)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tdefer db.Close()\n\tdirection := \"\"\n\tif d.DescOrder {\n\t\tdirection = \"DESC\"\n\t}\n\tsqlQuery := fmt.Sprintf(`SELECT ROW_NUMBER FROM (SELECT ROW_NUMBER() OVER (ORDER BY %s %s), %s FROM %s) x WHERE %s=$1`, d.OrderBy, direction, d.Key, t.Name, d.Key)\n\trow := db.QueryRow(sqlQuery, d.KeyVal)\n\tvar position int64\n\terr = row.Scan(&position)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn position, nil\n}", "func processPosition(m map[string]interface{}) (float64, float64, bool) {\n\tl0, ok := m[\"results\"].([]interface{})\n\tif !ok {\n\t\treturn 0, 0, false\n\t}\n\tl1, ok := l0[0].(map[string]interface{})\n\tif !ok {\n\t\treturn 0, 0, false\n\t}\n\tl2, ok := l1[\"geometry\"].(map[string]interface{})\n\tif !ok {\n\t\treturn 0, 0, false\n\t}\n\tl3, ok := l2[\"location\"].(map[string]interface{})\n\tif !ok {\n\t\treturn 0, 0, false\n\t}\n\tlatitude, ok := l3[\"lat\"].(float64)\n\tif !ok {\n\t\treturn 0, 0, false\n\t}\n\tlongitude, ok := l3[\"lng\"].(float64)\n\tif !ok {\n\t\treturn 0, 0, false\n\t}\n\treturn latitude, longitude, true\n}", "func (o *InlineResponse20051TodoItems) GetPosition() string {\n\tif o == nil || o.Position == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Position\n}", "func (_m *Manager) Update(ctx context.Context, projectID int64, meta map[string]string) error {\n\tret := _m.Called(ctx, projectID, meta)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, int64, map[string]string) error); ok {\n\t\tr0 = rf(ctx, projectID, meta)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *PredictService) GetTime(ctx context.Context, point app.Point, cars []*app.Car) ([]int64, error) {\n\tret := _m.Called(ctx, point, cars)\n\n\tvar r0 []int64\n\tif rf, ok := ret.Get(0).(func(context.Context, app.Point, []*app.Car) []int64); ok {\n\t\tr0 = rf(ctx, point, cars)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]int64)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, app.Point, []*app.Car) error); ok {\n\t\tr1 = rf(ctx, point, cars)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func Test_Client_MapByCallingCode(t *testing.T) {\n\tret := mockClient.MapByCallingCode(\"65\")\n\tassert.Equal(t, ret[0].Name, \"Singapore\")\n}", "func (_m *TeamStore) GetNewTeamMembersSince(teamID string, since int64, offset int, limit int) (*model.NewTeamMembersList, int64, error) {\n\tret := _m.Called(teamID, since, offset, limit)\n\n\tvar r0 *model.NewTeamMembersList\n\tif rf, ok := ret.Get(0).(func(string, int64, int, int) *model.NewTeamMembersList); ok {\n\t\tr0 = rf(teamID, since, offset, limit)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*model.NewTeamMembersList)\n\t\t}\n\t}\n\n\tvar r1 int64\n\tif rf, ok := ret.Get(1).(func(string, int64, int, int) int64); ok {\n\t\tr1 = rf(teamID, since, offset, limit)\n\t} else {\n\t\tr1 = ret.Get(1).(int64)\n\t}\n\n\tvar r2 error\n\tif rf, ok := ret.Get(2).(func(string, int64, int, int) error); ok {\n\t\tr2 = rf(teamID, since, offset, limit)\n\t} else {\n\t\tr2 = ret.Error(2)\n\t}\n\n\treturn r0, r1, r2\n}", "func newMockGetDataFunc(mockStore *mock.NodeStore) func(addr Address) (data []byte, err error) {\n\treturn func(addr Address) (data []byte, err error) {\n\t\tdata, err = mockStore.Get(addr)\n\t\tif err == mock.ErrNotFound {\n\t\t\t// preserve ErrChunkNotFound error\n\t\t\terr = ErrChunkNotFound\n\t\t}\n\t\treturn data, err\n\t}\n}", "func (_m *DiffStore) Get(priority int64, mainDigest types.Digest, rightDigests types.DigestSlice) (map[types.Digest]interface{}, error) {\n\tret := _m.Called(priority, mainDigest, rightDigests)\n\n\tvar r0 map[types.Digest]interface{}\n\tif rf, ok := ret.Get(0).(func(int64, types.Digest, types.DigestSlice) map[types.Digest]interface{}); ok {\n\t\tr0 = rf(priority, mainDigest, rightDigests)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(map[types.Digest]interface{})\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(int64, types.Digest, types.DigestSlice) error); ok {\n\t\tr1 = rf(priority, mainDigest, rightDigests)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *OrderRepository) FindRange(limit, offset int) (*[]order.Order, error) {\n\tret := _m.Called(limit, offset)\n\n\tvar r0 *[]order.Order\n\tif rf, ok := ret.Get(0).(func(int, int) *[]order.Order); ok {\n\t\tr0 = rf(limit, offset)\n\t} else {\n\t\tif _, ok := ret.Get(0).(*[]order.Order); ok {\n\t\t\tr0 = ret.Get(0).(*[]order.Order)\n\t\t} else {\n\t\t\tr0 = nil\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(int, int) error); ok {\n\t\tr1 = rf(limit, offset)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *Consumer) QueryWatermarkOffsets(topic string, partition int32, timeoutMs int) (int64, int64, error) {\n\tret := _m.Called(topic, partition, timeoutMs)\n\n\tvar r0 int64\n\tif rf, ok := ret.Get(0).(func(string, int32, int) int64); ok {\n\t\tr0 = rf(topic, partition, timeoutMs)\n\t} else {\n\t\tr0 = ret.Get(0).(int64)\n\t}\n\n\tvar r1 int64\n\tif rf, ok := ret.Get(1).(func(string, int32, int) int64); ok {\n\t\tr1 = rf(topic, partition, timeoutMs)\n\t} else {\n\t\tr1 = ret.Get(1).(int64)\n\t}\n\n\tvar r2 error\n\tif rf, ok := ret.Get(2).(func(string, int32, int) error); ok {\n\t\tr2 = rf(topic, partition, timeoutMs)\n\t} else {\n\t\tr2 = ret.Error(2)\n\t}\n\n\treturn r0, r1, r2\n}", "func (mon *Monitor) GetPosition() (int, int) {\n\tvar x, y C.int\n\tC.glfwGetMonitorPos(mon.internalPtr, &x, &y)\n\treturn int(x), int(y)\n}", "func (m *MockClusterDescriber) Location() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Location\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func MockResultFileLocationFromFile(f string) (*ResultFileLocation, error) {\n\tfileInfo, err := os.Stat(f)\n\tif err != nil {\n\t\treturn nil, skerr.Wrapf(err, \"could not stat %s\", f)\n\t}\n\n\t// Read file into buffer and calculate the md5 in the process.\n\tfile, err := os.Open(f)\n\tif err != nil {\n\t\treturn nil, skerr.Wrapf(err, \"could not open %s\", f)\n\t}\n\tdefer util.Close(file)\n\n\tvar buf bytes.Buffer\n\thash, err := util.MD5FromReader(file, &buf)\n\tif err != nil {\n\t\treturn nil, skerr.Wrapf(err, \"unable to compute MD5 hash of %s\", f)\n\t}\n\n\tmrf := &ResultFileLocation{}\n\n\tmrf.On(\"Name\").Return(f)\n\tmrf.On(\"MD5\").Return(hash)\n\tmrf.On(\"Open\", testutils.AnyContext).Return(ioutil.NopCloser(&buf), nil)\n\tmrf.On(\"Content\").Return(buf.Bytes())\n\tmrf.On(\"TimeStamp\").Return(fileInfo.ModTime().Unix())\n\treturn mrf, nil\n}", "func (m *MockInfluxDB) GetFieldValueIfExist(MyDB, columnName, measurement string, roundedTime time.Time) int {\n\tret := m.ctrl.Call(m, \"GetFieldValueIfExist\", MyDB, columnName, measurement, roundedTime)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_m *API) GetMangaMoreInfo(id int) (string, int, error) {\n\tret := _m.Called(id)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(int) string); ok {\n\t\tr0 = rf(id)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 int\n\tif rf, ok := ret.Get(1).(func(int) int); ok {\n\t\tr1 = rf(id)\n\t} else {\n\t\tr1 = ret.Get(1).(int)\n\t}\n\n\tvar r2 error\n\tif rf, ok := ret.Get(2).(func(int) error); ok {\n\t\tr2 = rf(id)\n\t} else {\n\t\tr2 = ret.Error(2)\n\t}\n\n\treturn r0, r1, r2\n}", "func (_m *MockDataCoord) Start() error {\n\tret := _m.Called()\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func() error); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (d *Depot) GetPosition() (X, Y float64) {\n\treturn d.X, d.Y\n}", "func (m *StoreMock) MinimockGetPositionDone() bool {\n\tfor _, e := range m.GetPositionMock.expectations {\n\t\tif mm_atomic.LoadUint64(&e.Counter) < 1 {\n\t\t\treturn false\n\t\t}\n\t}\n\n\t// if default expectation was set then invocations count should be greater than zero\n\tif m.GetPositionMock.defaultExpectation != nil && mm_atomic.LoadUint64(&m.afterGetPositionCounter) < 1 {\n\t\treturn false\n\t}\n\t// if func was set then invocations count should be greater than zero\n\tif m.funcGetPosition != nil && mm_atomic.LoadUint64(&m.afterGetPositionCounter) < 1 {\n\t\treturn false\n\t}\n\treturn true\n}", "func (mmGetPosition *StoreMock) GetPositionBeforeCounter() uint64 {\n\treturn mm_atomic.LoadUint64(&mmGetPosition.beforeGetPositionCounter)\n}", "func (_m *MapFunc) Get(m map[string]func(string) string) error {\n\tret := _m.Called(m)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(map[string]func(string) string) error); ok {\n\t\tr0 = rf(m)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockInformation) LocalLocation() *universe.View {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"LocalLocation\")\n\tret0, _ := ret[0].(*universe.View)\n\treturn ret0\n}", "func (self *Rectangle) GetPointI(args ...interface{}) *Point{\n return &Point{self.Object.Call(\"getPoint\", args)}\n}", "func (mmSavePosition *mStoreMockSavePosition) Expect(t1 pb.TotalPosition) *mStoreMockSavePosition {\n\tif mmSavePosition.mock.funcSavePosition != nil {\n\t\tmmSavePosition.mock.t.Fatalf(\"StoreMock.SavePosition mock is already set by Set\")\n\t}\n\n\tif mmSavePosition.defaultExpectation == nil {\n\t\tmmSavePosition.defaultExpectation = &StoreMockSavePositionExpectation{}\n\t}\n\n\tmmSavePosition.defaultExpectation.params = &StoreMockSavePositionParams{t1}\n\tfor _, e := range mmSavePosition.expectations {\n\t\tif minimock.Equal(e.params, mmSavePosition.defaultExpectation.params) {\n\t\t\tmmSavePosition.mock.t.Fatalf(\"Expectation set by When has same params: %#v\", *mmSavePosition.defaultExpectation.params)\n\t\t}\n\t}\n\n\treturn mmSavePosition\n}", "func (_m *MockHistoryer) ParsedBlockSeq(tx *dbutil.Tx) (uint64, bool, error) {\n\tret := _m.Called(tx)\n\n\tvar r0 uint64\n\tif rf, ok := ret.Get(0).(func(*dbutil.Tx) uint64); ok {\n\t\tr0 = rf(tx)\n\t} else {\n\t\tr0 = ret.Get(0).(uint64)\n\t}\n\n\tvar r1 bool\n\tif rf, ok := ret.Get(1).(func(*dbutil.Tx) bool); ok {\n\t\tr1 = rf(tx)\n\t} else {\n\t\tr1 = ret.Get(1).(bool)\n\t}\n\n\tvar r2 error\n\tif rf, ok := ret.Get(2).(func(*dbutil.Tx) error); ok {\n\t\tr2 = rf(tx)\n\t} else {\n\t\tr2 = ret.Error(2)\n\t}\n\n\treturn r0, r1, r2\n}", "func (_m *Consumer) GetWatermarkOffsets(topic string, partition int32) (int64, int64, error) {\n\tret := _m.Called(topic, partition)\n\n\tvar r0 int64\n\tif rf, ok := ret.Get(0).(func(string, int32) int64); ok {\n\t\tr0 = rf(topic, partition)\n\t} else {\n\t\tr0 = ret.Get(0).(int64)\n\t}\n\n\tvar r1 int64\n\tif rf, ok := ret.Get(1).(func(string, int32) int64); ok {\n\t\tr1 = rf(topic, partition)\n\t} else {\n\t\tr1 = ret.Get(1).(int64)\n\t}\n\n\tvar r2 error\n\tif rf, ok := ret.Get(2).(func(string, int32) error); ok {\n\t\tr2 = rf(topic, partition)\n\t} else {\n\t\tr2 = ret.Error(2)\n\t}\n\n\treturn r0, r1, r2\n}", "func (o *VersionedControllerService) GetPosition() Position {\n\tif o == nil || o.Position == nil {\n\t\tvar ret Position\n\t\treturn ret\n\t}\n\treturn *o.Position\n}", "func (p *player) calculatePosition() (float64, float64) {\n\tif p.startedMoving == 0 {\n\t\treturn p.location.x, p.location.y\n\t}\n\tcurrentTime := time.Now().UnixNano()\n\ttimeSince := float64((currentTime-p.startedMoving)/1000000) / 1000\n\tchangeX := timeSince * p.velocity.x\n\tx := p.location.x + changeX\n\ty := p.location.y\n\tif p.velocity.y != 0 {\n\t\tgravity := (timeSince / 2 * world_gravity)\n\t\tvelocity := p.velocity.y + gravity\n\t\ty = p.location.y + (timeSince * velocity)\n\t}\n\t//b, _ := w.inPlatform(x, y)\n\t//log.Println(\"in platform:\", b, x, y)\n\treturn x, y\n}", "func (l *Location) Pos() uint64 {\n\treturn l.pos\n}", "func (m *MockScheduler) GetSagaCoord() saga.SagaCoordinator {\n\tret := m.ctrl.Call(m, \"GetSagaCoord\")\n\tret0, _ := ret[0].(saga.SagaCoordinator)\n\treturn ret0\n}", "func (mmSavePosition *mStoreMockSavePosition) When(t1 pb.TotalPosition) *StoreMockSavePositionExpectation {\n\tif mmSavePosition.mock.funcSavePosition != nil {\n\t\tmmSavePosition.mock.t.Fatalf(\"StoreMock.SavePosition mock is already set by Set\")\n\t}\n\n\texpectation := &StoreMockSavePositionExpectation{\n\t\tmock: mmSavePosition.mock,\n\t\tparams: &StoreMockSavePositionParams{t1},\n\t}\n\tmmSavePosition.expectations = append(mmSavePosition.expectations, expectation)\n\treturn expectation\n}", "func (_m *MockReaderIterator) Current() (ts.Datapoint, time0.Unit, ts.Annotation) {\n\tret := _m.ctrl.Call(_m, \"Current\")\n\tret0, _ := ret[0].(ts.Datapoint)\n\tret1, _ := ret[1].(time0.Unit)\n\tret2, _ := ret[2].(ts.Annotation)\n\treturn ret0, ret1, ret2\n}", "func TestExtractLatLngFromRequest(t *testing.T) {\n\tg := &GoogleGeocoder{}\n\n\tdata, err := GetMockResponse(\"test/helpers/google_geocode_success.json\")\n\tif err != nil {\n\t\tt.Error(\"%v\\n\", err)\n\t}\n\n\tlat, lng := g.extractLatLngFromResponse(data)\n\tif lat != 37.615223 && lng != -122.389979 {\n\t\tt.Error(fmt.Sprintf(\"Expected: [37.615223, -122.389979], Got: [%f, %f]\", lat, lng))\n\t}\n}", "func (m *MessagePosition) GetPosition() (value int32) {\n\tif m == nil {\n\t\treturn\n\t}\n\treturn m.Position\n}", "func (_m *MockIterator) Current() (ts.Datapoint, time0.Unit, ts.Annotation) {\n\tret := _m.ctrl.Call(_m, \"Current\")\n\tret0, _ := ret[0].(ts.Datapoint)\n\tret1, _ := ret[1].(time0.Unit)\n\tret2, _ := ret[2].(ts.Annotation)\n\treturn ret0, ret1, ret2\n}", "func (_Contracts *ContractsCallerSession) MPosition(arg0 *big.Int, arg1 *big.Int) (struct {\n\tProposalId *big.Int\n\tPositionId *big.Int\n\tMaxCandidate uint8\n\tTitle [32]byte\n}, error) {\n\treturn _Contracts.Contract.MPosition(&_Contracts.CallOpts, arg0, arg1)\n}", "func (m *MockProc) Address() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Address\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (_m *PaymentRequestListFetcher) FetchPaymentRequestListByMove(officeUserID uuid.UUID, locator string) (*models.PaymentRequests, error) {\n\tret := _m.Called(officeUserID, locator)\n\n\tvar r0 *models.PaymentRequests\n\tif rf, ok := ret.Get(0).(func(uuid.UUID, string) *models.PaymentRequests); ok {\n\t\tr0 = rf(officeUserID, locator)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*models.PaymentRequests)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(uuid.UUID, string) error); ok {\n\t\tr1 = rf(officeUserID, locator)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (self *TileSprite) TilePosition() *Point{\n return &Point{self.Object.Get(\"tilePosition\")}\n}", "func (o *os) GetWindowPosition() gdnative.Vector2 {\n\to.ensureSingleton()\n\t//log.Println(\"Calling _OS.GetWindowPosition()\")\n\n\t// Build out the method's arguments\n\tptrArguments := make([]gdnative.Pointer, 0, 0)\n\n\t// Get the method bind\n\tmethodBind := gdnative.NewMethodBind(\"_OS\", \"get_window_position\")\n\n\t// Call the parent method.\n\t// Vector2\n\tretPtr := gdnative.NewEmptyVector2()\n\tgdnative.MethodBindPtrCall(methodBind, o.GetBaseObject(), ptrArguments, retPtr)\n\n\t// If we have a return type, convert it from a pointer into its actual object.\n\tret := gdnative.NewVector2FromPointer(retPtr)\n\treturn ret\n}", "func (_m *ChannelStore) UpdateLastViewedAt(channelIds []string, userID string) (map[string]int64, error) {\n\tret := _m.Called(channelIds, userID)\n\n\tvar r0 map[string]int64\n\tif rf, ok := ret.Get(0).(func([]string, string) map[string]int64); ok {\n\t\tr0 = rf(channelIds, userID)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(map[string]int64)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func([]string, string) error); ok {\n\t\tr1 = rf(channelIds, userID)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}" ]
[ "0.6899186", "0.64312905", "0.6404954", "0.61685777", "0.5884713", "0.58194864", "0.5684507", "0.56103456", "0.5553841", "0.55433613", "0.53670895", "0.5329959", "0.53229207", "0.5260972", "0.52503383", "0.5234564", "0.5223121", "0.521651", "0.5169736", "0.51623106", "0.5155069", "0.5139987", "0.51200294", "0.51135916", "0.5103358", "0.50938404", "0.5066272", "0.50545865", "0.49841824", "0.49835324", "0.4973556", "0.4972789", "0.49695498", "0.49590597", "0.49378476", "0.49209574", "0.4914987", "0.49103728", "0.49100876", "0.49094927", "0.4903846", "0.4900986", "0.48999658", "0.48978442", "0.48969343", "0.48837963", "0.4878767", "0.48463985", "0.4833356", "0.48279798", "0.48240623", "0.4815402", "0.48088372", "0.47904107", "0.47875017", "0.47823018", "0.4777885", "0.47729343", "0.4771242", "0.4769722", "0.47691962", "0.4763252", "0.47547767", "0.47522086", "0.47471786", "0.47462696", "0.4737738", "0.47370815", "0.47321507", "0.47309688", "0.47255296", "0.47243086", "0.4720054", "0.47168556", "0.47138172", "0.47088596", "0.4706082", "0.47005203", "0.46966866", "0.46940398", "0.46913216", "0.4686002", "0.46855783", "0.4685268", "0.46830165", "0.46743014", "0.4671881", "0.46697968", "0.4668888", "0.46688312", "0.46624273", "0.46610412", "0.46571273", "0.46562842", "0.46550047", "0.4653698", "0.46528023", "0.4651818", "0.46431893", "0.46418458" ]
0.71135765
0
GetPos is a helper method to define mock.On call
func (_e *MockWriteBufferJsonBased_Expecter) GetPos() *MockWriteBufferJsonBased_GetPos_Call { return &MockWriteBufferJsonBased_GetPos_Call{Call: _e.mock.On("GetPos")} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferXmlBased_Expecter) GetPos() *MockWriteBufferXmlBased_GetPos_Call {\n\treturn &MockWriteBufferXmlBased_GetPos_Call{Call: _e.mock.On(\"GetPos\")}\n}", "func (_m *MockWriteBufferJsonBased) GetPos() uint16 {\n\tret := _m.Called()\n\n\tvar r0 uint16\n\tif rf, ok := ret.Get(0).(func() uint16); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(uint16)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) GetPos() uint16 {\n\tret := _m.Called()\n\n\tvar r0 uint16\n\tif rf, ok := ret.Get(0).(func() uint16); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(uint16)\n\t}\n\n\treturn r0\n}", "func (m *StoreMock) MinimockGetPositionInspect() {\n\tfor _, e := range m.GetPositionMock.expectations {\n\t\tif mm_atomic.LoadUint64(&e.Counter) < 1 {\n\t\t\tm.t.Errorf(\"Expected call to StoreMock.GetPosition with params: %#v\", *e.params)\n\t\t}\n\t}\n\n\t// if default expectation was set then invocations count should be greater than zero\n\tif m.GetPositionMock.defaultExpectation != nil && mm_atomic.LoadUint64(&m.afterGetPositionCounter) < 1 {\n\t\tif m.GetPositionMock.defaultExpectation.params == nil {\n\t\t\tm.t.Error(\"Expected call to StoreMock.GetPosition\")\n\t\t} else {\n\t\t\tm.t.Errorf(\"Expected call to StoreMock.GetPosition with params: %#v\", *m.GetPositionMock.defaultExpectation.params)\n\t\t}\n\t}\n\t// if func was set then invocations count should be greater than zero\n\tif m.funcGetPosition != nil && mm_atomic.LoadUint64(&m.afterGetPositionCounter) < 1 {\n\t\tm.t.Error(\"Expected call to StoreMock.GetPosition\")\n\t}\n}", "func (m *MockEngine) Position() error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Position\")\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (client *APIClient) GetPosition(commanderName string) (commanderPosition CommanderPosition, err error) {\n\tresponse, err := client.request(\"GET\", urlGetPosition(commanderName), nil)\n\tif err != nil {\n\t\treturn\n\t}\n\n\terr = utilities.FromJSON(response, &commanderPosition)\n\treturn\n}", "func (m *Mob) GetPosition() context.Position {\n\tm.mutex.RLock()\n\tdefer m.mutex.RUnlock()\n\n\treturn *m.position\n}", "func (o *InputEventScreenTouch) GetPosition() gdnative.Vector2 {\n\t//log.Println(\"Calling InputEventScreenTouch.GetPosition()\")\n\n\t// Build out the method's arguments\n\tptrArguments := make([]gdnative.Pointer, 0, 0)\n\n\t// Get the method bind\n\tmethodBind := gdnative.NewMethodBind(\"InputEventScreenTouch\", \"get_position\")\n\n\t// Call the parent method.\n\t// Vector2\n\tretPtr := gdnative.NewEmptyVector2()\n\tgdnative.MethodBindPtrCall(methodBind, o.GetBaseObject(), ptrArguments, retPtr)\n\n\t// If we have a return type, convert it from a pointer into its actual object.\n\tret := gdnative.NewVector2FromPointer(retPtr)\n\treturn ret\n}", "func (player *Player) Position(f func(position mctype.Position)) {\n\tplayer.Exec(command.QueryTargetRequest(mctype.Target(player.name)), func(response *command.QueryTarget) {\n\t\tif len(*response.Details) == 1 {\n\t\t\tf((*response.Details)[0].Position)\n\t\t}\n\t})\n}", "func (o *InputEventScreenDrag) GetPosition() gdnative.Vector2 {\n\t//log.Println(\"Calling InputEventScreenDrag.GetPosition()\")\n\n\t// Build out the method's arguments\n\tptrArguments := make([]gdnative.Pointer, 0, 0)\n\n\t// Get the method bind\n\tmethodBind := gdnative.NewMethodBind(\"InputEventScreenDrag\", \"get_position\")\n\n\t// Call the parent method.\n\t// Vector2\n\tretPtr := gdnative.NewEmptyVector2()\n\tgdnative.MethodBindPtrCall(methodBind, o.GetBaseObject(), ptrArguments, retPtr)\n\n\t// If we have a return type, convert it from a pointer into its actual object.\n\tret := gdnative.NewVector2FromPointer(retPtr)\n\treturn ret\n}", "func (_Contracts *ContractsCaller) MPosition(opts *bind.CallOpts, arg0 *big.Int, arg1 *big.Int) (struct {\n\tProposalId *big.Int\n\tPositionId *big.Int\n\tMaxCandidate uint8\n\tTitle [32]byte\n}, error) {\n\tret := new(struct {\n\t\tProposalId *big.Int\n\t\tPositionId *big.Int\n\t\tMaxCandidate uint8\n\t\tTitle [32]byte\n\t})\n\tout := ret\n\terr := _Contracts.contract.Call(opts, out, \"mPosition\", arg0, arg1)\n\treturn *ret, err\n}", "func (self *Rectangle) GetPoint() *Point{\n return &Point{self.Object.Call(\"getPoint\")}\n}", "func (sw *subscriptionWorker) GetPosition(ctx context.Context) (int64, error) {\n\tlog := logrus.\n\t\tWithFields(logrus.Fields{\n\t\t\t\"SubscriberID\": sw.subscriberID,\n\t\t})\n\n\tmsgs, _ := sw.ms.Get(\n\t\tctx,\n\t\tPositionStream(sw.subscriberID),\n\t\tConverter(convertEnvelopeToPositionMessage),\n\t\tLast(),\n\t)\n\tif len(msgs) < 1 {\n\t\tlog.Debug(\"no messages found for subscriber, using default\")\n\t\treturn 0, nil\n\t}\n\n\tswitch pos := msgs[0].(type) {\n\tcase *positionMessage:\n\t\treturn pos.MyPosition, nil\n\tdefault:\n\t\tlog.\n\t\t\tWithError(ErrIncorrectMessageInPositionStream).\n\t\t\tError(\"incorrect message type in position stream\")\n\t\treturn 0, nil\n\t}\n}", "func (o *InlineResponse20051TodoItems) GetPosition() string {\n\tif o == nil || o.Position == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Position\n}", "func (m *MockBroker) GetPositions(arg0 ...model.Asset) ([]model.Position, error) {\n\tvarargs := []interface{}{}\n\tfor _, a := range arg0 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"GetPositions\", varargs...)\n\tret0, _ := ret[0].([]model.Position)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (this *AVTransport) GetPositionInfo(instanceId uint32) (*PositionInfo, error) {\n\ttype Response struct {\n\t\tXMLName xml.Name\n\t\tPositionInfo\n\t\tErrorResponse\n\t}\n\targs := []Arg{\n\t\t{\"InstanceID\", instanceId},\n\t}\n\tresponse := this.Svc.Call(\"GetPositionInfo\", args)\n\tdoc := Response{}\n\txml.Unmarshal([]byte(response), &doc)\n\treturn &doc.PositionInfo, doc.Error()\n}", "func (mmGetPosition *mStoreMockGetPosition) Calls() []*StoreMockGetPositionParams {\n\tmmGetPosition.mutex.RLock()\n\n\targCopy := make([]*StoreMockGetPositionParams, len(mmGetPosition.callArgs))\n\tcopy(argCopy, mmGetPosition.callArgs)\n\n\tmmGetPosition.mutex.RUnlock()\n\n\treturn argCopy\n}", "func (device *SilentStepperBrick) GetTargetPosition() (position int32, err error) {\n\tvar buf bytes.Buffer\n\n\tresultBytes, err := device.device.Get(uint8(FunctionGetTargetPosition), buf.Bytes())\n\tif err != nil {\n\t\treturn position, err\n\t}\n\tif len(resultBytes) > 0 {\n\t\tvar header PacketHeader\n\n\t\theader.FillFromBytes(resultBytes)\n\n\t\tif header.Length != 12 {\n\t\t\treturn position, fmt.Errorf(\"Received packet of unexpected size %d, instead of %d\", header.Length, 12)\n\t\t}\n\n\t\tif header.ErrorCode != 0 {\n\t\t\treturn position, DeviceError(header.ErrorCode)\n\t\t}\n\n\t\tresultBuf := bytes.NewBuffer(resultBytes[8:])\n\t\tbinary.Read(resultBuf, binary.LittleEndian, &position)\n\n\t}\n\n\treturn position, nil\n}", "func (obj Player) GetPos() (xf, yf, zf float64, err error) {\n\ts := \"player.getPos(\" + obj.name + \")\"\n\txf = 0.0\n\tyf = 0.0\n\tzf = 0.0\n\tvar r string\n\tr, err = object(obj.obj).sendReceive(s)\n\tif err != nil {\n\t\treturn\n\t}\n\tarr := strings.Split(r, \",\")\n\tarr2 := make([]*float64, 3)\n\tarr2[0] = &xf\n\tarr2[1] = &yf\n\tarr2[2] = &zf\n\tfor index, rs := range arr {\n\t\t*(arr2[index]), err = strconv.ParseFloat(rs, 64)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\treturn\n}", "func (m *StoreMock) MinimockSavePositionInspect() {\n\tfor _, e := range m.SavePositionMock.expectations {\n\t\tif mm_atomic.LoadUint64(&e.Counter) < 1 {\n\t\t\tm.t.Errorf(\"Expected call to StoreMock.SavePosition with params: %#v\", *e.params)\n\t\t}\n\t}\n\n\t// if default expectation was set then invocations count should be greater than zero\n\tif m.SavePositionMock.defaultExpectation != nil && mm_atomic.LoadUint64(&m.afterSavePositionCounter) < 1 {\n\t\tif m.SavePositionMock.defaultExpectation.params == nil {\n\t\t\tm.t.Error(\"Expected call to StoreMock.SavePosition\")\n\t\t} else {\n\t\t\tm.t.Errorf(\"Expected call to StoreMock.SavePosition with params: %#v\", *m.SavePositionMock.defaultExpectation.params)\n\t\t}\n\t}\n\t// if func was set then invocations count should be greater than zero\n\tif m.funcSavePosition != nil && mm_atomic.LoadUint64(&m.afterSavePositionCounter) < 1 {\n\t\tm.t.Error(\"Expected call to StoreMock.SavePosition\")\n\t}\n}", "func (p *Player) getPosition() (int64, bool) {\n\tpos, err := p.Player.GetProperty(INTERFACE + \".Player.Position\")\n\tif err != nil {\n\t\treturn 0, false\n\t}\n\tswitch position := pos.Value().(type) {\n\tcase int64:\n\t\treturn position, true\n\tcase uint64:\n\t\treturn int64(position), true\n\tdefault:\n\t\treturn 0, false\n\t}\n}", "func (self *Rectangle) GetPoint1O(position int) *Point{\n return &Point{self.Object.Call(\"getPoint\", position)}\n}", "func (_e *MockQueryCoord_Expecter) Start() *MockQueryCoord_Start_Call {\n\treturn &MockQueryCoord_Start_Call{Call: _e.mock.On(\"Start\")}\n}", "func (m *Mock) On(methodName string, arguments ...interface{}) *Call {\n\tfor _, arg := range arguments {\n\t\tif v := reflect.ValueOf(arg); v.Kind() == reflect.Func {\n\t\t\tpanic(fmt.Sprintf(\"cannot use Func in expectations. Use mock.AnythingOfType(\\\"%T\\\")\", arg))\n\t\t}\n\t}\n\n\tm.mutex.Lock()\n\tdefer m.mutex.Unlock()\n\tc := newCall(m, methodName, assert.CallerInfo(), arguments...)\n\tm.ExpectedCalls = append(m.ExpectedCalls, c)\n\treturn c\n}", "func (mmGetPosition *mStoreMockGetPosition) When(account string, contractID string) *StoreMockGetPositionExpectation {\n\tif mmGetPosition.mock.funcGetPosition != nil {\n\t\tmmGetPosition.mock.t.Fatalf(\"StoreMock.GetPosition mock is already set by Set\")\n\t}\n\n\texpectation := &StoreMockGetPositionExpectation{\n\t\tmock: mmGetPosition.mock,\n\t\tparams: &StoreMockGetPositionParams{account, contractID},\n\t}\n\tmmGetPosition.expectations = append(mmGetPosition.expectations, expectation)\n\treturn expectation\n}", "func (mon *Monitor) GetPosition() (int, int) {\n\tvar x, y C.int\n\tC.glfwGetMonitorPos(mon.internalPtr, &x, &y)\n\treturn int(x), int(y)\n}", "func (device *ServoBrick) GetPosition(servoNum uint8) (position int16, err error) {\n\tvar buf bytes.Buffer\n\tbinary.Write(&buf, binary.LittleEndian, servoNum)\n\n\tresultBytes, err := device.device.Get(uint8(FunctionGetPosition), buf.Bytes())\n\tif err != nil {\n\t\treturn position, err\n\t}\n\tif len(resultBytes) > 0 {\n\t\tvar header PacketHeader\n\n\t\theader.FillFromBytes(resultBytes)\n\n\t\tif header.Length != 10 {\n\t\t\treturn position, fmt.Errorf(\"Received packet of unexpected size %d, instead of %d\", header.Length, 10)\n\t\t}\n\n\t\tif header.ErrorCode != 0 {\n\t\t\treturn position, DeviceError(header.ErrorCode)\n\t\t}\n\n\t\tresultBuf := bytes.NewBuffer(resultBytes[8:])\n\t\tbinary.Read(resultBuf, binary.LittleEndian, &position)\n\n\t}\n\n\treturn position, nil\n}", "func (_e *MockDataCoord_Expecter) Start() *MockDataCoord_Start_Call {\n\treturn &MockDataCoord_Start_Call{Call: _e.mock.On(\"Start\")}\n}", "func (mmGetPosition *mStoreMockGetPosition) Set(f func(account string, contractID string) (t1 pb.TotalPosition, err error)) *StoreMock {\n\tif mmGetPosition.defaultExpectation != nil {\n\t\tmmGetPosition.mock.t.Fatalf(\"Default expectation is already set for the Store.GetPosition method\")\n\t}\n\n\tif len(mmGetPosition.expectations) > 0 {\n\t\tmmGetPosition.mock.t.Fatalf(\"Some expectations are already set for the Store.GetPosition method\")\n\t}\n\n\tmmGetPosition.mock.funcGetPosition = f\n\treturn mmGetPosition.mock\n}", "func (mock *MockWorldMap) GetCellValue(x, y int) int {\n\targs := mock.Called(x, y)\n\treturn args.Int(0)\n}", "func (m *StorageMock) MinimockGetUserLocationInspect() {\n\tfor _, e := range m.GetUserLocationMock.expectations {\n\t\tif mm_atomic.LoadUint64(&e.Counter) < 1 {\n\t\t\tm.t.Errorf(\"Expected call to StorageMock.GetUserLocation with params: %#v\", *e.params)\n\t\t}\n\t}\n\n\t// if default expectation was set then invocations count should be greater than zero\n\tif m.GetUserLocationMock.defaultExpectation != nil && mm_atomic.LoadUint64(&m.afterGetUserLocationCounter) < 1 {\n\t\tif m.GetUserLocationMock.defaultExpectation.params == nil {\n\t\t\tm.t.Error(\"Expected call to StorageMock.GetUserLocation\")\n\t\t} else {\n\t\t\tm.t.Errorf(\"Expected call to StorageMock.GetUserLocation with params: %#v\", *m.GetUserLocationMock.defaultExpectation.params)\n\t\t}\n\t}\n\t// if func was set then invocations count should be greater than zero\n\tif m.funcGetUserLocation != nil && mm_atomic.LoadUint64(&m.afterGetUserLocationCounter) < 1 {\n\t\tm.t.Error(\"Expected call to StorageMock.GetUserLocation\")\n\t}\n}", "func (o *VersionedControllerService) GetPosition() Position {\n\tif o == nil || o.Position == nil {\n\t\tvar ret Position\n\t\treturn ret\n\t}\n\treturn *o.Position\n}", "func (o *VersionedControllerService) GetPositionOk() (*Position, bool) {\n\tif o == nil || o.Position == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Position, true\n}", "func (_e *ReadSeekerCloser_Expecter) Seek(offset interface{}, whence interface{}) *ReadSeekerCloser_Seek_Call {\n\treturn &ReadSeekerCloser_Seek_Call{Call: _e.mock.On(\"Seek\", offset, whence)}\n}", "func (m *MessagePosition) GetPosition() (value int32) {\n\tif m == nil {\n\t\treturn\n\t}\n\treturn m.Position\n}", "func (n *Call) Pos() token.Pos {\n\tif n == nil {\n\t\treturn 0\n\t}\n\n\treturn n.Token.Pos()\n}", "func (t *Terrain) PositionOn(x int) gmath.Vector2i {\n\treturn gmath.Vector2i{X: x, Y: t.HeightOn(x)}\n}", "func (self *Rectangle) GetPointI(args ...interface{}) *Point{\n return &Point{self.Object.Call(\"getPoint\", args)}\n}", "func (robot Robot) GetTargetPosition() Pose {\n\treturn robot.targetPosition\n}", "func (lm *LocalMeta) Pos() mysql.Position {\n\tlm.RLock()\n\tdefer lm.RUnlock()\n\n\treturn mysql.Position{Name: lm.BinLogName, Pos: lm.BinLogPos}\n}", "func (self *Rectangle) OffsetPoint(point *Point) *Rectangle{\n return &Rectangle{self.Object.Call(\"offsetPoint\", point)}\n}", "func (self *PhysicsP2) Mpx(v int) int{\n return self.Object.Call(\"mpx\", v).Int()\n}", "func mockHandler(resp queryrange.Response, err error) queryrange.Handler {\n\treturn queryrange.HandlerFunc(func(ctx context.Context, req queryrange.Request) (queryrange.Response, error) {\n\t\tif expired := ctx.Err(); expired != nil {\n\t\t\treturn nil, expired\n\t\t}\n\n\t\treturn resp, err\n\t})\n}", "func (device *ServoBrick) GetCurrentPosition(servoNum uint8) (position int16, err error) {\n\tvar buf bytes.Buffer\n\tbinary.Write(&buf, binary.LittleEndian, servoNum)\n\n\tresultBytes, err := device.device.Get(uint8(FunctionGetCurrentPosition), buf.Bytes())\n\tif err != nil {\n\t\treturn position, err\n\t}\n\tif len(resultBytes) > 0 {\n\t\tvar header PacketHeader\n\n\t\theader.FillFromBytes(resultBytes)\n\n\t\tif header.Length != 10 {\n\t\t\treturn position, fmt.Errorf(\"Received packet of unexpected size %d, instead of %d\", header.Length, 10)\n\t\t}\n\n\t\tif header.ErrorCode != 0 {\n\t\t\treturn position, DeviceError(header.ErrorCode)\n\t\t}\n\n\t\tresultBuf := bytes.NewBuffer(resultBytes[8:])\n\t\tbinary.Read(resultBuf, binary.LittleEndian, &position)\n\n\t}\n\n\treturn position, nil\n}", "func (m *MockClient) GetOffset(arg0 string, arg1 int32, arg2 int64) (int64, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"GetOffset\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int64)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (bb *ByteSliceBuffer) GetPos(pos uint64) ([]byte, bool) {\n\tif n, ok := bb.Buffer.TransPos(pos); ok {\n\t\treturn bb.data[n], true\n\t}\n\treturn nil, false\n}", "func (da *DataFrame) GetPos(j int) interface{} {\n\tif da.done {\n\t\treturn nil\n\t}\n\n\treturn da.data[j][da.chunk-1]\n}", "func (n *Try) GetPosition() *position.Position {\n\treturn n.Position\n}", "func (self *Rectangle) OffsetPointI(args ...interface{}) *Rectangle{\n return &Rectangle{self.Object.Call(\"offsetPoint\", args)}\n}", "func (n *Property) GetPosition() *position.Position {\n\treturn n.Position\n}", "func (o *os) GetWindowPosition() gdnative.Vector2 {\n\to.ensureSingleton()\n\t//log.Println(\"Calling _OS.GetWindowPosition()\")\n\n\t// Build out the method's arguments\n\tptrArguments := make([]gdnative.Pointer, 0, 0)\n\n\t// Get the method bind\n\tmethodBind := gdnative.NewMethodBind(\"_OS\", \"get_window_position\")\n\n\t// Call the parent method.\n\t// Vector2\n\tretPtr := gdnative.NewEmptyVector2()\n\tgdnative.MethodBindPtrCall(methodBind, o.GetBaseObject(), ptrArguments, retPtr)\n\n\t// If we have a return type, convert it from a pointer into its actual object.\n\tret := gdnative.NewVector2FromPointer(retPtr)\n\treturn ret\n}", "func (device *SilentStepperBrick) GetCurrentPosition() (position int32, err error) {\n\tvar buf bytes.Buffer\n\n\tresultBytes, err := device.device.Get(uint8(FunctionGetCurrentPosition), buf.Bytes())\n\tif err != nil {\n\t\treturn position, err\n\t}\n\tif len(resultBytes) > 0 {\n\t\tvar header PacketHeader\n\n\t\theader.FillFromBytes(resultBytes)\n\n\t\tif header.Length != 12 {\n\t\t\treturn position, fmt.Errorf(\"Received packet of unexpected size %d, instead of %d\", header.Length, 12)\n\t\t}\n\n\t\tif header.ErrorCode != 0 {\n\t\t\treturn position, DeviceError(header.ErrorCode)\n\t\t}\n\n\t\tresultBuf := bytes.NewBuffer(resultBytes[8:])\n\t\tbinary.Read(resultBuf, binary.LittleEndian, &position)\n\n\t}\n\n\treturn position, nil\n}", "func (n *Argument) GetPosition() *position.Position {\n\treturn n.Position\n}", "func (n *TraitUsePrecedence) GetPosition() *position.Position {\n\treturn n.Position\n}", "func (o *ReportingTaskEntity) GetPosition() PositionDTO {\n\tif o == nil || o.Position == nil {\n\t\tvar ret PositionDTO\n\t\treturn ret\n\t}\n\treturn *o.Position\n}", "func (m *WorkbookWorksheet) GetPosition()(*int32) {\n return m.position\n}", "func (p *player) calculatePosition() (float64, float64) {\n\tif p.startedMoving == 0 {\n\t\treturn p.location.x, p.location.y\n\t}\n\tcurrentTime := time.Now().UnixNano()\n\ttimeSince := float64((currentTime-p.startedMoving)/1000000) / 1000\n\tchangeX := timeSince * p.velocity.x\n\tx := p.location.x + changeX\n\ty := p.location.y\n\tif p.velocity.y != 0 {\n\t\tgravity := (timeSince / 2 * world_gravity)\n\t\tvelocity := p.velocity.y + gravity\n\t\ty = p.location.y + (timeSince * velocity)\n\t}\n\t//b, _ := w.inPlatform(x, y)\n\t//log.Println(\"in platform:\", b, x, y)\n\treturn x, y\n}", "func (_Contracts *ContractsCallerSession) MPosition(arg0 *big.Int, arg1 *big.Int) (struct {\n\tProposalId *big.Int\n\tPositionId *big.Int\n\tMaxCandidate uint8\n\tTitle [32]byte\n}, error) {\n\treturn _Contracts.Contract.MPosition(&_Contracts.CallOpts, arg0, arg1)\n}", "func (level *Level) GetRandomPosition() Position {\n\tx := rand.Intn(level.size.Width)\n\ty := rand.Intn(level.size.Height)\n\t// log.Println(\"Generated a random position at\", Position{x, y})\n\treturn Position{x, y}\n}", "func (node *On) Position() string {\n\treturn node.Pos\n}", "func (m *MockStorage) GetProductPositions(ctx context.Context, productID int64) ([]model.Position, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"GetProductPositions\", ctx, productID)\n\tret0, _ := ret[0].([]model.Position)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (self *Rectangle) GetPoint2O(position int, out *Point) *Point{\n return &Point{self.Object.Call(\"getPoint\", position, out)}\n}", "func (o *ComponentReferenceDTO) GetPositionOk() (*PositionDTO, bool) {\n\tif o == nil || o.Position == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Position, true\n}", "func GetMasterPosition(t *testing.T, vttablet Vttablet, hostname string) (string, string) {\n\tctx := context.Background()\n\tvtablet := getTablet(vttablet.GrpcPort, hostname)\n\tpos, err := tmClient.MasterPosition(ctx, vtablet)\n\trequire.NoError(t, err)\n\tgtID := strings.SplitAfter(pos, \"/\")[1]\n\treturn pos, gtID\n}", "func (v *Provider) Position() (float64, float64, error) {\n\tres, err := v.statusLG()\n\tcoord := res.ResMsg.VehicleStatusInfo.VehicleLocation.Coord\n\treturn coord.Lat, coord.Lon, err\n}", "func (self *TileSprite) CameraOffset() *Point{\n return &Point{self.Object.Get(\"cameraOffset\")}\n}", "func FuncPos(call *vulncheck.CallSite) string {\n\tif call != nil && call.Pos != nil {\n\t\treturn call.Pos.String()\n\t}\n\treturn \"\"\n}", "func (t Table) GetPos(d Data) (int64, error) {\n\tdb, err := openDB(t.Config)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tdefer db.Close()\n\tdirection := \"\"\n\tif d.DescOrder {\n\t\tdirection = \"DESC\"\n\t}\n\tsqlQuery := fmt.Sprintf(`SELECT ROW_NUMBER FROM (SELECT ROW_NUMBER() OVER (ORDER BY %s %s), %s FROM %s) x WHERE %s=$1`, d.OrderBy, direction, d.Key, t.Name, d.Key)\n\trow := db.QueryRow(sqlQuery, d.KeyVal)\n\tvar position int64\n\terr = row.Scan(&position)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn position, nil\n}", "func (m *Mock) Called(arguments ...interface{}) Arguments {\n\t// get the calling function's name\n\tpc, _, _, ok := runtime.Caller(1)\n\tif !ok {\n\t\tpanic(\"Couldn't get the caller information\")\n\t}\n\tfunctionPath := runtime.FuncForPC(pc).Name()\n\t// Next four lines are required to use GCCGO function naming conventions.\n\t// For Ex: github_com_docker_libkv_store_mock.WatchTree.pN39_github_com_docker_libkv_store_mock.Mock\n\t// uses interface information unlike golang github.com/docker/libkv/store/mock.(*Mock).WatchTree\n\t// With GCCGO we need to remove interface information starting from pN<dd>.\n\tif gccgoRE.MatchString(functionPath) {\n\t\tfunctionPath = gccgoRE.Split(functionPath, -1)[0]\n\t}\n\tparts := strings.Split(functionPath, \".\")\n\tfunctionName := parts[len(parts)-1]\n\treturn m.MethodCalled(functionName, arguments...)\n}", "func (self *PhysicsP2) Pxmi(v int) int{\n return self.Object.Call(\"pxmi\", v).Int()\n}", "func ExpectCall(o oglemock.MockObject, method string) oglemock.PartialExpecation {\n\t// Get information about the call site.\n\t_, file, lineNumber, ok := runtime.Caller(1)\n\tif !ok {\n\t\tpanic(\"ExpectCall: runtime.Caller\")\n\t}\n\n\t// Grab the current test info.\n\tinfo := currentlyRunningTest\n\tif info == nil {\n\t\tpanic(\"ExpectCall: no test info.\")\n\t}\n\n\t// Grab the mock controller.\n\tcontroller := currentlyRunningTest.MockController\n\tif controller == nil {\n\t\tpanic(\"ExpectCall: no mock controller.\")\n\t}\n\n\t// Report the expectation.\n\treturn controller.ExpectCall(o, method, file, lineNumber)\n}", "func (mock *BotMock) OnCalls() []struct {\n\tIn1 middleware.Predicate\n\tIn2 middleware.Handler\n} {\n\tvar calls []struct {\n\t\tIn1 middleware.Predicate\n\t\tIn2 middleware.Handler\n\t}\n\tlockBotMockOn.RLock()\n\tcalls = mock.calls.On\n\tlockBotMockOn.RUnlock()\n\treturn calls\n}", "func (c *Coder) GetPos() int {\n\tif c.rng < 0x100 {\n\t\treturn c.pos - 1\n\t}\n\treturn c.pos\n}", "func (b *Ball) GetPosition() *Position {\n\treturn b.position\n}", "func (p *player) adjustPosition() {\n\tif p.startedMoving == 0 {\n\t\treturn\n\t}\n\tp.location.x, p.location.y = p.calculatePosition()\n\tval, _ := w.inPlatform(p.location.x, p.location.y)\n\tlog.Println(\"in platform,\", val, p.location.x, p.location.y)\n\tp.startedMoving = 0\n}", "func (mmGetPosition *StoreMock) GetPositionBeforeCounter() uint64 {\n\treturn mm_atomic.LoadUint64(&mmGetPosition.beforeGetPositionCounter)\n}", "func (l *Location) Pos() uint64 {\n\treturn l.pos\n}", "func (self *Fsm_s)answer_pos_requests(){\n pos:= elevTypes.ElevPos_t{}\n for{\n pos= <-self.ExtComs.ElevPosRequest\n pos.Floor = self.lastFloor\n pos.Direction = self.lastDir\n self.ExtComs.ElevPosRequest <- pos\n }\n}", "func (p *Package) Pos(n Poser) token.Position {\n\tif n == nil {\n\t\tpanic(\"nil passed to Pos()\")\n\t}\n\treturn p.Fset.Position(n.Pos())\n}", "func (o *ReportingTaskEntity) GetPositionOk() (*PositionDTO, bool) {\n\tif o == nil || o.Position == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Position, true\n}", "func (p *PullRequestComment) GetPosition() int {\n\tif p == nil || p.Position == nil {\n\t\treturn 0\n\t}\n\treturn *p.Position\n}", "func CallArgAt(vm *VM, target, locals Interface, msg *Message) Interface {\n\tm := target.(*Call).Msg\n\tv, stop := msg.NumberArgAt(vm, locals, 0)\n\tif stop != nil {\n\t\treturn stop\n\t}\n\tr := m.ArgAt(int(v.Value))\n\tif r != nil {\n\t\treturn r\n\t}\n\treturn vm.Nil\n}", "func (gc *GisCache) GetPosition(typ string, name string) (*Position, error) {\n\tkey := gc.baseKey + posKey + typ + \":\" + name\n\n\t// Create position map\n\tpositionMap := make(map[string]*Position)\n\n\t// Get all position entry details\n\terr := gc.rc.ForEachEntry(key, getPosition, &positionMap)\n\tif err != nil {\n\t\tlog.Error(\"Failed to get all entries with error: \", err.Error())\n\t\treturn nil, err\n\t}\n\n\t// only one result, so return the first one\n\tfor _, position := range positionMap {\n\t\treturn position, nil\n\t}\n\treturn nil, nil\n}", "func (c *cursor) seekPosition(ctx context.Context, round uint64) error {\n\tselect {\n\tcase <-ctx.Done():\n\t\treturn ctx.Err()\n\tdefault:\n\t}\n\n\tconst query = `\n\tSELECT\n\t\tcount(beacon_id) as round_offset\n\tFROM\n\t beacon_details\n\tWHERE\n\t beacon_id = :id\n\t\tAND round < :round`\n\n\tdata := struct {\n\t\tID int `db:\"id\"`\n\t\tRound uint64 `db:\"round\"`\n\t}{\n\t\tID: c.store.beaconID,\n\t\tRound: round,\n\t}\n\n\tvar p struct {\n\t\tPosition uint64 `db:\"round_offset\"`\n\t}\n\trows, err := c.store.db.NamedQueryContext(ctx, query, data)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer rows.Close()\n\n\tif !rows.Next() {\n\t\treturn chainerrors.ErrNoBeaconStored\n\t}\n\terr = rows.StructScan(&p)\n\n\tc.pos = p.Position\n\treturn err\n}", "func (o *InlineResponse20051TodoItems) GetPositionOk() (*string, bool) {\n\tif o == nil || o.Position == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Position, true\n}", "func (z *Zombie) GetCurrentPosition() *Position {\n\treturn z.position\n}", "func (cmd Command) Position() int64 {\n\treturn cmd.GlobalPosition\n}", "func (self *PhysicsP2) Mpxi(v int) int{\n return self.Object.Call(\"mpxi\", v).Int()\n}", "func (uni *Uniform4fv) GetPos(pos int) float32 {\n\n\treturn uni.v[pos]\n}", "func Mock(m *MockHandler) {\n\tmock = m\n}", "func (m *StoreMock) MinimockGetOrderInspect() {\n\tfor _, e := range m.GetOrderMock.expectations {\n\t\tif mm_atomic.LoadUint64(&e.Counter) < 1 {\n\t\t\tm.t.Errorf(\"Expected call to StoreMock.GetOrder with params: %#v\", *e.params)\n\t\t}\n\t}\n\n\t// if default expectation was set then invocations count should be greater than zero\n\tif m.GetOrderMock.defaultExpectation != nil && mm_atomic.LoadUint64(&m.afterGetOrderCounter) < 1 {\n\t\tif m.GetOrderMock.defaultExpectation.params == nil {\n\t\t\tm.t.Error(\"Expected call to StoreMock.GetOrder\")\n\t\t} else {\n\t\t\tm.t.Errorf(\"Expected call to StoreMock.GetOrder with params: %#v\", *m.GetOrderMock.defaultExpectation.params)\n\t\t}\n\t}\n\t// if func was set then invocations count should be greater than zero\n\tif m.funcGetOrder != nil && mm_atomic.LoadUint64(&m.afterGetOrderCounter) < 1 {\n\t\tm.t.Error(\"Expected call to StoreMock.GetOrder\")\n\t}\n}", "func (mmGetPosition *StoreMock) GetPositionAfterCounter() uint64 {\n\treturn mm_atomic.LoadUint64(&mmGetPosition.afterGetPositionCounter)\n}", "func (s *Scanner) pos() Pos {\n\treturn s.bufpos[s.bufi]\n}", "func (f Failure) Pos() meta.Position {\n\treturn f.pos\n}", "func (o *VersionedConnection) GetPosition() Position {\n\tif o == nil || o.Position == nil {\n\t\tvar ret Position\n\t\treturn ret\n\t}\n\treturn *o.Position\n}", "func execmScannerPos(_ int, p *gop.Context) {\n\targs := p.GetArgs(1)\n\tret := args[0].(*scanner.Scanner).Pos()\n\tp.Ret(1, ret)\n}", "func (m *StoreMock) MinimockGetPositionDone() bool {\n\tfor _, e := range m.GetPositionMock.expectations {\n\t\tif mm_atomic.LoadUint64(&e.Counter) < 1 {\n\t\t\treturn false\n\t\t}\n\t}\n\n\t// if default expectation was set then invocations count should be greater than zero\n\tif m.GetPositionMock.defaultExpectation != nil && mm_atomic.LoadUint64(&m.afterGetPositionCounter) < 1 {\n\t\treturn false\n\t}\n\t// if func was set then invocations count should be greater than zero\n\tif m.funcGetPosition != nil && mm_atomic.LoadUint64(&m.afterGetPositionCounter) < 1 {\n\t\treturn false\n\t}\n\treturn true\n}", "func (self *Graphics) CameraOffset() *Point{\n return &Point{self.Object.Get(\"cameraOffset\")}\n}", "func (o *AudioStreamPlayer) GetPlaybackPosition() gdnative.Real {\n\t//log.Println(\"Calling AudioStreamPlayer.GetPlaybackPosition()\")\n\n\t// Build out the method's arguments\n\tptrArguments := make([]gdnative.Pointer, 0, 0)\n\n\t// Get the method bind\n\tmethodBind := gdnative.NewMethodBind(\"AudioStreamPlayer\", \"get_playback_position\")\n\n\t// Call the parent method.\n\t// float\n\tretPtr := gdnative.NewEmptyReal()\n\tgdnative.MethodBindPtrCall(methodBind, o.GetBaseObject(), ptrArguments, retPtr)\n\n\t// If we have a return type, convert it from a pointer into its actual object.\n\tret := gdnative.NewRealFromPointer(retPtr)\n\treturn ret\n}", "func (mmSavePosition *mStoreMockSavePosition) Calls() []*StoreMockSavePositionParams {\n\tmmSavePosition.mutex.RLock()\n\n\targCopy := make([]*StoreMockSavePositionParams, len(mmSavePosition.callArgs))\n\tcopy(argCopy, mmSavePosition.callArgs)\n\n\tmmSavePosition.mutex.RUnlock()\n\n\treturn argCopy\n}" ]
[ "0.65795183", "0.6404625", "0.62713015", "0.5971185", "0.5868294", "0.5617027", "0.5587604", "0.5450661", "0.54378927", "0.5315302", "0.5237889", "0.51818484", "0.51590216", "0.5156574", "0.51533866", "0.51147693", "0.5109267", "0.51027274", "0.5102396", "0.50930154", "0.5072506", "0.5068723", "0.5050282", "0.50447845", "0.5039647", "0.5036607", "0.49852884", "0.4982671", "0.49794906", "0.49710548", "0.49592948", "0.49510625", "0.4942378", "0.490546", "0.48958313", "0.4894984", "0.48917013", "0.488867", "0.48846918", "0.48735827", "0.48477852", "0.48462325", "0.48451212", "0.4837103", "0.4818664", "0.48142457", "0.48137605", "0.48128468", "0.4810079", "0.4809002", "0.4807071", "0.4805634", "0.47994304", "0.47977686", "0.4797654", "0.4792716", "0.47795966", "0.47714993", "0.47708884", "0.47705728", "0.47684544", "0.47527063", "0.47518927", "0.4745208", "0.47443447", "0.47429994", "0.47429347", "0.47398525", "0.47396255", "0.47367823", "0.47333252", "0.47310156", "0.47263992", "0.47220057", "0.4715513", "0.47154734", "0.4713575", "0.47053695", "0.47017893", "0.4699353", "0.46928173", "0.46927935", "0.46862027", "0.46841246", "0.46814883", "0.46749073", "0.46686575", "0.46641803", "0.4664116", "0.4660198", "0.4658848", "0.4652766", "0.46467048", "0.4644217", "0.46359417", "0.46292323", "0.46278977", "0.46238", "0.4622969", "0.46155643" ]
0.6790602
0
PopContext provides a mock function with given fields: logicalName, writerArgs
func (_m *MockWriteBufferJsonBased) PopContext(logicalName string, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, logicalName) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok { r0 = rf(logicalName, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferXmlBased) PopContext(logicalName string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) PopContext(logicalName interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_PopContext_Call {\n\treturn &MockWriteBufferJsonBased_PopContext_Call{Call: _e.mock.On(\"PopContext\",\n\t\tappend([]interface{}{logicalName}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) PopContext(logicalName interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_PopContext_Call {\n\treturn &MockWriteBufferXmlBased_PopContext_Call{Call: _e.mock.On(\"PopContext\",\n\t\tappend([]interface{}{logicalName}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) PushContext(logicalName string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) PushContext(logicalName string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) PushContext(logicalName interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_PushContext_Call {\n\treturn &MockWriteBufferJsonBased_PushContext_Call{Call: _e.mock.On(\"PushContext\",\n\t\tappend([]interface{}{logicalName}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) PushContext(logicalName interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_PushContext_Call {\n\treturn &MockWriteBufferXmlBased_PushContext_Call{Call: _e.mock.On(\"PushContext\",\n\t\tappend([]interface{}{logicalName}, writerArgs...)...)}\n}", "func (_m *RepositoryWriter) Close(ctx context.Context) error {\n\tret := _m.Called(ctx)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context) error); ok {\n\t\tr0 = rf(ctx)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *DirectRepositoryWriter) Close(ctx context.Context) error {\n\tret := _m.Called(ctx)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context) error); ok {\n\t\tr0 = rf(ctx)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Repository) PushContext(_a0 context.Context, _a1 *git.PushOptions) error {\n\tret := _m.Called(_a0, _a1)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *git.PushOptions) error); ok {\n\t\tr0 = rf(_a0, _a1)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockAcknowledger) Push(ctx context.Context) error {\n\tret := _m.Called(ctx)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context) error); ok {\n\t\tr0 = rf(ctx)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *SourceTarget) Persist(ctx context.Context, name string) error {\n\tret := _m.Called(ctx, name)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string) error); ok {\n\t\tr0 = rf(ctx, name)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Queue) Pop() interface{} {\n\tret := _m.Called()\n\n\tvar r0 interface{}\n\tif rf, ok := ret.Get(0).(func() interface{}); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(interface{})\n\t\t}\n\t}\n\n\treturn r0\n}", "func (m *MockInternalClient) WRingSetMeta(ctx context.Context, in *WRingRequestMsg, opts ...grpc.CallOption) (*WRingSetMetaResponseMsg, error) {\n\tvarargs := []interface{}{ctx, in}\n\tfor _, a := range opts {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"WRingSetMeta\", varargs...)\n\tret0, _ := ret[0].(*WRingSetMetaResponseMsg)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *Storage) Close(ctx context.Context) error {\n\tret := _m.Called(ctx)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context) error); ok {\n\t\tr0 = rf(ctx)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *IRepository) Store(name string, age int) error {\n\tret := _m.Called(name, age)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, int) error); ok {\n\t\tr0 = rf(name, age)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockInternalServer) WRingGetMeta(arg0 context.Context, arg1 *WRingRequestMsg) (*WRingGetMetaResponseMsg, error) {\n\tret := m.ctrl.Call(m, \"WRingGetMeta\", arg0, arg1)\n\tret0, _ := ret[0].(*WRingGetMetaResponseMsg)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockInternalServer) WRingSetMeta(arg0 context.Context, arg1 *WRingRequestMsg) (*WRingSetMetaResponseMsg, error) {\n\tret := m.ctrl.Call(m, \"WRingSetMeta\", arg0, arg1)\n\tret0, _ := ret[0].(*WRingSetMetaResponseMsg)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockManager) SerializeShipMetadata(arg0 api.ShipAppMetadata, arg1 string) error {\n\tret := m.ctrl.Call(m, \"SerializeShipMetadata\", arg0, arg1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *MockInternalServer) WRingCreate(arg0 context.Context, arg1 *WRingRequestMsg) (*WRingResponseMsg, error) {\n\tret := m.ctrl.Call(m, \"WRingCreate\", arg0, arg1)\n\tret0, _ := ret[0].(*WRingResponseMsg)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *VPStore) Close() error {\n\tret := _m.Called()\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func() error); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockInternalClient) WRingGetMeta(ctx context.Context, in *WRingRequestMsg, opts ...grpc.CallOption) (*WRingGetMetaResponseMsg, error) {\n\tvarargs := []interface{}{ctx, in}\n\tfor _, a := range opts {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"WRingGetMeta\", varargs...)\n\tret0, _ := ret[0].(*WRingGetMetaResponseMsg)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (o *PromAPIMock) SpyArgumentsAndReturnEmpty(fn func(args mock.Arguments)) {\n\tmetric := model.Metric{\n\t\t\"__name__\": \"whatever\",\n\t\t\"instance\": \"whatever\",\n\t\t\"job\": \"whatever\",\n\t}\n\to.On(\n\t\t\"Query\",\n\t\tmock.AnythingOfType(\"*context.emptyCtx\"),\n\t\tmock.AnythingOfType(\"string\"),\n\t\tmock.AnythingOfType(\"time.Time\"),\n\t).Run(fn).Return(model.Vector{}, nil)\n\tmatrix := model.Matrix{\n\t\t&model.SampleStream{\n\t\t\tMetric: metric,\n\t\t\tValues: []model.SamplePair{},\n\t\t},\n\t}\n\to.On(\n\t\t\"QueryRange\",\n\t\tmock.AnythingOfType(\"*context.emptyCtx\"),\n\t\tmock.AnythingOfType(\"string\"),\n\t\tmock.AnythingOfType(\"v1.Range\"),\n\t).Run(fn).Return(matrix, nil)\n}", "func (m *MockEnvironment) Stack() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Stack\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (m *MockPacketHandler) Context() context.Context {\n\tret := m.ctrl.Call(m, \"Context\")\n\tret0, _ := ret[0].(context.Context)\n\treturn ret0\n}", "func (m *mParcelMockContext) Return(r context.Context) *ParcelMock {\n\tm.mock.ContextFunc = nil\n\tm.expectationSeries = nil\n\n\tif m.mainExpectation == nil {\n\t\tm.mainExpectation = &ParcelMockContextExpectation{}\n\t}\n\tm.mainExpectation.result = &ParcelMockContextResult{r}\n\treturn m.mock\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockInternalClient) WRingCreate(ctx context.Context, in *WRingRequestMsg, opts ...grpc.CallOption) (*WRingResponseMsg, error) {\n\tvarargs := []interface{}{ctx, in}\n\tfor _, a := range opts {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"WRingCreate\", varargs...)\n\tret0, _ := ret[0].(*WRingResponseMsg)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *ModifierMock) Set(p context.Context, p1 Drop) (r error) {\n\tcounter := atomic.AddUint64(&m.SetPreCounter, 1)\n\tdefer atomic.AddUint64(&m.SetCounter, 1)\n\n\tif len(m.SetMock.expectationSeries) > 0 {\n\t\tif counter > uint64(len(m.SetMock.expectationSeries)) {\n\t\t\tm.t.Fatalf(\"Unexpected call to ModifierMock.Set. %v %v\", p, p1)\n\t\t\treturn\n\t\t}\n\n\t\tinput := m.SetMock.expectationSeries[counter-1].input\n\t\ttestify_assert.Equal(m.t, *input, ModifierMockSetInput{p, p1}, \"Modifier.Set got unexpected parameters\")\n\n\t\tresult := m.SetMock.expectationSeries[counter-1].result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the ModifierMock.Set\")\n\t\t\treturn\n\t\t}\n\n\t\tr = result.r\n\n\t\treturn\n\t}\n\n\tif m.SetMock.mainExpectation != nil {\n\n\t\tinput := m.SetMock.mainExpectation.input\n\t\tif input != nil {\n\t\t\ttestify_assert.Equal(m.t, *input, ModifierMockSetInput{p, p1}, \"Modifier.Set got unexpected parameters\")\n\t\t}\n\n\t\tresult := m.SetMock.mainExpectation.result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the ModifierMock.Set\")\n\t\t}\n\n\t\tr = result.r\n\n\t\treturn\n\t}\n\n\tif m.SetFunc == nil {\n\t\tm.t.Fatalf(\"Unexpected call to ModifierMock.Set. %v %v\", p, p1)\n\t\treturn\n\t}\n\n\treturn m.SetFunc(p, p1)\n}", "func (_m *OplogCursor) Close(ctx context.Context) error {\n\tret := _m.Called(ctx)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context) error); ok {\n\t\tr0 = rf(ctx)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func createMockContext(t *testing.T, mockArgs []string) *cli.Context {\n\tt.Log(\"Create mock context\")\n\tmockApp := cli.NewApp()\n\n\tmockSet := flag.NewFlagSet(\"mock\", 0)\n\t//mockArgs := []string{\"TESTDIR\"}\n\tmockSet.Parse(mockArgs)\n\n\treturn cli.NewContext(mockApp, mockSet, nil)\n}", "func (_m *OplogCursor) Push(_a0 []byte) error {\n\tret := _m.Called(_a0)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Output) Write(ctx context.Context, batch []stream.WritableMessage) error {\n\tret := _m.Called(ctx, batch)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, []stream.WritableMessage) error); ok {\n\t\tr0 = rf(ctx, batch)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MergeRequest) Close(projectId gitlab.NameOrId, mergeRequestID int) error {\n\tret := _m.Called(projectId, mergeRequestID)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(gitlab.NameOrId, int) error); ok {\n\t\tr0 = rf(projectId, mergeRequestID)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *ChannelStore) Restore(channelID string, timestamp int64) error {\n\tret := _m.Called(channelID, timestamp)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, int64) error); ok {\n\t\tr0 = rf(channelID, timestamp)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockStore) Write(ctx context.Context, key string, val interface{}, opts ...store.WriteOption) error {\n\t_va := make([]interface{}, len(opts))\n\tfor _i := range opts {\n\t\t_va[_i] = opts[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, key, val)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...store.WriteOption) error); ok {\n\t\tr0 = rf(ctx, key, val, opts...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *mockDbOperation) StoreMetadata(metadata db.Metadata, dir string) error {\n\tret := _m.Called(metadata, dir)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(db.Metadata, string) error); ok {\n\t\tr0 = rf(metadata, dir)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *DatabaseReaderWriter) Close() error {\n\tret := _m.Called()\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func() error); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func mock() (mainProto.Data, extraProto.Data) {\n\trandomStr := randStr(10)\n\tmain := mainProto.Data{\n\t\tUsername: randomStr,\n\t\tEmail: randomStr + \"@gmail.com\",\n\t\tPassword: randomStr,\n\t}\n\textra := extraProto.Data{\n\t\tUserID: randomStr,\n\t\tFirstName: randomStr,\n\t\tLastName: randomStr,\n\t\tGender: \"male\",\n\t\tBirthdayUTC: int64(864466669),\n\t}\n\treturn main, extra\n}", "func (m *MockProduct) AggregatedRightDetails(arg0 context.Context, arg1 db.AggregatedRightDetailsParams) (db.AggregatedRightDetailsRow, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AggregatedRightDetails\", arg0, arg1)\n\tret0, _ := ret[0].(db.AggregatedRightDetailsRow)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *Session) CloseWrite() error {\n\tret := _m.Called()\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func() error); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *SubProcessCmd) StdinPipe() (io.WriteCloser, error) {\n\tret := _m.Called()\n\n\tvar r0 io.WriteCloser\n\tif rf, ok := ret.Get(0).(func() io.WriteCloser); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(io.WriteCloser)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func() error); ok {\n\t\tr1 = rf()\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *RepositoryWriter) Refresh(ctx context.Context) error {\n\tret := _m.Called(ctx)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context) error); ok {\n\t\tr0 = rf(ctx)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *RediStore) Close() error {\n\tret := _m.Called()\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func() error); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockSessionRunner) Retire(arg0 protocol.ConnectionID) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"Retire\", arg0)\n}", "func (_m *ChannelStore) ClearSidebarOnTeamLeave(userID string, teamID string) error {\n\tret := _m.Called(userID, teamID)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, string) error); ok {\n\t\tr0 = rf(userID, teamID)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Writer) Close() error {\n\tret := _m.Called()\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func() error); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Writer) Close() error {\n\tret := _m.Called()\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func() error); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *DirectRepositoryWriter) Refresh(ctx context.Context) error {\n\tret := _m.Called(ctx)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context) error); ok {\n\t\tr0 = rf(ctx)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *DockerClient) ImagePush(_a0 context.Context, _a1 string, _a2 types.ImagePushOptions) (io.ReadCloser, error) {\n\tret := _m.Called(_a0, _a1, _a2)\n\n\tvar r0 io.ReadCloser\n\tif rf, ok := ret.Get(0).(func(context.Context, string, types.ImagePushOptions) io.ReadCloser); ok {\n\t\tr0 = rf(_a0, _a1, _a2)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(io.ReadCloser)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, string, types.ImagePushOptions) error); ok {\n\t\tr1 = rf(_a0, _a1, _a2)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *Consumer) Unassign() error {\n\tret := _m.Called()\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func() error); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *DirectRepositoryWriter) NewWriter(ctx context.Context, opt repo.WriteSessionOptions) (context.Context, repo.RepositoryWriter, error) {\n\tret := _m.Called(ctx, opt)\n\n\tvar r0 context.Context\n\tvar r1 repo.RepositoryWriter\n\tvar r2 error\n\tif rf, ok := ret.Get(0).(func(context.Context, repo.WriteSessionOptions) (context.Context, repo.RepositoryWriter, error)); ok {\n\t\treturn rf(ctx, opt)\n\t}\n\tif rf, ok := ret.Get(0).(func(context.Context, repo.WriteSessionOptions) context.Context); ok {\n\t\tr0 = rf(ctx, opt)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(context.Context)\n\t\t}\n\t}\n\n\tif rf, ok := ret.Get(1).(func(context.Context, repo.WriteSessionOptions) repo.RepositoryWriter); ok {\n\t\tr1 = rf(ctx, opt)\n\t} else {\n\t\tif ret.Get(1) != nil {\n\t\t\tr1 = ret.Get(1).(repo.RepositoryWriter)\n\t\t}\n\t}\n\n\tif rf, ok := ret.Get(2).(func(context.Context, repo.WriteSessionOptions) error); ok {\n\t\tr2 = rf(ctx, opt)\n\t} else {\n\t\tr2 = ret.Error(2)\n\t}\n\n\treturn r0, r1, r2\n}", "func (m *MockResponseWriter) Write(arg0 *types.APIRequest, arg1 int, arg2 types.APIObject) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"Write\", arg0, arg1, arg2)\n}", "func (_m *RepositoryWriter) NewWriter(ctx context.Context, opt repo.WriteSessionOptions) (context.Context, repo.RepositoryWriter, error) {\n\tret := _m.Called(ctx, opt)\n\n\tvar r0 context.Context\n\tvar r1 repo.RepositoryWriter\n\tvar r2 error\n\tif rf, ok := ret.Get(0).(func(context.Context, repo.WriteSessionOptions) (context.Context, repo.RepositoryWriter, error)); ok {\n\t\treturn rf(ctx, opt)\n\t}\n\tif rf, ok := ret.Get(0).(func(context.Context, repo.WriteSessionOptions) context.Context); ok {\n\t\tr0 = rf(ctx, opt)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(context.Context)\n\t\t}\n\t}\n\n\tif rf, ok := ret.Get(1).(func(context.Context, repo.WriteSessionOptions) repo.RepositoryWriter); ok {\n\t\tr1 = rf(ctx, opt)\n\t} else {\n\t\tif ret.Get(1) != nil {\n\t\t\tr1 = ret.Get(1).(repo.RepositoryWriter)\n\t\t}\n\t}\n\n\tif rf, ok := ret.Get(2).(func(context.Context, repo.WriteSessionOptions) error); ok {\n\t\tr2 = rf(ctx, opt)\n\t} else {\n\t\tr2 = ret.Error(2)\n\t}\n\n\treturn r0, r1, r2\n}", "func (m *MockUsersService) ApplyLeave(ctx context.Context, leave models.Leave) (interface{}, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ApplyLeave\", ctx, leave)\n\tret0, _ := ret[0].(interface{})\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *Forge) Hook(ctx context.Context, r *http.Request) (*model.Repo, *model.Pipeline, error) {\n\tret := _m.Called(ctx, r)\n\n\tvar r0 *model.Repo\n\tvar r1 *model.Pipeline\n\tvar r2 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *http.Request) (*model.Repo, *model.Pipeline, error)); ok {\n\t\treturn rf(ctx, r)\n\t}\n\tif rf, ok := ret.Get(0).(func(context.Context, *http.Request) *model.Repo); ok {\n\t\tr0 = rf(ctx, r)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*model.Repo)\n\t\t}\n\t}\n\n\tif rf, ok := ret.Get(1).(func(context.Context, *http.Request) *model.Pipeline); ok {\n\t\tr1 = rf(ctx, r)\n\t} else {\n\t\tif ret.Get(1) != nil {\n\t\t\tr1 = ret.Get(1).(*model.Pipeline)\n\t\t}\n\t}\n\n\tif rf, ok := ret.Get(2).(func(context.Context, *http.Request) error); ok {\n\t\tr2 = rf(ctx, r)\n\t} else {\n\t\tr2 = ret.Error(2)\n\t}\n\n\treturn r0, r1, r2\n}", "func (m *MockMembers) Leave() error {\n\tret := m.ctrl.Call(m, \"Leave\")\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func MockSystemCall(std_out, std_err string, err error) {\n\tcontext.SystemCall = func(dir string, cmd string, args []string, local_out, local_err io.Writer) error {\n\t\tlocal_out.Write([]byte(std_out))\n\t\tlocal_err.Write([]byte(std_err))\n\t\treturn err\n\t}\n}", "func (m *MockCfnClient) GetStackOutputs(stackName string) (map[string]string, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"GetStackOutputs\", stackName)\n\tret0, _ := ret[0].(map[string]string)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferJsonBased) GetPos() uint16 {\n\tret := _m.Called()\n\n\tvar r0 uint16\n\tif rf, ok := ret.Get(0).(func() uint16); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(uint16)\n\t}\n\n\treturn r0\n}", "func (_m *MockSetter) Set(ctx context.Context, volumeName string, labels map[string]string) error {\n\tret := _m.Called(ctx, volumeName, labels)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, map[string]string) error); ok {\n\t\tr0 = rf(ctx, volumeName, labels)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockFullNode) MpoolPush(arg0 context.Context, arg1 *types.SignedMessage) (cid.Cid, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"MpoolPush\", arg0, arg1)\n\tret0, _ := ret[0].(cid.Cid)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *RepositoryWriter) Flush(ctx context.Context) error {\n\tret := _m.Called(ctx)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context) error); ok {\n\t\tr0 = rf(ctx)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockInterface) Create(ctx context.Context, key string, val string) error {\n\tret := _m.Called(ctx, key, val)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok {\n\t\tr0 = rf(ctx, key, val)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockBackend) Purge(key string) error {\n\tret := _m.Called(key)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string) error); ok {\n\t\tr0 = rf(key)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *mockAPI) pop(req *http.Request) *expectation {\n\tpath := fmt.Sprintf(\"%s-%s\", strings.TrimSuffix(req.URL.Path, \"/\"), req.Method)\n\tm.Lock()\n\ts, ok := m.expectations[path]\n\tif !ok || len(s) == 0 {\n\t\tm.Unlock()\n\t\treturn m.notFound()\n\t}\n\tnext := s[0]\n\tm.expectations[path] = s[1:]\n\tif len(m.expectations[path]) == 0 {\n\t\tdelete(m.expectations, path)\n\t}\n\n\tm.Unlock()\n\n\tif next == nil || next.code == 0 {\n\t\treturn m.notFound()\n\t}\n\treturn next\n}", "func (_m *ServerConnexion) Stor(path string, oReader io.Reader) error {\n\tret := _m.Called(path, oReader)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, io.Reader) error); ok {\n\t\tr0 = rf(path, oReader)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockRequestInterceptor) ProcessWriteResponses(ctx context.Context, writeRequest model.PlcWriteRequest, writeResults []model.PlcWriteRequestResult) model.PlcWriteRequestResult {\n\tret := _m.Called(ctx, writeRequest, writeResults)\n\n\tvar r0 model.PlcWriteRequestResult\n\tif rf, ok := ret.Get(0).(func(context.Context, model.PlcWriteRequest, []model.PlcWriteRequestResult) model.PlcWriteRequestResult); ok {\n\t\tr0 = rf(ctx, writeRequest, writeResults)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(model.PlcWriteRequestResult)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (m *MockSession) Bind(arg0 context.Context, arg1 string) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Bind\", arg0, arg1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_m *DirectRepositoryWriter) Flush(ctx context.Context) error {\n\tret := _m.Called(ctx)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context) error); ok {\n\t\tr0 = rf(ctx)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Manager) Get(ctx context.Context, projectID int64, meta ...string) (map[string]string, error) {\n\t_va := make([]interface{}, len(meta))\n\tfor _i := range meta {\n\t\t_va[_i] = meta[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, projectID)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 map[string]string\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func(context.Context, int64, ...string) (map[string]string, error)); ok {\n\t\treturn rf(ctx, projectID, meta...)\n\t}\n\tif rf, ok := ret.Get(0).(func(context.Context, int64, ...string) map[string]string); ok {\n\t\tr0 = rf(ctx, projectID, meta...)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(map[string]string)\n\t\t}\n\t}\n\n\tif rf, ok := ret.Get(1).(func(context.Context, int64, ...string) error); ok {\n\t\tr1 = rf(ctx, projectID, meta...)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *MockWriteRequestInterceptor) ProcessWriteResponses(ctx context.Context, writeRequest model.PlcWriteRequest, writeResults []model.PlcWriteRequestResult) model.PlcWriteRequestResult {\n\tret := _m.Called(ctx, writeRequest, writeResults)\n\n\tvar r0 model.PlcWriteRequestResult\n\tif rf, ok := ret.Get(0).(func(context.Context, model.PlcWriteRequest, []model.PlcWriteRequestResult) model.PlcWriteRequestResult); ok {\n\t\tr0 = rf(ctx, writeRequest, writeResults)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(model.PlcWriteRequestResult)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (_m *Database) Close(_a0 context.Context) error {\n\tret := _m.Called(_a0)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context) error); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockTracer) Extract(format, carrier interface{}) (opentracing.SpanContext, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Extract\", format, carrier)\n\tret0, _ := ret[0].(opentracing.SpanContext)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockTracer) Extract(format, carrier interface{}) (opentracing.SpanContext, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Extract\", format, carrier)\n\tret0, _ := ret[0].(opentracing.SpanContext)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *DBRepository) Close() error {\n\tret := _m.Called()\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func() error); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockGitService) Clone(_a0 context.Context, _a1 string) (*git.Repository, error) {\n\tret := _m.Called(_a0, _a1)\n\n\tvar r0 *git.Repository\n\tif rf, ok := ret.Get(0).(func(context.Context, string) *git.Repository); ok {\n\t\tr0 = rf(_a0, _a1)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*git.Repository)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, string) error); ok {\n\t\tr1 = rf(_a0, _a1)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func MockCreateResponse(t *testing.T) {\n\tth.Mux.HandleFunc(shareEndpoint, func(w http.ResponseWriter, r *http.Request) {\n\t\tth.TestMethod(t, r, \"POST\")\n\t\tth.TestHeader(t, r, \"X-Auth-Token\", fake.TokenID)\n\t\tth.TestHeader(t, r, \"Content-Type\", \"application/json\")\n\t\tth.TestHeader(t, r, \"Accept\", \"application/json\")\n\t\tth.TestJSONRequest(t, r, createRequest)\n\t\tw.Header().Add(\"Content-Type\", \"application/json\")\n\t\tw.WriteHeader(http.StatusOK)\n\t\tfmt.Fprintf(w, createResponse)\n\t})\n}", "func (m *MockCache) Del(ctx context.Context, keys ...string) (int64, error) {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{ctx}\n\tfor _, a := range keys {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"Del\", varargs...)\n\tret0, _ := ret[0].(int64)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func NewMockWriteRequestInterceptor(t interface {\n\tmock.TestingT\n\tCleanup(func())\n}) *MockWriteRequestInterceptor {\n\tmock := &MockWriteRequestInterceptor{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func (_m *mockStateBuilder) applyEvents(domainID string, requestID string, execution shared.WorkflowExecution, _a3 []*shared.HistoryEvent,\n\tnewRunHistory []*shared.HistoryEvent, eventStoreVersion, newRunEventStoreVersion int32, newRunNDC bool) (*shared.HistoryEvent, *decisionInfo, mutableState, error) {\n\n\tret := _m.Called(domainID, requestID, execution, _a3, newRunHistory, eventStoreVersion, newRunEventStoreVersion, newRunNDC)\n\n\tvar r0 *shared.HistoryEvent\n\tif rf, ok := ret.Get(0).(func(string, string, shared.WorkflowExecution, []*shared.HistoryEvent, []*shared.HistoryEvent, int32, int32, bool) *shared.HistoryEvent); ok {\n\t\tr0 = rf(domainID, requestID, execution, _a3, newRunHistory, eventStoreVersion, newRunEventStoreVersion, newRunNDC)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*shared.HistoryEvent)\n\t\t}\n\t}\n\n\tvar r1 *decisionInfo\n\tif rf, ok := ret.Get(1).(func(string, string, shared.WorkflowExecution, []*shared.HistoryEvent, []*shared.HistoryEvent, int32, int32, bool) *decisionInfo); ok {\n\t\tr1 = rf(domainID, requestID, execution, _a3, newRunHistory, eventStoreVersion, newRunEventStoreVersion, newRunNDC)\n\t} else {\n\t\tif ret.Get(1) != nil {\n\t\t\tr1 = ret.Get(1).(*decisionInfo)\n\t\t}\n\t}\n\n\tvar r2 mutableState\n\tif rf, ok := ret.Get(2).(func(string, string, shared.WorkflowExecution, []*shared.HistoryEvent, []*shared.HistoryEvent, int32, int32, bool) mutableState); ok {\n\t\tr2 = rf(domainID, requestID, execution, _a3, newRunHistory, eventStoreVersion, newRunEventStoreVersion, newRunNDC)\n\t} else {\n\t\tif ret.Get(2) != nil {\n\t\t\tr2 = ret.Get(2).(mutableState)\n\t\t}\n\t}\n\n\tvar r3 error\n\tif rf, ok := ret.Get(3).(func(string, string, shared.WorkflowExecution, []*shared.HistoryEvent, []*shared.HistoryEvent, int32, int32, bool) error); ok {\n\t\tr3 = rf(domainID, requestID, execution, _a3, newRunHistory, eventStoreVersion, newRunEventStoreVersion, newRunNDC)\n\t} else {\n\t\tr3 = ret.Error(3)\n\t}\n\n\treturn r0, r1, r2, r3\n}", "func fakePopFromStack(stack *Stack) *Node {\n\treturn stack.FakePop()\n}", "func (m *mockCommand) ParseArgs(strings []string) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ParseArgs\", strings)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *mParcelMockContext) Set(f func(p context.Context) (r context.Context)) *ParcelMock {\n\tm.mainExpectation = nil\n\tm.expectationSeries = nil\n\n\tm.mock.ContextFunc = f\n\treturn m.mock\n}", "func (_m *MockStore) Read(ctx context.Context, key string, val interface{}, opts ...store.ReadOption) error {\n\t_va := make([]interface{}, len(opts))\n\tfor _i := range opts {\n\t\t_va[_i] = opts[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, key, val)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...store.ReadOption) error); ok {\n\t\tr0 = rf(ctx, key, val, opts...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Session) Exit(code int) error {\n\tret := _m.Called(code)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(int) error); ok {\n\t\tr0 = rf(code)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockProvisioner_ProvisionResourceServer) Context() context.Context {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Context\")\n\tret0, _ := ret[0].(context.Context)\n\treturn ret0\n}", "func (m *MockConfigAdminService_ListSnapshotsServer) Context() context.Context {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Context\")\n\tret0, _ := ret[0].(context.Context)\n\treturn ret0\n}", "func (_m *Remote) Hook(r *http.Request) (*model.Repo, *model.Build, error) {\n\tret := _m.Called(r)\n\n\tvar r0 *model.Repo\n\tif rf, ok := ret.Get(0).(func(*http.Request) *model.Repo); ok {\n\t\tr0 = rf(r)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*model.Repo)\n\t\t}\n\t}\n\n\tvar r1 *model.Build\n\tif rf, ok := ret.Get(1).(func(*http.Request) *model.Build); ok {\n\t\tr1 = rf(r)\n\t} else {\n\t\tif ret.Get(1) != nil {\n\t\t\tr1 = ret.Get(1).(*model.Build)\n\t\t}\n\t}\n\n\tvar r2 error\n\tif rf, ok := ret.Get(2).(func(*http.Request) error); ok {\n\t\tr2 = rf(r)\n\t} else {\n\t\tr2 = ret.Error(2)\n\t}\n\n\treturn r0, r1, r2\n}", "func (m *MockIDistributedEnforcer) RemoveNamedGroupingPolicy(arg0 string, arg1 ...interface{}) (bool, error) {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{arg0}\n\tfor _, a := range arg1 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"RemoveNamedGroupingPolicy\", varargs...)\n\tret0, _ := ret[0].(bool)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockSessionRunner) RetireResetToken(arg0 [16]byte) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"RetireResetToken\", arg0)\n}", "func (m *MockFlag) Create(arg0 context.Context, arg1 flaggio.NewFlag) (string, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Create\", arg0, arg1)\n\tret0, _ := ret[0].(string)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockWriter) Create(key string, value interface{}) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Create\", key, value)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func mockMapStore(storage map[string]interface{}) ResultStoreFn {\n\treturn func(id string, key string, value interface{}) {\n\t\tutil.SetNestedField(storage, value, id, key)\n\t}\n}", "func (_m *SubProcessCmd) StdoutPipe() (io.ReadCloser, error) {\n\tret := _m.Called()\n\n\tvar r0 io.ReadCloser\n\tif rf, ok := ret.Get(0).(func() io.ReadCloser); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(io.ReadCloser)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func() error); ok {\n\t\tr1 = rf()\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *ExecutionManager) Create(ctx context.Context, vendorType string, vendorID int64, trigger string, extraAttrs ...map[string]interface{}) (int64, error) {\n\t_va := make([]interface{}, len(extraAttrs))\n\tfor _i := range extraAttrs {\n\t\t_va[_i] = extraAttrs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, vendorType, vendorID, trigger)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 int64\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, int64, string, ...map[string]interface{}) (int64, error)); ok {\n\t\treturn rf(ctx, vendorType, vendorID, trigger, extraAttrs...)\n\t}\n\tif rf, ok := ret.Get(0).(func(context.Context, string, int64, string, ...map[string]interface{}) int64); ok {\n\t\tr0 = rf(ctx, vendorType, vendorID, trigger, extraAttrs...)\n\t} else {\n\t\tr0 = ret.Get(0).(int64)\n\t}\n\n\tif rf, ok := ret.Get(1).(func(context.Context, string, int64, string, ...map[string]interface{}) error); ok {\n\t\tr1 = rf(ctx, vendorType, vendorID, trigger, extraAttrs...)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (mw *MockWarehouse) Close() {}", "func (_m *MockGrabX) Get(ctx context.Context, name string) string {\n\tret := _m.Called(ctx, name)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(context.Context, string) string); ok {\n\t\tr0 = rf(ctx, name)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (m *MockProviderClient) DeleteCloudformationStack(arg0 context.Context, arg1 map[string]string, arg2 string) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"DeleteCloudformationStack\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}" ]
[ "0.78670734", "0.7238186", "0.70515925", "0.6772405", "0.6644104", "0.6125356", "0.5905429", "0.53000224", "0.52264994", "0.5159887", "0.50755894", "0.5041714", "0.49556407", "0.4920434", "0.4918639", "0.49163756", "0.4878267", "0.48682675", "0.48597848", "0.48353273", "0.4807918", "0.48054862", "0.48050696", "0.47991198", "0.4796362", "0.47851628", "0.4781915", "0.47668943", "0.47630087", "0.4741868", "0.47361222", "0.47219345", "0.47021315", "0.47000834", "0.47000584", "0.46974838", "0.46965143", "0.46952504", "0.46910104", "0.46893087", "0.46807382", "0.467949", "0.4662969", "0.46275657", "0.46185997", "0.46127775", "0.46122602", "0.4606656", "0.4606656", "0.4602917", "0.45967928", "0.45747337", "0.45737636", "0.45721555", "0.45647383", "0.45640993", "0.45639476", "0.45638242", "0.4559625", "0.45386747", "0.4523974", "0.45233557", "0.4521518", "0.45199424", "0.4519726", "0.45142347", "0.4512095", "0.4504711", "0.45009884", "0.4500749", "0.4500034", "0.44979763", "0.4488716", "0.44865766", "0.44765294", "0.44765294", "0.4466213", "0.44660583", "0.4460205", "0.44586924", "0.44577572", "0.44550455", "0.44531247", "0.44500065", "0.44477892", "0.4446418", "0.44454908", "0.44454187", "0.4441774", "0.4441681", "0.44408226", "0.44398224", "0.4435359", "0.44330794", "0.44286674", "0.4424713", "0.44247088", "0.44190994", "0.44154176", "0.44151062" ]
0.79414773
0
PopContext is a helper method to define mock.On call logicalName string writerArgs ...WithWriterArgs
func (_e *MockWriteBufferJsonBased_Expecter) PopContext(logicalName interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_PopContext_Call { return &MockWriteBufferJsonBased_PopContext_Call{Call: _e.mock.On("PopContext", append([]interface{}{logicalName}, writerArgs...)...)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferXmlBased_Expecter) PopContext(logicalName interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_PopContext_Call {\n\treturn &MockWriteBufferXmlBased_PopContext_Call{Call: _e.mock.On(\"PopContext\",\n\t\tappend([]interface{}{logicalName}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) PopContext(logicalName string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) PopContext(logicalName string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) PushContext(logicalName interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_PushContext_Call {\n\treturn &MockWriteBufferJsonBased_PushContext_Call{Call: _e.mock.On(\"PushContext\",\n\t\tappend([]interface{}{logicalName}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) PushContext(logicalName interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_PushContext_Call {\n\treturn &MockWriteBufferXmlBased_PushContext_Call{Call: _e.mock.On(\"PushContext\",\n\t\tappend([]interface{}{logicalName}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) PushContext(logicalName string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) PushContext(logicalName string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *mockAPI) pop(req *http.Request) *expectation {\n\tpath := fmt.Sprintf(\"%s-%s\", strings.TrimSuffix(req.URL.Path, \"/\"), req.Method)\n\tm.Lock()\n\ts, ok := m.expectations[path]\n\tif !ok || len(s) == 0 {\n\t\tm.Unlock()\n\t\treturn m.notFound()\n\t}\n\tnext := s[0]\n\tm.expectations[path] = s[1:]\n\tif len(m.expectations[path]) == 0 {\n\t\tdelete(m.expectations, path)\n\t}\n\n\tm.Unlock()\n\n\tif next == nil || next.code == 0 {\n\t\treturn m.notFound()\n\t}\n\treturn next\n}", "func op_POP(pc *uint64, in *interpreter, ctx *callCtx) uint64 {\n\tctx.stack.Pop()\n\treturn 0\n}", "func Pop(ctx ContextT) int32 {\n\treturn int32(C.yices_pop(yctx(ctx)))\n}", "func POPW(mr operand.Op) { ctx.POPW(mr) }", "func createMockContext(t *testing.T, mockArgs []string) *cli.Context {\n\tt.Log(\"Create mock context\")\n\tmockApp := cli.NewApp()\n\n\tmockSet := flag.NewFlagSet(\"mock\", 0)\n\t//mockArgs := []string{\"TESTDIR\"}\n\tmockSet.Parse(mockArgs)\n\n\treturn cli.NewContext(mockApp, mockSet, nil)\n}", "func MockSystemCall(std_out, std_err string, err error) {\n\tcontext.SystemCall = func(dir string, cmd string, args []string, local_out, local_err io.Writer) error {\n\t\tlocal_out.Write([]byte(std_out))\n\t\tlocal_err.Write([]byte(std_err))\n\t\treturn err\n\t}\n}", "func (set Set) Pop(ctx context.Context) (string, error) {\n\treq := newRequest(\"*2\\r\\n$4\\r\\nSPOP\\r\\n$\")\n\treq.addString(set.name)\n\treturn set.c.cmdString(ctx, req)\n}", "func Pop(ctx echo.Context) error {\n\n\treq := types.PopRequest{}\n\n\terr := ctx.Bind(&req)\n\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\n\tif !registration.IsAgentRegistered(req.Token) {\n\t\treturn ctx.JSON(403, types.ValidateResponse{Success: false, Message: \"Security Token Not Recognized\"})\n\t}\n\n\tmsg, err := GetFromQueue(req.Queue)\n\n\tdata := types.Message{}\n\n\tjson.Unmarshal(msg, &data)\n\n\tresp := types.PopResponse{Message: data.Message, Queue: req.Queue}\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn ctx.JSON(200, resp)\n}", "func (r *renderer) pop() reflect.Value {\n\tif len(r.stack) == 0 {\n\t\treturn reflect.Value{}\n\t}\n\tctx := r.stack[len(r.stack)-1]\n\tr.stack = r.stack[:len(r.stack)-1]\n\treturn ctx\n}", "func MockOnResetSystem(ctx context.Context, mockAPI *redfishMocks.RedfishAPI,\n\tsystemID string, requestBody *redfishClient.ResetRequestBody, redfishErr redfishClient.RedfishError,\n\thttpResponse *http.Response, err error) {\n\trequest := redfishClient.ApiResetSystemRequest{}.ResetRequestBody(*requestBody)\n\tmockAPI.On(\"ResetSystem\", ctx, systemID).Return(request).Times(1)\n\tmockAPI.On(\"ResetSystemExecute\", mock.Anything).Return(redfishErr, httpResponse, err).Times(1)\n}", "func (mock *GitModuleControllerMock) OnRemoveCalls() []struct {\n\tCtx context.Context\n\tName string\n\tSync v1.GitModuleHandler\n} {\n\tvar calls []struct {\n\t\tCtx context.Context\n\t\tName string\n\t\tSync v1.GitModuleHandler\n\t}\n\tlockGitModuleControllerMockOnRemove.RLock()\n\tcalls = mock.calls.OnRemove\n\tlockGitModuleControllerMockOnRemove.RUnlock()\n\treturn calls\n}", "func (_e *Output_Expecter) Write(ctx interface{}, batch interface{}) *Output_Write_Call {\n\treturn &Output_Write_Call{Call: _e.mock.On(\"Write\", ctx, batch)}\n}", "func ShallowSpyAndConditionallyCallThrough(name string, callThroughCondition string) Gob {\n\treturn &spy{name: name, callThroughCondition: callThroughCondition, shouldExport: false}\n}", "func (_m *Repository) PushContext(_a0 context.Context, _a1 *git.PushOptions) error {\n\tret := _m.Called(_a0, _a1)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *git.PushOptions) error); ok {\n\t\tr0 = rf(_a0, _a1)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (o *PromAPIMock) SpyArgumentsAndReturnEmpty(fn func(args mock.Arguments)) {\n\tmetric := model.Metric{\n\t\t\"__name__\": \"whatever\",\n\t\t\"instance\": \"whatever\",\n\t\t\"job\": \"whatever\",\n\t}\n\to.On(\n\t\t\"Query\",\n\t\tmock.AnythingOfType(\"*context.emptyCtx\"),\n\t\tmock.AnythingOfType(\"string\"),\n\t\tmock.AnythingOfType(\"time.Time\"),\n\t).Run(fn).Return(model.Vector{}, nil)\n\tmatrix := model.Matrix{\n\t\t&model.SampleStream{\n\t\t\tMetric: metric,\n\t\t\tValues: []model.SamplePair{},\n\t\t},\n\t}\n\to.On(\n\t\t\"QueryRange\",\n\t\tmock.AnythingOfType(\"*context.emptyCtx\"),\n\t\tmock.AnythingOfType(\"string\"),\n\t\tmock.AnythingOfType(\"v1.Range\"),\n\t).Run(fn).Return(matrix, nil)\n}", "func SpyAndConditionallyCallThrough(name string, callThroughCondition string) Gob {\n\treturn &spy{name: name, callThroughCondition: callThroughCondition, shouldExport: true}\n}", "func ret(context *Context) {\n context.cpu.pc = context.stack[context.cpu.sp]\n context.cpu.sp--\n}", "func (m *mParcelMockContext) Return(r context.Context) *ParcelMock {\n\tm.mock.ContextFunc = nil\n\tm.expectationSeries = nil\n\n\tif m.mainExpectation == nil {\n\t\tm.mainExpectation = &ParcelMockContextExpectation{}\n\t}\n\tm.mainExpectation.result = &ParcelMockContextResult{r}\n\treturn m.mock\n}", "func (_m *MockAcknowledger) Push(ctx context.Context) error {\n\tret := _m.Called(ctx)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context) error); ok {\n\t\tr0 = rf(ctx)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func SetOnPop(fn func(v interface{})) {\n\n\texec.OnPop = fn\n}", "func (vm *VM) framePop() {\n\tf := vm.frames.pop()\n\tvm.global.op = f.op\n\tvm.global.currentEnv = f.env\n}", "func (v *DevbindMock) Reset() {\n\tv.receivedArgs = [][]string{}\n}", "func mockBatchCTX(t testing.TB) *BatchCTX {\n\tt.Helper()\n\tmockBatch := NewBatchCTX(mockBatchCTXHeader())\n\tmockBatch.AddEntry(mockCTXEntryDetail())\n\tmockBatch.GetEntries()[0].AddAddenda05(mockAddenda05())\n\tmockBatch.Entries[0].AddendaRecordIndicator = 1\n\tif err := mockBatch.Create(); err != nil {\n\t\tt.Fatal(err)\n\t}\n\treturn mockBatch\n}", "func (m *mParcelMockContext) Set(f func(p context.Context) (r context.Context)) *ParcelMock {\n\tm.mainExpectation = nil\n\tm.expectationSeries = nil\n\n\tm.mock.ContextFunc = f\n\treturn m.mock\n}", "func Test04Pop(t *testing.T) {\n\tgomega.RegisterTestingT(t)\n\n\tprefixStack := getPrefixStack()\n\tprefixStack.Push()\n\tprefixStack.Push()\n\n\tgomega.Expect(len(prefixStack.Entries)).To(gomega.BeEquivalentTo(3))\n\tprefixStack.Pop()\n\tgomega.Expect(len(prefixStack.Entries)).To(gomega.BeEquivalentTo(2))\n}", "func (p *PageStack) StackPopped(o, top model.Component) {\n\to.Stop()\n\tp.StackTop(top)\n}", "func POPQ(mr operand.Op) { ctx.POPQ(mr) }", "func (c *Context) POPW(mr operand.Op) {\n\tc.addinstruction(x86.POPW(mr))\n}", "func (ctx *PQContext) Pop(params []string) apis.IResponse {\n\tvar err *mpqerr.ErrorResponse\n\tvar limit int64 = 1\n\tvar asyncId string\n\n\tpopWaitTimeout := ctx.pq.config.PopWaitTimeout\n\n\tfor len(params) > 0 {\n\t\tswitch params[0] {\n\t\tcase PRM_LIMIT:\n\t\t\tparams, limit, err = mpqproto.ParseInt64Param(params, 1, conf.CFG_PQ.MaxPopBatchSize)\n\t\tcase PRM_POP_WAIT:\n\t\t\tparams, popWaitTimeout, err = mpqproto.ParseInt64Param(params, 0, conf.CFG_PQ.MaxPopWaitTimeout)\n\t\tcase PRM_ASYNC:\n\t\t\tparams, asyncId, err = mpqproto.ParseItemId(params)\n\t\tdefault:\n\t\t\treturn mpqerr.UnknownParam(params[0])\n\t\t}\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tif len(asyncId) > 0 {\n\t\treturn ctx.asyncPop(asyncId, 0, popWaitTimeout, limit, false)\n\t} else {\n\t\treturn ctx.pq.Pop(0, popWaitTimeout, limit, false)\n\t}\n}", "func (m *MockSessionRunner) Retire(arg0 protocol.ConnectionID) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"Retire\", arg0)\n}", "func (s *StakingKeeperMock) Jail(sdk.Context, sdk.ConsAddress) {}", "func (conf Configuration) OnCall(method string, cont Context, argument interface{}) {\n\tlog.Printf(\"%v %v: Called\\nContext=%# v\\nArgument=%# v\\n\\n\", conf.Name, method, pretty.Formatter(cont), pretty.Formatter(argument))\n}", "func (s *BasePlSqlParserListener) ExitCall_spec(ctx *Call_specContext) {}", "func ShallowSpyAndCallThrough(name string) Gob {\n\treturn &spy{name: name, callThroughCondition: unconditionalCallthrough, shouldExport: false}\n}", "func fakePopFromStack(stack *Stack) *Node {\n\treturn stack.FakePop()\n}", "func (m *MockLSIFStore) Clear(v0 context.Context, v1 ...int) error {\n\tr0 := m.ClearFunc.nextHook()(v0, v1...)\n\tm.ClearFunc.appendCall(LSIFStoreClearFuncCall{v0, v1, r0})\n\treturn r0\n}", "func (m *MockLSIFStore) Clear(v0 context.Context, v1 ...int) error {\n\tr0 := m.ClearFunc.nextHook()(v0, v1...)\n\tm.ClearFunc.appendCall(LSIFStoreClearFuncCall{v0, v1, r0})\n\treturn r0\n}", "func MockOnSetSystem(ctx context.Context, mockAPI *redfishMocks.RedfishAPI, systemID string,\n\tcomputerSystem redfishClient.ComputerSystem, httpResponse *http.Response, err error) {\n\trequest := redfishClient.ApiSetSystemRequest{}.ComputerSystem(computerSystem)\n\tmockAPI.On(\"SetSystem\", ctx, systemID).Return(request).Times(1)\n\tmockAPI.On(\"SetSystemExecute\", mock.Anything).Return(computerSystem, httpResponse, err).Times(1)\n}", "func (client *MockPodExecClient) AddMockPodExecReturnContext(ctx context.Context, cmd string, mockPodExecReturnContext *MockPodExecReturnContext) {\n\tclient.WantCmdList = append(client.WantCmdList, cmd)\n\tif client.MockReturnContexts == nil {\n\t\tclient.MockReturnContexts = make(map[string]*MockPodExecReturnContext)\n\t}\n\tclient.MockReturnContexts[cmd] = mockPodExecReturnContext\n}", "func (m *ParcelMock) Context(p context.Context) (r context.Context) {\n\tcounter := atomic.AddUint64(&m.ContextPreCounter, 1)\n\tdefer atomic.AddUint64(&m.ContextCounter, 1)\n\n\tif len(m.ContextMock.expectationSeries) > 0 {\n\t\tif counter > uint64(len(m.ContextMock.expectationSeries)) {\n\t\t\tm.t.Fatalf(\"Unexpected call to ParcelMock.Context. %v\", p)\n\t\t\treturn\n\t\t}\n\n\t\tinput := m.ContextMock.expectationSeries[counter-1].input\n\t\ttestify_assert.Equal(m.t, *input, ParcelMockContextInput{p}, \"Parcel.Context got unexpected parameters\")\n\n\t\tresult := m.ContextMock.expectationSeries[counter-1].result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the ParcelMock.Context\")\n\t\t\treturn\n\t\t}\n\n\t\tr = result.r\n\n\t\treturn\n\t}\n\n\tif m.ContextMock.mainExpectation != nil {\n\n\t\tinput := m.ContextMock.mainExpectation.input\n\t\tif input != nil {\n\t\t\ttestify_assert.Equal(m.t, *input, ParcelMockContextInput{p}, \"Parcel.Context got unexpected parameters\")\n\t\t}\n\n\t\tresult := m.ContextMock.mainExpectation.result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the ParcelMock.Context\")\n\t\t}\n\n\t\tr = result.r\n\n\t\treturn\n\t}\n\n\tif m.ContextFunc == nil {\n\t\tm.t.Fatalf(\"Unexpected call to ParcelMock.Context. %v\", p)\n\t\treturn\n\t}\n\n\treturn m.ContextFunc(p)\n}", "func SpyAndCallThrough(name string) Gob {\n\treturn &spy{name: name, callThroughCondition: unconditionalCallthrough, shouldExport: true}\n}", "func (m *MockPacketHandler) Context() context.Context {\n\tret := m.ctrl.Call(m, \"Context\")\n\tret0, _ := ret[0].(context.Context)\n\treturn ret0\n}", "func (m *MockSessionPool) OnAfterSessionBind(arg0 func(context.Context, session.Session) error) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"OnAfterSessionBind\", arg0)\n}", "func (mock *IGraphMock) CloseCalls() []struct {\n\tCtx context.Context\n} {\n\tvar calls []struct {\n\t\tCtx context.Context\n\t}\n\tmock.lockClose.RLock()\n\tcalls = mock.calls.Close\n\tmock.lockClose.RUnlock()\n\treturn calls\n}", "func (vm *VM) opPop(instr []uint16) int {\n\tif len(vm.stack) == 0 {\n\t\t// error\n\t\tvm.Status = \"opPop has empty stack!\"\n\t\treturn 0\n\t}\n\n\tv := vm.stack[len(vm.stack)-1]\n\tvm.stack = vm.stack[:len(vm.stack)-1]\n\ta, _, _ := vm.getAbc(instr)\n\tvm.registers[a] = v\n\treturn 2\n}", "func (m *ModifierMock) Set(p context.Context, p1 Drop) (r error) {\n\tcounter := atomic.AddUint64(&m.SetPreCounter, 1)\n\tdefer atomic.AddUint64(&m.SetCounter, 1)\n\n\tif len(m.SetMock.expectationSeries) > 0 {\n\t\tif counter > uint64(len(m.SetMock.expectationSeries)) {\n\t\t\tm.t.Fatalf(\"Unexpected call to ModifierMock.Set. %v %v\", p, p1)\n\t\t\treturn\n\t\t}\n\n\t\tinput := m.SetMock.expectationSeries[counter-1].input\n\t\ttestify_assert.Equal(m.t, *input, ModifierMockSetInput{p, p1}, \"Modifier.Set got unexpected parameters\")\n\n\t\tresult := m.SetMock.expectationSeries[counter-1].result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the ModifierMock.Set\")\n\t\t\treturn\n\t\t}\n\n\t\tr = result.r\n\n\t\treturn\n\t}\n\n\tif m.SetMock.mainExpectation != nil {\n\n\t\tinput := m.SetMock.mainExpectation.input\n\t\tif input != nil {\n\t\t\ttestify_assert.Equal(m.t, *input, ModifierMockSetInput{p, p1}, \"Modifier.Set got unexpected parameters\")\n\t\t}\n\n\t\tresult := m.SetMock.mainExpectation.result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the ModifierMock.Set\")\n\t\t}\n\n\t\tr = result.r\n\n\t\treturn\n\t}\n\n\tif m.SetFunc == nil {\n\t\tm.t.Fatalf(\"Unexpected call to ModifierMock.Set. %v %v\", p, p1)\n\t\treturn\n\t}\n\n\treturn m.SetFunc(p, p1)\n}", "func (_m *MockHTTPServerInterface) SetContext(_a0 context.Context) {\n\t_m.Called(_a0)\n}", "func (c *Call) Unset() *Call {\n\tvar unlockOnce sync.Once\n\n\tfor _, arg := range c.Arguments {\n\t\tif v := reflect.ValueOf(arg); v.Kind() == reflect.Func {\n\t\t\tpanic(fmt.Sprintf(\"cannot use Func in expectations. Use mock.AnythingOfType(\\\"%T\\\")\", arg))\n\t\t}\n\t}\n\n\tc.lock()\n\tdefer unlockOnce.Do(c.unlock)\n\n\tfoundMatchingCall := false\n\n\t// in-place filter slice for calls to be removed - iterate from 0'th to last skipping unnecessary ones\n\tvar index int // write index\n\tfor _, call := range c.Parent.ExpectedCalls {\n\t\tif call.Method == c.Method {\n\t\t\t_, diffCount := call.Arguments.Diff(c.Arguments)\n\t\t\tif diffCount == 0 {\n\t\t\t\tfoundMatchingCall = true\n\t\t\t\t// Remove from ExpectedCalls - just skip it\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t\tc.Parent.ExpectedCalls[index] = call\n\t\tindex++\n\t}\n\t// trim slice up to last copied index\n\tc.Parent.ExpectedCalls = c.Parent.ExpectedCalls[:index]\n\n\tif !foundMatchingCall {\n\t\tunlockOnce.Do(c.unlock)\n\t\tc.Parent.fail(\"\\n\\nmock: Could not find expected call\\n-----------------------------\\n\\n%s\\n\\n\",\n\t\t\tcallString(c.Method, c.Arguments, true),\n\t\t)\n\t}\n\n\treturn c\n}", "func Pop(context *endly.Context) *model.Process {\n\tvar processes = processes(context)\n\tvar process = processes.Pop()\n\tif process != nil && process.Source != nil {\n\t\tcontext.Source = process.Source\n\t}\n\treturn process\n}", "func (m *MockMounter) CommandContext(ctx context.Context, cmd string, args ...string) exec.Cmd {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{ctx, cmd}\n\tfor _, a := range args {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"CommandContext\", varargs...)\n\tret0, _ := ret[0].(exec.Cmd)\n\treturn ret0\n}", "func (lc *LoggingContext) Pop() (error, bool) {\n\tif lc.curr == \"\" {\n\t\treturn errors.New(fmt.Sprintf(\"Cannot pop context; no loggers have been added\")), false\n\t}\n\tlc.guard.Lock()\n\tdefer lc.guard.Unlock()\n\tif lc.logstack.Len() == 0 {\n\t\treturn errors.New(fmt.Sprintf(\"Cannot pop context; logger stack empty. Still using \\\"%s\\\"\", lc.curr)), true\n\t}\n\tname := lc.logstack.Remove(lc.logstack.Back()).(string)\n\tlc.curr = name\n\tlogger := lc.logmap[name]\n\tl.UseLogger(*logger)\n\treturn nil, true\n}", "func (_m *Pipeline_mgr_iface) OnExit() error {\n\tret := _m.Called()\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func() error); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (client *MockPodExecClient) AddMockPodExecReturnContexts(ctx context.Context, podExecCmds []string, mockPodExecReturnContexts ...*MockPodExecReturnContext) {\n\tfor n := range mockPodExecReturnContexts {\n\t\tclient.AddMockPodExecReturnContext(ctx, podExecCmds[n], mockPodExecReturnContexts[n])\n\t}\n}", "func WrapMockAuthConfig(hfn http.HandlerFunc, cfg *config.APICfg, brk brokers.Broker, str stores.Store, mgr *oldPush.Manager, c push.Client, roles ...string) http.HandlerFunc {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\n\t\turlVars := mux.Vars(r)\n\n\t\tuserRoles := []string{\"publisher\", \"consumer\"}\n\t\tif len(roles) > 0 {\n\t\t\tuserRoles = roles\n\t\t}\n\n\t\tnStr := str.Clone()\n\t\tdefer nStr.Close()\n\n\t\tprojectUUID := projects.GetUUIDByName(urlVars[\"project\"], nStr)\n\t\tgorillaContext.Set(r, \"auth_project_uuid\", projectUUID)\n\t\tgorillaContext.Set(r, \"brk\", brk)\n\t\tgorillaContext.Set(r, \"str\", nStr)\n\t\tgorillaContext.Set(r, \"mgr\", mgr)\n\t\tgorillaContext.Set(r, \"apsc\", c)\n\t\tgorillaContext.Set(r, \"auth_resource\", cfg.ResAuth)\n\t\tgorillaContext.Set(r, \"auth_user\", \"UserA\")\n\t\tgorillaContext.Set(r, \"auth_user_uuid\", \"uuid1\")\n\t\tgorillaContext.Set(r, \"auth_roles\", userRoles)\n\t\tgorillaContext.Set(r, \"push_worker_token\", cfg.PushWorkerToken)\n\t\tgorillaContext.Set(r, \"push_enabled\", cfg.PushEnabled)\n\t\thfn.ServeHTTP(w, r)\n\n\t})\n}", "func (c *Context) POPQ(mr operand.Op) {\n\tc.addinstruction(x86.POPQ(mr))\n}", "func (mock *PurgerMock) PurgeCalls() []struct {\n\tCtx context.Context\n} {\n\tvar calls []struct {\n\t\tCtx context.Context\n\t}\n\tmock.lockPurge.RLock()\n\tcalls = mock.calls.Purge\n\tmock.lockPurge.RUnlock()\n\treturn calls\n}", "func (m *MockSessionRunner) RetireResetToken(arg0 [16]byte) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"RetireResetToken\", arg0)\n}", "func CALL(r operand.Op) { ctx.CALL(r) }", "func call(context *Context) {\n context.cpu.sp++\n context.stack[context.cpu.sp] = context.cpu.pc\n context.cpu.pc = context.opcode & 0x0FFF\n}", "func (m *MockEnvironment) Stack() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Stack\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (s *state) pop(mark int) {\n\ts.vars = s.vars[0:mark]\n}", "func (_f34 *FakeContext) SetWithParentStub(ident2 Context) {\n\t_f34.WithParentHook = func(context.Context) Context {\n\t\treturn ident2\n\t}\n}", "func (b *Builder) Pop(count uint32) {\n\tb.popStackMulti(int(count))\n\tb.instructions = append(b.instructions, asm.Pop{\n\t\tCount: count,\n\t})\n}", "func (c *Compiler) preventPop() {\n\told := c.currentInstructions()\n\tnew := old[:c.scopes[c.currentScope].emitted.Position]\n\n\tc.scopes[c.currentScope].instructions = new\n\tc.scopes[c.currentScope].emitted = c.scopes[c.currentScope].prevEmitted\n}", "func (_m *mockSuite) teardown(_a0 testing.T) {\n\t_m.Called(_a0)\n}", "func (m *MockTracer) Extract(format, carrier interface{}) (opentracing.SpanContext, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Extract\", format, carrier)\n\tret0, _ := ret[0].(opentracing.SpanContext)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockTracer) Extract(format, carrier interface{}) (opentracing.SpanContext, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Extract\", format, carrier)\n\tret0, _ := ret[0].(opentracing.SpanContext)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockMessageProducer) Close() {\n\t_m.Called()\n}", "func newProjectCommandContext(ctx *command.Context,\n\tcmd command.Name,\n\tapplyCmd string,\n\tapprovePoliciesCmd string,\n\tplanCmd string,\n\tprojCfg valid.MergedProjectCfg,\n\tsteps []valid.Step,\n\tpolicySets valid.PolicySets,\n\tescapedCommentArgs []string,\n\tautomergeEnabled bool,\n\tparallelApplyEnabled bool,\n\tparallelPlanEnabled bool,\n\tverbose bool,\n\tabortOnExcecutionOrderFail bool,\n\tscope tally.Scope,\n\tpullStatus models.PullReqStatus,\n) command.ProjectContext {\n\n\tvar projectPlanStatus models.ProjectPlanStatus\n\tvar projectPolicyStatus []models.PolicySetStatus\n\n\tif ctx.PullStatus != nil {\n\t\tfor _, project := range ctx.PullStatus.Projects {\n\n\t\t\t// if name is not used, let's match the directory\n\t\t\tif projCfg.Name == \"\" && project.RepoRelDir == projCfg.RepoRelDir {\n\t\t\t\tprojectPlanStatus = project.Status\n\t\t\t\tprojectPolicyStatus = project.PolicyStatus\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\tif projCfg.Name != \"\" && project.ProjectName == projCfg.Name {\n\t\t\t\tprojectPlanStatus = project.Status\n\t\t\t\tprojectPolicyStatus = project.PolicyStatus\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\n\treturn command.ProjectContext{\n\t\tCommandName: cmd,\n\t\tApplyCmd: applyCmd,\n\t\tApprovePoliciesCmd: approvePoliciesCmd,\n\t\tBaseRepo: ctx.Pull.BaseRepo,\n\t\tEscapedCommentArgs: escapedCommentArgs,\n\t\tAutomergeEnabled: automergeEnabled,\n\t\tDeleteSourceBranchOnMerge: projCfg.DeleteSourceBranchOnMerge,\n\t\tRepoLocking: projCfg.RepoLocking,\n\t\tParallelApplyEnabled: parallelApplyEnabled,\n\t\tParallelPlanEnabled: parallelPlanEnabled,\n\t\tParallelPolicyCheckEnabled: parallelPlanEnabled,\n\t\tAutoplanEnabled: projCfg.AutoplanEnabled,\n\t\tSteps: steps,\n\t\tHeadRepo: ctx.HeadRepo,\n\t\tLog: ctx.Log,\n\t\tScope: scope,\n\t\tProjectPlanStatus: projectPlanStatus,\n\t\tProjectPolicyStatus: projectPolicyStatus,\n\t\tPull: ctx.Pull,\n\t\tProjectName: projCfg.Name,\n\t\tPlanRequirements: projCfg.PlanRequirements,\n\t\tApplyRequirements: projCfg.ApplyRequirements,\n\t\tImportRequirements: projCfg.ImportRequirements,\n\t\tRePlanCmd: planCmd,\n\t\tRepoRelDir: projCfg.RepoRelDir,\n\t\tRepoConfigVersion: projCfg.RepoCfgVersion,\n\t\tTerraformVersion: projCfg.TerraformVersion,\n\t\tUser: ctx.User,\n\t\tVerbose: verbose,\n\t\tWorkspace: projCfg.Workspace,\n\t\tPolicySets: policySets,\n\t\tPolicySetTarget: ctx.PolicySet,\n\t\tClearPolicyApproval: ctx.ClearPolicyApproval,\n\t\tPullReqStatus: pullStatus,\n\t\tJobID: uuid.New().String(),\n\t\tExecutionOrderGroup: projCfg.ExecutionOrderGroup,\n\t\tAbortOnExcecutionOrderFail: abortOnExcecutionOrderFail,\n\t}\n}", "func (m *MockInternalClient) WRingSetMeta(ctx context.Context, in *WRingRequestMsg, opts ...grpc.CallOption) (*WRingSetMetaResponseMsg, error) {\n\tvarargs := []interface{}{ctx, in}\n\tfor _, a := range opts {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"WRingSetMeta\", varargs...)\n\tret0, _ := ret[0].(*WRingSetMetaResponseMsg)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockMessageHandler) UnSub(arg0 context.Context, arg1 *proto.UnSubRequest, arg2 *proto.Response) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"UnSub\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func contextInjector(baseCtx func() context.Context) grpcutil.UnifiedServerInterceptor {\n\treturn func(ctx context.Context, fullMethod string, handler func(ctx context.Context) error) error {\n\t\treturn handler(&mergedCtx{ctx, baseCtx()})\n\t}\n}", "func (m *MockArgusdClient) DestroyWatch(arg0 context.Context, arg1 *golang.ArgusdConfig, arg2 ...grpc.CallOption) (*golang.Empty, error) {\n\tvarargs := []interface{}{arg0, arg1}\n\tfor _, a := range arg2 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"DestroyWatch\", varargs...)\n\tret0, _ := ret[0].(*golang.Empty)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (st *scopeStack) pop() {\n\tl := len(st.stack)\n\tif l == 1 {\n\t\tpanic(\"popped the standard library (pre-main) scope\")\n\t}\n\t// TODO OPT: Optimize for space, this will never free any underlying memory.\n\tst.stack = st.stack[:l-1]\n}", "func (mw *MockWarehouse) Close() {}", "func (s *BaseMySqlParserListener) ExitSpecificFunctionCall(ctx *SpecificFunctionCallContext) {}", "func (_e *AcknowledgeableInput_Expecter) Ack(ctx interface{}, msg interface{}, ack interface{}) *AcknowledgeableInput_Ack_Call {\n\treturn &AcknowledgeableInput_Ack_Call{Call: _e.mock.On(\"Ack\", ctx, msg, ack)}\n}", "func (mock *StoreServiceMock) RemoveCalls() []struct {\n\tEntry ytfeed.Entry\n} {\n\tvar calls []struct {\n\t\tEntry ytfeed.Entry\n\t}\n\tmock.lockRemove.RLock()\n\tcalls = mock.calls.Remove\n\tmock.lockRemove.RUnlock()\n\treturn calls\n}", "func TestFBaseProcessorNoProcessorFunction(t *testing.T) {\n\ttmpLogger := logrus.New()\n\tvar logBuf bytes.Buffer\n\ttmpLogger.Out = &logBuf\n\toldLogger := logger()\n\tSetLogger(tmpLogger)\n\tdefer func() {\n\t\tSetLogger(oldLogger)\n\t}()\n\n\tmockTransport := new(mockTTransport)\n\treads := make(chan []byte, 4)\n\treads <- pingFrame[0:1] // version\n\treads <- pingFrame[1:5] // headers size\n\treads <- pingFrame[5:34] // FContext headers\n\treads <- pingFrame[34:] // request body\n\tmockTransport.reads = reads\n\t// _opid0, cid 123\n\t// The ordering of opid and cid in the header is non-deterministic,\n\t// so cant check for equality.\n\tresponseCtx := []byte{0, 0, 0, 0, 29, 0, 0, 0, 5, 95, 111, 112, 105, 100, 0, 0, 0, 1, 48, 0, 0, 0, 4, 95, 99, 105, 100, 0, 0, 0, 3, 49, 50, 51}\n\tmockTransport.On(\"Write\", mock.Anything).Return(len(responseCtx), nil).Once()\n\t// [1,\"ping\",3,0,{\"1\":{\"str\":\"Unknown function ping\"},\"2\":{\"i32\":1}}]\n\tresponseBody := []byte{\n\t\t91, 49, 44, 34, 112, 105, 110, 103, 34, 44, 51, 44, 48, 44, 123, 34,\n\t\t49, 34, 58, 123, 34, 115, 116, 114, 34, 58, 34, 85, 110, 107, 110, 111,\n\t\t119, 110, 32, 102, 117, 110, 99, 116, 105, 111, 110, 32, 112, 105, 110,\n\t\t103, 34, 125, 44, 34, 50, 34, 58, 123, 34, 105, 51, 50, 34, 58, 49,\n\t\t125, 125, 93,\n\t}\n\tmockTransport.On(\"Write\", responseBody).Return(len(responseBody), nil).Once()\n\tmockTransport.On(\"Flush\", mock.Anything).Return(nil)\n\tproto := &FProtocol{TProtocol: thrift.NewTJSONProtocol(mockTransport)}\n\tprocessor := NewFBaseProcessor()\n\n\tassert.NoError(t, processor.Process(proto, proto))\n\tassert.True(t,\n\t\tstrings.Contains(\n\t\t\tstring(logBuf.Bytes()),\n\t\t\t\"frugal: client invoked unknown function ping on request with correlation id 123\"))\n\tmockTransport.AssertExpectations(t)\n}", "func pop(a *Stack) {\n if a.head == 0 {\n fmt.Println(\"STACK EMPTY\")\n } else {a.con = a.con[:len(a.con)-1]\n\tfmt.Println(\"by reference function call\",a.con)\n\ta.head--\n }\n}", "func (_e *WriteCloser_Expecter) Write(p interface{}) *WriteCloser_Write_Call {\n\treturn &WriteCloser_Write_Call{Call: _e.mock.On(\"Write\", p)}\n}", "func (mock *BotMock) OnCalls() []struct {\n\tIn1 middleware.Predicate\n\tIn2 middleware.Handler\n} {\n\tvar calls []struct {\n\t\tIn1 middleware.Predicate\n\t\tIn2 middleware.Handler\n\t}\n\tlockBotMockOn.RLock()\n\tcalls = mock.calls.On\n\tlockBotMockOn.RUnlock()\n\treturn calls\n}", "func (lc mockNotifyLogger) Trace(msg string, args ...interface{}) {\n}", "func (md *MockDestination) Close() {\n}", "func (m *MockDriver) Close() {}", "func (m *mParcelMockContext) Expect(p context.Context) *mParcelMockContext {\n\tm.mock.ContextFunc = nil\n\tm.expectationSeries = nil\n\n\tif m.mainExpectation == nil {\n\t\tm.mainExpectation = &ParcelMockContextExpectation{}\n\t}\n\tm.mainExpectation.input = &ParcelMockContextInput{p}\n\treturn m\n}", "func (_e *Output_Expecter) WriteOne(ctx interface{}, msg interface{}) *Output_WriteOne_Call {\n\treturn &Output_WriteOne_Call{Call: _e.mock.On(\"WriteOne\", ctx, msg)}\n}", "func (s *BaselimboListener) ExitQualifier(ctx *QualifierContext) {}", "func (mock *SessionRepositoryMock) RemoveCalls() []struct {\n\tSession string\n} {\n\tvar calls []struct {\n\t\tSession string\n\t}\n\tlockSessionRepositoryMockRemove.RLock()\n\tcalls = mock.calls.Remove\n\tlockSessionRepositoryMockRemove.RUnlock()\n\treturn calls\n}", "func (s *BaselimboListener) ExitMonadic_expression(ctx *Monadic_expressionContext) {}", "func (w *walk) pop() {\n\tif len(*w) > 0 {\n\t\t*w = (*w)[:len(*w)-1]\n\t}\n}", "func (m *MockSQSAPI) RemovePermissionWithContext(arg0 context.Context, arg1 *sqs.RemovePermissionInput, arg2 ...request.Option) (*sqs.RemovePermissionOutput, error) {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{arg0, arg1}\n\tfor _, a := range arg2 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"RemovePermissionWithContext\", varargs...)\n\tret0, _ := ret[0].(*sqs.RemovePermissionOutput)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (s *callStack) popIfExists(whichFrame int) {\n\tif len(s.stack) == whichFrame {\n\t\tif s.top().cleanEnv {\n\t\t\ts.calls--\n\t\t}\n\t\ts.setCurrentTrace(s.stack[len(s.stack)-1].trace)\n\t\ts.stack = s.stack[:len(s.stack)-1]\n\t}\n}" ]
[ "0.77656066", "0.7429666", "0.74085003", "0.65526056", "0.64336056", "0.6204913", "0.61769307", "0.4980717", "0.4930698", "0.48595628", "0.48254013", "0.4805074", "0.4766037", "0.46512192", "0.46512038", "0.46496385", "0.46438354", "0.46318793", "0.46282122", "0.461843", "0.4608844", "0.45978042", "0.45671457", "0.4563564", "0.45365402", "0.45307723", "0.45104295", "0.45068598", "0.44916826", "0.44844896", "0.448062", "0.4470565", "0.4459924", "0.44594678", "0.44527218", "0.44479877", "0.44473657", "0.44394413", "0.4433973", "0.44235995", "0.44118848", "0.440748", "0.44019592", "0.44019592", "0.43895328", "0.4388715", "0.43792522", "0.4376066", "0.43669745", "0.43635675", "0.43576744", "0.43459746", "0.4343124", "0.43386307", "0.43385372", "0.43312073", "0.43248644", "0.43170634", "0.42881244", "0.42839247", "0.42748767", "0.42690372", "0.426231", "0.42376375", "0.42281568", "0.4226049", "0.4224621", "0.4209568", "0.4202517", "0.4190489", "0.41899085", "0.41885242", "0.41823015", "0.41823015", "0.41733974", "0.41694632", "0.41667694", "0.41653606", "0.41467094", "0.4143537", "0.41425562", "0.41422737", "0.4140317", "0.41401443", "0.41304436", "0.41246447", "0.411908", "0.4109117", "0.41076878", "0.4106092", "0.41060805", "0.41057786", "0.41053492", "0.4104692", "0.41032037", "0.40974647", "0.4089285", "0.4087238", "0.40872023", "0.4086604" ]
0.78453577
0
PushContext provides a mock function with given fields: logicalName, writerArgs
func (_m *MockWriteBufferJsonBased) PushContext(logicalName string, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, logicalName) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok { r0 = rf(logicalName, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferXmlBased) PushContext(logicalName string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) PushContext(logicalName interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_PushContext_Call {\n\treturn &MockWriteBufferJsonBased_PushContext_Call{Call: _e.mock.On(\"PushContext\",\n\t\tappend([]interface{}{logicalName}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) PushContext(logicalName interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_PushContext_Call {\n\treturn &MockWriteBufferXmlBased_PushContext_Call{Call: _e.mock.On(\"PushContext\",\n\t\tappend([]interface{}{logicalName}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) PopContext(logicalName string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) PopContext(logicalName string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Repository) PushContext(_a0 context.Context, _a1 *git.PushOptions) error {\n\tret := _m.Called(_a0, _a1)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *git.PushOptions) error); ok {\n\t\tr0 = rf(_a0, _a1)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockAcknowledger) Push(ctx context.Context) error {\n\tret := _m.Called(ctx)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context) error); ok {\n\t\tr0 = rf(ctx)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *OplogCursor) Push(_a0 []byte) error {\n\tret := _m.Called(_a0)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *DockerClient) ImagePush(_a0 context.Context, _a1 string, _a2 types.ImagePushOptions) (io.ReadCloser, error) {\n\tret := _m.Called(_a0, _a1, _a2)\n\n\tvar r0 io.ReadCloser\n\tif rf, ok := ret.Get(0).(func(context.Context, string, types.ImagePushOptions) io.ReadCloser); ok {\n\t\tr0 = rf(_a0, _a1, _a2)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(io.ReadCloser)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, string, types.ImagePushOptions) error); ok {\n\t\tr1 = rf(_a0, _a1, _a2)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_e *MockWriteBufferJsonBased_Expecter) PopContext(logicalName interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_PopContext_Call {\n\treturn &MockWriteBufferJsonBased_PopContext_Call{Call: _e.mock.On(\"PopContext\",\n\t\tappend([]interface{}{logicalName}, writerArgs...)...)}\n}", "func (_m *MockStore) Write(ctx context.Context, key string, val interface{}, opts ...store.WriteOption) error {\n\t_va := make([]interface{}, len(opts))\n\tfor _i := range opts {\n\t\t_va[_i] = opts[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, key, val)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...store.WriteOption) error); ok {\n\t\tr0 = rf(ctx, key, val, opts...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockSession) Push(arg0 string, arg1 interface{}) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Push\", arg0, arg1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *MockFullNode) MpoolPush(arg0 context.Context, arg1 *types.SignedMessage) (cid.Cid, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"MpoolPush\", arg0, arg1)\n\tret0, _ := ret[0].(cid.Cid)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockLayout) Push(arg0 image.Digest, arg1 image.Name) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Push\", arg0, arg1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_m *Repository) Push(_a0 *git.PushOptions) error {\n\tret := _m.Called(_a0)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(*git.PushOptions) error); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) PopContext(logicalName interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_PopContext_Call {\n\treturn &MockWriteBufferXmlBased_PopContext_Call{Call: _e.mock.On(\"PopContext\",\n\t\tappend([]interface{}{logicalName}, writerArgs...)...)}\n}", "func (_m *Queue) Push(v interface{}) interface{} {\n\tret := _m.Called(v)\n\n\tvar r0 interface{}\n\tif rf, ok := ret.Get(0).(func(interface{}) interface{}); ok {\n\t\tr0 = rf(v)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(interface{})\n\t\t}\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *IRepository) Store(name string, age int) error {\n\tret := _m.Called(name, age)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, int) error); ok {\n\t\tr0 = rf(name, age)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Manager) Add(ctx context.Context, projectID int64, meta map[string]string) error {\n\tret := _m.Called(ctx, projectID, meta)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, int64, map[string]string) error); ok {\n\t\tr0 = rf(ctx, projectID, meta)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Output) Write(ctx context.Context, batch []stream.WritableMessage) error {\n\tret := _m.Called(ctx, batch)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, []stream.WritableMessage) error); ok {\n\t\tr0 = rf(ctx, batch)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockRequestInterceptor) InterceptWriteRequest(ctx context.Context, writeRequest model.PlcWriteRequest) []model.PlcWriteRequest {\n\tret := _m.Called(ctx, writeRequest)\n\n\tvar r0 []model.PlcWriteRequest\n\tif rf, ok := ret.Get(0).(func(context.Context, model.PlcWriteRequest) []model.PlcWriteRequest); ok {\n\t\tr0 = rf(ctx, writeRequest)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]model.PlcWriteRequest)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (m *MockInternalServer) WRingSetMeta(arg0 context.Context, arg1 *WRingRequestMsg) (*WRingSetMetaResponseMsg, error) {\n\tret := m.ctrl.Call(m, \"WRingSetMeta\", arg0, arg1)\n\tret0, _ := ret[0].(*WRingSetMetaResponseMsg)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteRequestInterceptor) InterceptWriteRequest(ctx context.Context, writeRequest model.PlcWriteRequest) []model.PlcWriteRequest {\n\tret := _m.Called(ctx, writeRequest)\n\n\tvar r0 []model.PlcWriteRequest\n\tif rf, ok := ret.Get(0).(func(context.Context, model.PlcWriteRequest) []model.PlcWriteRequest); ok {\n\t\tr0 = rf(ctx, writeRequest)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]model.PlcWriteRequest)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (_m *ExecutionManager) Create(ctx context.Context, vendorType string, vendorID int64, trigger string, extraAttrs ...map[string]interface{}) (int64, error) {\n\t_va := make([]interface{}, len(extraAttrs))\n\tfor _i := range extraAttrs {\n\t\t_va[_i] = extraAttrs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, vendorType, vendorID, trigger)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 int64\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, int64, string, ...map[string]interface{}) (int64, error)); ok {\n\t\treturn rf(ctx, vendorType, vendorID, trigger, extraAttrs...)\n\t}\n\tif rf, ok := ret.Get(0).(func(context.Context, string, int64, string, ...map[string]interface{}) int64); ok {\n\t\tr0 = rf(ctx, vendorType, vendorID, trigger, extraAttrs...)\n\t} else {\n\t\tr0 = ret.Get(0).(int64)\n\t}\n\n\tif rf, ok := ret.Get(1).(func(context.Context, string, int64, string, ...map[string]interface{}) error); ok {\n\t\tr1 = rf(ctx, vendorType, vendorID, trigger, extraAttrs...)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (m *MockResponseWriter) Write(arg0 *types.APIRequest, arg1 int, arg2 types.APIObject) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"Write\", arg0, arg1, arg2)\n}", "func (m *MockAtomicLogic) PushKeyKeeper() core.PushKeyKeeper {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"PushKeyKeeper\")\n\tret0, _ := ret[0].(core.PushKeyKeeper)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockLogic) PushKeyKeeper() core.PushKeyKeeper {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"PushKeyKeeper\")\n\tret0, _ := ret[0].(core.PushKeyKeeper)\n\treturn ret0\n}", "func (m *MockIExec) DoGitPush(dir string, args ...string) (string, error) {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{dir}\n\tfor _, a := range args {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"DoGitPush\", varargs...)\n\tret0, _ := ret[0].(string)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockBackend) Push(image string) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Push\", image)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *MockInternalServer) WRingCreate(arg0 context.Context, arg1 *WRingRequestMsg) (*WRingResponseMsg, error) {\n\tret := m.ctrl.Call(m, \"WRingCreate\", arg0, arg1)\n\tret0, _ := ret[0].(*WRingResponseMsg)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockManager) SerializeShipMetadata(arg0 api.ShipAppMetadata, arg1 string) error {\n\tret := m.ctrl.Call(m, \"SerializeShipMetadata\", arg0, arg1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_m *Connection) Write(ctx context.Context, typ websocket.MessageType, p []byte) error {\n\tret := _m.Called(ctx, typ, p)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, websocket.MessageType, []byte) error); ok {\n\t\tr0 = rf(ctx, typ, p)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *mockDbOperation) StoreMetadata(metadata db.Metadata, dir string) error {\n\tret := _m.Called(metadata, dir)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(db.Metadata, string) error); ok {\n\t\tr0 = rf(metadata, dir)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *AppFunctionContext) PushToCore(event dtos.Event) (common.BaseWithIdResponse, error) {\n\tret := _m.Called(event)\n\n\tvar r0 common.BaseWithIdResponse\n\tif rf, ok := ret.Get(0).(func(dtos.Event) common.BaseWithIdResponse); ok {\n\t\tr0 = rf(event)\n\t} else {\n\t\tr0 = ret.Get(0).(common.BaseWithIdResponse)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(dtos.Event) error); ok {\n\t\tr1 = rf(event)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (m *MockWriter) Create(key string, value interface{}) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Create\", key, value)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_e *Output_Expecter) Write(ctx interface{}, batch interface{}) *Output_Write_Call {\n\treturn &Output_Write_Call{Call: _e.mock.On(\"Write\", ctx, batch)}\n}", "func (_m *MockInterface) Create(ctx context.Context, key string, val string) error {\n\tret := _m.Called(ctx, key, val)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok {\n\t\tr0 = rf(ctx, key, val)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockInternalClient) WRingSetMeta(ctx context.Context, in *WRingRequestMsg, opts ...grpc.CallOption) (*WRingSetMetaResponseMsg, error) {\n\tvarargs := []interface{}{ctx, in}\n\tfor _, a := range opts {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"WRingSetMeta\", varargs...)\n\tret0, _ := ret[0].(*WRingSetMetaResponseMsg)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockFullNode) MpoolPushMessage(arg0 context.Context, arg1 *types.Message, arg2 *types0.MessageSendSpec) (*types.SignedMessage, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"MpoolPushMessage\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(*types.SignedMessage)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *Repository) Store(ctx context.Context, _a1 *models.Host) error {\n\tret := _m.Called(ctx, _a1)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *models.Host) error); ok {\n\t\tr0 = rf(ctx, _a1)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *SourceTarget) Persist(ctx context.Context, name string) error {\n\tret := _m.Called(ctx, name)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string) error); ok {\n\t\tr0 = rf(ctx, name)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func mockRegisterProducerTx(info *payload.ProducerInfo) *types.Transaction {\n\treturn &types.Transaction{\n\t\tTxType: types.RegisterProducer,\n\t\tPayload: info,\n\t}\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockStore) Put(arg0 string, arg1 []byte, arg2 ...storage.Tag) error {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{arg0, arg1}\n\tfor _, a := range arg2 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"Put\", varargs...)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *MockPacketHandler) Context() context.Context {\n\tret := m.ctrl.Call(m, \"Context\")\n\tret0, _ := ret[0].(context.Context)\n\treturn ret0\n}", "func (_m *MockSetter) Set(ctx context.Context, volumeName string, labels map[string]string) error {\n\tret := _m.Called(ctx, volumeName, labels)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, map[string]string) error); ok {\n\t\tr0 = rf(ctx, volumeName, labels)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockKeepers) PushKeyKeeper() core.PushKeyKeeper {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"PushKeyKeeper\")\n\tret0, _ := ret[0].(core.PushKeyKeeper)\n\treturn ret0\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mmWriteTo *mDigestHolderMockWriteTo) When(w io.Writer) *DigestHolderMockWriteToExpectation {\n\tif mmWriteTo.mock.funcWriteTo != nil {\n\t\tmmWriteTo.mock.t.Fatalf(\"DigestHolderMock.WriteTo mock is already set by Set\")\n\t}\n\n\texpectation := &DigestHolderMockWriteToExpectation{\n\t\tmock: mmWriteTo.mock,\n\t\tparams: &DigestHolderMockWriteToParams{w},\n\t}\n\tmmWriteTo.expectations = append(mmWriteTo.expectations, expectation)\n\treturn expectation\n}", "func (m *SignatureKeyHolderMock) WriteTo(p io.Writer) (r int64, r1 error) {\n\tcounter := atomic.AddUint64(&m.WriteToPreCounter, 1)\n\tdefer atomic.AddUint64(&m.WriteToCounter, 1)\n\n\tif len(m.WriteToMock.expectationSeries) > 0 {\n\t\tif counter > uint64(len(m.WriteToMock.expectationSeries)) {\n\t\t\tm.t.Fatalf(\"Unexpected call to SignatureKeyHolderMock.WriteTo. %v\", p)\n\t\t\treturn\n\t\t}\n\n\t\tinput := m.WriteToMock.expectationSeries[counter-1].input\n\t\ttestify_assert.Equal(m.t, *input, SignatureKeyHolderMockWriteToInput{p}, \"SignatureKeyHolder.WriteTo got unexpected parameters\")\n\n\t\tresult := m.WriteToMock.expectationSeries[counter-1].result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the SignatureKeyHolderMock.WriteTo\")\n\t\t\treturn\n\t\t}\n\n\t\tr = result.r\n\t\tr1 = result.r1\n\n\t\treturn\n\t}\n\n\tif m.WriteToMock.mainExpectation != nil {\n\n\t\tinput := m.WriteToMock.mainExpectation.input\n\t\tif input != nil {\n\t\t\ttestify_assert.Equal(m.t, *input, SignatureKeyHolderMockWriteToInput{p}, \"SignatureKeyHolder.WriteTo got unexpected parameters\")\n\t\t}\n\n\t\tresult := m.WriteToMock.mainExpectation.result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the SignatureKeyHolderMock.WriteTo\")\n\t\t}\n\n\t\tr = result.r\n\t\tr1 = result.r1\n\n\t\treturn\n\t}\n\n\tif m.WriteToFunc == nil {\n\t\tm.t.Fatalf(\"Unexpected call to SignatureKeyHolderMock.WriteTo. %v\", p)\n\t\treturn\n\t}\n\n\treturn m.WriteToFunc(p)\n}", "func (m *MockTChanNode) WriteTagged(ctx thrift.Context, req *WriteTaggedRequest) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteTagged\", ctx, req)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_m *BlobStore) Put(path string, reader io.Reader, objectSize int64) error {\n\tret := _m.Called(path, reader, objectSize)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, io.Reader, int64) error); ok {\n\t\tr0 = rf(path, reader, objectSize)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockInternalClient) WRingCreate(ctx context.Context, in *WRingRequestMsg, opts ...grpc.CallOption) (*WRingResponseMsg, error) {\n\tvarargs := []interface{}{ctx, in}\n\tfor _, a := range opts {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"WRingCreate\", varargs...)\n\tret0, _ := ret[0].(*WRingResponseMsg)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *Client) Post(arg0 context.Context, arg1 string, arg2 interface{}) ([]byte, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Post\", arg0, arg1, arg2)\n\tret0, _ := ret[0].([]byte)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *Output) WriteOne(ctx context.Context, msg stream.WritableMessage) error {\n\tret := _m.Called(ctx, msg)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, stream.WritableMessage) error); ok {\n\t\tr0 = rf(ctx, msg)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockFullNode) MpoolBatchPush(arg0 context.Context, arg1 []*types.SignedMessage) ([]cid.Cid, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"MpoolBatchPush\", arg0, arg1)\n\tret0, _ := ret[0].([]cid.Cid)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *Session) Write(data []byte) (int, error) {\n\tret := _m.Called(data)\n\n\tvar r0 int\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func([]byte) (int, error)); ok {\n\t\treturn rf(data)\n\t}\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(data)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func createMockContext(t *testing.T, mockArgs []string) *cli.Context {\n\tt.Log(\"Create mock context\")\n\tmockApp := cli.NewApp()\n\n\tmockSet := flag.NewFlagSet(\"mock\", 0)\n\t//mockArgs := []string{\"TESTDIR\"}\n\tmockSet.Parse(mockArgs)\n\n\treturn cli.NewContext(mockApp, mockSet, nil)\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *GetterSetter) Set(ctx context.Context, data interface{}, opts ...firestore.SetOption) (*firestore.WriteResult, error) {\n\t_va := make([]interface{}, len(opts))\n\tfor _i := range opts {\n\t\t_va[_i] = opts[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 *firestore.WriteResult\n\tif rf, ok := ret.Get(0).(func(context.Context, interface{}, ...firestore.SetOption) *firestore.WriteResult); ok {\n\t\tr0 = rf(ctx, data, opts...)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*firestore.WriteResult)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, interface{}, ...firestore.SetOption) error); ok {\n\t\tr1 = rf(ctx, data, opts...)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *ModeHistory) Add(ctx context.Context, mode string, user string, message string) error {\n\tret := _m.Called(ctx, mode, user, message)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, string, string) error); ok {\n\t\tr0 = rf(ctx, mode, user, message)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockDelegateActor) PostOutboxRequestBodyHook(c context.Context, r *http.Request, data vocab.Type) (context.Context, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"PostOutboxRequestBodyHook\", c, r, data)\n\tret0, _ := ret[0].(context.Context)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockResponseWriter) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockLedgerClient) Register(arg0 context.Context, arg1 *ledger.RegisterRequest, arg2 ...grpc.CallOption) (*ledger.RegisterResult, error) {\n\tvarargs := []interface{}{arg0, arg1}\n\tfor _, a := range arg2 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"Register\", varargs...)\n\tret0, _ := ret[0].(*ledger.RegisterResult)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockTChanNode) Write(ctx thrift.Context, req *WriteRequest) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", ctx, req)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *mSignatureKeyHolderMockWriteTo) Set(f func(p io.Writer) (r int64, r1 error)) *SignatureKeyHolderMock {\n\tm.mainExpectation = nil\n\tm.expectationSeries = nil\n\n\tm.mock.WriteToFunc = f\n\treturn m.mock\n}", "func (m *MockTChanCluster) WriteTagged(ctx thrift.Context, req *WriteTaggedRequest) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteTagged\", ctx, req)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_m *Forge) Hook(ctx context.Context, r *http.Request) (*model.Repo, *model.Pipeline, error) {\n\tret := _m.Called(ctx, r)\n\n\tvar r0 *model.Repo\n\tvar r1 *model.Pipeline\n\tvar r2 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *http.Request) (*model.Repo, *model.Pipeline, error)); ok {\n\t\treturn rf(ctx, r)\n\t}\n\tif rf, ok := ret.Get(0).(func(context.Context, *http.Request) *model.Repo); ok {\n\t\tr0 = rf(ctx, r)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*model.Repo)\n\t\t}\n\t}\n\n\tif rf, ok := ret.Get(1).(func(context.Context, *http.Request) *model.Pipeline); ok {\n\t\tr1 = rf(ctx, r)\n\t} else {\n\t\tif ret.Get(1) != nil {\n\t\t\tr1 = ret.Get(1).(*model.Pipeline)\n\t\t}\n\t}\n\n\tif rf, ok := ret.Get(2).(func(context.Context, *http.Request) error); ok {\n\t\tr2 = rf(ctx, r)\n\t} else {\n\t\tr2 = ret.Error(2)\n\t}\n\n\treturn r0, r1, r2\n}", "func (_m *MoveRouter) Submit(appCtx appcontext.AppContext, move *models.Move, newSignedCertification *models.SignedCertification) error {\n\tret := _m.Called(appCtx, move, newSignedCertification)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(appcontext.AppContext, *models.Move, *models.SignedCertification) error); ok {\n\t\tr0 = rf(appCtx, move, newSignedCertification)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *WriteCloser_Expecter) Write(p interface{}) *WriteCloser_Write_Call {\n\treturn &WriteCloser_Write_Call{Call: _e.mock.On(\"Write\", p)}\n}", "func (_m *GCSUploader) UploadJSON(ctx context.Context, data interface{}, tempFileName string, gcsObjectPath string) error {\n\tret := _m.Called(ctx, data, tempFileName, gcsObjectPath)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, interface{}, string, string) error); ok {\n\t\tr0 = rf(ctx, data, tempFileName, gcsObjectPath)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func NewMockWriteRequestInterceptor(t interface {\n\tmock.TestingT\n\tCleanup(func())\n}) *MockWriteRequestInterceptor {\n\tmock := &MockWriteRequestInterceptor{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func (m *MockConfigAdminService_UploadRegisterModelServer) Context() context.Context {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Context\")\n\tret0, _ := ret[0].(context.Context)\n\treturn ret0\n}", "func (_m *Remote) Hook(r *http.Request) (*model.Repo, *model.Build, error) {\n\tret := _m.Called(r)\n\n\tvar r0 *model.Repo\n\tif rf, ok := ret.Get(0).(func(*http.Request) *model.Repo); ok {\n\t\tr0 = rf(r)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*model.Repo)\n\t\t}\n\t}\n\n\tvar r1 *model.Build\n\tif rf, ok := ret.Get(1).(func(*http.Request) *model.Build); ok {\n\t\tr1 = rf(r)\n\t} else {\n\t\tif ret.Get(1) != nil {\n\t\t\tr1 = ret.Get(1).(*model.Build)\n\t\t}\n\t}\n\n\tvar r2 error\n\tif rf, ok := ret.Get(2).(func(*http.Request) error); ok {\n\t\tr2 = rf(r)\n\t} else {\n\t\tr2 = ret.Error(2)\n\t}\n\n\treturn r0, r1, r2\n}", "func (_m *ORM) StoreString(chainID *big.Int, key string, val string) error {\n\tret := _m.Called(chainID, key, val)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(*big.Int, string, string) error); ok {\n\t\tr0 = rf(chainID, key, val)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockContext) Wire(v reflect.Value, tag string) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Wire\", v, tag)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_m *SubProcessCmd) StdinPipe() (io.WriteCloser, error) {\n\tret := _m.Called()\n\n\tvar r0 io.WriteCloser\n\tif rf, ok := ret.Get(0).(func() io.WriteCloser); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(io.WriteCloser)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func() error); ok {\n\t\tr1 = rf()\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *DirectRepositoryWriter) Flush(ctx context.Context) error {\n\tret := _m.Called(ctx)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context) error); ok {\n\t\tr0 = rf(ctx)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *RepositoryWriter) Flush(ctx context.Context) error {\n\tret := _m.Called(ctx)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context) error); ok {\n\t\tr0 = rf(ctx)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockStorage) Add(user string, timeStamp int64, expireTime int64) error {\n\tret := _m.Called(user, timeStamp, expireTime)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, int64, int64) error); ok {\n\t\tr0 = rf(user, timeStamp, expireTime)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockEnvironment) Stack() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Stack\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteSerializable(ctx context.Context, serializable Serializable) error {\n\tret := _m.Called(ctx, serializable)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, Serializable) error); ok {\n\t\tr0 = rf(ctx, serializable)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestFBaseProcessorNoProcessorFunction(t *testing.T) {\n\ttmpLogger := logrus.New()\n\tvar logBuf bytes.Buffer\n\ttmpLogger.Out = &logBuf\n\toldLogger := logger()\n\tSetLogger(tmpLogger)\n\tdefer func() {\n\t\tSetLogger(oldLogger)\n\t}()\n\n\tmockTransport := new(mockTTransport)\n\treads := make(chan []byte, 4)\n\treads <- pingFrame[0:1] // version\n\treads <- pingFrame[1:5] // headers size\n\treads <- pingFrame[5:34] // FContext headers\n\treads <- pingFrame[34:] // request body\n\tmockTransport.reads = reads\n\t// _opid0, cid 123\n\t// The ordering of opid and cid in the header is non-deterministic,\n\t// so cant check for equality.\n\tresponseCtx := []byte{0, 0, 0, 0, 29, 0, 0, 0, 5, 95, 111, 112, 105, 100, 0, 0, 0, 1, 48, 0, 0, 0, 4, 95, 99, 105, 100, 0, 0, 0, 3, 49, 50, 51}\n\tmockTransport.On(\"Write\", mock.Anything).Return(len(responseCtx), nil).Once()\n\t// [1,\"ping\",3,0,{\"1\":{\"str\":\"Unknown function ping\"},\"2\":{\"i32\":1}}]\n\tresponseBody := []byte{\n\t\t91, 49, 44, 34, 112, 105, 110, 103, 34, 44, 51, 44, 48, 44, 123, 34,\n\t\t49, 34, 58, 123, 34, 115, 116, 114, 34, 58, 34, 85, 110, 107, 110, 111,\n\t\t119, 110, 32, 102, 117, 110, 99, 116, 105, 111, 110, 32, 112, 105, 110,\n\t\t103, 34, 125, 44, 34, 50, 34, 58, 123, 34, 105, 51, 50, 34, 58, 49,\n\t\t125, 125, 93,\n\t}\n\tmockTransport.On(\"Write\", responseBody).Return(len(responseBody), nil).Once()\n\tmockTransport.On(\"Flush\", mock.Anything).Return(nil)\n\tproto := &FProtocol{TProtocol: thrift.NewTJSONProtocol(mockTransport)}\n\tprocessor := NewFBaseProcessor()\n\n\tassert.NoError(t, processor.Process(proto, proto))\n\tassert.True(t,\n\t\tstrings.Contains(\n\t\t\tstring(logBuf.Bytes()),\n\t\t\t\"frugal: client invoked unknown function ping on request with correlation id 123\"))\n\tmockTransport.AssertExpectations(t)\n}", "func (_e *MockRequestInterceptor_Expecter) InterceptWriteRequest(ctx interface{}, writeRequest interface{}) *MockRequestInterceptor_InterceptWriteRequest_Call {\n\treturn &MockRequestInterceptor_InterceptWriteRequest_Call{Call: _e.mock.On(\"InterceptWriteRequest\", ctx, writeRequest)}\n}", "func (_m *MockEURIConverter) CreateWriter(uri *url.URL) io.WriteCloser {\n\tret := _m.Called(uri)\n\n\tvar r0 io.WriteCloser\n\tif rf, ok := ret.Get(0).(func(*url.URL) io.WriteCloser); ok {\n\t\tr0 = rf(uri)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(io.WriteCloser)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (_m *MockHistoryEngine) ReplicateRawEvents(ctx context.Context, request *gohistory.ReplicateRawEventsRequest) error {\n\tret := _m.Called(request)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(*gohistory.ReplicateRawEventsRequest) error); ok {\n\t\tr0 = rf(request)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockConn) Write(arg0 core.WriteableFrame) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}" ]
[ "0.8022218", "0.7493231", "0.72168684", "0.66726047", "0.65568435", "0.64086", "0.61141825", "0.57537127", "0.56440634", "0.5580612", "0.5567108", "0.5530202", "0.5488108", "0.5436103", "0.54193276", "0.5399626", "0.5376059", "0.5359565", "0.5356018", "0.5337279", "0.5282447", "0.52520776", "0.5249146", "0.52358776", "0.5228741", "0.5201827", "0.5189856", "0.51860595", "0.5165536", "0.51627827", "0.5155957", "0.51535356", "0.51353663", "0.50988054", "0.50949967", "0.5080326", "0.50655746", "0.50561905", "0.50506634", "0.503302", "0.50224227", "0.50213414", "0.50194865", "0.5003216", "0.49938223", "0.49900547", "0.49822015", "0.49798012", "0.49752268", "0.49748874", "0.49665368", "0.49611303", "0.49472433", "0.4933958", "0.4931513", "0.4923994", "0.49117577", "0.4907086", "0.48988584", "0.48893636", "0.4882357", "0.48737764", "0.48690984", "0.48653504", "0.48606068", "0.4858899", "0.4858163", "0.48567444", "0.484572", "0.4839827", "0.48391443", "0.48309314", "0.48198485", "0.4815632", "0.48125467", "0.479971", "0.4798826", "0.47921312", "0.47863472", "0.47834316", "0.47819194", "0.4779911", "0.47778618", "0.4761433", "0.4757964", "0.47463357", "0.47391728", "0.47376156", "0.47318962", "0.47317222", "0.4725067", "0.47237557", "0.47228873", "0.47189796", "0.47189367", "0.47179273", "0.4717704", "0.4716715", "0.47049573", "0.47030705" ]
0.8145334
0
PushContext is a helper method to define mock.On call logicalName string writerArgs ...WithWriterArgs
func (_e *MockWriteBufferJsonBased_Expecter) PushContext(logicalName interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_PushContext_Call { return &MockWriteBufferJsonBased_PushContext_Call{Call: _e.mock.On("PushContext", append([]interface{}{logicalName}, writerArgs...)...)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferXmlBased_Expecter) PushContext(logicalName interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_PushContext_Call {\n\treturn &MockWriteBufferXmlBased_PushContext_Call{Call: _e.mock.On(\"PushContext\",\n\t\tappend([]interface{}{logicalName}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) PushContext(logicalName string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) PushContext(logicalName string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) PopContext(logicalName string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Repository) PushContext(_a0 context.Context, _a1 *git.PushOptions) error {\n\tret := _m.Called(_a0, _a1)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *git.PushOptions) error); ok {\n\t\tr0 = rf(_a0, _a1)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) PopContext(logicalName string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) PopContext(logicalName interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_PopContext_Call {\n\treturn &MockWriteBufferJsonBased_PopContext_Call{Call: _e.mock.On(\"PopContext\",\n\t\tappend([]interface{}{logicalName}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) PopContext(logicalName interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_PopContext_Call {\n\treturn &MockWriteBufferXmlBased_PopContext_Call{Call: _e.mock.On(\"PopContext\",\n\t\tappend([]interface{}{logicalName}, writerArgs...)...)}\n}", "func (_e *Output_Expecter) Write(ctx interface{}, batch interface{}) *Output_Write_Call {\n\treturn &Output_Write_Call{Call: _e.mock.On(\"Write\", ctx, batch)}\n}", "func (f *FakeVCenter) PushMetrics(context.Context, metrics.Receiver) {}", "func (_m *MockAcknowledger) Push(ctx context.Context) error {\n\tret := _m.Called(ctx)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context) error); ok {\n\t\tr0 = rf(ctx)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (lc *LoggingContext) Push(name string) (error, bool) {\n\tif lc.curr == \"\" {\n\t\treturn errors.New(fmt.Sprintf(\"Cannot push context to \\\"%s\\\"; no loggers have been added\", name)), false\n\t}\n\tlc.guard.Lock()\n\tdefer lc.guard.Unlock()\n\tlogger := lc.logmap[name]\n\tif logger == nil {\n\t\treturn errors.New(fmt.Sprintf(\"Logger \\\"%s\\\" not found. Still using \\\"%s\\\"\", name, lc.curr)), true\n\t}\n\tlc.logstack.PushBack(lc.curr)\n\tlc.curr = name\n\tl.UseLogger(*logger)\n\treturn nil, true\n}", "func createMockContext(t *testing.T, mockArgs []string) *cli.Context {\n\tt.Log(\"Create mock context\")\n\tmockApp := cli.NewApp()\n\n\tmockSet := flag.NewFlagSet(\"mock\", 0)\n\t//mockArgs := []string{\"TESTDIR\"}\n\tmockSet.Parse(mockArgs)\n\n\treturn cli.NewContext(mockApp, mockSet, nil)\n}", "func (conf Configuration) OnCall(method string, cont Context, argument interface{}) {\n\tlog.Printf(\"%v %v: Called\\nContext=%# v\\nArgument=%# v\\n\\n\", conf.Name, method, pretty.Formatter(cont), pretty.Formatter(argument))\n}", "func (_e *MockDataReceiverService_PutMetricServer_Expecter) Context() *MockDataReceiverService_PutMetricServer_Context_Call {\n\treturn &MockDataReceiverService_PutMetricServer_Context_Call{Call: _e.mock.On(\"Context\")}\n}", "func call(context *Context) {\n context.cpu.sp++\n context.stack[context.cpu.sp] = context.cpu.pc\n context.cpu.pc = context.opcode & 0x0FFF\n}", "func (m *MockSession) Push(arg0 string, arg1 interface{}) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Push\", arg0, arg1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_e *MockDataReceiverService_PutMetricClient_Expecter) Context() *MockDataReceiverService_PutMetricClient_Context_Call {\n\treturn &MockDataReceiverService_PutMetricClient_Context_Call{Call: _e.mock.On(\"Context\")}\n}", "func (f *VCenter) PushMetrics(context.Context, metrics.Receiver) {}", "func (_e *MockRequestInterceptor_Expecter) InterceptWriteRequest(ctx interface{}, writeRequest interface{}) *MockRequestInterceptor_InterceptWriteRequest_Call {\n\treturn &MockRequestInterceptor_InterceptWriteRequest_Call{Call: _e.mock.On(\"InterceptWriteRequest\", ctx, writeRequest)}\n}", "func (_m *MockHTTPServerInterface) SetContext(_a0 context.Context) {\n\t_m.Called(_a0)\n}", "func (_e *MockWriteRequestInterceptor_Expecter) InterceptWriteRequest(ctx interface{}, writeRequest interface{}) *MockWriteRequestInterceptor_InterceptWriteRequest_Call {\n\treturn &MockWriteRequestInterceptor_InterceptWriteRequest_Call{Call: _e.mock.On(\"InterceptWriteRequest\", ctx, writeRequest)}\n}", "func (ws *WriterStack) Push(enc Encoder, args []byte) error {\n\tvar err error\n\tws.Writer, err = enc(ws.Writer, args)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// note this DELIBERATELY does not end up including the \"base writer\"\n\t// that we're interpolating to... we do NOT want to end up closing\n\t// that, too!\n\tws.components = append(ws.components, ws.Writer)\n\treturn nil\n}", "func (w *interceptRW) Push(target string, opts *http.PushOptions) error {\n\tif w, ok := w.ResponseWriter.(http.Pusher); ok {\n\t\treturn w.Push(target, opts)\n\t}\n\treturn http.ErrNotSupported\n}", "func (client *MockPodExecClient) AddMockPodExecReturnContext(ctx context.Context, cmd string, mockPodExecReturnContext *MockPodExecReturnContext) {\n\tclient.WantCmdList = append(client.WantCmdList, cmd)\n\tif client.MockReturnContexts == nil {\n\t\tclient.MockReturnContexts = make(map[string]*MockPodExecReturnContext)\n\t}\n\tclient.MockReturnContexts[cmd] = mockPodExecReturnContext\n}", "func (m *MockFullNode) MpoolPush(arg0 context.Context, arg1 *types.SignedMessage) (cid.Cid, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"MpoolPush\", arg0, arg1)\n\tret0, _ := ret[0].(cid.Cid)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func Push(ctx ContextT) int32 {\n\treturn int32(C.yices_push(yctx(ctx)))\n}", "func (r *renderer) push(context reflect.Value) {\n\tr.stack = append(r.stack, context)\n}", "func (mock *PodSecurityPolicyTemplateInterfaceMock) AddHandlerCalls() []struct {\n\tCtx context.Context\n\tName string\n\tSync v31.PodSecurityPolicyTemplateHandlerFunc\n} {\n\tvar calls []struct {\n\t\tCtx context.Context\n\t\tName string\n\t\tSync v31.PodSecurityPolicyTemplateHandlerFunc\n\t}\n\tlockPodSecurityPolicyTemplateInterfaceMockAddHandler.RLock()\n\tcalls = mock.calls.AddHandler\n\tlockPodSecurityPolicyTemplateInterfaceMockAddHandler.RUnlock()\n\treturn calls\n}", "func Push(ctx echo.Context) error {\n\n\tmsg := types.Message{}\n\n\terr := ctx.Bind(&msg)\n\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\n\tif !registration.IsAgentRegistered(msg.Token) {\n\t\treturn ctx.JSON(403, types.ValidateResponse{Success: false, Message: \"Security Token Not Recognized\"})\n\t}\n\n\tgo PushToQueue(msg)\n\treturn ctx.JSON(200, types.PushResponse{true})\n}", "func (_e *Output_Expecter) WriteOne(ctx interface{}, msg interface{}) *Output_WriteOne_Call {\n\treturn &Output_WriteOne_Call{Call: _e.mock.On(\"WriteOne\", ctx, msg)}\n}", "func (lc mockNotifyLogger) Trace(msg string, args ...interface{}) {\n}", "func Context(t *testing.T, msg string, args ...interface{}) {\n\tt.Log(fmt.Sprintf(msg, args...))\n}", "func contextInjector(baseCtx func() context.Context) grpcutil.UnifiedServerInterceptor {\n\treturn func(ctx context.Context, fullMethod string, handler func(ctx context.Context) error) error {\n\t\treturn handler(&mergedCtx{ctx, baseCtx()})\n\t}\n}", "func (_e *WriteCloser_Expecter) Write(p interface{}) *WriteCloser_Write_Call {\n\treturn &WriteCloser_Write_Call{Call: _e.mock.On(\"Write\", p)}\n}", "func (m *MockPacketHandler) Context() context.Context {\n\tret := m.ctrl.Call(m, \"Context\")\n\tret0, _ := ret[0].(context.Context)\n\treturn ret0\n}", "func (p *Pusher) Push(ctx context.Context) error {\n\tif p.PushFormat == \"\" {\n\t\tp.PushFormat = expfmt.FmtText\n\t}\n\n\tresps := make(chan (error))\n\tgo func() {\n\t\tresps <- p.push(ctx)\n\t}()\n\n\tselect {\n\tcase err := <-resps:\n\t\treturn err\n\tcase <-ctx.Done():\n\t\treturn ctx.Err()\n\t}\n}", "func (m *MockLayout) Push(arg0 image.Digest, arg1 image.Name) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Push\", arg0, arg1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (mock *GitModuleControllerMock) AddGenericHandlerCalls() []struct {\n\tCtx context.Context\n\tName string\n\tHandler generic.Handler\n} {\n\tvar calls []struct {\n\t\tCtx context.Context\n\t\tName string\n\t\tHandler generic.Handler\n\t}\n\tlockGitModuleControllerMockAddGenericHandler.RLock()\n\tcalls = mock.calls.AddGenericHandler\n\tlockGitModuleControllerMockAddGenericHandler.RUnlock()\n\treturn calls\n}", "func (client *MockPodExecClient) AddMockPodExecReturnContexts(ctx context.Context, podExecCmds []string, mockPodExecReturnContexts ...*MockPodExecReturnContext) {\n\tfor n := range mockPodExecReturnContexts {\n\t\tclient.AddMockPodExecReturnContext(ctx, podExecCmds[n], mockPodExecReturnContexts[n])\n\t}\n}", "func (_e *MockDataRegistryService_CreateOrUpdateMetricsServer_Expecter) Context() *MockDataRegistryService_CreateOrUpdateMetricsServer_Context_Call {\n\treturn &MockDataRegistryService_CreateOrUpdateMetricsServer_Context_Call{Call: _e.mock.On(\"Context\")}\n}", "func (m *MockSQSAPI) AddPermissionWithContext(arg0 context.Context, arg1 *sqs.AddPermissionInput, arg2 ...request.Option) (*sqs.AddPermissionOutput, error) {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{arg0, arg1}\n\tfor _, a := range arg2 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"AddPermissionWithContext\", varargs...)\n\tret0, _ := ret[0].(*sqs.AddPermissionOutput)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *Mockexecuter) ExecWithWriter(ctx context.Context, commandLine string, w io.Writer, args ...string) error {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{ctx, commandLine, w}\n\tfor _, a := range args {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"ExecWithWriter\", varargs...)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *mParcelMockContext) Set(f func(p context.Context) (r context.Context)) *ParcelMock {\n\tm.mainExpectation = nil\n\tm.expectationSeries = nil\n\n\tm.mock.ContextFunc = f\n\treturn m.mock\n}", "func (_m *DockerClient) ImagePush(_a0 context.Context, _a1 string, _a2 types.ImagePushOptions) (io.ReadCloser, error) {\n\tret := _m.Called(_a0, _a1, _a2)\n\n\tvar r0 io.ReadCloser\n\tif rf, ok := ret.Get(0).(func(context.Context, string, types.ImagePushOptions) io.ReadCloser); ok {\n\t\tr0 = rf(_a0, _a1, _a2)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(io.ReadCloser)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, string, types.ImagePushOptions) error); ok {\n\t\tr1 = rf(_a0, _a1, _a2)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *OplogCursor) Push(_a0 []byte) error {\n\tret := _m.Called(_a0)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func mockBatchCTX(t testing.TB) *BatchCTX {\n\tt.Helper()\n\tmockBatch := NewBatchCTX(mockBatchCTXHeader())\n\tmockBatch.AddEntry(mockCTXEntryDetail())\n\tmockBatch.GetEntries()[0].AddAddenda05(mockAddenda05())\n\tmockBatch.Entries[0].AddendaRecordIndicator = 1\n\tif err := mockBatch.Create(); err != nil {\n\t\tt.Fatal(err)\n\t}\n\treturn mockBatch\n}", "func (m *Mock) On(methodName string, arguments ...interface{}) *Call {\n\tfor _, arg := range arguments {\n\t\tif v := reflect.ValueOf(arg); v.Kind() == reflect.Func {\n\t\t\tpanic(fmt.Sprintf(\"cannot use Func in expectations. Use mock.AnythingOfType(\\\"%T\\\")\", arg))\n\t\t}\n\t}\n\n\tm.mutex.Lock()\n\tdefer m.mutex.Unlock()\n\tc := newCall(m, methodName, assert.CallerInfo(), arguments...)\n\tm.ExpectedCalls = append(m.ExpectedCalls, c)\n\treturn c\n}", "func (s *StakingKeeperMock) Jail(sdk.Context, sdk.ConsAddress) {}", "func MockSystemCall(std_out, std_err string, err error) {\n\tcontext.SystemCall = func(dir string, cmd string, args []string, local_out, local_err io.Writer) error {\n\t\tlocal_out.Write([]byte(std_out))\n\t\tlocal_err.Write([]byte(std_err))\n\t\treturn err\n\t}\n}", "func TestEmitContextEvents(t *testing.T) {\n\tassert := asserts.NewTesting(t, asserts.FailStop)\n\tmsh := mesh.New()\n\n\terr := msh.SpawnCells(NewTestBehavior(\"foo\"))\n\tassert.NoError(err)\n\n\tctxA := context.Background()\n\tctxB, cancel := context.WithTimeout(ctxA, 5*time.Millisecond)\n\tdefer cancel()\n\n\tmsh.Emit(\"foo\", event.WithContext(ctxA, \"set\", \"a\", 5))\n\tmsh.Emit(\"foo\", event.WithContext(ctxA, \"set\", \"b\", 5))\n\n\ttime.Sleep(20 * time.Millisecond)\n\n\tmsh.Emit(\"foo\", event.WithContext(ctxB, \"set\", \"b\", 10))\n\n\tpl, plc := event.NewReplyPayload()\n\n\tmsh.Emit(\"foo\", event.New(\"send\", pl))\n\n\tplr, err := plc.Wait(waitTimeout)\n\n\tassert.NoError(err)\n\tassert.Equal(plr.At(\"a\").AsInt(0), 5)\n\tassert.Equal(plr.At(\"b\").AsInt(0), 5)\n\n\terr = msh.Stop()\n\tassert.NoError(err)\n}", "func (m *MockIExec) DoGitPush(dir string, args ...string) (string, error) {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{dir}\n\tfor _, a := range args {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"DoGitPush\", varargs...)\n\tret0, _ := ret[0].(string)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *ParcelMock) Context(p context.Context) (r context.Context) {\n\tcounter := atomic.AddUint64(&m.ContextPreCounter, 1)\n\tdefer atomic.AddUint64(&m.ContextCounter, 1)\n\n\tif len(m.ContextMock.expectationSeries) > 0 {\n\t\tif counter > uint64(len(m.ContextMock.expectationSeries)) {\n\t\t\tm.t.Fatalf(\"Unexpected call to ParcelMock.Context. %v\", p)\n\t\t\treturn\n\t\t}\n\n\t\tinput := m.ContextMock.expectationSeries[counter-1].input\n\t\ttestify_assert.Equal(m.t, *input, ParcelMockContextInput{p}, \"Parcel.Context got unexpected parameters\")\n\n\t\tresult := m.ContextMock.expectationSeries[counter-1].result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the ParcelMock.Context\")\n\t\t\treturn\n\t\t}\n\n\t\tr = result.r\n\n\t\treturn\n\t}\n\n\tif m.ContextMock.mainExpectation != nil {\n\n\t\tinput := m.ContextMock.mainExpectation.input\n\t\tif input != nil {\n\t\t\ttestify_assert.Equal(m.t, *input, ParcelMockContextInput{p}, \"Parcel.Context got unexpected parameters\")\n\t\t}\n\n\t\tresult := m.ContextMock.mainExpectation.result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the ParcelMock.Context\")\n\t\t}\n\n\t\tr = result.r\n\n\t\treturn\n\t}\n\n\tif m.ContextFunc == nil {\n\t\tm.t.Fatalf(\"Unexpected call to ParcelMock.Context. %v\", p)\n\t\treturn\n\t}\n\n\treturn m.ContextFunc(p)\n}", "func (mock *PodSecurityPolicyTemplateControllerMock) AddHandlerCalls() []struct {\n\tCtx context.Context\n\tName string\n\tHandler v31.PodSecurityPolicyTemplateHandlerFunc\n} {\n\tvar calls []struct {\n\t\tCtx context.Context\n\t\tName string\n\t\tHandler v31.PodSecurityPolicyTemplateHandlerFunc\n\t}\n\tlockPodSecurityPolicyTemplateControllerMockAddHandler.RLock()\n\tcalls = mock.calls.AddHandler\n\tlockPodSecurityPolicyTemplateControllerMockAddHandler.RUnlock()\n\treturn calls\n}", "func (rww *ResponseWriterWrapper) Push(target string, opts *http.PushOptions) error {\n\tif pusher, ok := rww.ResponseWriter.(http.Pusher); ok {\n\t\treturn pusher.Push(target, opts)\n\t}\n\treturn ErrNotImplemented\n}", "func (m *MockMounter) CommandContext(ctx context.Context, cmd string, args ...string) exec.Cmd {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{ctx, cmd}\n\tfor _, a := range args {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"CommandContext\", varargs...)\n\tret0, _ := ret[0].(exec.Cmd)\n\treturn ret0\n}", "func MockOnSetSystem(ctx context.Context, mockAPI *redfishMocks.RedfishAPI, systemID string,\n\tcomputerSystem redfishClient.ComputerSystem, httpResponse *http.Response, err error) {\n\trequest := redfishClient.ApiSetSystemRequest{}.ComputerSystem(computerSystem)\n\tmockAPI.On(\"SetSystem\", ctx, systemID).Return(request).Times(1)\n\tmockAPI.On(\"SetSystemExecute\", mock.Anything).Return(computerSystem, httpResponse, err).Times(1)\n}", "func (m *Client) Post(arg0 context.Context, arg1 string, arg2 interface{}) ([]byte, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Post\", arg0, arg1, arg2)\n\tret0, _ := ret[0].([]byte)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (mock *ProjectRoleTemplateBindingInterfaceMock) AddHandlerCalls() []struct {\n\tCtx context.Context\n\tName string\n\tSync v31.ProjectRoleTemplateBindingHandlerFunc\n} {\n\tvar calls []struct {\n\t\tCtx context.Context\n\t\tName string\n\t\tSync v31.ProjectRoleTemplateBindingHandlerFunc\n\t}\n\tlockProjectRoleTemplateBindingInterfaceMockAddHandler.RLock()\n\tcalls = mock.calls.AddHandler\n\tlockProjectRoleTemplateBindingInterfaceMockAddHandler.RUnlock()\n\treturn calls\n}", "func (mock *MultiClusterAppInterfaceMock) AddHandlerCalls() []struct {\n\tCtx context.Context\n\tName string\n\tSync v31.MultiClusterAppHandlerFunc\n} {\n\tvar calls []struct {\n\t\tCtx context.Context\n\t\tName string\n\t\tSync v31.MultiClusterAppHandlerFunc\n\t}\n\tlockMultiClusterAppInterfaceMockAddHandler.RLock()\n\tcalls = mock.calls.AddHandler\n\tlockMultiClusterAppInterfaceMockAddHandler.RUnlock()\n\treturn calls\n}", "func (rh *MockRequestHandler) PushRequest(ctx context.Context, req *protocol.Request) (*protocol.Response, error) {\n\tselect {\n\tcase rh.in <- req:\n\tcase <-ctx.Done():\n\t\treturn nil, errors.New(\"request cancelled\")\n\t}\n\n\tselect {\n\tcase resp := <-req.Responded():\n\t\tif resp == nil {\n\t\t\treturn protocol.NewResponseConfig(config.Default), errors.New(\"no response\")\n\t\t}\n\t\treturn resp, nil\n\tcase <-ctx.Done():\n\t\treturn nil, errors.New(\"request cancelled\")\n\t}\n\treturn nil, nil\n}", "func (rec *RawEventCreate) SetContext(m map[string]interface{}) *RawEventCreate {\n\trec.mutation.SetContext(m)\n\treturn rec\n}", "func (m *MockSQSAPI) SendMessageWithContext(arg0 context.Context, arg1 *sqs.SendMessageInput, arg2 ...request.Option) (*sqs.SendMessageOutput, error) {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{arg0, arg1}\n\tfor _, a := range arg2 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"SendMessageWithContext\", varargs...)\n\tret0, _ := ret[0].(*sqs.SendMessageOutput)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func push(x interface{}) {\n\tswitch y := x.(type) {\n\tcase CNIReport:\n\t\tSendAITelemetry(y)\n\n\tcase AIMetric:\n\t\tSendAIMetric(y)\n\tdefault:\n\t\tlog.Printf(\"Push fn: Default case:%+v\", y)\n\t}\n}", "func (p *Pipe) Push(values ...phono.Param) {\n\tif len(values) == 0 {\n\t\treturn\n\t}\n\tparams := params(make(map[string][]phono.ParamFunc))\n\tp.events <- eventMessage{\n\t\tevent: push,\n\t\tparams: params.add(values...),\n\t}\n}", "func (mock *SourceCodeProviderInterfaceMock) AddHandlerCalls() []struct {\n\tCtx context.Context\n\tName string\n\tSync v3.SourceCodeProviderHandlerFunc\n} {\n\tvar calls []struct {\n\t\tCtx context.Context\n\t\tName string\n\t\tSync v3.SourceCodeProviderHandlerFunc\n\t}\n\tlockSourceCodeProviderInterfaceMockAddHandler.RLock()\n\tcalls = mock.calls.AddHandler\n\tlockSourceCodeProviderInterfaceMockAddHandler.RUnlock()\n\treturn calls\n}", "func (mock *PersistentVolumeClaimInterfaceMock) AddHandlerCalls() []struct {\n\tCtx context.Context\n\tName string\n\tSync v11.PersistentVolumeClaimHandlerFunc\n} {\n\tvar calls []struct {\n\t\tCtx context.Context\n\t\tName string\n\t\tSync v11.PersistentVolumeClaimHandlerFunc\n\t}\n\tlockPersistentVolumeClaimInterfaceMockAddHandler.RLock()\n\tcalls = mock.calls.AddHandler\n\tlockPersistentVolumeClaimInterfaceMockAddHandler.RUnlock()\n\treturn calls\n}", "func (mock *GlobalRoleBindingInterfaceMock) AddHandlerCalls() []struct {\n\tCtx context.Context\n\tName string\n\tSync v31.GlobalRoleBindingHandlerFunc\n} {\n\tvar calls []struct {\n\t\tCtx context.Context\n\t\tName string\n\t\tSync v31.GlobalRoleBindingHandlerFunc\n\t}\n\tlockGlobalRoleBindingInterfaceMockAddHandler.RLock()\n\tcalls = mock.calls.AddHandler\n\tlockGlobalRoleBindingInterfaceMockAddHandler.RUnlock()\n\treturn calls\n}", "func (m *MockSession) PushToFront(arg0 context.Context) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"PushToFront\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *MockResponseWriter) Write(arg0 *types.APIRequest, arg1 int, arg2 types.APIObject) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"Write\", arg0, arg1, arg2)\n}", "func WithContext(ctx context.Context, method, url string) (int, string) {\n\treturn TracedCall(ctx, method, url)\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func makeTracker(ctx context.Context, name string) (context.Context, func(error)) {\n\tctx, err := tag.New(ctx, tag.Upsert(apiNameKey, name))\n\tif err != nil {\n\t\tlogrus.WithError(err).Fatalf(\"cannot add tag %v=%v\", apiNameKey, name)\n\t}\n\n\t// It would have been nice to pull the latency (end-start) elapsed time\n\t// from Spans but this is hidden from us, so we have to call time.Now()\n\t// twice ourselves.\n\tctx, span := trace.StartSpan(ctx, name)\n\tstart := time.Now()\n\n\treturn ctx, func(err error) {\n\n\t\tstatus := \"ok\"\n\t\tif err != nil {\n\t\t\tif err == context.Canceled {\n\t\t\t\tstatus = \"canceled\"\n\t\t\t} else if err == context.DeadlineExceeded {\n\t\t\t\tstatus = \"timeout\"\n\t\t\t} else if derr, ok := err.(*docker.Error); ok {\n\t\t\t\tstatus = strconv.FormatInt(int64(derr.Status), 10)\n\t\t\t} else {\n\t\t\t\tstatus = \"error\"\n\t\t\t}\n\t\t}\n\n\t\tctx, err := tag.New(ctx, tag.Upsert(apiStatusKey, status))\n\t\tif err != nil {\n\t\t\tlogrus.WithError(err).Fatalf(\"cannot add tag %v=%v\", apiStatusKey, status)\n\t\t}\n\n\t\tstats.Record(ctx, dockerLatencyMeasure.M(int64(time.Now().Sub(start)/time.Millisecond)))\n\t\tspan.End()\n\t}\n}", "func (_m *AppFunctionContext) PushToCore(event dtos.Event) (common.BaseWithIdResponse, error) {\n\tret := _m.Called(event)\n\n\tvar r0 common.BaseWithIdResponse\n\tif rf, ok := ret.Get(0).(func(dtos.Event) common.BaseWithIdResponse); ok {\n\t\tr0 = rf(event)\n\t} else {\n\t\tr0 = ret.Get(0).(common.BaseWithIdResponse)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(dtos.Event) error); ok {\n\t\tr1 = rf(event)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mock *HarborRepositoryInterfaceMock) AddHandlerCalls() []struct {\n\tCtx context.Context\n\tName string\n\tSync v3.HarborRepositoryHandlerFunc\n} {\n\tvar calls []struct {\n\t\tCtx context.Context\n\t\tName string\n\t\tSync v3.HarborRepositoryHandlerFunc\n\t}\n\tlockHarborRepositoryInterfaceMockAddHandler.RLock()\n\tcalls = mock.calls.AddHandler\n\tlockHarborRepositoryInterfaceMockAddHandler.RUnlock()\n\treturn calls\n}", "func (c *fakeRedisConn) SetContext(v interface{}) {}", "func (b *binding) Push(ctx context.Context, local, remote string) error {\n\treturn b.Command(\"push\", local, remote).Run(ctx)\n}", "func (m *MockFullNode) MpoolBatchPush(arg0 context.Context, arg1 []*types.SignedMessage) ([]cid.Cid, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"MpoolBatchPush\", arg0, arg1)\n\tret0, _ := ret[0].([]cid.Cid)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func mockRegisterProducerTx(info *payload.ProducerInfo) *types.Transaction {\n\treturn &types.Transaction{\n\t\tTxType: types.RegisterProducer,\n\t\tPayload: info,\n\t}\n}", "func (m *GenericMock) AddCall(method string, result interface{}, args ...interface{}) {\n\tm.lock.Lock()\n\tdefer m.lock.Unlock()\n\tm.calls = append(m.calls, mockCall{\n\t\tmethod: method,\n\t\targs: args,\n\t\tresult: result,\n\t\tused: false,\n\t})\n}", "func TestFBaseProcessorNoProcessorFunction(t *testing.T) {\n\ttmpLogger := logrus.New()\n\tvar logBuf bytes.Buffer\n\ttmpLogger.Out = &logBuf\n\toldLogger := logger()\n\tSetLogger(tmpLogger)\n\tdefer func() {\n\t\tSetLogger(oldLogger)\n\t}()\n\n\tmockTransport := new(mockTTransport)\n\treads := make(chan []byte, 4)\n\treads <- pingFrame[0:1] // version\n\treads <- pingFrame[1:5] // headers size\n\treads <- pingFrame[5:34] // FContext headers\n\treads <- pingFrame[34:] // request body\n\tmockTransport.reads = reads\n\t// _opid0, cid 123\n\t// The ordering of opid and cid in the header is non-deterministic,\n\t// so cant check for equality.\n\tresponseCtx := []byte{0, 0, 0, 0, 29, 0, 0, 0, 5, 95, 111, 112, 105, 100, 0, 0, 0, 1, 48, 0, 0, 0, 4, 95, 99, 105, 100, 0, 0, 0, 3, 49, 50, 51}\n\tmockTransport.On(\"Write\", mock.Anything).Return(len(responseCtx), nil).Once()\n\t// [1,\"ping\",3,0,{\"1\":{\"str\":\"Unknown function ping\"},\"2\":{\"i32\":1}}]\n\tresponseBody := []byte{\n\t\t91, 49, 44, 34, 112, 105, 110, 103, 34, 44, 51, 44, 48, 44, 123, 34,\n\t\t49, 34, 58, 123, 34, 115, 116, 114, 34, 58, 34, 85, 110, 107, 110, 111,\n\t\t119, 110, 32, 102, 117, 110, 99, 116, 105, 111, 110, 32, 112, 105, 110,\n\t\t103, 34, 125, 44, 34, 50, 34, 58, 123, 34, 105, 51, 50, 34, 58, 49,\n\t\t125, 125, 93,\n\t}\n\tmockTransport.On(\"Write\", responseBody).Return(len(responseBody), nil).Once()\n\tmockTransport.On(\"Flush\", mock.Anything).Return(nil)\n\tproto := &FProtocol{TProtocol: thrift.NewTJSONProtocol(mockTransport)}\n\tprocessor := NewFBaseProcessor()\n\n\tassert.NoError(t, processor.Process(proto, proto))\n\tassert.True(t,\n\t\tstrings.Contains(\n\t\t\tstring(logBuf.Bytes()),\n\t\t\t\"frugal: client invoked unknown function ping on request with correlation id 123\"))\n\tmockTransport.AssertExpectations(t)\n}", "func injectCtx(endpointHandler func(http.ResponseWriter, *http.Request, context.Context)) http.Handler {\n\treturn http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) {\n\t\tendpointHandler(res, req, logs.WithRqId(httpContext, uuid.NewRandom().String()))\n\t})\n}", "func handler(ctx context.Context, request events.APIGatewayProxyRequest) (events.APIGatewayProxyResponse, error) {\n\tfmt.Printf(\"My request: %+v\", request)\n\tfmt.Printf(\"My context: %+v\", ctx)\n\n\tlc, ok := lambdacontext.FromContext(ctx)\n\tif !ok {\n\t\treturn events.APIGatewayProxyResponse{\n\t\t\tStatusCode: http.StatusBadGateway,\n\t\t\tBody: \"Something went wrong :(\",\n\t\t}, nil\n\t}\n\n\tout := &struct {\n\t\tHeaders map[string]string `json:\"headers,omitempty\"`\n\t\tClientContext struct {\n\t\t\tClientInfo struct {\n\t\t\t\tAppPackageName string `json:\"app_package_name,omitempty\"`\n\t\t\t\tAppTitle string `json:\"app_title,omitempty\"`\n\t\t\t\tAppVersionCode string `json:\"app_version_code,omitempty\"`\n\t\t\t\tInstallationID string `json:\"installation_id,omitempty\"`\n\t\t\t} `json:\"client_info,omitemtpy,omitempty\"`\n\t\t\tCustom map[string]string `json:\"custom,omitempty\"`\n\t\t\tEnv map[string]string `json:\"env,omitempty\"`\n\t\t} `json:\"client_context,omitempty\"`\n\t\tAWSInfo struct {\n\t\t\tAwsRequestID string `json:\"aws_request_id,omitempty\"`\n\t\t\tInvokedFunctionArn string `json:\"invoked_function_arn,omitempty\"`\n\t\t} `json:\"aws_info,omitempty\"`\n\t\tNetlify map[string]interface{} `json:\"netlify,omitempty\"`\n\t}{}\n\n\tout.Headers = request.Headers\n\tout.ClientContext.Env = lc.ClientContext.Env\n\tout.ClientContext.Custom = lc.ClientContext.Custom\n\tout.ClientContext.ClientInfo.AppPackageName = lc.ClientContext.Client.AppPackageName\n\tout.ClientContext.ClientInfo.AppTitle = lc.ClientContext.Client.AppTitle\n\tout.ClientContext.ClientInfo.AppVersionCode = lc.ClientContext.Client.AppVersionCode\n\tout.ClientContext.ClientInfo.InstallationID = lc.ClientContext.Client.InstallationID\n\tout.AWSInfo.AwsRequestID = lc.AwsRequestID\n\tout.AWSInfo.InvokedFunctionArn = lc.InvokedFunctionArn\n\n\t// the netlify struct is encoded\n\tnfstr, ok := lc.ClientContext.Custom[\"netlify\"]\n\tif ok {\n\t\tfmt.Println(\"Found netlify context\")\n\t\tdecoded, err := base64.StdEncoding.DecodeString(nfstr)\n\t\tif err != nil {\n\t\t\treturn events.APIGatewayProxyResponse{\n\t\t\t\tStatusCode: http.StatusBadRequest,\n\t\t\t\tBody: err.Error(),\n\t\t\t}, nil\n\t\t}\n\t\tparsed := make(map[string]interface{})\n\t\tif err := json.Unmarshal(decoded, &parsed); err != nil {\n\t\t\treturn events.APIGatewayProxyResponse{\n\t\t\t\tStatusCode: http.StatusBadRequest,\n\t\t\t\tBody: err.Error(),\n\t\t\t}, nil\n\t\t}\n\t\tout.Netlify = parsed\n\t}\n\n\tfmt.Printf(\"Marshaling the output: %+v\\n\", out)\n\tbs, err := json.Marshal(out)\n\tif err != nil {\n\t\treturn events.APIGatewayProxyResponse{\n\t\t\tStatusCode: http.StatusInternalServerError,\n\t\t\tBody: err.Error(),\n\t\t}, nil\n\t}\n\tfmt.Println(\"It all seems good\")\n\treturn events.APIGatewayProxyResponse{\n\t\tStatusCode: http.StatusOK,\n\t\tBody: string(bs),\n\t}, nil\n}", "func (mr *MockSessionMockRecorder) Push(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Push\", reflect.TypeOf((*MockSession)(nil).Push), arg0, arg1)\n}", "func (c *Context) Push(op operation.Operation) {\n\tc.operations = append(c.operations, op)\n}", "func WrapMockAuthConfig(hfn http.HandlerFunc, cfg *config.APICfg, brk brokers.Broker, str stores.Store, mgr *oldPush.Manager, c push.Client, roles ...string) http.HandlerFunc {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\n\t\turlVars := mux.Vars(r)\n\n\t\tuserRoles := []string{\"publisher\", \"consumer\"}\n\t\tif len(roles) > 0 {\n\t\t\tuserRoles = roles\n\t\t}\n\n\t\tnStr := str.Clone()\n\t\tdefer nStr.Close()\n\n\t\tprojectUUID := projects.GetUUIDByName(urlVars[\"project\"], nStr)\n\t\tgorillaContext.Set(r, \"auth_project_uuid\", projectUUID)\n\t\tgorillaContext.Set(r, \"brk\", brk)\n\t\tgorillaContext.Set(r, \"str\", nStr)\n\t\tgorillaContext.Set(r, \"mgr\", mgr)\n\t\tgorillaContext.Set(r, \"apsc\", c)\n\t\tgorillaContext.Set(r, \"auth_resource\", cfg.ResAuth)\n\t\tgorillaContext.Set(r, \"auth_user\", \"UserA\")\n\t\tgorillaContext.Set(r, \"auth_user_uuid\", \"uuid1\")\n\t\tgorillaContext.Set(r, \"auth_roles\", userRoles)\n\t\tgorillaContext.Set(r, \"push_worker_token\", cfg.PushWorkerToken)\n\t\tgorillaContext.Set(r, \"push_enabled\", cfg.PushEnabled)\n\t\thfn.ServeHTTP(w, r)\n\n\t})\n}", "func (f *ExtensionStoreCreateFunc) PushHook(hook func(context.Context, int32, int32, string) (int32, error)) {\n\tf.mutex.Lock()\n\tf.hooks = append(f.hooks, hook)\n\tf.mutex.Unlock()\n}", "func (m *MockAtomicLogic) PushKeyKeeper() core.PushKeyKeeper {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"PushKeyKeeper\")\n\tret0, _ := ret[0].(core.PushKeyKeeper)\n\treturn ret0\n}", "func RunInstrumented(ctx context.Context, name string, app newrelic.Application,\n\tsink MetricsSink, logger *zap.Logger, fn func(context.Context) error) error {\n\n\tcurTrans := newrelic.FromContext(ctx)\n\tvar newTrans newrelic.Transaction\n\tif curTrans == nil {\n\t\tnewTrans = app.StartTransaction(name, nil, nil)\n\t} else {\n\t\tnewTrans = curTrans.NewGoroutine()\n\t}\n\t_ = newTrans.SetName(name)\n\n\tvar err error\n\tdefer func() {\n\t\t// (1) Close with the supplied error, either from the function\n\t\t// return or from the panic handler below.\n\t\tif err != nil {\n\t\t\t_ = newTrans.NoticeError(err)\n\t\t}\n\t\t_ = newTrans.End()\n\t}()\n\n\tdefer func() {\n\t\tif p := recover(); p != nil {\n\t\t\t// OK, this is a serious COMEFROM-like trick here. In case of an\n\t\t\t// exception we modify the 'err' variable from the parent scope.\n\t\t\t// This in turn will be picked up by the deferred function (1).\n\n\t\t\t// Create an error with a nice stack trace\n\t\t\tstack := make([]uintptr, 40)\n\t\t\tn := runtime.Callers(3, stack)\n\t\t\terr = newrelic.Error{\n\t\t\t\tMessage: fmt.Sprintf(\"%v\", p),\n\t\t\t\tClass: \"gopanic\",\n\t\t\t\tStack: stack[:n],\n\t\t\t}\n\t\t\tpanic(p)\n\t\t}\n\t}()\n\n\tlogger = logger.Named(name).With(getLogLinkingMetadata(newTrans)...)\n\tc := newrelic.NewContext(ctx, newTrans) // Create context with tracing attached\n\tc = ImbueContext(c, logger) // Save logger into the context\n\tc = MakeMetricContext(c, name) // Save metrics into the context\n\n\tmet := GetMetricsFromContext(c)\n\tdefer sink.SubmitSegmentMetrics(met)\n\tdefer met.CopyToTransaction(newTrans)\n\n\terr = fn(c)\n\n\treturn err\n}", "func (t Tracker) track(payload payload.Payload, contexts []SelfDescribingJson) {\n\n\t// Add standard KV Pairs\n\tpayload.Add(T_VERSION, common.NewString(TRACKER_VERSION))\n\tpayload.Add(PLATFORM, common.NewString(t.Platform))\n\tpayload.Add(APP_ID, common.NewString(t.AppId))\n\tpayload.Add(NAMESPACE, common.NewString(t.Namespace))\n\n\t// Build the final context and add it to the payload\n\tif contexts != nil && len(contexts) > 0 {\n\t\tdataArray := []map[string]interface{}{}\n\t\tfor _, val := range contexts {\n\t\t\tdataArray = append(dataArray, val.Get())\n\t\t}\n\t\tcontextJson := *InitSelfDescribingJson(SCHEMA_CONTEXTS, dataArray)\n\t\tpayload.AddJson(contextJson.Get(), t.Base64Encode, CONTEXT_ENCODED, CONTEXT)\n\t}\n\n\t// Add the event to the Emitter.\n\tt.Emitter.Add(payload)\n}", "func (mr *MockAgentSecure_WorkloadmetaStreamEntitiesServerMockRecorder) Context() *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Context\", reflect.TypeOf((*MockAgentSecure_WorkloadmetaStreamEntitiesServer)(nil).Context))\n}", "func CALL(r operand.Op) { ctx.CALL(r) }", "func (m *MockLogic) PushKeyKeeper() core.PushKeyKeeper {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"PushKeyKeeper\")\n\tret0, _ := ret[0].(core.PushKeyKeeper)\n\treturn ret0\n}", "func (m *MockRPCClient) PostWith(arg0 context.Context, arg1 string, arg2, arg3 interface{}, arg4 ...rpc.Option) error {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{arg0, arg1, arg2, arg3}\n\tfor _, a := range arg4 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"PostWith\", varargs...)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (mock *MultiClusterAppControllerMock) AddHandlerCalls() []struct {\n\tCtx context.Context\n\tName string\n\tHandler v31.MultiClusterAppHandlerFunc\n} {\n\tvar calls []struct {\n\t\tCtx context.Context\n\t\tName string\n\t\tHandler v31.MultiClusterAppHandlerFunc\n\t}\n\tlockMultiClusterAppControllerMockAddHandler.RLock()\n\tcalls = mock.calls.AddHandler\n\tlockMultiClusterAppControllerMockAddHandler.RUnlock()\n\treturn calls\n}", "func (m *MockAgentSecure_TaggerStreamEntitiesServer) Context() context.Context {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Context\")\n\tret0, _ := ret[0].(context.Context)\n\treturn ret0\n}", "func (m *MockConsensus) PushEvent(arg0 hash.Event) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"PushEvent\", arg0)\n}", "func TestTraceContextIsPropagated(t *testing.T) {\n\t_, flusher := tracetesting.InitTracer()\n\n\tvar client = http.Client{\n\t\tTransport: hyperhttp.NewTransport(\n\t\t\thttp.DefaultTransport,\n\t\t),\n\t}\n\n\t// Configure Gin server\n\tr := gin.Default()\n\tr.Use(Middleware(&sdkhttp.Options{}))\n\tr.POST(\"/things/:thing_id\", func(c *gin.Context) {\n\t\tc.JSON(200, gin.H{\n\t\t\t\"thing\": \"go\",\n\t\t})\n\t})\n\n\tr2 := gin.Default()\n\tr2.Use(Middleware(&sdkhttp.Options{}))\n\tr2.GET(\"/send_thing_request\", func(c *gin.Context) {\n\t\treq, _ := http.NewRequest(\"POST\",\n\t\t\t\"http://localhost:60543/things/123\",\n\t\t\tbytes.NewBufferString(`{\"name\":\"Jacinto\"}`))\n\n\t\treq = req.WithContext(c.Request.Context())\n\t\tres, err := client.Do(req)\n\t\tif err != nil {\n\t\t\tc.JSON(400, gin.H{\n\t\t\t\t\"success\": false,\n\t\t\t})\n\t\t\treturn\n\t\t}\n\t\tbodyBytes, _ := ioutil.ReadAll(res.Body)\n\t\tbodyString := string(bodyBytes)\n\t\tc.JSON(200, gin.H{\n\t\t\t\"success\": true,\n\t\t\t\"otherServiceResponse\": bodyString,\n\t\t})\n\t})\n\n\tserver := &http.Server{Addr: \":60543\", Handler: r}\n\tdefer server.Close()\n\tgo server.ListenAndServe()\n\n\tserver2 := &http.Server{Addr: \":60544\", Handler: r2}\n\tdefer server2.Close()\n\tgo server2.ListenAndServe()\n\n\treq, _ := http.NewRequest(\"GET\",\n\t\t\"http://localhost:60544/send_thing_request\", nil)\n\n\tres, err := client.Do(req)\n\t_, readErr := ioutil.ReadAll(res.Body)\n\trequire.NoError(t, readErr)\n\n\tif err != nil {\n\t\tt.Errorf(\"unexpected error: %v\", err)\n\t}\n\n\tif want, have := 200, res.StatusCode; want != have {\n\t\tt.Errorf(\"unexpected status code, want %q, have %q\", want, have)\n\t}\n\n\tspans := flusher()\n\tif spans == nil {\n\t\tt.Errorf(\"failed\")\n\t}\n\n\tassert.Equal(t, 4, len(spans))\n\tassert.Equal(t, \"/things/:thing_id\", spans[0].Name())\n\tassert.Equal(t, spans[1].SpanContext().SpanID(), spans[0].Parent().SpanID())\n\tassert.Equal(t, \"HTTP POST\", spans[1].Name())\n\tassert.Equal(t, spans[2].SpanContext().SpanID(), spans[1].Parent().SpanID())\n\tassert.Equal(t, \"/send_thing_request\", spans[2].Name())\n\tassert.Equal(t, spans[3].SpanContext().SpanID(), spans[2].Parent().SpanID())\n\tassert.Equal(t, \"HTTP GET\", spans[3].Name())\n\n\ttraceId := spans[0].SpanContext().TraceID().String()\n\tfor _, span := range spans {\n\t\tassert.Equal(t, traceId, span.SpanContext().TraceID().String())\n\t}\n}", "func PushArgs(tag string) []string {\n\treturn []string{\"push\", tag}\n}" ]
[ "0.78451693", "0.7605195", "0.7581111", "0.5930799", "0.591325", "0.5899208", "0.58596015", "0.5771454", "0.5477703", "0.54256225", "0.5419763", "0.5122685", "0.51218194", "0.511524", "0.50980157", "0.50257444", "0.4956452", "0.49511698", "0.49058107", "0.4904071", "0.48771927", "0.48743698", "0.48603767", "0.48578268", "0.48459697", "0.48452544", "0.48384535", "0.48383352", "0.48355082", "0.48079413", "0.4800915", "0.4794968", "0.47878835", "0.4782025", "0.4775926", "0.4769322", "0.47652358", "0.47623336", "0.47604117", "0.47570348", "0.4755708", "0.4754011", "0.47490576", "0.47445852", "0.4740907", "0.47212723", "0.47203344", "0.4715687", "0.4701206", "0.46940085", "0.46740374", "0.46630818", "0.46573412", "0.46572793", "0.46566683", "0.46532524", "0.46504825", "0.46461946", "0.46177998", "0.4613848", "0.4608926", "0.4584738", "0.45805842", "0.45787996", "0.4571176", "0.45667624", "0.45631313", "0.4560033", "0.45468798", "0.45296156", "0.452052", "0.4513549", "0.45099735", "0.45099634", "0.45045057", "0.44946468", "0.44944355", "0.4485012", "0.44777125", "0.44773912", "0.44755185", "0.44709247", "0.4470079", "0.44688657", "0.44646353", "0.4463437", "0.44610927", "0.44454312", "0.4444937", "0.44422132", "0.44371352", "0.44366527", "0.44328633", "0.4432139", "0.44305944", "0.4428358", "0.4425823", "0.44235513", "0.44225705", "0.44182295" ]
0.80246294
0
WriteBigFloat provides a mock function with given fields: logicalName, bitLength, value, writerArgs
func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, logicalName, bitLength, value) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok { r0 = rf(logicalName, bitLength, value, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferXmlBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigFloat_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigFloat_Call{Call: _e.mock.On(\"WriteBigFloat\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigFloat_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigFloat_Call{Call: _e.mock.On(\"WriteBigFloat\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat64_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat64_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (w *Writer) WriteBigFloat(bf *big.Float) {\n\tw.writeByte(TagDouble)\n\tvar buf [64]byte\n\tw.write(bf.Append(buf[:0], 'g', -1))\n\tw.writeByte(TagSemicolon)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat32_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigInt_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat32_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *ByteWriter) WriteFloat64(val float64, offset int) (int, error) {\n\treturn w.WriteVal(val, offset)\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (bw *BufWriter) Float64(f float64) {\n\tif bw.Error != nil {\n\t\treturn\n\t}\n\tbw.stringBuf, bw.Error = Float64(f, bw.stringBuf[:0])\n\tif bw.Error != nil {\n\t\treturn\n\t}\n\t_, bw.Error = bw.writer.Write(bw.stringBuf)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockSession) Float64(arg0 string) float64 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Float64\", arg0)\n\tret0, _ := ret[0].(float64)\n\treturn ret0\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func (hm *HM) WriteFloat(addr int, val float64) error {\n\tbuf := make([]byte, int(unsafe.Sizeof(float64(0))))\n\tbinary.BigEndian.PutUint64(buf[:], math.Float64bits(val))\n\treturn hm.shm.WriteN(addr, buf)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (e *Encoder) WriteFloat(n float64, bitSize int) {\n\te.prepareNext(Number)\n\te.out = appendFloat(e.out, n, bitSize)\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) WriteFloat64(v float64) error {\n\treturn binary.Write(w.out, w.bo, &v)\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockValue) Float64(def float64) float64 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Float64\", def)\n\tret0, _ := ret[0].(float64)\n\treturn ret0\n}", "func (p *Stream) WriteFloat64(value float64) {\n\tif value == 0 {\n\t\tp.writeFrame[p.writeIndex] = 4\n\t\tp.writeIndex++\n\t\tif p.writeIndex == streamBlockSize {\n\t\t\tp.gotoNextWriteFrame()\n\t\t}\n\t} else {\n\t\tv := math.Float64bits(value)\n\t\tif p.writeIndex < streamBlockSize-9 {\n\t\t\tb := p.writeFrame[p.writeIndex:]\n\t\t\tb[0] = 5\n\t\t\tb[1] = byte(v)\n\t\t\tb[2] = byte(v >> 8)\n\t\t\tb[3] = byte(v >> 16)\n\t\t\tb[4] = byte(v >> 24)\n\t\t\tb[5] = byte(v >> 32)\n\t\t\tb[6] = byte(v >> 40)\n\t\t\tb[7] = byte(v >> 48)\n\t\t\tb[8] = byte(v >> 56)\n\t\t\tp.writeIndex += 9\n\t\t} else {\n\t\t\tp.PutBytes([]byte{\n\t\t\t\t5,\n\t\t\t\tbyte(v),\n\t\t\t\tbyte(v >> 8),\n\t\t\t\tbyte(v >> 16),\n\t\t\t\tbyte(v >> 24),\n\t\t\t\tbyte(v >> 32),\n\t\t\t\tbyte(v >> 40),\n\t\t\t\tbyte(v >> 48),\n\t\t\t\tbyte(v >> 56),\n\t\t\t})\n\t\t}\n\t}\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func WriteFloat64(buf io.Writer, v float64) (n int, err error) {\n\tbits := math.Float64bits(v)\n\tbits = bits ^ (-(bits >> 63) | (1 << 63))\n\tdata := make([]byte, 8)\n\tbinary.BigEndian.PutUint64(data, bits)\n\treturn buf.Write(data)\n}", "func TestWrite(t *testing.T) {\n\tmockZooKeeper := &MockZooHandle{\n\t\tzk: mock.Mock{},\n\t}\n\n\tbytes := make([]byte, 3)\n\tff := NewFuseFile(bytes, 0, \"mock/path\", mockZooKeeper)\n\n\tmockZooKeeper.zk.On(\"Set\", \"mock/path\", bytes, int32(-1)).Return(&zk.Stat{DataLength: int32(len(bytes))}, nil)\n\n\t// assert that we send 3 bytes into the writer and status out == fuse.OK\n\tsize, stat := ff.Write(bytes, 0)\n\tassert.Equal(t, uint32(3), size)\n\tassert.Equal(t, fuse.OK, stat)\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func FloatFloat64(val float64) (out *big.Float, err error) {\n\tout = new(big.Float).SetFloat64(val)\n\treturn\n}", "func (c Channel) WriteFloat64(name string, value float64) error {\n\tcName := C.CString(name)\n\tdefer C.free(unsafe.Pointer(cName))\n\n\terrno := C.iio_channel_attr_write_double(\n\t\tc.handle,\n\t\tcName,\n\t\tC.double(value),\n\t)\n\tif errno == 0 {\n\t\treturn nil\n\t}\n\treturn syscall.Errno(-errno)\n}", "func (m *MockIOPackage) WriteUint64(arg0 []byte, arg1 int, arg2 uint64) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint64\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (m *MockHash64) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) WriteFloat(f float64, bitSize int) {\n\tif f != f {\n\t\tw.writeByte(TagNaN)\n\t\treturn\n\t}\n\tif f > math.MaxFloat64 {\n\t\tw.write([]byte{TagInfinity, TagPos})\n\t\treturn\n\t}\n\tif f < -math.MaxFloat64 {\n\t\tw.write([]byte{TagInfinity, TagNeg})\n\t\treturn\n\t}\n\tw.writeByte(TagDouble)\n\tvar buf [64]byte\n\tw.write(strconv.AppendFloat(buf[:0], f, 'g', -1, bitSize))\n\tw.writeByte(TagSemicolon)\n}", "func (out *OutBuffer) WriteFloat64BE(v float64) {\n\tout.WriteUint64BE(math.Float64bits(v))\n}", "func (_m *ValueConverter) ToFloat(_a0 interface{}) float64 {\n\tret := _m.Called(_a0)\n\n\tvar r0 float64\n\tif rf, ok := ret.Get(0).(func(interface{}) float64); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tr0 = ret.Get(0).(float64)\n\t}\n\n\treturn r0\n}", "func newFloat(value *big.Float) *TypedFloat {\n\tbytes, _ := value.GobEncode()\n\ttypedFloat := TypedFloat{\n\t\tBytes: bytes,\n\t\tType: ValueType_FLOAT,\n\t}\n\treturn &typedFloat\n}", "func (o *FakeObject) Float() float64 { return o.Value.(float64) }", "func (d Decimal) BigFloat() *big.Float {\n\tf := &big.Float{}\n\tf.SetString(d.String())\n\treturn f\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint16_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func NewBigFloat(f float64) *big.Float {\n\tr := big.NewFloat(f)\n\tr.SetPrec(CurrentPrecision)\n\treturn r\n}", "func (m *MockSeriesRef) Write(ctx context.Context, timestamp time.UnixNano, value float64, unit time.Unit, annotation []byte, wOpts series.WriteOptions) (bool, series.WriteType, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", ctx, timestamp, value, unit, annotation, wOpts)\n\tret0, _ := ret[0].(bool)\n\tret1, _ := ret[1].(series.WriteType)\n\tret2, _ := ret[2].(error)\n\treturn ret0, ret1, ret2\n}", "func FloatGobEncode(x *big.Float,) ([]byte, error)", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func WriteFloat(buffer []byte, offset int, value float32) {\n WriteUInt32(buffer, offset, math.Float32bits(value))\n}", "func (client PrimitiveClient) PutFloat(complexBody FloatWrapper) (result autorest.Response, err error) {\n req, err := client.PutFloatPreparer(complexBody)\n if err != nil {\n return result, autorest.NewErrorWithError(err, \"complexgroup.PrimitiveClient\", \"PutFloat\", nil , \"Failure preparing request\")\n }\n\n resp, err := client.PutFloatSender(req)\n if err != nil {\n result.Response = resp\n return result, autorest.NewErrorWithError(err, \"complexgroup.PrimitiveClient\", \"PutFloat\", resp, \"Failure sending request\")\n }\n\n result, err = client.PutFloatResponder(resp)\n if err != nil {\n err = autorest.NewErrorWithError(err, \"complexgroup.PrimitiveClient\", \"PutFloat\", resp, \"Failure responding to request\")\n }\n\n return\n}", "func (m *MockStore) WriteFitbitToken(token *oauth2.Token) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteFitbitToken\", token)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (fw *Writer) PutFloat64Field(addr biopb.Coord, v float64) {\n\twb := fw.buf\n\twb.updateAddrBounds(addr)\n\twb.defaultBuf.PutFloat64(v)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *Message) putFloat64(v float64) {\n\tb := m.bufferForPut(8)\n\tdefer b.Advance(8)\n\n\tbinary.LittleEndian.PutUint64(b.Bytes[b.Offset:], math.Float64bits(v))\n}", "func newBigFloat(n uint64) *big.Float {\n\ttmp := new(big.Float).SetUint64(n)\n\ttmp.SetPrec(ENCODER_DECODER_PREC)\n\treturn tmp\n}", "func (w *FormSerializationWriter) WriteFloat64Value(key string, value *float64) error {\n\tif key != \"\" && value != nil {\n\t\tw.writePropertyName(key)\n\t}\n\tif value != nil {\n\t\tw.writeRawValue(strconv.FormatFloat(*value, 'f', -1, 64))\n\t}\n\tif key != \"\" && value != nil {\n\t\tw.writePropertySeparator()\n\t}\n\treturn nil\n}", "func (m *MockHash) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockFile) Write(_param0 []byte) (int, error) {\n\tret := _m.ctrl.Call(_m, \"Write\", _param0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (this *channelStruct) WriteFloats(samples []float64) {\n\tthis.samples = append(this.samples, samples...)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBit_Call {\n\treturn &MockWriteBufferXmlBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (z *Rat) SetFloat64(f float64) *Rat {}", "func WriteFloat64(src []float64, dst Floating) int {\n\tlength := min(dst.Len(), len(src))\n\tfor i := 0; i < length; i++ {\n\t\tdst.SetSample(i, float64(src[i]))\n\t}\n\treturn ChannelLength(length, dst.Channels())\n}", "func (instance *Instance) SetFloat64(fieldName string, value float64) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (w *ByteWriter) MustWriteFloat64(val float64, offset int) int {\n\treturn w.MustWriteVal(val, offset)\n}", "func (w *Writer) Float64(n float64) {\n\tw.buf = strconv.AppendFloat(w.buf, float64(n), 'g', -1, 64)\n}", "func TestFloat(t *testing.T) {\n\ttype Test struct {\n\t\tvalue interface{}\n\t\texpected float64\n\t}\n\ttests := []Test{\n\t\t{float32(3), 3},\n\t\t{float32(3.4), 3.4},\n\t\t{float32(0), 0},\n\t\t{float32(0.4), 0.4},\n\t\t{float32(-10.1), -10.1},\n\t\t{float64(4), 4},\n\t\t{float64(4.1), 4.1},\n\t\t{float64(0), 0},\n\t\t{float64(0.3), 0.3},\n\t\t{float64(-12.03), -12.03},\n\t}\n\n\tfor _, test := range tests {\n\t\t// Encode it\n\t\tdata, err := Marshal(test.value)\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"Marshal failed: %v\", err)\n\t\t}\n\t\t// Decode it\n\t\tvar value float64\n\t\terr = Unmarshal(data, &value)\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"Unmarshal failed: %v\", err)\n\t\t}\n\t\t// Check equality.\n\t\tif !reflect.DeepEqual(value, test.expected) {\n\t\t\tt.Fatalf(\"not equal %v/%v\", value, test.expected)\n\t\t}\n\t\tt.Logf(\"Unmarshal() = %+v\\n\", value)\n\t}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call {\n\treturn &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByteArray_Call {\n\treturn &MockWriteBufferJsonBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt64_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (val *Float) writeTo(buf *bytes.Buffer) error {\n\tif val == nil {\n\t\t_, err := buf.WriteString(\"null\")\n\t\treturn err\n\t}\n\t_, err := fmt.Fprint(buf, float64(*val))\n\treturn err\n}", "func (z *Big) SetFloat64(x float64) *Big {\n\tif x == 0 {\n\t\tvar sign form\n\t\tif math.Signbit(x) {\n\t\t\tsign = signbit\n\t\t}\n\t\treturn z.setZero(sign, 0)\n\t}\n\tif math.IsNaN(x) {\n\t\tvar sign form\n\t\tif math.Signbit(x) {\n\t\t\tsign = signbit\n\t\t}\n\t\treturn z.setNaN(0, qnan|sign, 0)\n\t}\n\tif math.IsInf(x, 0) {\n\t\tif math.IsInf(x, 1) {\n\t\t\tz.form = pinf\n\t\t} else {\n\t\t\tz.form = ninf\n\t\t}\n\t\treturn z\n\t}\n\n\t// The gist of the following is lifted from math/big/rat.go, but adapted for\n\t// base-10 decimals.\n\n\tconst expMask = 1<<11 - 1\n\tbits := math.Float64bits(x)\n\tmantissa := bits & (1<<52 - 1)\n\texp := int((bits >> 52) & expMask)\n\tif exp == 0 { // denormal\n\t\texp -= 1022\n\t} else { // normal\n\t\tmantissa |= 1 << 52\n\t\texp -= 1023\n\t}\n\n\tif mantissa == 0 {\n\t\treturn z.SetUint64(0)\n\t}\n\n\tshift := 52 - exp\n\tfor mantissa&1 == 0 && shift > 0 {\n\t\tmantissa >>= 1\n\t\tshift--\n\t}\n\n\tz.exp = 0\n\tz.form = finite | form(bits>>63)\n\n\tif shift > 0 {\n\t\tz.unscaled.SetUint64(uint64(shift))\n\t\tz.unscaled.Exp(c.FiveInt, &z.unscaled, nil)\n\t\tarith.Mul(&z.unscaled, &z.unscaled, mantissa)\n\t\tz.exp = -shift\n\t} else {\n\t\t// TODO(eric): figure out why this doesn't work for _some_ numbers. See\n\t\t// https://github.com/ericlagergren/decimal/issues/89\n\t\t//\n\t\t// z.compact = mantissa << uint(-shift)\n\t\t// z.precision = arith.Length(z.compact)\n\n\t\tz.compact = c.Inflated\n\t\tz.unscaled.SetUint64(mantissa)\n\t\tz.unscaled.Lsh(&z.unscaled, uint(-shift))\n\t}\n\treturn z.norm()\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockStore) Write(ctx context.Context, key string, val interface{}, opts ...store.WriteOption) error {\n\t_va := make([]interface{}, len(opts))\n\tfor _i := range opts {\n\t\t_va[_i] = opts[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, key, val)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...store.WriteOption) error); ok {\n\t\tr0 = rf(ctx, key, val, opts...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (s *Structure) Float64(isMaster bool, cmd string, params ...interface{}) (reply float64, err error) {\n\tconn := s.getConn(isMaster)\n\tif conn == nil {\n\t\treturn constant.ZeroFLOAT64, configNotExistsOrLoad(s.InstanceName, isMaster)\n\t}\n\n\treply, err = redis.Float64(conn.Do(cmd, params...))\n\tconn.Close()\n\n\treturn reply, err\n}", "func (m *MockScraper) WriteToFile(arg0 map[int]map[string]int, arg1 int) (string, error) {\n\tret := m.ctrl.Call(m, \"WriteToFile\", arg0, arg1)\n\tret0, _ := ret[0].(string)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockHash32) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (z *Float) SetFloat64(x float64) *Float {}", "func (m *MockSpaceStorage) WriteSpaceHash(arg0 string) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteSpaceHash\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}" ]
[ "0.8631717", "0.8431752", "0.8291609", "0.7719881", "0.75569457", "0.74524903", "0.7259598", "0.69641304", "0.6741704", "0.6688483", "0.6532758", "0.6410249", "0.6239776", "0.6208443", "0.6142447", "0.6065741", "0.6063831", "0.60201216", "0.5984412", "0.58820206", "0.5843859", "0.5835373", "0.5793238", "0.5784325", "0.575006", "0.57270145", "0.5702599", "0.5696998", "0.56874436", "0.5650964", "0.5589224", "0.55213654", "0.5494792", "0.5488131", "0.5474722", "0.5454198", "0.54458684", "0.54259384", "0.5423256", "0.5386087", "0.53610116", "0.5356992", "0.5345114", "0.53313524", "0.52740026", "0.527352", "0.5271043", "0.52544713", "0.524868", "0.5146019", "0.51161295", "0.51138115", "0.5095837", "0.50946224", "0.50909805", "0.5084719", "0.50545555", "0.5052752", "0.5035955", "0.5017589", "0.50064844", "0.5003708", "0.49859214", "0.49738845", "0.49663642", "0.4960646", "0.4958333", "0.49546468", "0.49459645", "0.49419248", "0.49314386", "0.49266905", "0.49025527", "0.48925063", "0.48897544", "0.4889604", "0.48894542", "0.4877593", "0.48756728", "0.4862402", "0.4852277", "0.4838001", "0.4834285", "0.4828342", "0.48272845", "0.4815834", "0.4804732", "0.47987336", "0.47880825", "0.47728178", "0.47637263", "0.4759564", "0.4755998", "0.47483805", "0.47471255", "0.47445288", "0.47441432", "0.47420016", "0.4733018", "0.47198474" ]
0.8680841
0
WriteBigFloat is a helper method to define mock.On call logicalName string bitLength uint8 value big.Float writerArgs ...WithWriterArgs
func (_e *MockWriteBufferJsonBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigFloat_Call { return &MockWriteBufferJsonBased_WriteBigFloat_Call{Call: _e.mock.On("WriteBigFloat", append([]interface{}{logicalName, bitLength, value}, writerArgs...)...)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigFloat_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigFloat_Call{Call: _e.mock.On(\"WriteBigFloat\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat64_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat64_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) WriteBigFloat(bf *big.Float) {\n\tw.writeByte(TagDouble)\n\tvar buf [64]byte\n\tw.write(bf.Append(buf[:0], 'g', -1))\n\tw.writeByte(TagSemicolon)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat32_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat32_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *ByteWriter) WriteFloat64(val float64, offset int) (int, error) {\n\treturn w.WriteVal(val, offset)\n}", "func (e *Encoder) WriteFloat(n float64, bitSize int) {\n\te.prepareNext(Number)\n\te.out = appendFloat(e.out, n, bitSize)\n}", "func (bw *BufWriter) Float64(f float64) {\n\tif bw.Error != nil {\n\t\treturn\n\t}\n\tbw.stringBuf, bw.Error = Float64(f, bw.stringBuf[:0])\n\tif bw.Error != nil {\n\t\treturn\n\t}\n\t_, bw.Error = bw.writer.Write(bw.stringBuf)\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (hm *HM) WriteFloat(addr int, val float64) error {\n\tbuf := make([]byte, int(unsafe.Sizeof(float64(0))))\n\tbinary.BigEndian.PutUint64(buf[:], math.Float64bits(val))\n\treturn hm.shm.WriteN(addr, buf)\n}", "func (w *Writer) WriteFloat(f float64, bitSize int) {\n\tif f != f {\n\t\tw.writeByte(TagNaN)\n\t\treturn\n\t}\n\tif f > math.MaxFloat64 {\n\t\tw.write([]byte{TagInfinity, TagPos})\n\t\treturn\n\t}\n\tif f < -math.MaxFloat64 {\n\t\tw.write([]byte{TagInfinity, TagNeg})\n\t\treturn\n\t}\n\tw.writeByte(TagDouble)\n\tvar buf [64]byte\n\tw.write(strconv.AppendFloat(buf[:0], f, 'g', -1, bitSize))\n\tw.writeByte(TagSemicolon)\n}", "func (w *Writer) WriteFloat64(v float64) error {\n\treturn binary.Write(w.out, w.bo, &v)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func FloatFloat64(val float64) (out *big.Float, err error) {\n\tout = new(big.Float).SetFloat64(val)\n\treturn\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteFloat(buffer []byte, offset int, value float32) {\n WriteUInt32(buffer, offset, math.Float32bits(value))\n}", "func FloatMarshalText(x *big.Float,) ([]byte, error)", "func (w *Writer) Float64(n float64) {\n\tw.buf = strconv.AppendFloat(w.buf, float64(n), 'g', -1, 64)\n}", "func (c Channel) WriteFloat64(name string, value float64) error {\n\tcName := C.CString(name)\n\tdefer C.free(unsafe.Pointer(cName))\n\n\terrno := C.iio_channel_attr_write_double(\n\t\tc.handle,\n\t\tcName,\n\t\tC.double(value),\n\t)\n\tif errno == 0 {\n\t\treturn nil\n\t}\n\treturn syscall.Errno(-errno)\n}", "func (p *Stream) WriteFloat64(value float64) {\n\tif value == 0 {\n\t\tp.writeFrame[p.writeIndex] = 4\n\t\tp.writeIndex++\n\t\tif p.writeIndex == streamBlockSize {\n\t\t\tp.gotoNextWriteFrame()\n\t\t}\n\t} else {\n\t\tv := math.Float64bits(value)\n\t\tif p.writeIndex < streamBlockSize-9 {\n\t\t\tb := p.writeFrame[p.writeIndex:]\n\t\t\tb[0] = 5\n\t\t\tb[1] = byte(v)\n\t\t\tb[2] = byte(v >> 8)\n\t\t\tb[3] = byte(v >> 16)\n\t\t\tb[4] = byte(v >> 24)\n\t\t\tb[5] = byte(v >> 32)\n\t\t\tb[6] = byte(v >> 40)\n\t\t\tb[7] = byte(v >> 48)\n\t\t\tb[8] = byte(v >> 56)\n\t\t\tp.writeIndex += 9\n\t\t} else {\n\t\t\tp.PutBytes([]byte{\n\t\t\t\t5,\n\t\t\t\tbyte(v),\n\t\t\t\tbyte(v >> 8),\n\t\t\t\tbyte(v >> 16),\n\t\t\t\tbyte(v >> 24),\n\t\t\t\tbyte(v >> 32),\n\t\t\t\tbyte(v >> 40),\n\t\t\t\tbyte(v >> 48),\n\t\t\t\tbyte(v >> 56),\n\t\t\t})\n\t\t}\n\t}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigInt_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func NewBigFloat(f float64) *big.Float {\n\tr := big.NewFloat(f)\n\tr.SetPrec(CurrentPrecision)\n\treturn r\n}", "func (d Decimal) BigFloat() *big.Float {\n\tf := &big.Float{}\n\tf.SetString(d.String())\n\treturn f\n}", "func FloatString(x *big.Float,) string", "func FloatAppend(x *big.Float, buf []byte, fmt byte, prec int) []byte", "func (this *channelStruct) WriteFloats(samples []float64) {\n\tthis.samples = append(this.samples, samples...)\n}", "func WriteFloat64(buf io.Writer, v float64) (n int, err error) {\n\tbits := math.Float64bits(v)\n\tbits = bits ^ (-(bits >> 63) | (1 << 63))\n\tdata := make([]byte, 8)\n\tbinary.BigEndian.PutUint64(data, bits)\n\treturn buf.Write(data)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func newFloat(value *big.Float) *TypedFloat {\n\tbytes, _ := value.GobEncode()\n\ttypedFloat := TypedFloat{\n\t\tBytes: bytes,\n\t\tType: ValueType_FLOAT,\n\t}\n\treturn &typedFloat\n}", "func Float(val string) (out *big.Float, err error) {\n\tvalue, ret := new(big.Float).SetString(val)\n\tif !ret {\n\t\terr = fmt.Errorf(\"invalid va\")\n\t\treturn\n\t}\n\treturn value, err\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (client PrimitiveClient) PutFloat(complexBody FloatWrapper) (result autorest.Response, err error) {\n req, err := client.PutFloatPreparer(complexBody)\n if err != nil {\n return result, autorest.NewErrorWithError(err, \"complexgroup.PrimitiveClient\", \"PutFloat\", nil , \"Failure preparing request\")\n }\n\n resp, err := client.PutFloatSender(req)\n if err != nil {\n result.Response = resp\n return result, autorest.NewErrorWithError(err, \"complexgroup.PrimitiveClient\", \"PutFloat\", resp, \"Failure sending request\")\n }\n\n result, err = client.PutFloatResponder(resp)\n if err != nil {\n err = autorest.NewErrorWithError(err, \"complexgroup.PrimitiveClient\", \"PutFloat\", resp, \"Failure responding to request\")\n }\n\n return\n}", "func newBigFloat(n uint64) *big.Float {\n\ttmp := new(big.Float).SetUint64(n)\n\ttmp.SetPrec(ENCODER_DECODER_PREC)\n\treturn tmp\n}", "func (o *FakeObject) Float() float64 { return o.Value.(float64) }", "func (z *Rat) SetFloat64(f float64) *Rat {}", "func NewFloat(x float64) *big.Float", "func (val *Float) writeTo(buf *bytes.Buffer) error {\n\tif val == nil {\n\t\t_, err := buf.WriteString(\"null\")\n\t\treturn err\n\t}\n\t_, err := fmt.Fprint(buf, float64(*val))\n\treturn err\n}", "func (out *OutBuffer) WriteFloat64LE(v float64) {\n\tout.WriteUint64LE(math.Float64bits(v))\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (out *OutBuffer) WriteFloat64BE(v float64) {\n\tout.WriteUint64BE(math.Float64bits(v))\n}", "func (m *Message) putFloat64(v float64) {\n\tb := m.bufferForPut(8)\n\tdefer b.Advance(8)\n\n\tbinary.LittleEndian.PutUint64(b.Bytes[b.Offset:], math.Float64bits(v))\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func main() {\n\t// Writing binary values\n\tbuf := bytes.NewBuffer([]byte{})\n\tif err := binary.Write(buf, binary.BigEndian, 1.004); err != nil {\n\t\tpanic(err)\n\t}\n\tif err := binary.Write(buf, binary.BigEndian, []byte(\"Hello\")); err != nil {\n\t\tpanic(err)\n\t}\n\n\t// Reading the written values\n\tvar num float64\n\tif err := binary.Read(buf, binary.BigEndian, &num); err != nil {\n\t\tpanic(err)\n\t}\n\tfmt.Printf(\"float64: %.3f\\n\", num)\n\tgreeting := make([]byte, 5)\n\tif err := binary.Read(buf, binary.BigEndian, &greeting); err != nil {\n\t\tpanic(err)\n\t}\n\tfmt.Printf(\"string: %s\\n\", greeting)\n}", "func (instance *Instance) SetFloat64(fieldName string, value float64) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func FloatGobEncode(x *big.Float,) ([]byte, error)", "func (z *Float) SetFloat64(x float64) *Float {}", "func AppendJSONFloat(buf []byte, v float64, bitSize int) []byte {\n\treturn strconv.AppendFloat(buf, v, 'g', -1, bitSize)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func FormatFloat(f float64, bits int) string {\n\tb := make([]byte, 0, 64)\n\tb = AppendFloat(b, f, bits)\n\treturn string(b)\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func appendFloat(dst []byte, f float64, fmt byte, prec int, bitSize int) []byte {\n\treturn genericFtoa(dst, f, fmt, prec, bitSize)\n}", "func (b *Buffer) AppendFloat64(v float64) {\n\tb.buf = strconv.AppendFloat(b.buf, v, 'f', -1, 64)\n}", "func FloatMul(z *big.Float, x, y *big.Float,) *big.Float", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func Float64(name string, value float64, usage string) *float64 {\n\tp := new(float64);\n\tFloat64Var(p, name, value, usage);\n\treturn p;\n}", "func WriteFloat64(src []float64, dst Floating) int {\n\tlength := min(dst.Len(), len(src))\n\tfor i := 0; i < length; i++ {\n\t\tdst.SetSample(i, float64(src[i]))\n\t}\n\treturn ChannelLength(length, dst.Channels())\n}", "func (m *MockSession) Float64(arg0 string) float64 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Float64\", arg0)\n\tret0, _ := ret[0].(float64)\n\treturn ret0\n}", "func (d Decimal) BigFloat() (*big.Float, error) {\n\tf, _, err := new(big.Float).Parse(string(d), 0)\n\treturn f, err\n}", "func (z *Big) SetFloat(x *big.Float) *Big {\n\tif x.IsInf() {\n\t\tif x.Signbit() {\n\t\t\tz.form = ninf\n\t\t} else {\n\t\t\tz.form = pinf\n\t\t}\n\t\treturn z\n\t}\n\n\tneg := x.Signbit()\n\tif x.Sign() == 0 {\n\t\tif neg {\n\t\t\tz.form |= signbit\n\t\t}\n\t\tz.compact = 0\n\t\tz.precision = 1\n\t\treturn z\n\t}\n\n\tz.exp = 0\n\tx0 := new(big.Float).Copy(x).SetPrec(big.MaxPrec)\n\tx0.Abs(x0)\n\tif !x.IsInt() {\n\t\tfor !x0.IsInt() {\n\t\t\tx0.Mul(x0, c.TenFloat)\n\t\t\tz.exp--\n\t\t}\n\t}\n\n\tif mant, acc := x0.Uint64(); acc == big.Exact {\n\t\tz.compact = mant\n\t\tz.precision = arith.Length(mant)\n\t} else {\n\t\tz.compact = c.Inflated\n\t\tx0.Int(&z.unscaled)\n\t\tz.precision = arith.BigLength(&z.unscaled)\n\t}\n\tz.form = finite\n\tif neg {\n\t\tz.form |= signbit\n\t}\n\treturn z\n}", "func FloatSetString(z *big.Float, s string) (*big.Float, bool)", "func FloatText(x *big.Float, format byte, prec int) string", "func (f *Flagger) Float64(name, shorthand string, value float64, usage string) {\n\tf.cmd.Flags().Float64P(name, shorthand, value, usage)\n\tf.cfg.BindPFlag(name, f.cmd.Flags().Lookup(name))\n}", "func WriteDouble(buffer []byte, offset int, value float64) {\n WriteUInt64(buffer, offset, math.Float64bits(value))\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Float64(name string, value float64, usage string) *float64 {\n\treturn Global.Float64(name, value, usage)\n}", "func (w *FormSerializationWriter) WriteFloat64Value(key string, value *float64) error {\n\tif key != \"\" && value != nil {\n\t\tw.writePropertyName(key)\n\t}\n\tif value != nil {\n\t\tw.writeRawValue(strconv.FormatFloat(*value, 'f', -1, 64))\n\t}\n\tif key != \"\" && value != nil {\n\t\tw.writePropertySeparator()\n\t}\n\treturn nil\n}", "func FloatSetMode(z *big.Float, mode big.RoundingMode,) *big.Float", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func stringOfBigFloat(n *big.Float) string {\n\treturn n.Text('g', -1)\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func RegisterFloat64(key string, def float64, description string) onion.Float {\n\tsetDescription(key, description)\n\treturn o.RegisterFloat64(key, def)\n}", "func (m Measurement) AddFloat64(name string, value float64) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func NewFloat(f float64) T {\n\treturn big.NewFloat(f)\n}", "func Float64(name string, value float64, usage string) *float64 {\n\treturn ex.FlagSet.Float64(name, value, usage)\n}", "func Float(flag string, value float64, description string) *float64 {\n\tvar v float64\n\tFloatVar(&v, flag, value, description)\n\treturn &v\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (fw *Writer) PutFloat64Field(addr biopb.Coord, v float64) {\n\twb := fw.buf\n\twb.updateAddrBounds(addr)\n\twb.defaultBuf.PutFloat64(v)\n}", "func (t Float64) WriteTo(w io.Writer) (int64, error) {\n\tlw := aWriterPool.Get(w)\n\tlw.WriteFloat64(float64(t))\n\treturn aWriterPool.Put(lw)\n}", "func JSONFloat(f float64, bits int) string {\n\tvar e strings.Builder\n\tvar scratch [64]byte\n\tb := scratch[:0]\n\tif math.IsInf(f, 0) || math.IsNaN(f) {\n\t\treturn \"NaN\"\n\t}\n\tabs := math.Abs(f)\n\tfmt := byte('f')\n\tif abs != 0 {\n\t\tif bits == 64 && (abs < 1e-6 || abs >= 1e21) || bits == 32 && (float32(abs) < 1e-6 || float32(abs) >= 1e21) {\n\t\t\tfmt = 'e'\n\t\t}\n\t}\n\tb = strconv.AppendFloat(b, f, fmt, -1, bits)\n\tif fmt == 'e' {\n\t\t// clean up e-09 to e-9\n\t\tn := len(b)\n\t\tif n >= 4 && b[n-4] == 'e' && b[n-3] == '-' && b[n-2] == '0' {\n\t\t\tb[n-2] = b[n-1]\n\t\t\tb = b[:n-1]\n\t\t}\n\t}\n\te.Write(b)\n\treturn e.String()\n}", "func (z *Float) SetUint64(x uint64) *Float {}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func ParseFloat(s string, base int, prec uint, mode big.RoundingMode,) (*big.Float, int, error)", "func AppendFloat64(dst []byte, val float64) []byte {\n\tswitch {\n\tcase math.IsNaN(val):\n\t\treturn append(dst, \"\\xfb\\x7f\\xf8\\x00\\x00\\x00\\x00\\x00\\x00\"...)\n\tcase math.IsInf(val, 1):\n\t\treturn append(dst, \"\\xfb\\x7f\\xf0\\x00\\x00\\x00\\x00\\x00\\x00\"...)\n\tcase math.IsInf(val, -1):\n\t\treturn append(dst, \"\\xfb\\xff\\xf0\\x00\\x00\\x00\\x00\\x00\\x00\"...)\n\t}\n\tmajor := majorTypeSimpleAndFloat\n\tsubType := additionalTypeFloat64\n\tn := math.Float64bits(val)\n\tdst = append(dst, byte(major|subType))\n\tfor i := uint(1); i <= 8; i++ {\n\t\tb := byte(n >> ((8 - i) * 8))\n\t\tdst = append(dst, b)\n\t}\n\treturn dst\n}", "func (m *MockValue) Float64(def float64) float64 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Float64\", def)\n\tret0, _ := ret[0].(float64)\n\treturn ret0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint16_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (f Float) Big() (x *big.Float, nan bool) {\n\tsignbit := f.Signbit()\n\texp := f.Exp()\n\tfrac := f.Frac()\n\tx = big.NewFloat(0)\n\tx.SetPrec(precision)\n\tx.SetMode(big.ToNearestEven)\n\n\t// ref: https://en.wikipedia.org/wiki/Half-precision_floating-point_format#Exponent_encoding\n\t//\n\t// 0b00001 - 0b11110\n\t// Normalized number.\n\t//\n\t// (-1)^signbit * 2^(exp-15) * 1.mant_2\n\tlead := 1\n\texponent := exp - bias\n\n\tswitch exp {\n\t// 0b11111\n\tcase 0x1F:\n\t\t// Inf or NaN\n\t\tif frac == 0 {\n\t\t\t// +-Inf\n\t\t\tx.SetInf(signbit)\n\t\t\treturn x, false\n\t\t}\n\t\t// +-NaN\n\t\tif signbit {\n\t\t\tx.Neg(x)\n\t\t}\n\t\treturn x, true\n\t// 0b00000\n\tcase 0x00:\n\t\tif frac == 0 {\n\t\t\t// +-Zero\n\t\t\tif signbit {\n\t\t\t\tx.Neg(x)\n\t\t\t}\n\t\t\treturn x, false\n\t\t}\n\t\t// Denormalized number.\n\t\t//\n\t\t// (-1)^signbit * 2^(-14) * 0.mant_2\n\t\tlead = 0\n\t\texponent = -14\n\t}\n\n\t// number = [ sign ] [ prefix ] mantissa [ exponent ] | infinity .\n\tsign := \"+\"\n\tif signbit {\n\t\tsign = \"-\"\n\t}\n\ts := fmt.Sprintf(\"%s0b%d.%010bp%d\", sign, lead, frac, exponent)\n\tif _, _, err := x.Parse(s, 0); err != nil {\n\t\tpanic(err)\n\t}\n\treturn x, false\n}", "func (p *numberPacker) packFloat(val float64) packedNumber {\n\tn := Number{Float: val}\n\treturn p.pack(&n)\n}", "func (f *Float) bigFtoa(buf []byte, fmt byte, prec int) []byte {\n\tif debugFloat && f.IsInf() {\n\t\tpanic(\"non-finite float\")\n\t}\n\n\t// 1) convert Float to multiprecision decimal\n\tvar mant nat\n\tif f.form == finite {\n\t\tmant = f.mant\n\t}\n\tvar d decimal\n\td.init(mant, int(f.exp)-f.mant.bitLen())\n\n\t// 2) round to desired precision\n\tshortest := false\n\tif prec < 0 {\n\t\tshortest = true\n\t\tpanic(\"unimplemented\")\n\t\t// TODO(gri) complete this\n\t\t// roundShortest(&d, f.mant, int(f.exp))\n\t\t// Precision for shortest representation mode.\n\t\tswitch fmt {\n\t\tcase 'e', 'E':\n\t\t\tprec = len(d.mant) - 1\n\t\tcase 'f':\n\t\t\tprec = max(len(d.mant)-d.exp, 0)\n\t\tcase 'g', 'G':\n\t\t\tprec = len(d.mant)\n\t\t}\n\t} else {\n\t\t// round appropriately\n\t\tswitch fmt {\n\t\tcase 'e', 'E':\n\t\t\t// one digit before and number of digits after decimal point\n\t\t\td.round(1 + prec)\n\t\tcase 'f':\n\t\t\t// number of digits before and after decimal point\n\t\t\td.round(d.exp + prec)\n\t\tcase 'g', 'G':\n\t\t\tif prec == 0 {\n\t\t\t\tprec = 1\n\t\t\t}\n\t\t\td.round(prec)\n\t\t}\n\t}\n\n\t// 3) read digits out and format\n\tswitch fmt {\n\tcase 'e', 'E':\n\t\treturn fmtE(buf, fmt, prec, f.neg, d)\n\tcase 'f':\n\t\treturn fmtF(buf, prec, f.neg, d)\n\tcase 'g', 'G':\n\t\t// trim trailing fractional zeros in %e format\n\t\teprec := prec\n\t\tif eprec > len(d.mant) && len(d.mant) >= d.exp {\n\t\t\teprec = len(d.mant)\n\t\t}\n\t\t// %e is used if the exponent from the conversion\n\t\t// is less than -4 or greater than or equal to the precision.\n\t\t// If precision was the shortest possible, use eprec = 6 for\n\t\t// this decision.\n\t\tif shortest {\n\t\t\teprec = 6\n\t\t}\n\t\texp := d.exp - 1\n\t\tif exp < -4 || exp >= eprec {\n\t\t\tif prec > len(d.mant) {\n\t\t\t\tprec = len(d.mant)\n\t\t\t}\n\t\t\treturn fmtE(buf, fmt+'e'-'g', prec-1, f.neg, d)\n\t\t}\n\t\tif prec > d.exp {\n\t\t\tprec = len(d.mant)\n\t\t}\n\t\treturn fmtF(buf, max(prec-d.exp, 0), f.neg, d)\n\t}\n\n\t// unknown format\n\treturn append(buf, '%', fmt)\n}", "func printFloat(w io.Writer, val float64, precision int, typeElided bool) {\n\tw.Write([]byte(strconv.FormatFloat(val, 'g', -1, precision)))\n\tif typeElided && !math.IsInf(val, 0) && val == math.Floor(val) {\n\t\tw.Write(pointZeroBytes)\n\t}\n}", "func (c *Configurator) Float64(name string, value float64, usage string) *float64 {\n\tp := new(float64)\n\n\tc.Float64Var(p, name, value, usage)\n\n\treturn p\n}" ]
[ "0.8524457", "0.82616055", "0.8254902", "0.75553405", "0.74119896", "0.7197534", "0.7081183", "0.69427246", "0.62936157", "0.62330365", "0.61646247", "0.6144696", "0.606474", "0.60556525", "0.59745574", "0.589862", "0.58421576", "0.5780743", "0.5731301", "0.57255864", "0.57085514", "0.55924857", "0.5583835", "0.5557176", "0.5496271", "0.54955566", "0.5494179", "0.54840344", "0.5483434", "0.5457297", "0.54491824", "0.5446008", "0.5445791", "0.54336387", "0.5396556", "0.53889096", "0.537571", "0.5371208", "0.53579587", "0.53356117", "0.5291221", "0.5284692", "0.5214845", "0.51962173", "0.51787657", "0.5178741", "0.51785797", "0.5169183", "0.5168694", "0.51646674", "0.51518494", "0.5140277", "0.5134594", "0.5130743", "0.5118828", "0.5113681", "0.5111751", "0.5102927", "0.5102726", "0.5100955", "0.50857604", "0.5083916", "0.5076872", "0.5074437", "0.5072213", "0.5062998", "0.50625896", "0.5060405", "0.50563383", "0.5055061", "0.50501615", "0.5038632", "0.5032067", "0.5013638", "0.4998251", "0.4987879", "0.49863923", "0.49828884", "0.49773714", "0.49562865", "0.49377123", "0.49330047", "0.49320093", "0.4915554", "0.48990986", "0.48942718", "0.48873147", "0.48814827", "0.4876707", "0.48659188", "0.48614103", "0.48584008", "0.48540944", "0.48523873", "0.48473582", "0.4844825", "0.48380566", "0.48246646", "0.48161575", "0.48038307" ]
0.86179817
0
WriteBigInt provides a mock function with given fields: logicalName, bitLength, value, writerArgs
func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, logicalName, bitLength, value) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok { r0 = rf(logicalName, bitLength, value, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigInt_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt64_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt64_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigFloat_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigFloat_Call{Call: _e.mock.On(\"WriteBigFloat\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (c *fakeRedisConn) WriteInt64(num int64) { c.rsp = append(c.rsp, num) }", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash64) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (w *Writer) WriteBigInt(bi *big.Int) {\n\tw.writeByte(TagLong)\n\tw.writeString(bi.String())\n\tw.writeByte(TagSemicolon)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigFloat_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigFloat_Call{Call: _e.mock.On(\"WriteBigFloat\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockIOPackage) WriteUint64(arg0 []byte, arg1 int, arg2 uint64) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint64\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash32) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func TestWriteBits(t *testing.T) {\n\tt.Run(\"NonMSB\", func(t *testing.T) {\n\t\tb := &Buffer{}\n\n\t\t// having empty buffered MSB.\n\t\tn, err := b.WriteBits(0xb, 4)\n\t\trequire.NoError(t, err)\n\t\tassert.Zero(t, n)\n\n\t\tassert.Len(t, b.data, 1)\n\t\tassert.Equal(t, byte(0xb), b.data[0])\n\n\t\tn, err = b.WriteBits(0xdf, 8)\n\t\trequire.NoError(t, err)\n\t\tassert.Equal(t, 1, n)\n\n\t\tif assert.Len(t, b.data, 2) {\n\t\t\tassert.Equal(t, byte(0xd), b.data[1])\n\t\t\tassert.Equal(t, byte(0xfb), b.data[0])\n\t\t}\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tb := BufferedMSB()\n\n\t\tn, err := b.WriteBits(0xf, 4)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Zero(t, n)\n\n\t\t// the output now should be\n\t\t// 11110000\n\t\t// ^\n\t\tif assert.Len(t, b.data, 1) {\n\t\t\tassert.Equal(t, byte(0xf0), b.data[0], \"%08b\", b.data[0])\n\t\t}\n\n\t\t// write 10111 = 0x17, 5\n\t\tn, err = b.WriteBits(0x17, 5)\n\t\trequire.NoError(t, err)\n\n\t\t// current output should be\n\t\t// 11111011 10000000\n\t\t// ^\n\t\tif assert.Len(t, b.data, 2) {\n\t\t\tassert.Equal(t, byte(0xfb), b.data[0])\n\t\t\tassert.Equal(t, byte(0x80), b.data[1])\n\t\t\tassert.Equal(t, uint8(1), b.bitIndex)\n\t\t}\n\t})\n}", "func (m *MockHash) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteInt64(buffer []byte, offset int, value int64) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n buffer[offset + 2] = byte(value >> 16)\n buffer[offset + 3] = byte(value >> 24)\n buffer[offset + 4] = byte(value >> 32)\n buffer[offset + 5] = byte(value >> 40)\n buffer[offset + 6] = byte(value >> 48)\n buffer[offset + 7] = byte(value >> 56)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBit_Call {\n\treturn &MockWriteBufferXmlBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt8_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt16_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (mock WriteCloser) fakeZeroWrite(p []byte) (n int, err error) {\n\treturn n, err\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat64_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Session) Write(data []byte) (int, error) {\n\tret := _m.Called(data)\n\n\tvar r0 int\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func([]byte) (int, error)); ok {\n\t\treturn rf(data)\n\t}\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(data)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *MockFile) Write(_param0 []byte) (int, error) {\n\tret := _m.ctrl.Call(_m, \"Write\", _param0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call {\n\treturn &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt32_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint32_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockSerial) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat64_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (n *Node) SetBigInt(x *big.Int)", "func TestUint64(t *testing.T) {\n\ttests := []struct {\n\t\tin uint64 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00}}, // Min single byte\n\t\t{255, []byte{0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00}}, // Max single byte\n\t\t{256, []byte{0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00}}, // Min 2-byte\n\t\t{65535, []byte{0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00}}, // Max 2-byte\n\t\t{0x10000, []byte{0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00}}, // Min 4-byte\n\t\t{0xffffffff, []byte{0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00}}, // Max 4-byte\n\t\t{0x100000000, []byte{0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00}}, // Min 8-byte\n\t\t{0xffffffffffffffff, []byte{0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff}}, // Max 8-byte\n\t}\n\n\tt.Logf(\"Running uint64 %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint64(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint64 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint64 #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint64\n\t\terr = ReadUint64(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint64 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint64 #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func TestBufferWriteBit(t *testing.T) {\n\tt.Run(\"Valid\", func(t *testing.T) {\n\t\tbuf := &Buffer{}\n\t\t// 10010011 11000111\n\t\t// 0x93 \t0xC7\n\t\tbits := []int{1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1}\n\t\tfor i := len(bits) - 1; i > -1; i-- {\n\t\t\tbit := bits[i]\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\tassert.Equal(t, byte(0xC7), buf.data[0], \"expected: %08b, is: %08b\", 0xc7, buf.data[0])\n\t\tassert.Equal(t, byte(0x93), buf.data[1], \"expected: %08b, is: %08b\", 0x93, buf.data[1])\n\t})\n\n\tt.Run(\"BitShifted\", func(t *testing.T) {\n\t\tt.Run(\"Empty\", func(t *testing.T) {\n\t\t\tbuf := &Buffer{}\n\t\t\t// fill thee buffer with 3 bits\n\t\t\tfor i := 0; i < 3; i++ {\n\t\t\t\terr := buf.WriteBit(int(0))\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// bits 11101\n\t\t\tbits := []int{1, 1, 1, 0, 1}\n\t\t\tfor i := len(bits) - 1; i > -1; i-- {\n\t\t\t\tbit := bits[i]\n\t\t\t\terr := buf.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// should be 11101000 - 0xe8\n\t\t\tassert.Equal(t, byte(0xe8), buf.data[0])\n\t\t})\n\t})\n\n\tt.Run(\"ByteShifted\", func(t *testing.T) {\n\t\tbuf := &Buffer{}\n\t\trequire.NoError(t, buf.WriteByte(0x00))\n\n\t\t// write 8 bits that should look like a byte 0xe3\n\t\t// 11100011 - 0xe3\n\t\tbits := []int{1, 1, 1, 0, 0, 0, 1, 1}\n\t\tfor i := len(bits) - 1; i > -1; i-- {\n\t\t\tbit := bits[i]\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\t\tassert.Equal(t, 2, len(buf.data))\n\t\tassert.Equal(t, byte(0xe3), buf.data[1])\n\n\t\t// there should be no error on writing additional byte.\n\t\tassert.NoError(t, buf.WriteByte(0x00))\n\t})\n\n\tt.Run(\"Finished\", func(t *testing.T) {\n\t\tbuf := &Buffer{}\n\n\t\t// write some bits to the first byte.\n\t\tfirstBits := []int{1, 0, 1}\n\t\tfor _, bit := range firstBits {\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\t// finish this byte\n\t\tbuf.FinishByte()\n\t\tsecondBits := []int{1, 0, 1}\n\n\t\t// write some bits to the second byte.\n\t\tfor _, bit := range secondBits {\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\tif assert.Len(t, buf.data, 2) {\n\t\t\t// 00000101 - 0x05\n\t\t\tassert.Equal(t, byte(0x05), buf.Data()[0])\n\t\t\tassert.Equal(t, byte(0x05), buf.Data()[1])\n\t\t}\n\t})\n\n\tt.Run(\"Inverse\", func(t *testing.T) {\n\t\tt.Run(\"Valid\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\t// \t10010111 10101100\n\t\t\t//\t0x97\t 0xac\n\t\t\tbits := []int{1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0}\n\n\t\t\t// write all the bits\n\t\t\tfor _, bit := range bits {\n\t\t\t\terr := w.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\texpected := byte(0x97)\n\t\t\tassert.Equal(t, expected, w.data[0], \"expected: %08b is: %08b\", expected, w.data[0])\n\t\t\texpected = byte(0xac)\n\t\t\tassert.Equal(t, expected, w.data[1], \"expected: %08b is: %08b\", expected, w.data[1])\n\t\t})\n\n\t\tt.Run(\"ByteShifted\", func(t *testing.T) {\n\t\t\tbuf := BufferedMSB()\n\t\t\terr := buf.WriteByte(0x00)\n\t\t\trequire.NoError(t, err)\n\n\t\t\t// 11100011 - 0xe3\n\t\t\tbits := []int{1, 1, 1, 0, 0, 0, 1, 1}\n\t\t\tfor _, bit := range bits {\n\t\t\t\terr := buf.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\tassert.Equal(t, byte(0xe3), buf.data[1], \"expected: %08b, is: %08b\", byte(0xe3), buf.data[1])\n\t\t})\n\n\t\tt.Run(\"BitShifted\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\t// 0xE0 - 11100000\n\t\t\terr := w.WriteByte(0xE0)\n\t\t\trequire.NoError(t, err)\n\n\t\t\tw.bitIndex = 5\n\t\t\tw.byteIndex = 0\n\n\t\t\tbits := []int{1, 0, 1, 0, 1}\n\t\t\tfor _, bit := range bits {\n\t\t\t\terr := w.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// should be 11100101 01000000 ...\n\t\t\t//\t\t\t 0xE5\t 0x40\n\t\t\tassert.Equal(t, byte(0xE5), w.data[0], \"expected: %08b, is: %08b\", byte(0xE5), w.data[0])\n\t\t\tassert.Equal(t, byte(0x40), w.data[1], \"expected: %08b, is: %08b\", byte(0x40), w.data[1])\n\t\t})\n\n\t\tt.Run(\"Finished\", func(t *testing.T) {\n\t\t\tbuf := BufferedMSB()\n\n\t\t\t// write some bits to the first byte\n\t\t\tfirstBits := []int{1, 0, 1}\n\t\t\tfor _, bit := range firstBits {\n\t\t\t\trequire.NoError(t, buf.WriteBit(bit))\n\t\t\t}\n\t\t\t// finish the byte\n\t\t\tbuf.FinishByte()\n\n\t\t\t// write bits to the second byte.\n\t\t\tsecondBits := []int{1, 0, 1}\n\t\t\tfor _, bit := range secondBits {\n\t\t\t\trequire.NoError(t, buf.WriteBit(bit))\n\t\t\t}\n\n\t\t\tif assert.Len(t, buf.Data(), 2) {\n\t\t\t\t// 10100000 - 0xa0\n\t\t\t\tassert.Equal(t, byte(0xa0), buf.Data()[0])\n\t\t\t\tassert.Equal(t, byte(0xa0), buf.Data()[1])\n\t\t\t}\n\t\t})\n\t})\n}", "func (sb *Builder) WriteInt(i int64, base int) {\n\t// if 0 <= i && i < nSmalls && base == 10 {\n\t// \tsb.WriteString(smallInt(int(i)))\n\t// }\n\tsb.formatBits(uint64(i), base, i < 0)\n}", "func (tw *TestWriter) Write(incoming []byte) (n int, err error) {\n\tif tw.mockWriteHandler != nil {\n\t\treturn tw.mockWriteHandler(incoming)\n\t}\n\n\ttw.capturedOutput = append(tw.capturedOutput, string(incoming))\n\treturn 0, nil\n}", "func (c *fakeRedisConn) WriteInt(num int) { c.rsp = append(c.rsp, num) }", "func (m *MockResponseWriter) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockIOPackage) WriteUint32(arg0 []byte, arg1 int, arg2 uint32) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint32\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func TestBigIntFillBytes(t *testing.T) {\n\tcheckResult := func(t *testing.T, buf []byte, want *BigInt) {\n\t\tt.Helper()\n\t\tgot := new(BigInt).SetBytes(buf)\n\t\tif got.CmpAbs(want) != 0 {\n\t\t\tt.Errorf(\"got 0x%x, want 0x%x: %x\", got, want, buf)\n\t\t}\n\t}\n\tpanics := func(f func()) (panic bool) {\n\t\tdefer func() { panic = recover() != nil }()\n\t\tf()\n\t\treturn\n\t}\n\n\tfor _, n := range []string{\n\t\t\"0\",\n\t\t\"1000\",\n\t\t\"0xffffffff\",\n\t\t\"-0xffffffff\",\n\t\t\"0xffffffffffffffff\",\n\t\t\"0x10000000000000000\",\n\t\t\"0xabababababababababababababababababababababababababa\",\n\t\t\"0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\",\n\t} {\n\t\tt.Run(n, func(t *testing.T) {\n\t\t\tt.Logf(n)\n\t\t\tx, ok := new(BigInt).SetString(n, 0)\n\t\t\tif !ok {\n\t\t\t\tpanic(\"invalid test entry\")\n\t\t\t}\n\n\t\t\t// Perfectly sized buffer.\n\t\t\tbyteLen := (x.BitLen() + 7) / 8\n\t\t\tbuf := make([]byte, byteLen)\n\t\t\tcheckResult(t, x.FillBytes(buf), x)\n\n\t\t\t// Way larger, checking all bytes get zeroed.\n\t\t\tbuf = make([]byte, 100)\n\t\t\tfor i := range buf {\n\t\t\t\tbuf[i] = 0xff\n\t\t\t}\n\t\t\tcheckResult(t, x.FillBytes(buf), x)\n\n\t\t\t// Too small.\n\t\t\tif byteLen > 0 {\n\t\t\t\tbuf = make([]byte, byteLen-1)\n\t\t\t\tif !panics(func() { x.FillBytes(buf) }) {\n\t\t\t\t\tt.Errorf(\"expected panic for small buffer and value %x\", x)\n\t\t\t\t}\n\t\t\t}\n\t\t})\n\t}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteString_Call {\n\treturn &MockWriteBufferXmlBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) WriteInt64(data interface{}) {\n\tvar t = w.getType(data, 8)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.buffer[w.index+2] = byte(t[2])\n\tw.buffer[w.index+3] = byte(t[3])\n\tw.buffer[w.index+4] = byte(t[4])\n\tw.buffer[w.index+5] = byte(t[5])\n\tw.buffer[w.index+6] = byte(t[6])\n\tw.buffer[w.index+7] = byte(t[7])\n\tw.index += 8\n}", "func WriteI64(p thrift.TProtocol, value int64, name string, field int16) error {\n\treturn WriteI64WithContext(context.Background(), p, value, name, field)\n}", "func (mock *WriteCloser) SetReturnWrite(n int, err error) *WriteCloser {\n\tmock.impl.Write = func([]byte) (int, error) {\n\t\treturn n, err\n\t}\n\treturn mock\n}", "func WriteBigInt(w io.Writer, v *big.Int) error {\n\tif v.Cmp(new(big.Int).SetUint64(0)) < 0 {\n\t\treturn fmt.Errorf(\"cannot write negative big.Int\")\n\t}\n\treturn WriteBytes(w, v.Bytes())\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt8_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockIOPackage) AppendUint64(arg0 []byte, arg1 uint64) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint64\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint16_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (mc *MockConn) MockWrite(msg wire.Message) {\n\tbuf := &bytes.Buffer{}\n\twire.WriteMessage(buf, msg, wire.MainNet)\n\tmc.receiveChan <- buf.Bytes()\n}", "func (_m *MockStore) Write(ctx context.Context, key string, val interface{}, opts ...store.WriteOption) error {\n\t_va := make([]interface{}, len(opts))\n\tfor _i := range opts {\n\t\t_va[_i] = opts[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, key, val)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...store.WriteOption) error); ok {\n\t\tr0 = rf(ctx, key, val, opts...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestBufferWrite(t *testing.T) {\n\tt.Run(\"Valid\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\n\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\tn, err := w.Write(toWrite)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Equal(t, 3, n)\n\n\t\tn, err = w.Write([]byte{0xff})\n\t\trequire.NoError(t, err)\n\n\t\tassert.Equal(t, 1, n)\n\n\t\texpected := append(toWrite, 0xff)\n\t\tfor i, bt := range w.Data() {\n\t\t\tassert.Equal(t, expected[i], bt, \"%d\", i)\n\t\t}\n\t})\n\n\tt.Run(\"Shifted\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\t\t// write empty byte and reset it's byte index to 0.\n\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\tw.byteIndex = 0\n\t\t// assume that 3 '0' bits were already written.\n\t\tw.bitIndex = 3\n\n\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\tn, err := w.Write(toWrite)\n\t\trequire.NoError(t, err)\n\t\tassert.Equal(t, 3, n)\n\n\t\t// 0x3f - 00111111\n\t\t// 00111111 << 3 = 11111000\n\t\texpected := byte(0xf8)\n\t\tassert.Equal(t, expected, w.data[0])\n\n\t\t// 0x12 - 00010010\n\t\t// 00111111 >> 5 = 00000001\n\t\t// 00010010 << 3 = 10010000\n\t\t// \t\t\t\t | 10010101\n\t\t// 10010111 - 0x91\n\t\texpected = byte(0x91)\n\t\tassert.Equal(t, expected, w.data[1])\n\n\t\t// 0x86 - 10000110\n\t\t// 00010010 >> 5 = \t00000000\n\t\t// 10000110 << 3 = \t00110000\n\t\t// \t\t\t\t |\t00110000\n\t\t// 00110000 = 0x30\n\t\texpected = byte(0x30)\n\t\tassert.Equal(t, expected, w.data[2])\n\t\tassert.Len(t, w.Data(), 4)\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tt.Run(\"Valid\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\t\tn, err := w.Write(toWrite)\n\t\t\trequire.NoError(t, err)\n\n\t\t\tassert.Equal(t, 3, n)\n\n\t\t\tn, err = w.Write([]byte{0xff})\n\t\t\trequire.NoError(t, err)\n\n\t\t\tassert.Equal(t, 1, n)\n\n\t\t\texpected := append(toWrite, 0xff)\n\t\t\tfor i, bt := range w.Data() {\n\t\t\t\tassert.Equal(t, expected[i], bt, \"%d\", i)\n\t\t\t}\n\t\t})\n\n\t\tt.Run(\"Shifted\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\t\t\t// write empty byte so the buffer data is initialized\n\t\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\t\t// reset it's byteindex\n\t\t\tw.byteIndex = 0\n\t\t\t// assume three '0' bits are already stored.\n\t\t\tw.bitIndex = 3\n\n\t\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\t\tn, err := w.Write(toWrite)\n\t\t\trequire.NoError(t, err)\n\t\t\tassert.Equal(t, 3, n)\n\n\t\t\t// 0x3f - 00111111\n\t\t\t// 00111111 >> 3 = 00000111\n\t\t\t// 00000111 = 0x07\n\t\t\texpected := byte(0x07)\n\t\t\tassert.Equal(t, expected, w.data[0])\n\n\t\t\t// 0x12 - 00010010\n\t\t\t// 00111111 << 5 = 11100000\n\t\t\t// 00010010 >> 3 = 00000010\n\t\t\t// \t\t\t\t | 11100010\n\t\t\t// 11100010 - 0xE2\n\t\t\texpected = byte(0xE2)\n\t\t\tassert.Equal(t, expected, w.data[1])\n\n\t\t\t// 0x86 - 10000110\n\t\t\t// 00010010 << 5 = \t01000000\n\t\t\t// 10000110 >> 3 = \t00010000\n\t\t\t// \t\t\t\t |\t01010000\n\t\t\t// 00110000 = 0x50\n\t\t\texpected = byte(0x50)\n\t\t\tassert.Equal(t, expected, w.data[2])\n\n\t\t\t// 0x86 - 10000110\n\t\t\t// 10000110 << 5 = \t11000000\n\t\t\t// 11000000 = 0xC0\n\t\t\texpected = byte(0xC0)\n\t\t\tassert.Equal(t, expected, w.data[3])\n\t\t})\n\t})\n}", "func WriteUInt64(buffer []byte, offset int, value uint64) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n buffer[offset + 2] = byte(value >> 16)\n buffer[offset + 3] = byte(value >> 24)\n buffer[offset + 4] = byte(value >> 32)\n buffer[offset + 5] = byte(value >> 40)\n buffer[offset + 6] = byte(value >> 48)\n buffer[offset + 7] = byte(value >> 56)\n}", "func TestUint64B(t *testing.T) {\n\ttests := []struct {\n\t\tin uint64 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01}}, // Min single byte\n\t\t{255, []byte{0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff}}, // Max single byte\n\t\t{256, []byte{0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x00}}, // Min 2-byte\n\t\t{65535, []byte{0x00,0x00,0x00,0x00,0x00,0x00,0xff,0xff}}, // Max 2-byte\n\t\t{0x10000, []byte{0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00}}, // Min 4-byte\n\t\t{0xffffffff, []byte{0x00,0x00,0x00,0x00,0xff,0xff,0xff,0xff}}, // Max 4-byte\n\t\t{0x100000000, []byte{0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00}}, // Min 8-byte\n\t\t{0xffffffffffffffff, []byte{0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff}}, // Max 8-byte\n\t}\n\n\tt.Logf(\"Running uint64B %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint64B(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint64B #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint64B #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint64\n\t\terr = ReadUint64B(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint64B #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint64B #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt32_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func TestBigIntMatchesMathBigInt15(t *testing.T) {\n\tt.Run(\"FillBytes\", func(t *testing.T) {\n\t\tapd := func(z number) []byte {\n\t\t\treturn z.toApd(t).FillBytes(make([]byte, len(z)))\n\t\t}\n\t\tmath := func(z number) []byte {\n\t\t\treturn z.toMath(t).FillBytes(make([]byte, len(z)))\n\t\t}\n\t\trequire(t, quick.CheckEqual(apd, math, nil))\n\t})\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockWriter) Write(p []byte) (n int, err error) {\n\tvar start time.Time\n\tvar stats WriterStats\n\tstart = time.Now()\n\tfor _, v := range p {\n\t\tstats.nbytes++ // count bytes\n\t\tif v == '\\n' {\n\t\t\tstats.nlines++ // count newlines (records)\n\t\t}\n\t}\n\n\tstats.duration = time.Since(start)\n\tm.statsChan <- stats\n\treturn stats.nbytes, nil\n}", "func (m *MockIOPackage) WriteUint8(arg0 []byte, arg1 int, arg2 uint8) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint8\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt16_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (r *MockReadWriteCloser) Write(p []byte) (n int, err error) {\n\n\tif err = r.WriteErr; err != nil {\n\t\tr.BytesWritten = p\n\t\tn = len(p)\n\t}\n\treturn\n}", "func (mock WriteCloser) Write(p []byte) (n int, err error) {\n\tmethodName := \"Write\" // nolint: goconst\n\tif mock.impl.Write != nil {\n\t\treturn mock.impl.Write(p)\n\t}\n\tif mock.callbackNotImplemented != nil {\n\t\tmock.callbackNotImplemented(mock.t, mock.name, methodName)\n\t} else {\n\t\tgomic.DefaultCallbackNotImplemented(mock.t, mock.name, methodName)\n\t}\n\treturn mock.fakeZeroWrite(p)\n}", "func (_m *Output) Write(ctx context.Context, batch []stream.WritableMessage) error {\n\tret := _m.Called(ctx, batch)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, []stream.WritableMessage) error); ok {\n\t\tr0 = rf(ctx, batch)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockStore) WriteFitbitToken(token *oauth2.Token) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteFitbitToken\", token)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}" ]
[ "0.84534484", "0.8174824", "0.8029277", "0.7017367", "0.68167186", "0.64487153", "0.6428734", "0.6412154", "0.6347939", "0.6293533", "0.6293084", "0.6292098", "0.624689", "0.6234826", "0.62276745", "0.617974", "0.6172028", "0.6128934", "0.60601205", "0.6031964", "0.5986424", "0.5966065", "0.59283763", "0.588795", "0.5873462", "0.5820458", "0.58136094", "0.58075696", "0.57789886", "0.5771945", "0.5759056", "0.57573736", "0.5748683", "0.57456636", "0.5703539", "0.56884015", "0.5652185", "0.56174237", "0.5601872", "0.5564724", "0.5551553", "0.55502486", "0.55372024", "0.55065393", "0.5502121", "0.54875165", "0.54862016", "0.54478484", "0.5446176", "0.5377052", "0.53641045", "0.53453743", "0.53409326", "0.53367364", "0.53338766", "0.5332045", "0.53241295", "0.5311979", "0.52833664", "0.5254776", "0.5249948", "0.52201664", "0.5213372", "0.5210597", "0.51935726", "0.5186925", "0.5185411", "0.5185129", "0.5179953", "0.5170791", "0.5161578", "0.51597106", "0.510353", "0.5096199", "0.5093743", "0.50840753", "0.50630844", "0.505745", "0.504816", "0.504433", "0.50411654", "0.5033174", "0.50257057", "0.5018535", "0.50179005", "0.5013761", "0.5009482", "0.49649975", "0.49400008", "0.49398693", "0.49381733", "0.49224764", "0.49177185", "0.4907368", "0.49069205", "0.49057236", "0.49057004", "0.49055445", "0.49051487", "0.4899943" ]
0.85100025
0
WriteBigInt is a helper method to define mock.On call logicalName string bitLength uint8 value big.Int writerArgs ...WithWriterArgs
func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call { return &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On("WriteBigInt", append([]interface{}{logicalName, bitLength, value}, writerArgs...)...)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigInt_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) WriteBigInt(bi *big.Int) {\n\tw.writeByte(TagLong)\n\tw.writeString(bi.String())\n\tw.writeByte(TagSemicolon)\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt64_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt64_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigFloat_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigFloat_Call{Call: _e.mock.On(\"WriteBigFloat\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (c *fakeRedisConn) WriteInt64(num int64) { c.rsp = append(c.rsp, num) }", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (sb *Builder) WriteInt(i int64, base int) {\n\t// if 0 <= i && i < nSmalls && base == 10 {\n\t// \tsb.WriteString(smallInt(int(i)))\n\t// }\n\tsb.formatBits(uint64(i), base, i < 0)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigFloat_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigFloat_Call{Call: _e.mock.On(\"WriteBigFloat\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt8_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (n *Node) SetBigInt(x *big.Int)", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call {\n\treturn &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func SetBigInt(gauge prometheus.Gauge, arg *big.Int) {\n\tgauge.Set(float64(arg.Int64()))\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt16_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt8_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func WriteBigInt(w io.Writer, v *big.Int) error {\n\tif v.Cmp(new(big.Int).SetUint64(0)) < 0 {\n\t\treturn fmt.Errorf(\"cannot write negative big.Int\")\n\t}\n\treturn WriteBytes(w, v.Bytes())\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteString_Call {\n\treturn &MockWriteBufferXmlBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func IntMarshalText(x *big.Int,) ([]byte, error)", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func WriteInt64(buffer []byte, offset int, value int64) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n buffer[offset + 2] = byte(value >> 16)\n buffer[offset + 3] = byte(value >> 24)\n buffer[offset + 4] = byte(value >> 32)\n buffer[offset + 5] = byte(value >> 40)\n buffer[offset + 6] = byte(value >> 48)\n buffer[offset + 7] = byte(value >> 56)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBit_Call {\n\treturn &MockWriteBufferXmlBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (w *Writer) WriteInt64(data interface{}) {\n\tvar t = w.getType(data, 8)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.buffer[w.index+2] = byte(t[2])\n\tw.buffer[w.index+3] = byte(t[3])\n\tw.buffer[w.index+4] = byte(t[4])\n\tw.buffer[w.index+5] = byte(t[5])\n\tw.buffer[w.index+6] = byte(t[6])\n\tw.buffer[w.index+7] = byte(t[7])\n\tw.index += 8\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (c *fakeRedisConn) WriteInt(num int) { c.rsp = append(c.rsp, num) }", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByteArray_Call {\n\treturn &MockWriteBufferJsonBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func WriteI64(p thrift.TProtocol, value int64, name string, field int16) error {\n\treturn WriteI64WithContext(context.Background(), p, value, name, field)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (t *SimpleChaincode) Write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n var name, value string // Entities\n var err error\n fmt.Println(\"running write()\")\n\n if len(args) != 2 {\n return nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the variable and value to set\")\n }\n\n name = args[0] //rename for funsies\n value = args[1]\n err = stub.PutState(name, []byte(value)) //write the variable into the chaincode state\n if err != nil {\n return nil, err\n }\n return nil, nil\n}", "func EncodeBig(bigint *big.Int) string {\n\tnbits := bigint.BitLen()\n\tif nbits == 0 {\n\t\treturn \"0x0\"\n\t}\n\treturn fmt.Sprintf(\"%#x\", bigint)\n}", "func WriteInteger(n int64, buf *goetty.ByteBuf) {\r\n\tbuf.WriteByte(':')\r\n\tbuf.Write(goetty.FormatInt64ToBytes(n))\r\n\tbuf.Write(Delims)\r\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt32_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockIOPackage) WriteUint64(arg0 []byte, arg1 int, arg2 uint64) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint64\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint16_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *exportWriter) mpint(x *big.Int, typ types.Type) {\n\tbasic, ok := typ.Underlying().(*types.Basic)\n\tif !ok {\n\t\tpanic(internalErrorf(\"unexpected type %v (%T)\", typ.Underlying(), typ.Underlying()))\n\t}\n\n\tsigned, maxBytes := intSize(basic)\n\n\tnegative := x.Sign() < 0\n\tif !signed && negative {\n\t\tpanic(internalErrorf(\"negative unsigned integer; type %v, value %v\", typ, x))\n\t}\n\n\tb := x.Bytes()\n\tif len(b) > 0 && b[0] == 0 {\n\t\tpanic(internalErrorf(\"leading zeros\"))\n\t}\n\tif uint(len(b)) > maxBytes {\n\t\tpanic(internalErrorf(\"bad mpint length: %d > %d (type %v, value %v)\", len(b), maxBytes, typ, x))\n\t}\n\n\tmaxSmall := 256 - maxBytes\n\tif signed {\n\t\tmaxSmall = 256 - 2*maxBytes\n\t}\n\tif maxBytes == 1 {\n\t\tmaxSmall = 256\n\t}\n\n\t// Check if x can use small value encoding.\n\tif len(b) <= 1 {\n\t\tvar ux uint\n\t\tif len(b) == 1 {\n\t\t\tux = uint(b[0])\n\t\t}\n\t\tif signed {\n\t\t\tux <<= 1\n\t\t\tif negative {\n\t\t\t\tux--\n\t\t\t}\n\t\t}\n\t\tif ux < maxSmall {\n\t\t\tw.data.WriteByte(byte(ux))\n\t\t\treturn\n\t\t}\n\t}\n\n\tn := 256 - uint(len(b))\n\tif signed {\n\t\tn = 256 - 2*uint(len(b))\n\t\tif negative {\n\t\t\tn |= 1\n\t\t}\n\t}\n\tif n < maxSmall || n >= 256 {\n\t\tpanic(internalErrorf(\"encoding mistake: %d, %v, %v => %d\", len(b), signed, negative, n))\n\t}\n\n\tw.data.WriteByte(byte(n))\n\tw.data.Write(b)\n}", "func MockWire(hash []byte, round uint64, step uint8, keys []key.ConsensusKeys, p *user.Provisioners, i ...int) *bytes.Buffer {\n\tev := MockAgreementEvent(hash, round, step, keys, p, i...)\n\n\tbuf := new(bytes.Buffer)\n\tif err := header.Marshal(buf, ev.Header); err != nil {\n\t\tpanic(err)\n\t}\n\n\tif err := Marshal(buf, *ev); err != nil {\n\t\tpanic(err)\n\t}\n\treturn buf\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) Emit(typ byte, args ...uint64) {\n\tnargs := byte(len(args)) - 1\n\tif nargs > 3 {\n\t\tnargs = 3\n\t}\n\tbuf := []byte{typ | nargs<<6}\n\tif nargs == 3 {\n\t\tbuf = append(buf, 0)\n\t}\n\tfor _, a := range args {\n\t\tbuf = appendVarint(buf, a)\n\t}\n\tif nargs == 3 {\n\t\tbuf[1] = byte(len(buf) - 2)\n\t}\n\tn, err := w.Write(buf)\n\tif n != len(buf) || err != nil {\n\t\tpanic(\"failed to write\")\n\t}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt16_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByteArray_Call {\n\treturn &MockWriteBufferXmlBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func IntMarshalJSON(x *big.Int,) ([]byte, error)", "func TestWriteBigChunked(t *testing.T) {\n\trandData := make([]byte, 4096)\n\tif _, err := rand.Read(randData); err != nil {\n\t\tt.Errorf(\"cannot get random data: %s\", err)\n\t\treturn\n\t}\n\tmsgData := \"awesomesauce\\n\" + base64.StdEncoding.EncodeToString(randData)\n\n\tfor _, i := range []CompressType{CompressGzip, CompressZlib} {\n\t\tmsg, err := sendAndRecv(msgData, i)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"sendAndRecv: %s\", err)\n\t\t\treturn\n\t\t}\n\n\t\tif msg.Short != \"awesomesauce\" {\n\t\t\tt.Errorf(\"msg.Short: expected %s, got %s\", msgData, msg.Full)\n\t\t\treturn\n\t\t}\n\n\t\tif msg.Full != msgData {\n\t\t\tt.Errorf(\"msg.Full: expected %s, got %s\", msgData, msg.Full)\n\t\t\treturn\n\t\t}\n\t}\n}", "func (tw *TestWriter) Write(incoming []byte) (n int, err error) {\n\tif tw.mockWriteHandler != nil {\n\t\treturn tw.mockWriteHandler(incoming)\n\t}\n\n\ttw.capturedOutput = append(tw.capturedOutput, string(incoming))\n\treturn 0, nil\n}", "func (e *encoder) writeInt(val int64, size int) {\n\te.writeUint(uint64(val), size)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint32_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteUInt64(buffer []byte, offset int, value uint64) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n buffer[offset + 2] = byte(value >> 16)\n buffer[offset + 3] = byte(value >> 24)\n buffer[offset + 4] = byte(value >> 32)\n buffer[offset + 5] = byte(value >> 40)\n buffer[offset + 6] = byte(value >> 48)\n buffer[offset + 7] = byte(value >> 56)\n}", "func sendMagicNumber(writer io.Writer, magicNumber magicNumberBytes) error {\n\t_, err := writer.Write(magicNumber[:])\n\treturn err\n}", "func (t *Target) WriteRunParamStub(projectDir string, haddockDir string) (string, error) {\n\n\tvar runParamString string\n\tnMol := 1\n\n\tif haddockDir == \"\" {\n\t\terr := errors.New(\"haddock directory not defined\")\n\t\treturn \"\", err\n\t}\n\n\tif projectDir == \"\" {\n\t\terr := errors.New(\"project directory not defined\")\n\t\treturn \"\", err\n\t}\n\n\tif len(t.Receptor) == 0 {\n\t\terr := errors.New(\"receptor not defined\")\n\t\treturn \"\", err\n\t}\n\n\trunParamString += \"RUN_NUMBER=1\\n\"\n\trunParamString += \"PROJECT_DIR=./\\n\"\n\trunParamString += \"HADDOCK_DIR=\" + haddockDir + \"\\n\"\n\n\t// Write receptor files\n\trunParamString += \"PDB_FILE1=../data/\" + filepath.Base(t.Receptor[0]) + \"\\n\"\n\n\t// Write receptor list file\n\tif t.ReceptorList != \"\" {\n\t\trunParamString += \"PDB_LIST1=../data\" + filepath.Base(t.ReceptorList) + \"\\n\"\n\t}\n\n\t// Write ligand files\n\tif len(t.Ligand) >= 1 {\n\t\trunParamString += \"PDB_FILE2=../data/\" + filepath.Base(t.Ligand[0]) + \"\\n\"\n\t\tnMol++\n\n\t\t// write ligand list files\n\t\tif t.LigandList != \"\" {\n\t\t\trunParamString += \"PDB_LIST2=../data/\" + filepath.Base(t.LigandList) + \"\\n\"\n\t\t}\n\t}\n\n\trunParamString += \"N_COMP=\" + strconv.Itoa(nMol) + \"\\n\"\n\n\trunParamF := filepath.Join(projectDir, \"/run.param\")\n\terr := os.WriteFile(runParamF, []byte(runParamString), 0644)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn runParamF, nil\n\n}", "func write(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"starting write\")\n\n\tif len(args) != 2 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 2. key of the variable and value to set\")\n\t}\n\n\t// input sanitation\n\terr = sanitize_arguments(args)\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the ledger\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tfmt.Println(\"- end write\")\n\treturn shim.Success(nil)\n}", "func (w *Writer) WriteUint64(data interface{}) {\n\tvar t = w.getType(data, 8)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.buffer[w.index+2] = byte(t[2])\n\tw.buffer[w.index+3] = byte(t[3])\n\tw.buffer[w.index+4] = byte(t[4])\n\tw.buffer[w.index+5] = byte(t[5])\n\tw.buffer[w.index+6] = byte(t[6])\n\tw.buffer[w.index+7] = byte(t[7])\n\tw.index += 8\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat64_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (w *Writer) Uint64(n uint64) {\n\tw.buf = strconv.AppendUint(w.buf, uint64(n), 10)\n}", "func write(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"starting write\")\n\n\tif len(args) != 2 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 2. key of the variable and value to set\")\n\t}\n\n\t// input sanitation\n\terr = sanitize_arguments(args)\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the ledger\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tfmt.Println(\"- end write\")\n\treturn shim.Success(nil)\n}", "func (_e *Output_Expecter) Write(ctx interface{}, batch interface{}) *Output_Write_Call {\n\treturn &Output_Write_Call{Call: _e.mock.On(\"Write\", ctx, batch)}\n}", "func (a *api) SetInt(raw bool) {\n\ta.Commentf(\"%s constructs a field element from a big integer.\", rawname(\"SetInt\", raw))\n\ta.rawcomment(raw)\n\ta.Printf(\"func (x %s) %s(y *big.Int) %s\", a.PointerType(), rawname(\"SetInt\", raw), a.PointerType())\n\ta.EnterBlock()\n\n\ta.Comment(\"Reduce if outside range.\")\n\ta.Linef(\"if y.Sign() < 0 || y.Cmp(%s) >= 0 {\", a.Name(\"p\"))\n\ta.Linef(\"y = new(big.Int).Mod(y, %s)\", a.Name(\"p\"))\n\ta.Linef(\"}\")\n\n\ta.Comment(\"Copy bytes into field element.\")\n\ta.Linef(\"b := y.Bytes()\")\n\ta.Linef(\"i := 0\")\n\ta.Linef(\"for ; i < len(b); i++ {\")\n\ta.Linef(\"x[i] = b[len(b)-1-i]\")\n\ta.Linef(\"}\")\n\ta.Linef(\"for ; i < %s; i++ {\", a.Size())\n\ta.Linef(\"x[i] = 0\")\n\ta.Linef(\"}\")\n\n\tif !raw && a.Montgomery() {\n\t\ta.Comment(\"Encode into the Montgomery domain.\")\n\t\ta.Call(\"Encode\", \"x\", \"x\")\n\t}\n\n\ta.Linef(\"return x\")\n\ta.LeaveBlock()\n}", "func serializeBigInt(x *big.Int) []byte {\n\txb := x.Bytes()\n\treverse(xb)\n\tfor len(xb) < EC_COORD_SIZE {\n\t\txb = append(xb, 0)\n\t}\n\treturn xb\n}", "func (c Channel) WriteInt64(name string, value int64) error {\n\tcName := C.CString(name)\n\tdefer C.free(unsafe.Pointer(cName))\n\n\terrno := C.iio_channel_attr_write_longlong(\n\t\tc.handle,\n\t\tcName,\n\t\tC.longlong(value),\n\t)\n\tif errno == 0 {\n\t\treturn nil\n\t}\n\treturn syscall.Errno(-errno)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt32_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func Int64(i64 int64) Val { return Val{t: bsontype.Int64}.writei64(i64) }", "func (w *Writer) Int64(n int64) {\n\tw.buf = strconv.AppendInt(w.buf, int64(n), 10)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (dm *dataManager) writeInt(address uint, i int) (err ProcessException) {\n\tdata := make([]byte, 4)\n\tbinary.LittleEndian.PutUint32(data, uint32(i))\n\n\terr = dm.process.WriteBytes(address, data)\n\n\treturn\n}", "func (f *framer) writeInt(n int32) {\n\tf.buf = appendInt(f.buf, n)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat64_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func write(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar receipt Receipt\n\tvar err error\n\tfmt.Println(\"starting write\")\n\n\tif len(args[0]) == 0 {\n\t\treturn shim.Error(\"wrong arguments\")\n\t}\n\n\treceiptJson := []byte(args[0])\n\terr = json.Unmarshal(receiptJson, &receipt)\n\tif err != nil {\n\t\tfmt.Println(\"json is wrong,json is: \" + args[0])\n\t\treturn shim.Error(err.Error())\n\t}\n\n\t//check if new owner exists\n\towner, err := getOwner(stub, receipt.OwnerRelation.Id)\n\tif err != nil {\n\t\tfmt.Println(\"Failed to find owner - \" + owner.Id)\n\t\treturn shim.Error(err.Error())\n\t}\n\n\t//check if new receipt exists\n\terr = checkReceipt(stub, receipt.Id)\n\tif err != nil {\n\t\tfmt.Println(\"Failed to create receipt - \" + receipt.Id)\n\t\treturn shim.Error(err.Error())\n\t}\n\n\terr = stub.PutState(receipt.Id, receiptJson) //write the variable into the ledger\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tfmt.Println(\"- end write\")\n\treturn shim.Success(nil)\n}", "func TestWriteBits(t *testing.T) {\n\tt.Run(\"NonMSB\", func(t *testing.T) {\n\t\tb := &Buffer{}\n\n\t\t// having empty buffered MSB.\n\t\tn, err := b.WriteBits(0xb, 4)\n\t\trequire.NoError(t, err)\n\t\tassert.Zero(t, n)\n\n\t\tassert.Len(t, b.data, 1)\n\t\tassert.Equal(t, byte(0xb), b.data[0])\n\n\t\tn, err = b.WriteBits(0xdf, 8)\n\t\trequire.NoError(t, err)\n\t\tassert.Equal(t, 1, n)\n\n\t\tif assert.Len(t, b.data, 2) {\n\t\t\tassert.Equal(t, byte(0xd), b.data[1])\n\t\t\tassert.Equal(t, byte(0xfb), b.data[0])\n\t\t}\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tb := BufferedMSB()\n\n\t\tn, err := b.WriteBits(0xf, 4)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Zero(t, n)\n\n\t\t// the output now should be\n\t\t// 11110000\n\t\t// ^\n\t\tif assert.Len(t, b.data, 1) {\n\t\t\tassert.Equal(t, byte(0xf0), b.data[0], \"%08b\", b.data[0])\n\t\t}\n\n\t\t// write 10111 = 0x17, 5\n\t\tn, err = b.WriteBits(0x17, 5)\n\t\trequire.NoError(t, err)\n\n\t\t// current output should be\n\t\t// 11111011 10000000\n\t\t// ^\n\t\tif assert.Len(t, b.data, 2) {\n\t\t\tassert.Equal(t, byte(0xfb), b.data[0])\n\t\t\tassert.Equal(t, byte(0x80), b.data[1])\n\t\t\tassert.Equal(t, uint8(1), b.bitIndex)\n\t\t}\n\t})\n}", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (mc *MockConn) MockWrite(msg wire.Message) {\n\tbuf := &bytes.Buffer{}\n\twire.WriteMessage(buf, msg, wire.MainNet)\n\tmc.receiveChan <- buf.Bytes()\n}", "func (stream *Stream) WriteInt64(nval int64) {\n\tstream.ensure(20)\n\tn := stream.n\n\tvar val uint64\n\tif nval < 0 {\n\t\tval = uint64(-nval)\n\t\tstream.buf[n] = '-'\n\t\tn++\n\t} else {\n\t\tval = uint64(nval)\n\t}\n\tq1 := val / 1000\n\tif q1 == 0 {\n\t\tstream.n = writeFirstBuf(stream.buf, digits[val], n)\n\t\treturn\n\t}\n\tr1 := val - q1*1000\n\tq2 := q1 / 1000\n\tif q2 == 0 {\n\t\tn := writeFirstBuf(stream.buf, digits[q1], n)\n\t\twriteBuf(stream.buf, digits[r1], n)\n\t\tstream.n = n + 3\n\t\treturn\n\t}\n\tr2 := q1 - q2*1000\n\tq3 := q2 / 1000\n\tif q3 == 0 {\n\t\tn = writeFirstBuf(stream.buf, digits[q2], n)\n\t\twriteBuf(stream.buf, digits[r2], n)\n\t\twriteBuf(stream.buf, digits[r1], n+3)\n\t\tstream.n = n + 6\n\t\treturn\n\t}\n\tr3 := q2 - q3*1000\n\tq4 := q3 / 1000\n\tif q4 == 0 {\n\t\tn = writeFirstBuf(stream.buf, digits[q3], n)\n\t\twriteBuf(stream.buf, digits[r3], n)\n\t\twriteBuf(stream.buf, digits[r2], n+3)\n\t\twriteBuf(stream.buf, digits[r1], n+6)\n\t\tstream.n = n + 9\n\t\treturn\n\t}\n\tr4 := q3 - q4*1000\n\tq5 := q4 / 1000\n\tif q5 == 0 {\n\t\tn = writeFirstBuf(stream.buf, digits[q4], n)\n\t\twriteBuf(stream.buf, digits[r4], n)\n\t\twriteBuf(stream.buf, digits[r3], n+3)\n\t\twriteBuf(stream.buf, digits[r2], n+6)\n\t\twriteBuf(stream.buf, digits[r1], n+9)\n\t\tstream.n = n + 12\n\t\treturn\n\t}\n\tr5 := q4 - q5*1000\n\tq6 := q5 / 1000\n\tif q6 == 0 {\n\t\tn = writeFirstBuf(stream.buf, digits[q5], n)\n\t} else {\n\t\tstream.buf[n] = byte(q6 + '0')\n\t\tn++\n\t\tr6 := q5 - q6*1000\n\t\twriteBuf(stream.buf, digits[r6], n)\n\t\tn += 3\n\t}\n\twriteBuf(stream.buf, digits[r5], n)\n\twriteBuf(stream.buf, digits[r4], n+3)\n\twriteBuf(stream.buf, digits[r3], n+6)\n\twriteBuf(stream.buf, digits[r2], n+9)\n\twriteBuf(stream.buf, digits[r1], n+12)\n\tstream.n = n + 15\n}" ]
[ "0.8055526", "0.7974968", "0.79611856", "0.64306176", "0.6311685", "0.6293441", "0.6197871", "0.61559606", "0.59665734", "0.583904", "0.57971793", "0.57261574", "0.56822324", "0.5677682", "0.5670915", "0.5643562", "0.5639665", "0.5617584", "0.56104374", "0.560792", "0.5563293", "0.55162716", "0.5500985", "0.5493887", "0.54816914", "0.5470784", "0.5470597", "0.5380177", "0.53536206", "0.5352995", "0.5351638", "0.5336163", "0.52835876", "0.5271559", "0.5262216", "0.5258628", "0.5254162", "0.52369386", "0.5236509", "0.5236364", "0.52182174", "0.51994455", "0.519859", "0.51889783", "0.51796365", "0.5148566", "0.51408505", "0.5129405", "0.51104724", "0.5095321", "0.5073662", "0.50691575", "0.5027026", "0.5022797", "0.50145596", "0.49855745", "0.49713477", "0.496743", "0.49499077", "0.49208194", "0.4919953", "0.49148408", "0.4903114", "0.48994854", "0.48843968", "0.48818773", "0.48716867", "0.48710698", "0.48490423", "0.48452258", "0.48425606", "0.48378217", "0.4822073", "0.48007834", "0.4793962", "0.47901034", "0.4777692", "0.47719952", "0.4759377", "0.47580492", "0.47578913", "0.4747509", "0.47413206", "0.47408518", "0.473632", "0.47337294", "0.47252697", "0.47121546", "0.47085375", "0.4690222", "0.4687891", "0.46868533", "0.46830603", "0.46779227", "0.46563017", "0.46559107", "0.4651425", "0.4645843", "0.46444035", "0.46416137" ]
0.8163736
0
WriteBit provides a mock function with given fields: logicalName, value, writerArgs
func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, logicalName, value) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok { r0 = rf(logicalName, value, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBit_Call {\n\treturn &MockWriteBufferXmlBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestBufferWriteBit(t *testing.T) {\n\tt.Run(\"Valid\", func(t *testing.T) {\n\t\tbuf := &Buffer{}\n\t\t// 10010011 11000111\n\t\t// 0x93 \t0xC7\n\t\tbits := []int{1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1}\n\t\tfor i := len(bits) - 1; i > -1; i-- {\n\t\t\tbit := bits[i]\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\tassert.Equal(t, byte(0xC7), buf.data[0], \"expected: %08b, is: %08b\", 0xc7, buf.data[0])\n\t\tassert.Equal(t, byte(0x93), buf.data[1], \"expected: %08b, is: %08b\", 0x93, buf.data[1])\n\t})\n\n\tt.Run(\"BitShifted\", func(t *testing.T) {\n\t\tt.Run(\"Empty\", func(t *testing.T) {\n\t\t\tbuf := &Buffer{}\n\t\t\t// fill thee buffer with 3 bits\n\t\t\tfor i := 0; i < 3; i++ {\n\t\t\t\terr := buf.WriteBit(int(0))\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// bits 11101\n\t\t\tbits := []int{1, 1, 1, 0, 1}\n\t\t\tfor i := len(bits) - 1; i > -1; i-- {\n\t\t\t\tbit := bits[i]\n\t\t\t\terr := buf.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// should be 11101000 - 0xe8\n\t\t\tassert.Equal(t, byte(0xe8), buf.data[0])\n\t\t})\n\t})\n\n\tt.Run(\"ByteShifted\", func(t *testing.T) {\n\t\tbuf := &Buffer{}\n\t\trequire.NoError(t, buf.WriteByte(0x00))\n\n\t\t// write 8 bits that should look like a byte 0xe3\n\t\t// 11100011 - 0xe3\n\t\tbits := []int{1, 1, 1, 0, 0, 0, 1, 1}\n\t\tfor i := len(bits) - 1; i > -1; i-- {\n\t\t\tbit := bits[i]\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\t\tassert.Equal(t, 2, len(buf.data))\n\t\tassert.Equal(t, byte(0xe3), buf.data[1])\n\n\t\t// there should be no error on writing additional byte.\n\t\tassert.NoError(t, buf.WriteByte(0x00))\n\t})\n\n\tt.Run(\"Finished\", func(t *testing.T) {\n\t\tbuf := &Buffer{}\n\n\t\t// write some bits to the first byte.\n\t\tfirstBits := []int{1, 0, 1}\n\t\tfor _, bit := range firstBits {\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\t// finish this byte\n\t\tbuf.FinishByte()\n\t\tsecondBits := []int{1, 0, 1}\n\n\t\t// write some bits to the second byte.\n\t\tfor _, bit := range secondBits {\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\tif assert.Len(t, buf.data, 2) {\n\t\t\t// 00000101 - 0x05\n\t\t\tassert.Equal(t, byte(0x05), buf.Data()[0])\n\t\t\tassert.Equal(t, byte(0x05), buf.Data()[1])\n\t\t}\n\t})\n\n\tt.Run(\"Inverse\", func(t *testing.T) {\n\t\tt.Run(\"Valid\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\t// \t10010111 10101100\n\t\t\t//\t0x97\t 0xac\n\t\t\tbits := []int{1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0}\n\n\t\t\t// write all the bits\n\t\t\tfor _, bit := range bits {\n\t\t\t\terr := w.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\texpected := byte(0x97)\n\t\t\tassert.Equal(t, expected, w.data[0], \"expected: %08b is: %08b\", expected, w.data[0])\n\t\t\texpected = byte(0xac)\n\t\t\tassert.Equal(t, expected, w.data[1], \"expected: %08b is: %08b\", expected, w.data[1])\n\t\t})\n\n\t\tt.Run(\"ByteShifted\", func(t *testing.T) {\n\t\t\tbuf := BufferedMSB()\n\t\t\terr := buf.WriteByte(0x00)\n\t\t\trequire.NoError(t, err)\n\n\t\t\t// 11100011 - 0xe3\n\t\t\tbits := []int{1, 1, 1, 0, 0, 0, 1, 1}\n\t\t\tfor _, bit := range bits {\n\t\t\t\terr := buf.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\tassert.Equal(t, byte(0xe3), buf.data[1], \"expected: %08b, is: %08b\", byte(0xe3), buf.data[1])\n\t\t})\n\n\t\tt.Run(\"BitShifted\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\t// 0xE0 - 11100000\n\t\t\terr := w.WriteByte(0xE0)\n\t\t\trequire.NoError(t, err)\n\n\t\t\tw.bitIndex = 5\n\t\t\tw.byteIndex = 0\n\n\t\t\tbits := []int{1, 0, 1, 0, 1}\n\t\t\tfor _, bit := range bits {\n\t\t\t\terr := w.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// should be 11100101 01000000 ...\n\t\t\t//\t\t\t 0xE5\t 0x40\n\t\t\tassert.Equal(t, byte(0xE5), w.data[0], \"expected: %08b, is: %08b\", byte(0xE5), w.data[0])\n\t\t\tassert.Equal(t, byte(0x40), w.data[1], \"expected: %08b, is: %08b\", byte(0x40), w.data[1])\n\t\t})\n\n\t\tt.Run(\"Finished\", func(t *testing.T) {\n\t\t\tbuf := BufferedMSB()\n\n\t\t\t// write some bits to the first byte\n\t\t\tfirstBits := []int{1, 0, 1}\n\t\t\tfor _, bit := range firstBits {\n\t\t\t\trequire.NoError(t, buf.WriteBit(bit))\n\t\t\t}\n\t\t\t// finish the byte\n\t\t\tbuf.FinishByte()\n\n\t\t\t// write bits to the second byte.\n\t\t\tsecondBits := []int{1, 0, 1}\n\t\t\tfor _, bit := range secondBits {\n\t\t\t\trequire.NoError(t, buf.WriteBit(bit))\n\t\t\t}\n\n\t\t\tif assert.Len(t, buf.Data(), 2) {\n\t\t\t\t// 10100000 - 0xa0\n\t\t\t\tassert.Equal(t, byte(0xa0), buf.Data()[0])\n\t\t\t\tassert.Equal(t, byte(0xa0), buf.Data()[1])\n\t\t\t}\n\t\t})\n\t})\n}", "func TestWriteBits(t *testing.T) {\n\tt.Run(\"NonMSB\", func(t *testing.T) {\n\t\tb := &Buffer{}\n\n\t\t// having empty buffered MSB.\n\t\tn, err := b.WriteBits(0xb, 4)\n\t\trequire.NoError(t, err)\n\t\tassert.Zero(t, n)\n\n\t\tassert.Len(t, b.data, 1)\n\t\tassert.Equal(t, byte(0xb), b.data[0])\n\n\t\tn, err = b.WriteBits(0xdf, 8)\n\t\trequire.NoError(t, err)\n\t\tassert.Equal(t, 1, n)\n\n\t\tif assert.Len(t, b.data, 2) {\n\t\t\tassert.Equal(t, byte(0xd), b.data[1])\n\t\t\tassert.Equal(t, byte(0xfb), b.data[0])\n\t\t}\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tb := BufferedMSB()\n\n\t\tn, err := b.WriteBits(0xf, 4)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Zero(t, n)\n\n\t\t// the output now should be\n\t\t// 11110000\n\t\t// ^\n\t\tif assert.Len(t, b.data, 1) {\n\t\t\tassert.Equal(t, byte(0xf0), b.data[0], \"%08b\", b.data[0])\n\t\t}\n\n\t\t// write 10111 = 0x17, 5\n\t\tn, err = b.WriteBits(0x17, 5)\n\t\trequire.NoError(t, err)\n\n\t\t// current output should be\n\t\t// 11111011 10000000\n\t\t// ^\n\t\tif assert.Len(t, b.data, 2) {\n\t\t\tassert.Equal(t, byte(0xfb), b.data[0])\n\t\t\tassert.Equal(t, byte(0x80), b.data[1])\n\t\t\tassert.Equal(t, uint8(1), b.bitIndex)\n\t\t}\n\t})\n}", "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_mr *MockOStreamMockRecorder) WriteBit(arg0 interface{}) *gomock.Call {\n\treturn _mr.mock.ctrl.RecordCallWithMethodType(_mr.mock, \"WriteBit\", reflect.TypeOf((*MockOStream)(nil).WriteBit), arg0)\n}", "func (_m *MockWriteBufferXmlBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (w *Writer) Bit(b byte) (err error) {\n\tw.bits <<= 1\n\tw.bits |= (b & 1)\n\tw.free--\n\n\tif w.free == 0 {\n\t\terr = w.Flush()\n\t}\n\treturn\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigInt_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockIStream) ReadBit() (Bit, error) {\n\tret := _m.ctrl.Call(_m, \"ReadBit\")\n\tret0, _ := ret[0].(Bit)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockStore) Write(ctx context.Context, key string, val interface{}, opts ...store.WriteOption) error {\n\t_va := make([]interface{}, len(opts))\n\tfor _i := range opts {\n\t\t_va[_i] = opts[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, key, val)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...store.WriteOption) error); ok {\n\t\tr0 = rf(ctx, key, val, opts...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_mr *MockOStreamMockRecorder) WriteBits(arg0, arg1 interface{}) *gomock.Call {\n\treturn _mr.mock.ctrl.RecordCallWithMethodType(_mr.mock, \"WriteBits\", reflect.TypeOf((*MockOStream)(nil).WriteBits), arg0, arg1)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call {\n\treturn &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt64_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (this *DefaultOutputBitStream) WriteBit(bit int) {\n\tif this.bitIndex <= 0 { // bitIndex = -1 if stream is closed => force pushCurrent() => panic\n\t\tthis.current |= uint64(bit & 1)\n\t\tthis.pushCurrent()\n\t} else {\n\t\tthis.current |= (uint64(bit&1) << uint(this.bitIndex))\n\t\tthis.bitIndex--\n\t}\n\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByte_Call {\n\treturn &MockWriteBufferJsonBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByteArray_Call {\n\treturn &MockWriteBufferJsonBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteString_Call {\n\treturn &MockWriteBufferXmlBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (bm BitMap) SetBit(ctx context.Context, offset int64, value int) (int64, error) {\n\treq := newRequest(\"*4\\r\\n$6\\r\\nSETBIT\\r\\n$\")\n\treq.addStringInt2(bm.name, offset, int64(value))\n\treturn bm.c.cmdInt(ctx, req)\n}", "func (w *Writer) WriteBit(bit Bit) error {\n\tif bit {\n\t\tw.b[0] |= 1 << (w.count - 1)\n\t}\n\n\tw.count--\n\n\tif w.count == 0 {\n\t\t// fill byte to io.Writer\n\t\tif n, err := w.w.Write(w.b[:]); n != 1 || err != nil {\n\t\t\treturn err\n\t\t}\n\t\tw.b[0] = 0\n\t\tw.count = 8\n\t}\n\n\treturn nil\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByteArray_Call {\n\treturn &MockWriteBufferXmlBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt64_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat64_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByte_Call {\n\treturn &MockWriteBufferXmlBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockFile) Write(_param0 []byte) (int, error) {\n\tret := _m.ctrl.Call(_m, \"Write\", _param0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt16_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *HadolintPiperFileUtils) FileWrite(filename string, data []byte, perm os.FileMode) error {\n\tret := _m.Called(filename, data, perm)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, os.FileMode) error); ok {\n\t\tr0 = rf(filename, data, perm)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Session) Write(data []byte) (int, error) {\n\tret := _m.Called(data)\n\n\tvar r0 int\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func([]byte) (int, error)); ok {\n\t\treturn rf(data)\n\t}\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(data)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *Connection) Write(ctx context.Context, typ websocket.MessageType, p []byte) error {\n\tret := _m.Called(ctx, typ, p)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, websocket.MessageType, []byte) error); ok {\n\t\tr0 = rf(ctx, typ, p)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockStore) WriteFitbitToken(token *oauth2.Token) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteFitbitToken\", token)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *MockHash64) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockHash) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint16_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockHash32) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat64_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *OSIOAPI) WriteFile(filename string, data []byte, perm os.FileMode) error {\n\tret := _m.Called(filename, data, perm)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, os.FileMode) error); ok {\n\t\tr0 = rf(filename, data, perm)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mc *MockConn) MockWrite(msg wire.Message) {\n\tbuf := &bytes.Buffer{}\n\twire.WriteMessage(buf, msg, wire.MainNet)\n\tmc.receiveChan <- buf.Bytes()\n}", "func TestBufferWrite(t *testing.T) {\n\tt.Run(\"Valid\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\n\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\tn, err := w.Write(toWrite)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Equal(t, 3, n)\n\n\t\tn, err = w.Write([]byte{0xff})\n\t\trequire.NoError(t, err)\n\n\t\tassert.Equal(t, 1, n)\n\n\t\texpected := append(toWrite, 0xff)\n\t\tfor i, bt := range w.Data() {\n\t\t\tassert.Equal(t, expected[i], bt, \"%d\", i)\n\t\t}\n\t})\n\n\tt.Run(\"Shifted\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\t\t// write empty byte and reset it's byte index to 0.\n\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\tw.byteIndex = 0\n\t\t// assume that 3 '0' bits were already written.\n\t\tw.bitIndex = 3\n\n\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\tn, err := w.Write(toWrite)\n\t\trequire.NoError(t, err)\n\t\tassert.Equal(t, 3, n)\n\n\t\t// 0x3f - 00111111\n\t\t// 00111111 << 3 = 11111000\n\t\texpected := byte(0xf8)\n\t\tassert.Equal(t, expected, w.data[0])\n\n\t\t// 0x12 - 00010010\n\t\t// 00111111 >> 5 = 00000001\n\t\t// 00010010 << 3 = 10010000\n\t\t// \t\t\t\t | 10010101\n\t\t// 10010111 - 0x91\n\t\texpected = byte(0x91)\n\t\tassert.Equal(t, expected, w.data[1])\n\n\t\t// 0x86 - 10000110\n\t\t// 00010010 >> 5 = \t00000000\n\t\t// 10000110 << 3 = \t00110000\n\t\t// \t\t\t\t |\t00110000\n\t\t// 00110000 = 0x30\n\t\texpected = byte(0x30)\n\t\tassert.Equal(t, expected, w.data[2])\n\t\tassert.Len(t, w.Data(), 4)\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tt.Run(\"Valid\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\t\tn, err := w.Write(toWrite)\n\t\t\trequire.NoError(t, err)\n\n\t\t\tassert.Equal(t, 3, n)\n\n\t\t\tn, err = w.Write([]byte{0xff})\n\t\t\trequire.NoError(t, err)\n\n\t\t\tassert.Equal(t, 1, n)\n\n\t\t\texpected := append(toWrite, 0xff)\n\t\t\tfor i, bt := range w.Data() {\n\t\t\t\tassert.Equal(t, expected[i], bt, \"%d\", i)\n\t\t\t}\n\t\t})\n\n\t\tt.Run(\"Shifted\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\t\t\t// write empty byte so the buffer data is initialized\n\t\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\t\t// reset it's byteindex\n\t\t\tw.byteIndex = 0\n\t\t\t// assume three '0' bits are already stored.\n\t\t\tw.bitIndex = 3\n\n\t\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\t\tn, err := w.Write(toWrite)\n\t\t\trequire.NoError(t, err)\n\t\t\tassert.Equal(t, 3, n)\n\n\t\t\t// 0x3f - 00111111\n\t\t\t// 00111111 >> 3 = 00000111\n\t\t\t// 00000111 = 0x07\n\t\t\texpected := byte(0x07)\n\t\t\tassert.Equal(t, expected, w.data[0])\n\n\t\t\t// 0x12 - 00010010\n\t\t\t// 00111111 << 5 = 11100000\n\t\t\t// 00010010 >> 3 = 00000010\n\t\t\t// \t\t\t\t | 11100010\n\t\t\t// 11100010 - 0xE2\n\t\t\texpected = byte(0xE2)\n\t\t\tassert.Equal(t, expected, w.data[1])\n\n\t\t\t// 0x86 - 10000110\n\t\t\t// 00010010 << 5 = \t01000000\n\t\t\t// 10000110 >> 3 = \t00010000\n\t\t\t// \t\t\t\t |\t01010000\n\t\t\t// 00110000 = 0x50\n\t\t\texpected = byte(0x50)\n\t\t\tassert.Equal(t, expected, w.data[2])\n\n\t\t\t// 0x86 - 10000110\n\t\t\t// 10000110 << 5 = \t11000000\n\t\t\t// 11000000 = 0xC0\n\t\t\texpected = byte(0xC0)\n\t\t\tassert.Equal(t, expected, w.data[3])\n\t\t})\n\t})\n}", "func TestWrite(t *testing.T) {\n\tmockZooKeeper := &MockZooHandle{\n\t\tzk: mock.Mock{},\n\t}\n\n\tbytes := make([]byte, 3)\n\tff := NewFuseFile(bytes, 0, \"mock/path\", mockZooKeeper)\n\n\tmockZooKeeper.zk.On(\"Set\", \"mock/path\", bytes, int32(-1)).Return(&zk.Stat{DataLength: int32(len(bytes))}, nil)\n\n\t// assert that we send 3 bytes into the writer and status out == fuse.OK\n\tsize, stat := ff.Write(bytes, 0)\n\tassert.Equal(t, uint32(3), size)\n\tassert.Equal(t, fuse.OK, stat)\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0]\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n\t\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\t\n\tfmt.Println(\"saving state for key: \" + key);\n\t\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t\n\t\n\treturn nil, nil\n}", "func (_m *Output) WriteOne(ctx context.Context, msg stream.WritableMessage) error {\n\tret := _m.Called(ctx, msg)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, stream.WritableMessage) error); ok {\n\t\tr0 = rf(ctx, msg)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigFloat_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigFloat_Call{Call: _e.mock.On(\"WriteBigFloat\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *Output) Write(ctx context.Context, batch []stream.WritableMessage) error {\n\tret := _m.Called(ctx, batch)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, []stream.WritableMessage) error); ok {\n\t\tr0 = rf(ctx, batch)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (t *SimpleChaincode) write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\r\n\tvar key, value string\r\n\tvar err error\r\n\t\r\n\r\n\tfmt.Println(\"running write()\")\r\n\r\n\tif len(args) != 2 {\r\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\r\n\t}\r\n\r\n\tkey = args[0] //rename for funsies\r\n\tvalue = args[1]\r\n\t\r\n\tv,err := stub.GetState(key)\r\n\tif v!=nil {\r\n\t\treturn nil, errors.New(\"Key already exists\")\r\n\t} else {\r\n\t\r\n\t\r\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\r\n\tif err != nil {\r\n\t\treturn nil, err\r\n\t}\r\n\t\r\n\t}\r\n\treturn nil, nil\r\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigFloat_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigFloat_Call{Call: _e.mock.On(\"WriteBigFloat\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (mock WriteCloser) fakeZeroWrite(p []byte) (n int, err error) {\n\treturn n, err\n}", "func (t *SimpleChaincode) Write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n var name, value string // Entities\n var err error\n fmt.Println(\"running write()\")\n\n if len(args) != 2 {\n return nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the variable and value to set\")\n }\n\n name = args[0] //rename for funsies\n value = args[1]\n err = stub.PutState(name, []byte(value)) //write the variable into the chaincode state\n if err != nil {\n return nil, err\n }\n return nil, nil\n}", "func (_m *RedsyncConn) Set(name string, value string) (bool, error) {\n\tret := _m.Called(name, value)\n\n\tvar r0 bool\n\tif rf, ok := ret.Get(0).(func(string, string) bool); ok {\n\t\tr0 = rf(name, value)\n\t} else {\n\t\tr0 = ret.Get(0).(bool)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string, string) error); ok {\n\t\tr1 = rf(name, value)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (obj *BitFieldWriter) Write(p []byte) (int, error) {\n\treturn obj.w.Write(p)\n}", "func (t *Target) Bit(b bool) usm.Value {\n\tif b {\n\t\treturn \"true\"\n\t}\n\treturn \"false\"\n}", "func WriteBool(buffer []byte, offset int, value bool) {\n if value {\n buffer[offset] = 1\n } else {\n buffer[offset] = 0\n }\n}", "func TestWrite(t *testing.T) {\n\ttests := []struct {\n\t\tid *ua.NodeID\n\t\tv interface{}\n\t\tstatus ua.StatusCode\n\t}{\n\t\t// happy flows\n\t\t{ua.NewStringNodeID(2, \"rw_bool\"), false, ua.StatusOK},\n\t\t{ua.NewStringNodeID(2, \"rw_int32\"), int32(9), ua.StatusOK},\n\n\t\t// error flows\n\t\t{ua.NewStringNodeID(2, \"ro_bool\"), false, ua.StatusBadUserAccessDenied},\n\t}\n\n\tctx := context.Background()\n\n\tsrv := NewServer(\"rw_server.py\")\n\tdefer srv.Close()\n\n\tc, err := opcua.NewClient(srv.Endpoint, srv.Opts...)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tif err := c.Connect(ctx); err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer c.Close(ctx)\n\n\tfor _, tt := range tests {\n\t\tt.Run(tt.id.String(), func(t *testing.T) {\n\t\t\ttestWrite(t, ctx, c, tt.status, &ua.WriteRequest{\n\t\t\t\tNodesToWrite: []*ua.WriteValue{\n\t\t\t\t\t&ua.WriteValue{\n\t\t\t\t\t\tNodeID: tt.id,\n\t\t\t\t\t\tAttributeID: ua.AttributeIDValue,\n\t\t\t\t\t\tValue: &ua.DataValue{\n\t\t\t\t\t\t\tEncodingMask: ua.DataValueValue,\n\t\t\t\t\t\t\tValue: ua.MustVariant(tt.v),\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t})\n\n\t\t\t// skip read tests if the write is expected to fail\n\t\t\tif tt.status != ua.StatusOK {\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\ttestRead(t, ctx, c, tt.v, tt.id)\n\t\t})\n\t}\n}", "func MockWire(hash []byte, round uint64, step uint8, keys []key.ConsensusKeys, p *user.Provisioners, i ...int) *bytes.Buffer {\n\tev := MockAgreementEvent(hash, round, step, keys, p, i...)\n\n\tbuf := new(bytes.Buffer)\n\tif err := header.Marshal(buf, ev.Header); err != nil {\n\t\tpanic(err)\n\t}\n\n\tif err := Marshal(buf, *ev); err != nil {\n\t\tpanic(err)\n\t}\n\treturn buf\n}" ]
[ "0.86602896", "0.81716317", "0.7927377", "0.7811452", "0.72616553", "0.69053876", "0.67568", "0.6756787", "0.6655242", "0.66078085", "0.65118873", "0.64812976", "0.6439382", "0.6409402", "0.6407872", "0.6377052", "0.63667095", "0.63367677", "0.6309389", "0.6305326", "0.6250164", "0.62255645", "0.6205874", "0.6163778", "0.61330336", "0.61247444", "0.6103288", "0.60753363", "0.60263306", "0.6025251", "0.5989666", "0.59818256", "0.59584093", "0.59583795", "0.5923081", "0.5908367", "0.58729357", "0.5862505", "0.5816559", "0.581551", "0.57620996", "0.57574505", "0.57386434", "0.5731173", "0.57179344", "0.56851226", "0.5677507", "0.56732506", "0.5672469", "0.5664541", "0.5656453", "0.5654149", "0.5637329", "0.56277794", "0.5621279", "0.55684936", "0.5532177", "0.55142415", "0.55083674", "0.5495221", "0.54946166", "0.54699", "0.54691446", "0.5465933", "0.545771", "0.54260504", "0.541185", "0.5399939", "0.5385316", "0.53836906", "0.5381743", "0.5377034", "0.5371954", "0.5371498", "0.5370927", "0.5357437", "0.535062", "0.5324633", "0.53159636", "0.5304701", "0.53046733", "0.53046733", "0.53046733", "0.5301835", "0.5287431", "0.5273373", "0.5257891", "0.52408016", "0.52408016", "0.52344316", "0.5199493", "0.51953", "0.51940393", "0.51801723", "0.5175556", "0.5175502", "0.5167326", "0.51543105", "0.5135783", "0.51348424" ]
0.8794198
0
WriteBit is a helper method to define mock.On call logicalName string value bool writerArgs ...WithWriterArgs
func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call { return &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On("WriteBit", append([]interface{}{logicalName, value}, writerArgs...)...)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBit_Call {\n\treturn &MockWriteBufferXmlBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (_m *MockWriteBufferXmlBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_mr *MockOStreamMockRecorder) WriteBit(arg0 interface{}) *gomock.Call {\n\treturn _mr.mock.ctrl.RecordCallWithMethodType(_mr.mock, \"WriteBit\", reflect.TypeOf((*MockOStream)(nil).WriteBit), arg0)\n}", "func TestBufferWriteBit(t *testing.T) {\n\tt.Run(\"Valid\", func(t *testing.T) {\n\t\tbuf := &Buffer{}\n\t\t// 10010011 11000111\n\t\t// 0x93 \t0xC7\n\t\tbits := []int{1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1}\n\t\tfor i := len(bits) - 1; i > -1; i-- {\n\t\t\tbit := bits[i]\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\tassert.Equal(t, byte(0xC7), buf.data[0], \"expected: %08b, is: %08b\", 0xc7, buf.data[0])\n\t\tassert.Equal(t, byte(0x93), buf.data[1], \"expected: %08b, is: %08b\", 0x93, buf.data[1])\n\t})\n\n\tt.Run(\"BitShifted\", func(t *testing.T) {\n\t\tt.Run(\"Empty\", func(t *testing.T) {\n\t\t\tbuf := &Buffer{}\n\t\t\t// fill thee buffer with 3 bits\n\t\t\tfor i := 0; i < 3; i++ {\n\t\t\t\terr := buf.WriteBit(int(0))\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// bits 11101\n\t\t\tbits := []int{1, 1, 1, 0, 1}\n\t\t\tfor i := len(bits) - 1; i > -1; i-- {\n\t\t\t\tbit := bits[i]\n\t\t\t\terr := buf.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// should be 11101000 - 0xe8\n\t\t\tassert.Equal(t, byte(0xe8), buf.data[0])\n\t\t})\n\t})\n\n\tt.Run(\"ByteShifted\", func(t *testing.T) {\n\t\tbuf := &Buffer{}\n\t\trequire.NoError(t, buf.WriteByte(0x00))\n\n\t\t// write 8 bits that should look like a byte 0xe3\n\t\t// 11100011 - 0xe3\n\t\tbits := []int{1, 1, 1, 0, 0, 0, 1, 1}\n\t\tfor i := len(bits) - 1; i > -1; i-- {\n\t\t\tbit := bits[i]\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\t\tassert.Equal(t, 2, len(buf.data))\n\t\tassert.Equal(t, byte(0xe3), buf.data[1])\n\n\t\t// there should be no error on writing additional byte.\n\t\tassert.NoError(t, buf.WriteByte(0x00))\n\t})\n\n\tt.Run(\"Finished\", func(t *testing.T) {\n\t\tbuf := &Buffer{}\n\n\t\t// write some bits to the first byte.\n\t\tfirstBits := []int{1, 0, 1}\n\t\tfor _, bit := range firstBits {\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\t// finish this byte\n\t\tbuf.FinishByte()\n\t\tsecondBits := []int{1, 0, 1}\n\n\t\t// write some bits to the second byte.\n\t\tfor _, bit := range secondBits {\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\tif assert.Len(t, buf.data, 2) {\n\t\t\t// 00000101 - 0x05\n\t\t\tassert.Equal(t, byte(0x05), buf.Data()[0])\n\t\t\tassert.Equal(t, byte(0x05), buf.Data()[1])\n\t\t}\n\t})\n\n\tt.Run(\"Inverse\", func(t *testing.T) {\n\t\tt.Run(\"Valid\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\t// \t10010111 10101100\n\t\t\t//\t0x97\t 0xac\n\t\t\tbits := []int{1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0}\n\n\t\t\t// write all the bits\n\t\t\tfor _, bit := range bits {\n\t\t\t\terr := w.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\texpected := byte(0x97)\n\t\t\tassert.Equal(t, expected, w.data[0], \"expected: %08b is: %08b\", expected, w.data[0])\n\t\t\texpected = byte(0xac)\n\t\t\tassert.Equal(t, expected, w.data[1], \"expected: %08b is: %08b\", expected, w.data[1])\n\t\t})\n\n\t\tt.Run(\"ByteShifted\", func(t *testing.T) {\n\t\t\tbuf := BufferedMSB()\n\t\t\terr := buf.WriteByte(0x00)\n\t\t\trequire.NoError(t, err)\n\n\t\t\t// 11100011 - 0xe3\n\t\t\tbits := []int{1, 1, 1, 0, 0, 0, 1, 1}\n\t\t\tfor _, bit := range bits {\n\t\t\t\terr := buf.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\tassert.Equal(t, byte(0xe3), buf.data[1], \"expected: %08b, is: %08b\", byte(0xe3), buf.data[1])\n\t\t})\n\n\t\tt.Run(\"BitShifted\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\t// 0xE0 - 11100000\n\t\t\terr := w.WriteByte(0xE0)\n\t\t\trequire.NoError(t, err)\n\n\t\t\tw.bitIndex = 5\n\t\t\tw.byteIndex = 0\n\n\t\t\tbits := []int{1, 0, 1, 0, 1}\n\t\t\tfor _, bit := range bits {\n\t\t\t\terr := w.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// should be 11100101 01000000 ...\n\t\t\t//\t\t\t 0xE5\t 0x40\n\t\t\tassert.Equal(t, byte(0xE5), w.data[0], \"expected: %08b, is: %08b\", byte(0xE5), w.data[0])\n\t\t\tassert.Equal(t, byte(0x40), w.data[1], \"expected: %08b, is: %08b\", byte(0x40), w.data[1])\n\t\t})\n\n\t\tt.Run(\"Finished\", func(t *testing.T) {\n\t\t\tbuf := BufferedMSB()\n\n\t\t\t// write some bits to the first byte\n\t\t\tfirstBits := []int{1, 0, 1}\n\t\t\tfor _, bit := range firstBits {\n\t\t\t\trequire.NoError(t, buf.WriteBit(bit))\n\t\t\t}\n\t\t\t// finish the byte\n\t\t\tbuf.FinishByte()\n\n\t\t\t// write bits to the second byte.\n\t\t\tsecondBits := []int{1, 0, 1}\n\t\t\tfor _, bit := range secondBits {\n\t\t\t\trequire.NoError(t, buf.WriteBit(bit))\n\t\t\t}\n\n\t\t\tif assert.Len(t, buf.Data(), 2) {\n\t\t\t\t// 10100000 - 0xa0\n\t\t\t\tassert.Equal(t, byte(0xa0), buf.Data()[0])\n\t\t\t\tassert.Equal(t, byte(0xa0), buf.Data()[1])\n\t\t\t}\n\t\t})\n\t})\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) Bit(b byte) (err error) {\n\tw.bits <<= 1\n\tw.bits |= (b & 1)\n\tw.free--\n\n\tif w.free == 0 {\n\t\terr = w.Flush()\n\t}\n\treturn\n}", "func (t *Target) Bit(b bool) usm.Value {\n\tif b {\n\t\treturn \"true\"\n\t}\n\treturn \"false\"\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) WriteBit(bit Bit) error {\n\tif bit {\n\t\tw.b[0] |= 1 << (w.count - 1)\n\t}\n\n\tw.count--\n\n\tif w.count == 0 {\n\t\t// fill byte to io.Writer\n\t\tif n, err := w.w.Write(w.b[:]); n != 1 || err != nil {\n\t\t\treturn err\n\t\t}\n\t\tw.b[0] = 0\n\t\tw.count = 8\n\t}\n\n\treturn nil\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (this *DefaultOutputBitStream) WriteBit(bit int) {\n\tif this.bitIndex <= 0 { // bitIndex = -1 if stream is closed => force pushCurrent() => panic\n\t\tthis.current |= uint64(bit & 1)\n\t\tthis.pushCurrent()\n\t} else {\n\t\tthis.current |= (uint64(bit&1) << uint(this.bitIndex))\n\t\tthis.bitIndex--\n\t}\n\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (bm BitMap) SetBit(ctx context.Context, offset int64, value int) (int64, error) {\n\treq := newRequest(\"*4\\r\\n$6\\r\\nSETBIT\\r\\n$\")\n\treq.addStringInt2(bm.name, offset, int64(value))\n\treturn bm.c.cmdInt(ctx, req)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call {\n\treturn &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteBool(buffer []byte, offset int, value bool) {\n if value {\n buffer[offset] = 1\n } else {\n buffer[offset] = 0\n }\n}", "func TestWriteBits(t *testing.T) {\n\tt.Run(\"NonMSB\", func(t *testing.T) {\n\t\tb := &Buffer{}\n\n\t\t// having empty buffered MSB.\n\t\tn, err := b.WriteBits(0xb, 4)\n\t\trequire.NoError(t, err)\n\t\tassert.Zero(t, n)\n\n\t\tassert.Len(t, b.data, 1)\n\t\tassert.Equal(t, byte(0xb), b.data[0])\n\n\t\tn, err = b.WriteBits(0xdf, 8)\n\t\trequire.NoError(t, err)\n\t\tassert.Equal(t, 1, n)\n\n\t\tif assert.Len(t, b.data, 2) {\n\t\t\tassert.Equal(t, byte(0xd), b.data[1])\n\t\t\tassert.Equal(t, byte(0xfb), b.data[0])\n\t\t}\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tb := BufferedMSB()\n\n\t\tn, err := b.WriteBits(0xf, 4)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Zero(t, n)\n\n\t\t// the output now should be\n\t\t// 11110000\n\t\t// ^\n\t\tif assert.Len(t, b.data, 1) {\n\t\t\tassert.Equal(t, byte(0xf0), b.data[0], \"%08b\", b.data[0])\n\t\t}\n\n\t\t// write 10111 = 0x17, 5\n\t\tn, err = b.WriteBits(0x17, 5)\n\t\trequire.NoError(t, err)\n\n\t\t// current output should be\n\t\t// 11111011 10000000\n\t\t// ^\n\t\tif assert.Len(t, b.data, 2) {\n\t\t\tassert.Equal(t, byte(0xfb), b.data[0])\n\t\t\tassert.Equal(t, byte(0x80), b.data[1])\n\t\t\tassert.Equal(t, uint8(1), b.bitIndex)\n\t\t}\n\t})\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteString_Call {\n\treturn &MockWriteBufferXmlBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByte_Call {\n\treturn &MockWriteBufferJsonBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func WriteBool(p thrift.TProtocol, value bool, name string, field int16) error {\n\treturn WriteBoolWithContext(context.Background(), p, value, name, field)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func SetBit(b byte, idx uint, flag bool) byte {\n\tif idx < 0 || idx > 7 {\n\t\tlog.Panic(\"the idx must be from 0 to 7\")\n\t}\n\tif flag {\n\t\treturn b | (1 << idx)\n\t}\n\treturn b &^ (1 << idx)\n}", "func bit(cpu *CPU, r, b byte) {\n\tbit := (r>>b)&1 == 0\n\tcpu.SetZero(bit)\n\tcpu.SetNegative(false)\n\tcpu.SetHalfCarry(true)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByte_Call {\n\treturn &MockWriteBufferXmlBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigInt_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func OnBit(num int, nth int) int {\n\treturn num | (1 << uint(nth))\n}", "func OnBit(num, nth int) int {\n\treturn num | (1 << uint(nth))\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByteArray_Call {\n\treturn &MockWriteBufferJsonBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_mr *MockOStreamMockRecorder) WriteBits(arg0, arg1 interface{}) *gomock.Call {\n\treturn _mr.mock.ctrl.RecordCallWithMethodType(_mr.mock, \"WriteBits\", reflect.TypeOf((*MockOStream)(nil).WriteBits), arg0, arg1)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt64_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByteArray_Call {\n\treturn &MockWriteBufferXmlBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (t *SimpleChaincode) Write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n var name, value string // Entities\n var err error\n fmt.Println(\"running write()\")\n\n if len(args) != 2 {\n return nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the variable and value to set\")\n }\n\n name = args[0] //rename for funsies\n value = args[1]\n err = stub.PutState(name, []byte(value)) //write the variable into the chaincode state\n if err != nil {\n return nil, err\n }\n return nil, nil\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func setBit(num int, i int) int {\n\treturn num | (1 << i)\n}", "func setBit(addr uint32, bit uint, val uint) uint32 {\n\tif bit < 0 {\n\t\tpanic(\"negative bit index\")\n\t}\n\n\tif val == 0 {\n\t\treturn addr & ^(1 << (32 - bit))\n\t} else if val == 1 {\n\t\treturn addr | (1 << (32 - bit))\n\t} else {\n\t\tpanic(\"set bit is not 0 or 1\")\n\t}\n}", "func setBit(bitboard uint64, square int) uint64 {\n\treturn bitboard | (1 << square)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt16_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (r *R1_eg) setBit(idx1 int, value int) {\n\tr.Values[idx1] = value\n}", "func (z *Int) SetBit(x *Int, i int, b uint) *Int {}", "func (b *BitStream) WriteBit(bit Bit) {\n\n\tif b.count == 0 {\n\t\tb.stream = append(b.stream, 0)\n\t\tb.count = 8\n\t}\n\n\ti := len(b.stream) - 1\n\n\tif bit {\n\t\tb.stream[i] |= 1 << (b.count - 1)\n\t}\n\tb.bitsWritten++\n\tb.count--\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0]\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (t *SimpleChaincode) write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (_m *MockIStream) ReadBit() (Bit, error) {\n\tret := _m.ctrl.Call(_m, \"ReadBit\")\n\tret0, _ := ret[0].(Bit)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt64_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (t *SimpleChaincode) write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n\t\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\t\n\tfmt.Println(\"saving state for key: \" + key);\n\t\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t\n\t\n\treturn nil, nil\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (w *Writer) WriteBool(b bool) {\n\tif b {\n\t\tw.cache |= 1 << (w.available - 1)\n\t}\n\n\tw.available--\n\n\tif w.available == 0 {\n\t\t// WriteByte never returns error\n\t\t_ = w.out.WriteByte(w.cache)\n\t\tw.cache = 0\n\t\tw.available = 8\n\t}\n}", "func setWriteFlag(tx *bolt.Tx) {\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (w *Writer) BitIndex() int64", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *Output_Expecter) WriteOne(ctx interface{}, msg interface{}) *Output_WriteOne_Call {\n\treturn &Output_WriteOne_Call{Call: _e.mock.On(\"WriteOne\", ctx, msg)}\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\r\n\tvar key, value string\r\n\tvar err error\r\n\t\r\n\r\n\tfmt.Println(\"running write()\")\r\n\r\n\tif len(args) != 2 {\r\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\r\n\t}\r\n\r\n\tkey = args[0] //rename for funsies\r\n\tvalue = args[1]\r\n\t\r\n\tv,err := stub.GetState(key)\r\n\tif v!=nil {\r\n\t\treturn nil, errors.New(\"Key already exists\")\r\n\t} else {\r\n\t\r\n\t\r\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\r\n\tif err != nil {\r\n\t\treturn nil, err\r\n\t}\r\n\t\r\n\t}\r\n\treturn nil, nil\r\n}", "func (_e *WriteCloser_Expecter) Write(p interface{}) *WriteCloser_Write_Call {\n\treturn &WriteCloser_Write_Call{Call: _e.mock.On(\"Write\", p)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func SetBit(n int, pos uint, val int) int {\n\tn |= (val << pos)\n\treturn n\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"starting write\")\n\n\tif len(args) != 2 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 2. key of the variable and value to set\")\n\t}\n\n\t// input sanitation\n\t\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the ledger\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tfmt.Println(\"- end write\")\n\treturn shim.Success(nil)\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func write(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"starting write\")\n\n\tif len(args) != 2 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 2. key of the variable and value to set\")\n\t}\n\n\t// input sanitation\n\terr = sanitize_arguments(args)\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the ledger\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tfmt.Println(\"- end write\")\n\treturn shim.Success(nil)\n}", "func (_e *MockTestTransportInstance_Expecter) Write(data interface{}) *MockTestTransportInstance_Write_Call {\n\treturn &MockTestTransportInstance_Write_Call{Call: _e.mock.On(\"Write\", data)}\n}", "func (c *CPU) Bit(r Register, bitnum byte) {\n\tc.MaybeFlagSetter(c.reg[r]&(1<<bitnum) == 0, ZFlag)\n\tc.ResetFlag(NFlag)\n\tc.SetFlag(HFlag)\n\n}", "func setBit(bitmap []byte, idx int) {\n\tbitmap[idx/8] = bitmap[idx/8] | (1 << uint(idx%8))\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func write(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"starting write\")\n\n\tif len(args) != 2 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 2. key of the variable and value to set\")\n\t}\n\n\t// input sanitation\n\terr = sanitize_arguments(args)\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the ledger\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tfmt.Println(\"- end write\")\n\treturn shim.Success(nil)\n}", "func (m *Value) Bool() bool { return m.BoolMock() }", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt16_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint16_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func pinName(name string, bit int) string {\n\treturn name + \"[\" + strconv.Itoa(bit) + \"]\"\n}", "func MockWire(hash []byte, round uint64, step uint8, keys []key.ConsensusKeys, p *user.Provisioners, i ...int) *bytes.Buffer {\n\tev := MockAgreementEvent(hash, round, step, keys, p, i...)\n\n\tbuf := new(bytes.Buffer)\n\tif err := header.Marshal(buf, ev.Header); err != nil {\n\t\tpanic(err)\n\t}\n\n\tif err := Marshal(buf, *ev); err != nil {\n\t\tpanic(err)\n\t}\n\treturn buf\n}", "func WriteBool(w io.Writer, v bool) error {\n\td := make([]byte, 1, 1)\n\tswitch v {\n\tcase true:\n\t\td[0] = 1\n\tcase false:\n\t\td[0] = 0\n\t}\n\t_, err := w.Write(d)\n\treturn err\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt8_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (w *Writer) WriteBool(b bool) {\n\tif b {\n\t\tw.writeByte(TagTrue)\n\t} else {\n\t\tw.writeByte(TagFalse)\n\t}\n}" ]
[ "0.8424163", "0.83194363", "0.8061882", "0.73231965", "0.6247864", "0.62236214", "0.6153121", "0.61043453", "0.6038922", "0.6028842", "0.59809315", "0.59227896", "0.58578914", "0.5830575", "0.5801824", "0.5787644", "0.57507336", "0.57506317", "0.57121617", "0.5708353", "0.5707085", "0.5682893", "0.56769884", "0.5672135", "0.56315494", "0.55884534", "0.55537015", "0.55458534", "0.55314136", "0.55259085", "0.55083936", "0.54884136", "0.5487009", "0.5436773", "0.5428234", "0.54202807", "0.53993684", "0.5392845", "0.53896576", "0.53745896", "0.5371009", "0.5364307", "0.5358244", "0.53564787", "0.53497475", "0.53401107", "0.53375256", "0.53291243", "0.5327832", "0.5320359", "0.5316753", "0.5315748", "0.5291817", "0.5291439", "0.5286923", "0.52804327", "0.5279588", "0.5279588", "0.5279588", "0.5275581", "0.52634674", "0.5245457", "0.5245457", "0.52400696", "0.52398753", "0.52379465", "0.52302074", "0.522037", "0.5210313", "0.51990294", "0.51958776", "0.5190837", "0.51762015", "0.5174247", "0.51596045", "0.51556915", "0.5128927", "0.5112089", "0.50798273", "0.5076103", "0.5071218", "0.50670147", "0.50607353", "0.50592816", "0.5049526", "0.504676", "0.50423455", "0.5035068", "0.5033394", "0.5032613", "0.50298655", "0.5025625", "0.5024585", "0.50215983", "0.50109595", "0.5001426", "0.5000772", "0.49996525", "0.49914756", "0.4988419" ]
0.82706326
2
WriteByte provides a mock function with given fields: logicalName, value, writerArgs
func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, logicalName, value) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok { r0 = rf(logicalName, value, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferJsonBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByte_Call {\n\treturn &MockWriteBufferJsonBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByte_Call {\n\treturn &MockWriteBufferXmlBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *WriteCloser_Expecter) Write(p interface{}) *WriteCloser_Write_Call {\n\treturn &WriteCloser_Write_Call{Call: _e.mock.On(\"Write\", p)}\n}", "func (_m *MockFile) Write(_param0 []byte) (int, error) {\n\tret := _m.ctrl.Call(_m, \"Write\", _param0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call {\n\treturn &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteByte(v byte) {\n\t_m.ctrl.Call(_m, \"WriteByte\", v)\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mock WriteCloser) Write(p []byte) (n int, err error) {\n\tmethodName := \"Write\" // nolint: goconst\n\tif mock.impl.Write != nil {\n\t\treturn mock.impl.Write(p)\n\t}\n\tif mock.callbackNotImplemented != nil {\n\t\tmock.callbackNotImplemented(mock.t, mock.name, methodName)\n\t} else {\n\t\tgomic.DefaultCallbackNotImplemented(mock.t, mock.name, methodName)\n\t}\n\treturn mock.fakeZeroWrite(p)\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestWrite(t *testing.T) {\n\tmockZooKeeper := &MockZooHandle{\n\t\tzk: mock.Mock{},\n\t}\n\n\tbytes := make([]byte, 3)\n\tff := NewFuseFile(bytes, 0, \"mock/path\", mockZooKeeper)\n\n\tmockZooKeeper.zk.On(\"Set\", \"mock/path\", bytes, int32(-1)).Return(&zk.Stat{DataLength: int32(len(bytes))}, nil)\n\n\t// assert that we send 3 bytes into the writer and status out == fuse.OK\n\tsize, stat := ff.Write(bytes, 0)\n\tassert.Equal(t, uint32(3), size)\n\tassert.Equal(t, fuse.OK, stat)\n}", "func (_m *MockOStream) WriteBytes(bytes []byte) {\n\t_m.ctrl.Call(_m, \"WriteBytes\", bytes)\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteString_Call {\n\treturn &MockWriteBufferXmlBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (m *MockResponseWriter) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBit_Call {\n\treturn &MockWriteBufferXmlBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (m *MockSerial) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func TestWrite(t *testing.T) {\n\tconst input = \"OK\"\n\n\tvar buf bytes.Buffer\n\tc := NewCoder(&buf)\n\n\tio.WriteString(c, input)\n\tif got := buf.String(); got != input {\n\t\tt.Errorf(\"Write(c, %q): got %q, want %q\", input, got, input)\n\t}\n}", "func (tw *TestWriter) Write(incoming []byte) (n int, err error) {\n\tif tw.mockWriteHandler != nil {\n\t\treturn tw.mockWriteHandler(incoming)\n\t}\n\n\ttw.capturedOutput = append(tw.capturedOutput, string(incoming))\n\treturn 0, nil\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByteArray_Call {\n\treturn &MockWriteBufferJsonBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockScraper) WriteToFile(arg0 map[int]map[string]int, arg1 int) (string, error) {\n\tret := m.ctrl.Call(m, \"WriteToFile\", arg0, arg1)\n\tret0, _ := ret[0].(string)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (_e *Output_Expecter) Write(ctx interface{}, batch interface{}) *Output_Write_Call {\n\treturn &Output_Write_Call{Call: _e.mock.On(\"Write\", ctx, batch)}\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestWrite(t *testing.T) {\n\ttr := NewTFramedTransport(new(mockTTransport))\n\tbuff := make([]byte, 10)\n\n\tn, err := tr.Write(buff)\n\n\tassert.Equal(t, 10, n)\n\tassert.Nil(t, err)\n\tassert.Equal(t, buff, tr.buf.Bytes())\n}", "func (mock WriteCloser) fakeZeroWrite(p []byte) (n int, err error) {\n\treturn n, err\n}", "func (m *mSignatureKeyHolderMockWriteTo) Set(f func(p io.Writer) (r int64, r1 error)) *SignatureKeyHolderMock {\n\tm.mainExpectation = nil\n\tm.expectationSeries = nil\n\n\tm.mock.WriteToFunc = f\n\treturn m.mock\n}", "func (m *MockFile) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockFile) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (r *MockReadWriteCloser) Write(p []byte) (n int, err error) {\n\n\tif err = r.WriteErr; err != nil {\n\t\tr.BytesWritten = p\n\t\tn = len(p)\n\t}\n\treturn\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockStore) Write(ctx context.Context, key string, val interface{}, opts ...store.WriteOption) error {\n\t_va := make([]interface{}, len(opts))\n\tfor _i := range opts {\n\t\t_va[_i] = opts[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, key, val)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...store.WriteOption) error); ok {\n\t\tr0 = rf(ctx, key, val, opts...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockResponseWriter) Write(arg0 *types.APIRequest, arg1 int, arg2 types.APIObject) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"Write\", arg0, arg1, arg2)\n}", "func (m *MockSeriesRef) Write(ctx context.Context, timestamp time.UnixNano, value float64, unit time.Unit, annotation []byte, wOpts series.WriteOptions) (bool, series.WriteType, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", ctx, timestamp, value, unit, annotation, wOpts)\n\tret0, _ := ret[0].(bool)\n\tret1, _ := ret[1].(series.WriteType)\n\tret2, _ := ret[2].(error)\n\treturn ret0, ret1, ret2\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByteArray_Call {\n\treturn &MockWriteBufferXmlBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (m *MockConn) Write(arg0 core.WriteableFrame) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_e *MockTestTransportInstance_Expecter) Write(data interface{}) *MockTestTransportInstance_Write_Call {\n\treturn &MockTestTransportInstance_Write_Call{Call: _e.mock.On(\"Write\", data)}\n}", "func (mc *MockConn) MockWrite(msg wire.Message) {\n\tbuf := &bytes.Buffer{}\n\twire.WriteMessage(buf, msg, wire.MainNet)\n\tmc.receiveChan <- buf.Bytes()\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt64_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func TestBufferWrite(t *testing.T) {\n\tt.Run(\"Valid\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\n\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\tn, err := w.Write(toWrite)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Equal(t, 3, n)\n\n\t\tn, err = w.Write([]byte{0xff})\n\t\trequire.NoError(t, err)\n\n\t\tassert.Equal(t, 1, n)\n\n\t\texpected := append(toWrite, 0xff)\n\t\tfor i, bt := range w.Data() {\n\t\t\tassert.Equal(t, expected[i], bt, \"%d\", i)\n\t\t}\n\t})\n\n\tt.Run(\"Shifted\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\t\t// write empty byte and reset it's byte index to 0.\n\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\tw.byteIndex = 0\n\t\t// assume that 3 '0' bits were already written.\n\t\tw.bitIndex = 3\n\n\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\tn, err := w.Write(toWrite)\n\t\trequire.NoError(t, err)\n\t\tassert.Equal(t, 3, n)\n\n\t\t// 0x3f - 00111111\n\t\t// 00111111 << 3 = 11111000\n\t\texpected := byte(0xf8)\n\t\tassert.Equal(t, expected, w.data[0])\n\n\t\t// 0x12 - 00010010\n\t\t// 00111111 >> 5 = 00000001\n\t\t// 00010010 << 3 = 10010000\n\t\t// \t\t\t\t | 10010101\n\t\t// 10010111 - 0x91\n\t\texpected = byte(0x91)\n\t\tassert.Equal(t, expected, w.data[1])\n\n\t\t// 0x86 - 10000110\n\t\t// 00010010 >> 5 = \t00000000\n\t\t// 10000110 << 3 = \t00110000\n\t\t// \t\t\t\t |\t00110000\n\t\t// 00110000 = 0x30\n\t\texpected = byte(0x30)\n\t\tassert.Equal(t, expected, w.data[2])\n\t\tassert.Len(t, w.Data(), 4)\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tt.Run(\"Valid\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\t\tn, err := w.Write(toWrite)\n\t\t\trequire.NoError(t, err)\n\n\t\t\tassert.Equal(t, 3, n)\n\n\t\t\tn, err = w.Write([]byte{0xff})\n\t\t\trequire.NoError(t, err)\n\n\t\t\tassert.Equal(t, 1, n)\n\n\t\t\texpected := append(toWrite, 0xff)\n\t\t\tfor i, bt := range w.Data() {\n\t\t\t\tassert.Equal(t, expected[i], bt, \"%d\", i)\n\t\t\t}\n\t\t})\n\n\t\tt.Run(\"Shifted\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\t\t\t// write empty byte so the buffer data is initialized\n\t\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\t\t// reset it's byteindex\n\t\t\tw.byteIndex = 0\n\t\t\t// assume three '0' bits are already stored.\n\t\t\tw.bitIndex = 3\n\n\t\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\t\tn, err := w.Write(toWrite)\n\t\t\trequire.NoError(t, err)\n\t\t\tassert.Equal(t, 3, n)\n\n\t\t\t// 0x3f - 00111111\n\t\t\t// 00111111 >> 3 = 00000111\n\t\t\t// 00000111 = 0x07\n\t\t\texpected := byte(0x07)\n\t\t\tassert.Equal(t, expected, w.data[0])\n\n\t\t\t// 0x12 - 00010010\n\t\t\t// 00111111 << 5 = 11100000\n\t\t\t// 00010010 >> 3 = 00000010\n\t\t\t// \t\t\t\t | 11100010\n\t\t\t// 11100010 - 0xE2\n\t\t\texpected = byte(0xE2)\n\t\t\tassert.Equal(t, expected, w.data[1])\n\n\t\t\t// 0x86 - 10000110\n\t\t\t// 00010010 << 5 = \t01000000\n\t\t\t// 10000110 >> 3 = \t00010000\n\t\t\t// \t\t\t\t |\t01010000\n\t\t\t// 00110000 = 0x50\n\t\t\texpected = byte(0x50)\n\t\t\tassert.Equal(t, expected, w.data[2])\n\n\t\t\t// 0x86 - 10000110\n\t\t\t// 10000110 << 5 = \t11000000\n\t\t\t// 11000000 = 0xC0\n\t\t\texpected = byte(0xC0)\n\t\t\tassert.Equal(t, expected, w.data[3])\n\t\t})\n\t})\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (mock *WriteCloser) SetFuncWrite(impl func(p []byte) (n int, err error)) *WriteCloser {\n\tmock.impl.Write = impl\n\treturn mock\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *SignatureKeyHolderMock) WriteTo(p io.Writer) (r int64, r1 error) {\n\tcounter := atomic.AddUint64(&m.WriteToPreCounter, 1)\n\tdefer atomic.AddUint64(&m.WriteToCounter, 1)\n\n\tif len(m.WriteToMock.expectationSeries) > 0 {\n\t\tif counter > uint64(len(m.WriteToMock.expectationSeries)) {\n\t\t\tm.t.Fatalf(\"Unexpected call to SignatureKeyHolderMock.WriteTo. %v\", p)\n\t\t\treturn\n\t\t}\n\n\t\tinput := m.WriteToMock.expectationSeries[counter-1].input\n\t\ttestify_assert.Equal(m.t, *input, SignatureKeyHolderMockWriteToInput{p}, \"SignatureKeyHolder.WriteTo got unexpected parameters\")\n\n\t\tresult := m.WriteToMock.expectationSeries[counter-1].result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the SignatureKeyHolderMock.WriteTo\")\n\t\t\treturn\n\t\t}\n\n\t\tr = result.r\n\t\tr1 = result.r1\n\n\t\treturn\n\t}\n\n\tif m.WriteToMock.mainExpectation != nil {\n\n\t\tinput := m.WriteToMock.mainExpectation.input\n\t\tif input != nil {\n\t\t\ttestify_assert.Equal(m.t, *input, SignatureKeyHolderMockWriteToInput{p}, \"SignatureKeyHolder.WriteTo got unexpected parameters\")\n\t\t}\n\n\t\tresult := m.WriteToMock.mainExpectation.result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the SignatureKeyHolderMock.WriteTo\")\n\t\t}\n\n\t\tr = result.r\n\t\tr1 = result.r1\n\n\t\treturn\n\t}\n\n\tif m.WriteToFunc == nil {\n\t\tm.t.Fatalf(\"Unexpected call to SignatureKeyHolderMock.WriteTo. %v\", p)\n\t\treturn\n\t}\n\n\treturn m.WriteToFunc(p)\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Session) Write(data []byte) (int, error) {\n\tret := _m.Called(data)\n\n\tvar r0 int\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func([]byte) (int, error)); ok {\n\t\treturn rf(data)\n\t}\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(data)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (m *MockHash64) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (mock *WriteCloser) SetReturnWrite(n int, err error) *WriteCloser {\n\tmock.impl.Write = func([]byte) (int, error) {\n\t\treturn n, err\n\t}\n\treturn mock\n}", "func (m *MockHash32) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockFileWriter) Write(arg0 string) error {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (t *SimpleChaincode) Write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n var name, value string // Entities\n var err error\n fmt.Println(\"running write()\")\n\n if len(args) != 2 {\n return nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the variable and value to set\")\n }\n\n name = args[0] //rename for funsies\n value = args[1]\n err = stub.PutState(name, []byte(value)) //write the variable into the chaincode state\n if err != nil {\n return nil, err\n }\n return nil, nil\n}", "func (_m *Output) Write(ctx context.Context, batch []stream.WritableMessage) error {\n\tret := _m.Called(ctx, batch)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, []stream.WritableMessage) error); ok {\n\t\tr0 = rf(ctx, batch)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockFile) WriteString(arg0 string) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteString\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func TestBufferWriteByte(t *testing.T) {\n\tt.Run(\"Valid\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\n\t\tinput := []byte{0x4f, 0xff, 0x13, 0x2}\n\n\t\tfor _, b := range input {\n\t\t\terr := w.WriteByte(b)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\tassert.Equal(t, 4, len(w.data))\n\t\tfor i := 0; i < len(w.data); i++ {\n\t\t\tassert.Equal(t, input[i], w.data[i])\n\t\t}\n\t})\n\n\tt.Run(\"ByteShifted\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\t\terr := w.WriteByte(0xff)\n\t\trequire.NoError(t, err)\n\t\terr = w.WriteByte(0x00)\n\t\trequire.NoError(t, err)\n\n\t\terr = w.WriteByte(0x23)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Equal(t, byte(0xff), w.data[0])\n\t\tassert.Equal(t, byte(0x00), w.data[1])\n\t\tassert.Equal(t, byte(0x23), w.data[2])\n\t})\n\n\tt.Run(\"BitShifted\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\n\t\t// insert empty byte and reset the byte index to 0\n\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\tw.byteIndex = 0\n\t\t// assume there are 5 bits already set\n\t\tw.bitIndex = 5\n\n\t\tinput := []byte{0x4f, 0xff, 0x13}\n\n\t\tfor _, b := range input {\n\t\t\terr := w.WriteByte(b)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\t// \t00000000\n\t\t// ^\n\t\t//\n\t\t// 0x4f - 01001111\n\t\t// 01001111 << 5 = 11100000\n\t\t//\t\t\t\t | 11100000\n\t\t// 11100000 - 0xE0\n\t\texpected := byte(0xe0)\n\t\tassert.Equal(t, expected, w.data[0], \"expected: %08b is %08b\", expected, w.data[0])\n\n\t\t// 0xff - 11111111\n\t\t// 01001111 >> 3 = 00001001\n\t\t// 11111111 << 5 = 11100000\n\t\t// \t\t\t\t | 11101001\n\t\t// 11101001 = 0xe9\n\t\texpected = byte(0xe9)\n\t\tassert.Equal(t, expected, w.data[1], \"expected %08b is %08b\", expected, w.data[1])\n\n\t\t// 0x13 - 00010011\n\t\t// 11111111 >> 3 = 00011111\n\t\t// 00010011 << 5 = 01100000\n\t\t//\t\t\t | 01111111\n\t\t// 01111111 = 0x7F\n\t\texpected = byte(0x7F)\n\t\tassert.Equal(t, expected, w.data[2], \"expected %08b is %08b\", expected, w.data[2])\n\n\t\t// 00010011 >> 3 = 00000010\n\t\t// 00000010 = 0x02\n\t\texpected = byte(0x02)\n\t\tassert.Equal(t, expected, w.data[3])\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tt.Run(\"Valid\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\tinput := []byte{0x4f, 0xff, 0x13, 0x2}\n\n\t\t\tfor _, b := range input {\n\t\t\t\terr := w.WriteByte(b)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\tassert.Len(t, w.data, 4)\n\t\t\tfor i := 0; i < len(w.data); i++ {\n\t\t\t\tassert.Equal(t, input[i], w.data[i])\n\t\t\t}\n\t\t})\n\n\t\tt.Run(\"BitShifted\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\t\t\t// write empty byte and reset it's byte index to 0.\n\t\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\t\tw.byteIndex = 0\n\t\t\t// assume that 5 empty bits are already written\n\t\t\tw.bitIndex = 5\n\n\t\t\tinput := []byte{0x4f, 0xff, 0x13}\n\n\t\t\tfor _, b := range input {\n\t\t\t\terr := w.WriteByte(b)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// 0x4f - 01001111\n\t\t\t// 01001111 >> 5 = 00000010\n\t\t\t// \t\t\t\t 0x02\n\t\t\texpected := byte(0x02)\n\t\t\tassert.Equal(t, expected, w.data[0], \"expected: %08b is %08b\", expected, w.data[0])\n\n\t\t\t// 0xff - 11111111\n\t\t\t// 01001111 << 3 = 01111000\n\t\t\t// 11111111 >> 5 = 00000111\n\t\t\t//\t\t\t\t | 01111111\n\t\t\t// 01111111 = 0x7F\n\t\t\texpected = byte(0x7F)\n\t\t\tassert.Equal(t, expected, w.data[1], \"expected %08b is %08b\", expected, w.data[1])\n\n\t\t\t// 0x13 - 00010011\n\t\t\t// 11111111 << 3 = 11111000\n\t\t\t// 00010011 >> 5 = 00000000\n\t\t\t// \t\t\t\t | 11111000\n\t\t\t// 11111000 = 0xF8\n\t\t\texpected = byte(0xf8)\n\t\t\tassert.Equal(t, expected, w.data[2], \"expected %08b is %08b\", expected, w.data[2])\n\n\t\t\t// 00010011 << 3 = 10011000\n\t\t\t// 10011000 - 0x98\n\t\t\texpected = byte(0x98)\n\t\t\tassert.Equal(t, expected, w.data[3])\n\t\t})\n\t})\n}", "func (m *MockResponseWriter) WriteHeader(arg0 int) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"WriteHeader\", arg0)\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt64_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockInternalServer) CryptoAsymKeyWrite(arg0 context.Context, arg1 *CryptoAsymKeyWriteRequestMsg) (*CryptoAsymKeyWriteResponseMsg, error) {\n\tret := m.ctrl.Call(m, \"CryptoAsymKeyWrite\", arg0, arg1)\n\tret0, _ := ret[0].(*CryptoAsymKeyWriteResponseMsg)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *OSIOAPI) WriteFile(filename string, data []byte, perm os.FileMode) error {\n\tret := _m.Called(filename, data, perm)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, os.FileMode) error); ok {\n\t\tr0 = rf(filename, data, perm)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *GoMockDiskStorage) Write(chunk []byte) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", chunk)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt8_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Connection) Write(ctx context.Context, typ websocket.MessageType, p []byte) error {\n\tret := _m.Called(ctx, typ, p)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, websocket.MessageType, []byte) error); ok {\n\t\tr0 = rf(ctx, typ, p)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *mockResponseWriter) Write(content []byte) (int, error) {\n\treturn m.body.Write(content)\n}", "func (m *MockInternalClient) CryptoAsymKeyWrite(ctx context.Context, in *CryptoAsymKeyWriteRequestMsg, opts ...grpc.CallOption) (*CryptoAsymKeyWriteResponseMsg, error) {\n\tvarargs := []interface{}{ctx, in}\n\tfor _, a := range opts {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"CryptoAsymKeyWrite\", varargs...)\n\tret0, _ := ret[0].(*CryptoAsymKeyWriteResponseMsg)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (w *MockWriter) Write(p []byte) (int, error) {\n\tw.Entries = append(w.Entries, string(p))\n\treturn len(p), nil\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0]\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (_m *Output) WriteOne(ctx context.Context, msg stream.WritableMessage) error {\n\tret := _m.Called(ctx, msg)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, stream.WritableMessage) error); ok {\n\t\tr0 = rf(ctx, msg)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (c *cacheableStoreMock) Write() error {\n\treturn c.err\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (p *WriterStub) Write(buf []byte) (int, error) {\n\tp.Body = append(p.Body, buf...)\n\treturn len(p.Body), nil\n}", "func (_m *HadolintPiperFileUtils) FileWrite(filename string, data []byte, perm os.FileMode) error {\n\tret := _m.Called(filename, data, perm)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, os.FileMode) error); ok {\n\t\tr0 = rf(filename, data, perm)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mmWriteTo *mDigestHolderMockWriteTo) Set(f func(w io.Writer) (n int64, err error)) *DigestHolderMock {\n\tif mmWriteTo.defaultExpectation != nil {\n\t\tmmWriteTo.mock.t.Fatalf(\"Default expectation is already set for the DigestHolder.WriteTo method\")\n\t}\n\n\tif len(mmWriteTo.expectations) > 0 {\n\t\tmmWriteTo.mock.t.Fatalf(\"Some expectations are already set for the DigestHolder.WriteTo method\")\n\t}\n\n\tmmWriteTo.mock.funcWriteTo = f\n\treturn mmWriteTo.mock\n}", "func (m *MockTChanNode) Write(ctx thrift.Context, req *WriteRequest) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", ctx, req)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (t *SimpleChaincode) write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat64_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}" ]
[ "0.67002857", "0.6636245", "0.65939057", "0.648245", "0.64470804", "0.6290206", "0.6226514", "0.617506", "0.6166279", "0.6156794", "0.6124814", "0.6121386", "0.6108336", "0.6086509", "0.607821", "0.60465515", "0.60111666", "0.6007855", "0.6005255", "0.5979612", "0.5977461", "0.59634435", "0.5936804", "0.5904282", "0.589715", "0.5893065", "0.5857352", "0.58546805", "0.584457", "0.5844098", "0.5826795", "0.5820111", "0.58170676", "0.58142644", "0.5812465", "0.57961595", "0.57898647", "0.5781413", "0.57378626", "0.5720255", "0.5713031", "0.5713031", "0.571164", "0.570684", "0.5692718", "0.56898034", "0.566784", "0.5657327", "0.5650172", "0.5649642", "0.56456107", "0.5639882", "0.5629152", "0.56020725", "0.5598988", "0.5594355", "0.55905336", "0.5576098", "0.555211", "0.55361384", "0.5524049", "0.5522995", "0.55212635", "0.5521036", "0.55201626", "0.55173725", "0.5511685", "0.5506536", "0.5500695", "0.5494674", "0.5481503", "0.5480499", "0.5472801", "0.5457492", "0.5441563", "0.5433162", "0.5407296", "0.540486", "0.53945", "0.53865117", "0.5375631", "0.53717494", "0.5367421", "0.5340823", "0.53285617", "0.5323165", "0.5320679", "0.53117293", "0.53117293", "0.53117293", "0.52992094", "0.5290412", "0.5287516", "0.5276763", "0.5276684", "0.5273302", "0.5267219", "0.5261081", "0.5261081", "0.5245717" ]
0.6813392
0
WriteByte is a helper method to define mock.On call logicalName string value byte writerArgs ...WithWriterArgs
func (_e *MockWriteBufferJsonBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByte_Call { return &MockWriteBufferJsonBased_WriteByte_Call{Call: _e.mock.On("WriteByte", append([]interface{}{logicalName, value}, writerArgs...)...)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferXmlBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByte_Call {\n\treturn &MockWriteBufferXmlBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call {\n\treturn &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_e *WriteCloser_Expecter) Write(p interface{}) *WriteCloser_Write_Call {\n\treturn &WriteCloser_Write_Call{Call: _e.mock.On(\"Write\", p)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByteArray_Call {\n\treturn &MockWriteBufferJsonBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteString_Call {\n\treturn &MockWriteBufferXmlBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBit_Call {\n\treturn &MockWriteBufferXmlBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByteArray_Call {\n\treturn &MockWriteBufferXmlBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (_e *Output_Expecter) Write(ctx interface{}, batch interface{}) *Output_Write_Call {\n\treturn &Output_Write_Call{Call: _e.mock.On(\"Write\", ctx, batch)}\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteByte(v byte) {\n\t_m.ctrl.Call(_m, \"WriteByte\", v)\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockTestTransportInstance_Expecter) Write(data interface{}) *MockTestTransportInstance_Write_Call {\n\treturn &MockTestTransportInstance_Write_Call{Call: _e.mock.On(\"Write\", data)}\n}", "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func (tw *TestWriter) Write(incoming []byte) (n int, err error) {\n\tif tw.mockWriteHandler != nil {\n\t\treturn tw.mockWriteHandler(incoming)\n\t}\n\n\ttw.capturedOutput = append(tw.capturedOutput, string(incoming))\n\treturn 0, nil\n}", "func (t *SimpleChaincode) Write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n var name, value string // Entities\n var err error\n fmt.Println(\"running write()\")\n\n if len(args) != 2 {\n return nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the variable and value to set\")\n }\n\n name = args[0] //rename for funsies\n value = args[1]\n err = stub.PutState(name, []byte(value)) //write the variable into the chaincode state\n if err != nil {\n return nil, err\n }\n return nil, nil\n}", "func (mock WriteCloser) Write(p []byte) (n int, err error) {\n\tmethodName := \"Write\" // nolint: goconst\n\tif mock.impl.Write != nil {\n\t\treturn mock.impl.Write(p)\n\t}\n\tif mock.callbackNotImplemented != nil {\n\t\tmock.callbackNotImplemented(mock.t, mock.name, methodName)\n\t} else {\n\t\tgomic.DefaultCallbackNotImplemented(mock.t, mock.name, methodName)\n\t}\n\treturn mock.fakeZeroWrite(p)\n}", "func TestWrite(t *testing.T) {\n\tconst input = \"OK\"\n\n\tvar buf bytes.Buffer\n\tc := NewCoder(&buf)\n\n\tio.WriteString(c, input)\n\tif got := buf.String(); got != input {\n\t\tt.Errorf(\"Write(c, %q): got %q, want %q\", input, got, input)\n\t}\n}", "func (_m *MockOStream) WriteBytes(bytes []byte) {\n\t_m.ctrl.Call(_m, \"WriteBytes\", bytes)\n}", "func (mc *MockConn) MockWrite(msg wire.Message) {\n\tbuf := &bytes.Buffer{}\n\twire.WriteMessage(buf, msg, wire.MainNet)\n\tmc.receiveChan <- buf.Bytes()\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestWrite(t *testing.T) {\n\tmockZooKeeper := &MockZooHandle{\n\t\tzk: mock.Mock{},\n\t}\n\n\tbytes := make([]byte, 3)\n\tff := NewFuseFile(bytes, 0, \"mock/path\", mockZooKeeper)\n\n\tmockZooKeeper.zk.On(\"Set\", \"mock/path\", bytes, int32(-1)).Return(&zk.Stat{DataLength: int32(len(bytes))}, nil)\n\n\t// assert that we send 3 bytes into the writer and status out == fuse.OK\n\tsize, stat := ff.Write(bytes, 0)\n\tassert.Equal(t, uint32(3), size)\n\tassert.Equal(t, fuse.OK, stat)\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *Output_Expecter) WriteOne(ctx interface{}, msg interface{}) *Output_WriteOne_Call {\n\treturn &Output_WriteOne_Call{Call: _e.mock.On(\"WriteOne\", ctx, msg)}\n}", "func (_m *MockFile) Write(_param0 []byte) (int, error) {\n\tret := _m.ctrl.Call(_m, \"Write\", _param0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt8_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func MockWire(hash []byte, round uint64, step uint8, keys []key.ConsensusKeys, p *user.Provisioners, i ...int) *bytes.Buffer {\n\tev := MockAgreementEvent(hash, round, step, keys, p, i...)\n\n\tbuf := new(bytes.Buffer)\n\tif err := header.Marshal(buf, ev.Header); err != nil {\n\t\tpanic(err)\n\t}\n\n\tif err := Marshal(buf, *ev); err != nil {\n\t\tpanic(err)\n\t}\n\treturn buf\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (p *WriterStub) Write(buf []byte) (int, error) {\n\tp.Body = append(p.Body, buf...)\n\treturn len(p.Body), nil\n}", "func (m *MockSerial) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockResponseWriter) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (mock *WriteCloser) SetFuncWrite(impl func(p []byte) (n int, err error)) *WriteCloser {\n\tmock.impl.Write = impl\n\treturn mock\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0]\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func TestBufferWrite(t *testing.T) {\n\tt.Run(\"Valid\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\n\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\tn, err := w.Write(toWrite)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Equal(t, 3, n)\n\n\t\tn, err = w.Write([]byte{0xff})\n\t\trequire.NoError(t, err)\n\n\t\tassert.Equal(t, 1, n)\n\n\t\texpected := append(toWrite, 0xff)\n\t\tfor i, bt := range w.Data() {\n\t\t\tassert.Equal(t, expected[i], bt, \"%d\", i)\n\t\t}\n\t})\n\n\tt.Run(\"Shifted\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\t\t// write empty byte and reset it's byte index to 0.\n\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\tw.byteIndex = 0\n\t\t// assume that 3 '0' bits were already written.\n\t\tw.bitIndex = 3\n\n\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\tn, err := w.Write(toWrite)\n\t\trequire.NoError(t, err)\n\t\tassert.Equal(t, 3, n)\n\n\t\t// 0x3f - 00111111\n\t\t// 00111111 << 3 = 11111000\n\t\texpected := byte(0xf8)\n\t\tassert.Equal(t, expected, w.data[0])\n\n\t\t// 0x12 - 00010010\n\t\t// 00111111 >> 5 = 00000001\n\t\t// 00010010 << 3 = 10010000\n\t\t// \t\t\t\t | 10010101\n\t\t// 10010111 - 0x91\n\t\texpected = byte(0x91)\n\t\tassert.Equal(t, expected, w.data[1])\n\n\t\t// 0x86 - 10000110\n\t\t// 00010010 >> 5 = \t00000000\n\t\t// 10000110 << 3 = \t00110000\n\t\t// \t\t\t\t |\t00110000\n\t\t// 00110000 = 0x30\n\t\texpected = byte(0x30)\n\t\tassert.Equal(t, expected, w.data[2])\n\t\tassert.Len(t, w.Data(), 4)\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tt.Run(\"Valid\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\t\tn, err := w.Write(toWrite)\n\t\t\trequire.NoError(t, err)\n\n\t\t\tassert.Equal(t, 3, n)\n\n\t\t\tn, err = w.Write([]byte{0xff})\n\t\t\trequire.NoError(t, err)\n\n\t\t\tassert.Equal(t, 1, n)\n\n\t\t\texpected := append(toWrite, 0xff)\n\t\t\tfor i, bt := range w.Data() {\n\t\t\t\tassert.Equal(t, expected[i], bt, \"%d\", i)\n\t\t\t}\n\t\t})\n\n\t\tt.Run(\"Shifted\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\t\t\t// write empty byte so the buffer data is initialized\n\t\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\t\t// reset it's byteindex\n\t\t\tw.byteIndex = 0\n\t\t\t// assume three '0' bits are already stored.\n\t\t\tw.bitIndex = 3\n\n\t\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\t\tn, err := w.Write(toWrite)\n\t\t\trequire.NoError(t, err)\n\t\t\tassert.Equal(t, 3, n)\n\n\t\t\t// 0x3f - 00111111\n\t\t\t// 00111111 >> 3 = 00000111\n\t\t\t// 00000111 = 0x07\n\t\t\texpected := byte(0x07)\n\t\t\tassert.Equal(t, expected, w.data[0])\n\n\t\t\t// 0x12 - 00010010\n\t\t\t// 00111111 << 5 = 11100000\n\t\t\t// 00010010 >> 3 = 00000010\n\t\t\t// \t\t\t\t | 11100010\n\t\t\t// 11100010 - 0xE2\n\t\t\texpected = byte(0xE2)\n\t\t\tassert.Equal(t, expected, w.data[1])\n\n\t\t\t// 0x86 - 10000110\n\t\t\t// 00010010 << 5 = \t01000000\n\t\t\t// 10000110 >> 3 = \t00010000\n\t\t\t// \t\t\t\t |\t01010000\n\t\t\t// 00110000 = 0x50\n\t\t\texpected = byte(0x50)\n\t\t\tassert.Equal(t, expected, w.data[2])\n\n\t\t\t// 0x86 - 10000110\n\t\t\t// 10000110 << 5 = \t11000000\n\t\t\t// 11000000 = 0xC0\n\t\t\texpected = byte(0xC0)\n\t\t\tassert.Equal(t, expected, w.data[3])\n\t\t})\n\t})\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt8_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockResponseWriter) Write(arg0 *types.APIRequest, arg1 int, arg2 types.APIObject) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"Write\", arg0, arg1, arg2)\n}", "func (t *SimpleChaincode) write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func writeExpected(t *testing.T, streamName string, w io.Writer, data string) {\n\tn, err := io.WriteString(w, data)\n\tassert.NoError(t, err, \"stream %s\", streamName)\n\tassert.Equal(t, len(data), n, \"stream %s\", streamName)\n}", "func TestWrite(t *testing.T) {\n\ttr := NewTFramedTransport(new(mockTTransport))\n\tbuff := make([]byte, 10)\n\n\tn, err := tr.Write(buff)\n\n\tassert.Equal(t, 10, n)\n\tassert.Nil(t, err)\n\tassert.Equal(t, buff, tr.buf.Bytes())\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *mSignatureKeyHolderMockWriteTo) Set(f func(p io.Writer) (r int64, r1 error)) *SignatureKeyHolderMock {\n\tm.mainExpectation = nil\n\tm.expectationSeries = nil\n\n\tm.mock.WriteToFunc = f\n\treturn m.mock\n}", "func TestBufferWriteByte(t *testing.T) {\n\tt.Run(\"Valid\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\n\t\tinput := []byte{0x4f, 0xff, 0x13, 0x2}\n\n\t\tfor _, b := range input {\n\t\t\terr := w.WriteByte(b)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\tassert.Equal(t, 4, len(w.data))\n\t\tfor i := 0; i < len(w.data); i++ {\n\t\t\tassert.Equal(t, input[i], w.data[i])\n\t\t}\n\t})\n\n\tt.Run(\"ByteShifted\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\t\terr := w.WriteByte(0xff)\n\t\trequire.NoError(t, err)\n\t\terr = w.WriteByte(0x00)\n\t\trequire.NoError(t, err)\n\n\t\terr = w.WriteByte(0x23)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Equal(t, byte(0xff), w.data[0])\n\t\tassert.Equal(t, byte(0x00), w.data[1])\n\t\tassert.Equal(t, byte(0x23), w.data[2])\n\t})\n\n\tt.Run(\"BitShifted\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\n\t\t// insert empty byte and reset the byte index to 0\n\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\tw.byteIndex = 0\n\t\t// assume there are 5 bits already set\n\t\tw.bitIndex = 5\n\n\t\tinput := []byte{0x4f, 0xff, 0x13}\n\n\t\tfor _, b := range input {\n\t\t\terr := w.WriteByte(b)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\t// \t00000000\n\t\t// ^\n\t\t//\n\t\t// 0x4f - 01001111\n\t\t// 01001111 << 5 = 11100000\n\t\t//\t\t\t\t | 11100000\n\t\t// 11100000 - 0xE0\n\t\texpected := byte(0xe0)\n\t\tassert.Equal(t, expected, w.data[0], \"expected: %08b is %08b\", expected, w.data[0])\n\n\t\t// 0xff - 11111111\n\t\t// 01001111 >> 3 = 00001001\n\t\t// 11111111 << 5 = 11100000\n\t\t// \t\t\t\t | 11101001\n\t\t// 11101001 = 0xe9\n\t\texpected = byte(0xe9)\n\t\tassert.Equal(t, expected, w.data[1], \"expected %08b is %08b\", expected, w.data[1])\n\n\t\t// 0x13 - 00010011\n\t\t// 11111111 >> 3 = 00011111\n\t\t// 00010011 << 5 = 01100000\n\t\t//\t\t\t | 01111111\n\t\t// 01111111 = 0x7F\n\t\texpected = byte(0x7F)\n\t\tassert.Equal(t, expected, w.data[2], \"expected %08b is %08b\", expected, w.data[2])\n\n\t\t// 00010011 >> 3 = 00000010\n\t\t// 00000010 = 0x02\n\t\texpected = byte(0x02)\n\t\tassert.Equal(t, expected, w.data[3])\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tt.Run(\"Valid\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\tinput := []byte{0x4f, 0xff, 0x13, 0x2}\n\n\t\t\tfor _, b := range input {\n\t\t\t\terr := w.WriteByte(b)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\tassert.Len(t, w.data, 4)\n\t\t\tfor i := 0; i < len(w.data); i++ {\n\t\t\t\tassert.Equal(t, input[i], w.data[i])\n\t\t\t}\n\t\t})\n\n\t\tt.Run(\"BitShifted\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\t\t\t// write empty byte and reset it's byte index to 0.\n\t\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\t\tw.byteIndex = 0\n\t\t\t// assume that 5 empty bits are already written\n\t\t\tw.bitIndex = 5\n\n\t\t\tinput := []byte{0x4f, 0xff, 0x13}\n\n\t\t\tfor _, b := range input {\n\t\t\t\terr := w.WriteByte(b)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// 0x4f - 01001111\n\t\t\t// 01001111 >> 5 = 00000010\n\t\t\t// \t\t\t\t 0x02\n\t\t\texpected := byte(0x02)\n\t\t\tassert.Equal(t, expected, w.data[0], \"expected: %08b is %08b\", expected, w.data[0])\n\n\t\t\t// 0xff - 11111111\n\t\t\t// 01001111 << 3 = 01111000\n\t\t\t// 11111111 >> 5 = 00000111\n\t\t\t//\t\t\t\t | 01111111\n\t\t\t// 01111111 = 0x7F\n\t\t\texpected = byte(0x7F)\n\t\t\tassert.Equal(t, expected, w.data[1], \"expected %08b is %08b\", expected, w.data[1])\n\n\t\t\t// 0x13 - 00010011\n\t\t\t// 11111111 << 3 = 11111000\n\t\t\t// 00010011 >> 5 = 00000000\n\t\t\t// \t\t\t\t | 11111000\n\t\t\t// 11111000 = 0xF8\n\t\t\texpected = byte(0xf8)\n\t\t\tassert.Equal(t, expected, w.data[2], \"expected %08b is %08b\", expected, w.data[2])\n\n\t\t\t// 00010011 << 3 = 10011000\n\t\t\t// 10011000 - 0x98\n\t\t\texpected = byte(0x98)\n\t\t\tassert.Equal(t, expected, w.data[3])\n\t\t})\n\t})\n}", "func (r *MockReadWriteCloser) Write(p []byte) (n int, err error) {\n\n\tif err = r.WriteErr; err != nil {\n\t\tr.BytesWritten = p\n\t\tn = len(p)\n\t}\n\treturn\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt64_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_mr *MockOStreamMockRecorder) WriteByte(arg0 interface{}) *gomock.Call {\n\treturn _mr.mock.ctrl.RecordCallWithMethodType(_mr.mock, \"WriteByte\", reflect.TypeOf((*MockOStream)(nil).WriteByte), arg0)\n}", "func (m *MockConn) Write(arg0 core.WriteableFrame) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *MockResponseWriter) WriteHeader(arg0 int) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"WriteHeader\", arg0)\n}", "func (t *SimpleChaincode) write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n\t\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\t\n\tfmt.Println(\"saving state for key: \" + key);\n\t\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t\n\t\n\treturn nil, nil\n}", "func (c *MockRemoteWriteClient) Name() string { return \"\" }", "func write(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"starting write\")\n\n\tif len(args) != 2 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 2. key of the variable and value to set\")\n\t}\n\n\t// input sanitation\n\terr = sanitize_arguments(args)\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the ledger\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tfmt.Println(\"- end write\")\n\treturn shim.Success(nil)\n}", "func (w *MockWriter) Write(p []byte) (int, error) {\n\tw.Entries = append(w.Entries, string(p))\n\treturn len(p), nil\n}", "func write(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"starting write\")\n\n\tif len(args) != 2 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 2. key of the variable and value to set\")\n\t}\n\n\t// input sanitation\n\terr = sanitize_arguments(args)\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the ledger\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tfmt.Println(\"- end write\")\n\treturn shim.Success(nil)\n}", "func writeCommand(w io.Writer, op string, args ...interface{}) error {\n\tfor _, arg := range args {\n\t\t// TODO: Use the same buffer for all arguments\n\t\tif m, err := marshal(nil, arg); err == nil {\n\t\t\tif _, err := w.Write(append(m, ' ')); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t} else {\n\t\t\treturn err\n\t\t}\n\t}\n\tif _, err := io.WriteString(w, op); err != nil {\n\t\treturn err\n\t}\n\tif _, err := w.Write([]byte{'\\n'}); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func WriteByte(buffer []byte, offset int, value byte) {\n buffer[offset] = value\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\r\n\tvar key, value string\r\n\tvar err error\r\n\t\r\n\r\n\tfmt.Println(\"running write()\")\r\n\r\n\tif len(args) != 2 {\r\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\r\n\t}\r\n\r\n\tkey = args[0] //rename for funsies\r\n\tvalue = args[1]\r\n\t\r\n\tv,err := stub.GetState(key)\r\n\tif v!=nil {\r\n\t\treturn nil, errors.New(\"Key already exists\")\r\n\t} else {\r\n\t\r\n\t\r\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\r\n\tif err != nil {\r\n\t\treturn nil, err\r\n\t}\r\n\t\r\n\t}\r\n\treturn nil, nil\r\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"starting write\")\n\n\tif len(args) != 2 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 2. key of the variable and value to set\")\n\t}\n\n\t// input sanitation\n\t\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the ledger\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tfmt.Println(\"- end write\")\n\treturn shim.Success(nil)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt64_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockFile) WriteString(arg0 string) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteString\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockPlcWriteResponse_Expecter) String() *MockPlcWriteResponse_String_Call {\n\treturn &MockPlcWriteResponse_String_Call{Call: _e.mock.On(\"String\")}\n}", "func sendMagicNumber(writer io.Writer, magicNumber magicNumberBytes) error {\n\t_, err := writer.Write(magicNumber[:])\n\treturn err\n}", "func (_m *MockWriteBufferJsonBased) PushContext(logicalName string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func writer(coord string) {\n\tbroadcast <- coord\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mock *WriteCloser) SetReturnWrite(n int, err error) *WriteCloser {\n\tmock.impl.Write = func([]byte) (int, error) {\n\t\treturn n, err\n\t}\n\treturn mock\n}", "func (i *Injector) Send(t *testing.T, b []byte) {\n\tt.Helper()\n\n\tn, err := unix.Write(i.fd, b)\n\tif err != nil {\n\t\tt.Fatalf(\"can't write bytes of len %d: %s\", len(b), err)\n\t}\n\tif n != len(b) {\n\t\tt.Fatalf(\"got %d bytes written, want %d\", n, len(b))\n\t}\n}", "func (mmWriteTo *mDigestHolderMockWriteTo) Set(f func(w io.Writer) (n int64, err error)) *DigestHolderMock {\n\tif mmWriteTo.defaultExpectation != nil {\n\t\tmmWriteTo.mock.t.Fatalf(\"Default expectation is already set for the DigestHolder.WriteTo method\")\n\t}\n\n\tif len(mmWriteTo.expectations) > 0 {\n\t\tmmWriteTo.mock.t.Fatalf(\"Some expectations are already set for the DigestHolder.WriteTo method\")\n\t}\n\n\tmmWriteTo.mock.funcWriteTo = f\n\treturn mmWriteTo.mock\n}", "func WriteString(buffer []byte, offset int, value string) {\n WriteBytes(buffer, offset, []byte(value))\n}", "func (c *Call) Write(code int, r io.Reader) error {\n\tc.code = code\n\tc.writer.WriteHeader(c.code)\n\t_, err := io.Copy(c.writer, r)\n\tc.done = true\n\treturn err\n}", "func (_m *MockWriteBufferXmlBased) PushContext(logicalName string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt16_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (rtc *RTC) Write(target, value byte) {\n\trtc.Ctr[target-0x08] = value\n}", "func (cpu *Mos6502) write(address word, data byte) {\n\tcpu.bus.Write(uint16(address), data)\n}", "func (mock WriteCloser) fakeZeroWrite(p []byte) (n int, err error) {\n\treturn n, err\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteByte() {\n\tfmt.Println(\"----------------> WriteByte\")\n\tvar b byte = '!'\n\tbuf := bytes.NewBufferString(\"hello\")\n\tfmt.Println(buf.String())\n\n\t//write byte at then end of buffer\n\tbuf.WriteByte(b)\n\n\tfmt.Println(buf.String())\n}", "func (m *MockHash32) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockFile) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockFile) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (d *Driver) write(data []byte) error {\n\t// d.log(\"write\", time.Now(), data)\n\tn, err := d.device.Write(data)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif n != len(data) {\n\t\treturn errors.New(\"unexpected write size\")\n\t}\n\treturn nil\n}", "func (w *Writer) WriteByte(data interface{}) {\n\tvar t = w.getType(data, 1)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.index++\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func testWrite(c *testContext, flow testFlow, checkers ...checker.NetworkChecker) uint16 {\n\tc.t.Helper()\n\treturn testWriteInternal(c, flow, true, checkers...)\n}" ]
[ "0.6638451", "0.642947", "0.6239703", "0.6239353", "0.6178896", "0.61765397", "0.60629034", "0.60372734", "0.6028312", "0.597386", "0.5955224", "0.5878804", "0.5844925", "0.5805151", "0.5804814", "0.58004725", "0.57872075", "0.56363416", "0.56325155", "0.5610619", "0.5540477", "0.5536064", "0.55119187", "0.5483463", "0.54763114", "0.54613775", "0.54573494", "0.5404257", "0.5397804", "0.5323106", "0.53152347", "0.52880263", "0.5281167", "0.5276997", "0.5251946", "0.51995784", "0.5189675", "0.5184659", "0.51726127", "0.5167047", "0.51602", "0.51546645", "0.5149028", "0.5149028", "0.5149028", "0.514721", "0.5139284", "0.51372933", "0.5130844", "0.5130844", "0.51302254", "0.50859797", "0.5060954", "0.50534695", "0.50439674", "0.5027895", "0.5026835", "0.5020015", "0.5005605", "0.49839854", "0.49753273", "0.4972085", "0.49656045", "0.496419", "0.49609882", "0.49531183", "0.49419713", "0.49404812", "0.49403825", "0.4932556", "0.49271592", "0.49207315", "0.49114543", "0.49106526", "0.48886776", "0.48727918", "0.4872103", "0.48631036", "0.48558685", "0.4854708", "0.48545778", "0.4849567", "0.48491108", "0.48482582", "0.48476905", "0.48461267", "0.4845749", "0.48445717", "0.48430735", "0.48384944", "0.48347846", "0.48304188", "0.48292464", "0.48280993", "0.4820976", "0.4820976", "0.4812208", "0.4811174", "0.48094496", "0.4805785" ]
0.68581784
0
WriteByteArray provides a mock function with given fields: logicalName, data, writerArgs
func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, logicalName, data) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok { r0 = rf(logicalName, data, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByteArray_Call {\n\treturn &MockWriteBufferJsonBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByteArray_Call {\n\treturn &MockWriteBufferXmlBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBytes(bytes []byte) {\n\t_m.ctrl.Call(_m, \"WriteBytes\", bytes)\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockQueue) PublishBytes(arg0 ...[]byte) bool {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{}\n\tfor _, a := range arg0 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"PublishBytes\", varargs...)\n\tret0, _ := ret[0].(bool)\n\treturn ret0\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func CreateMockByteArray(mockData string) *[]byte {\n\tba := make([]byte, 0)\n\tba = append(ba, mockData...)\n\treturn &ba\n}", "func CreateMockByteArray(mockData string) *[]byte {\n\tba := make([]byte, 0)\n\tba = append(ba, mockData...)\n\treturn &ba\n}", "func CreateMockByteArray(mockData string) *[]byte {\n\tba := make([]byte, 0)\n\tba = append(ba, mockData...)\n\treturn &ba\n}", "func CreateMockByteArray(mockData string) *[]byte {\n\tba := make([]byte, 0)\n\tba = append(ba, mockData...)\n\treturn &ba\n}", "func CreateMockByteArray(mockData string) *[]byte {\n\tba := make([]byte, 0)\n\tba = append(ba, mockData...)\n\treturn &ba\n}", "func (_m *MockFile) Write(_param0 []byte) (int, error) {\n\tret := _m.ctrl.Call(_m, \"Write\", _param0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *OSIOAPI) WriteFile(filename string, data []byte, perm os.FileMode) error {\n\tret := _m.Called(filename, data, perm)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, os.FileMode) error); ok {\n\t\tr0 = rf(filename, data, perm)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByte_Call {\n\treturn &MockWriteBufferJsonBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *Session) Write(data []byte) (int, error) {\n\tret := _m.Called(data)\n\n\tvar r0 int\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func([]byte) (int, error)); ok {\n\t\treturn rf(data)\n\t}\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(data)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockedAcknowledgementStorage) writeDataToFile() error {\n\targs := m.Called()\n\treturn args.Error(0)\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockScraper) WriteToFile(arg0 map[int]map[string]int, arg1 int) (string, error) {\n\tret := m.ctrl.Call(m, \"WriteToFile\", arg0, arg1)\n\tret0, _ := ret[0].(string)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func MockWire(hash []byte, round uint64, step uint8, keys []key.ConsensusKeys, p *user.Provisioners, i ...int) *bytes.Buffer {\n\tev := MockAgreementEvent(hash, round, step, keys, p, i...)\n\n\tbuf := new(bytes.Buffer)\n\tif err := header.Marshal(buf, ev.Header); err != nil {\n\t\tpanic(err)\n\t}\n\n\tif err := Marshal(buf, *ev); err != nil {\n\t\tpanic(err)\n\t}\n\treturn buf\n}", "func (_m *HadolintPiperFileUtils) FileWrite(filename string, data []byte, perm os.FileMode) error {\n\tret := _m.Called(filename, data, perm)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, os.FileMode) error); ok {\n\t\tr0 = rf(filename, data, perm)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash64) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call {\n\treturn &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockPostForkBlock) Bytes() []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Bytes\")\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockManager) SerializeShipMetadata(arg0 api.ShipAppMetadata, arg1 string) error {\n\tret := m.ctrl.Call(m, \"SerializeShipMetadata\", arg0, arg1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *MockHash32) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByte_Call {\n\treturn &MockWriteBufferXmlBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *GoMockDiskStorage) Write(chunk []byte) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", chunk)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *SignatureKeyHolderMock) AsBytes() (r []byte) {\n\tcounter := atomic.AddUint64(&m.AsBytesPreCounter, 1)\n\tdefer atomic.AddUint64(&m.AsBytesCounter, 1)\n\n\tif len(m.AsBytesMock.expectationSeries) > 0 {\n\t\tif counter > uint64(len(m.AsBytesMock.expectationSeries)) {\n\t\t\tm.t.Fatalf(\"Unexpected call to SignatureKeyHolderMock.AsBytes.\")\n\t\t\treturn\n\t\t}\n\n\t\tresult := m.AsBytesMock.expectationSeries[counter-1].result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the SignatureKeyHolderMock.AsBytes\")\n\t\t\treturn\n\t\t}\n\n\t\tr = result.r\n\n\t\treturn\n\t}\n\n\tif m.AsBytesMock.mainExpectation != nil {\n\n\t\tresult := m.AsBytesMock.mainExpectation.result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the SignatureKeyHolderMock.AsBytes\")\n\t\t}\n\n\t\tr = result.r\n\n\t\treturn\n\t}\n\n\tif m.AsBytesFunc == nil {\n\t\tm.t.Fatalf(\"Unexpected call to SignatureKeyHolderMock.AsBytes.\")\n\t\treturn\n\t}\n\n\treturn m.AsBytesFunc()\n}", "func (m *MockManager) SerializeContentSHA(arg0 string) error {\n\tret := m.ctrl.Call(m, \"SerializeContentSHA\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_m *MockTestTransportInstance) DrainWriteBuffer(numBytes uint32) []byte {\n\tret := _m.Called(numBytes)\n\n\tvar r0 []byte\n\tif rf, ok := ret.Get(0).(func(uint32) []byte); ok {\n\t\tr0 = rf(numBytes)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]byte)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (m *MockSession) WriteMapData(name string, data map[string]interface{}) error {\n\targs := m.Mock.Called(name, data)\n\treturn args.Error(0)\n}", "func (r *MockReadWriteCloser) Write(p []byte) (n int, err error) {\n\n\tif err = r.WriteErr; err != nil {\n\t\tr.BytesWritten = p\n\t\tn = len(p)\n\t}\n\treturn\n}", "func (m *MockFile) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockFile) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockSpaceStorage) WriteSpaceHash(arg0 string) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteSpaceHash\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *MockSerial) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func TestWrite(t *testing.T) {\n\ttr := NewTFramedTransport(new(mockTTransport))\n\tbuff := make([]byte, 10)\n\n\tn, err := tr.Write(buff)\n\n\tassert.Equal(t, 10, n)\n\tassert.Nil(t, err)\n\tassert.Equal(t, buff, tr.buf.Bytes())\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockResponseWriter) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteString_Call {\n\treturn &MockWriteBufferXmlBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (m *MockManager) WriteFileMetaData(arg0 string, arg1 *storage.FileMetaData) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteFileMetaData\", arg0, arg1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestWrite(t *testing.T) {\n\tmockZooKeeper := &MockZooHandle{\n\t\tzk: mock.Mock{},\n\t}\n\n\tbytes := make([]byte, 3)\n\tff := NewFuseFile(bytes, 0, \"mock/path\", mockZooKeeper)\n\n\tmockZooKeeper.zk.On(\"Set\", \"mock/path\", bytes, int32(-1)).Return(&zk.Stat{DataLength: int32(len(bytes))}, nil)\n\n\t// assert that we send 3 bytes into the writer and status out == fuse.OK\n\tsize, stat := ff.Write(bytes, 0)\n\tassert.Equal(t, uint32(3), size)\n\tassert.Equal(t, fuse.OK, stat)\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func (m *MockEncoder) Bytes() []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Bytes\")\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBit_Call {\n\treturn &MockWriteBufferXmlBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *Output) Write(ctx context.Context, batch []stream.WritableMessage) error {\n\tret := _m.Called(ctx, batch)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, []stream.WritableMessage) error); ok {\n\t\tr0 = rf(ctx, batch)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockStore) Write(ctx context.Context, key string, val interface{}, opts ...store.WriteOption) error {\n\t_va := make([]interface{}, len(opts))\n\tfor _i := range opts {\n\t\t_va[_i] = opts[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, key, val)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...store.WriteOption) error); ok {\n\t\tr0 = rf(ctx, key, val, opts...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mock WriteCloser) Write(p []byte) (n int, err error) {\n\tmethodName := \"Write\" // nolint: goconst\n\tif mock.impl.Write != nil {\n\t\treturn mock.impl.Write(p)\n\t}\n\tif mock.callbackNotImplemented != nil {\n\t\tmock.callbackNotImplemented(mock.t, mock.name, methodName)\n\t} else {\n\t\tgomic.DefaultCallbackNotImplemented(mock.t, mock.name, methodName)\n\t}\n\treturn mock.fakeZeroWrite(p)\n}", "func (m *MockIOPackage) WriteUint8(arg0 []byte, arg1 int, arg2 uint8) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint8\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (m *MockManager) WriteDownloadFile(arg0 string, arg1, arg2 int64, arg3 io.Reader) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteDownloadFile\", arg0, arg1, arg2, arg3)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_m *GCSUploader) UploadBytes(ctx context.Context, data []byte, fallbackSrc string, dst string) error {\n\tret := _m.Called(ctx, data, fallbackSrc, dst)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, []byte, string, string) error); ok {\n\t\tr0 = rf(ctx, data, fallbackSrc, dst)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (p *WriterStub) Write(buf []byte) (int, error) {\n\tp.Body = append(p.Body, buf...)\n\treturn len(p.Body), nil\n}", "func (mwc *MockWriteCloser) Bytes() []byte {\n\treturn mwc.b.Bytes()\n}", "func (m *MockValues) Bytes() []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Bytes\")\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (_m *MockOStream) WriteByte(v byte) {\n\t_m.ctrl.Call(_m, \"WriteByte\", v)\n}", "func (_m *DynamoDBAPIMock) BatchWriteItem(_a0 *dynamodb.BatchWriteItemInput) (*dynamodb.BatchWriteItemOutput, error) {\n\tret := _m.Called(_a0)\n\n\tvar r0 *dynamodb.BatchWriteItemOutput\n\tif rf, ok := ret.Get(0).(func(*dynamodb.BatchWriteItemInput) *dynamodb.BatchWriteItemOutput); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*dynamodb.BatchWriteItemOutput)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(*dynamodb.BatchWriteItemInput) error); ok {\n\t\tr1 = rf(_a0)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (m *SignatureKeyHolderMock) AsByteString() (r string) {\n\tcounter := atomic.AddUint64(&m.AsByteStringPreCounter, 1)\n\tdefer atomic.AddUint64(&m.AsByteStringCounter, 1)\n\n\tif len(m.AsByteStringMock.expectationSeries) > 0 {\n\t\tif counter > uint64(len(m.AsByteStringMock.expectationSeries)) {\n\t\t\tm.t.Fatalf(\"Unexpected call to SignatureKeyHolderMock.AsByteString.\")\n\t\t\treturn\n\t\t}\n\n\t\tresult := m.AsByteStringMock.expectationSeries[counter-1].result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the SignatureKeyHolderMock.AsByteString\")\n\t\t\treturn\n\t\t}\n\n\t\tr = result.r\n\n\t\treturn\n\t}\n\n\tif m.AsByteStringMock.mainExpectation != nil {\n\n\t\tresult := m.AsByteStringMock.mainExpectation.result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the SignatureKeyHolderMock.AsByteString\")\n\t\t}\n\n\t\tr = result.r\n\n\t\treturn\n\t}\n\n\tif m.AsByteStringFunc == nil {\n\t\tm.t.Fatalf(\"Unexpected call to SignatureKeyHolderMock.AsByteString.\")\n\t\treturn\n\t}\n\n\treturn m.AsByteStringFunc()\n}", "func (_m *AppFunctionContext) ResponseData() []byte {\n\tret := _m.Called()\n\n\tvar r0 []byte\n\tif rf, ok := ret.Get(0).(func() []byte); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]byte)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (mock WriteCloser) fakeZeroWrite(p []byte) (n int, err error) {\n\treturn n, err\n}", "func (m *MockConn) Write(arg0 core.WriteableFrame) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Connection) Write(ctx context.Context, typ websocket.MessageType, p []byte) error {\n\tret := _m.Called(ctx, typ, p)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, websocket.MessageType, []byte) error); ok {\n\t\tr0 = rf(ctx, typ, p)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockFileWriter) Write(arg0 string) error {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat64_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (res *ServerHTTPResponse) writeBytes(bytes []byte) {\n\t_, err := res.responseWriter.Write(bytes)\n\tif err != nil {\n\t\t/* coverage ignore next line */\n\t\tres.contextLogger.Error(res.Request.Context(),\n\t\t\t\"Could not write string to resp body\",\n\t\t\tzap.Error(err),\n\t\t\tzap.String(\"bytesLength\", strconv.Itoa(len(bytes))),\n\t\t)\n\t}\n}", "func (m *MockIOPackage) AppendUint8(arg0 []byte, arg1 uint8) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint8\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (m *GoMockDiskStorage) WriteFile(filePath string, perm os.FileMode) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteFile\", filePath, perm)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}" ]
[ "0.8165671", "0.7775948", "0.75337565", "0.6698076", "0.6436688", "0.6303499", "0.6229756", "0.6213682", "0.6141898", "0.60687166", "0.59944326", "0.5961416", "0.5864685", "0.58582264", "0.57701546", "0.5765581", "0.57435733", "0.57435733", "0.57435733", "0.57435733", "0.57435733", "0.57328755", "0.57213867", "0.57044077", "0.5594058", "0.5583197", "0.5577007", "0.5575721", "0.5563489", "0.555548", "0.55347604", "0.5524994", "0.55185074", "0.5507157", "0.5503958", "0.5470005", "0.544035", "0.54012436", "0.5400395", "0.53738815", "0.53483343", "0.5342768", "0.5341693", "0.5339284", "0.533612", "0.5332899", "0.5332035", "0.53139925", "0.5302512", "0.5301373", "0.5301324", "0.5274847", "0.5254008", "0.5226414", "0.52255774", "0.5222305", "0.52143323", "0.5207058", "0.5205985", "0.5201722", "0.5201722", "0.5179056", "0.5168025", "0.51606715", "0.5155456", "0.51442677", "0.5122302", "0.5086221", "0.50827146", "0.50782025", "0.50741535", "0.5070016", "0.50640124", "0.50588244", "0.50430465", "0.502522", "0.50211036", "0.50205725", "0.50177824", "0.5004492", "0.4989085", "0.49864322", "0.49802646", "0.49726915", "0.49630874", "0.4961579", "0.49588275", "0.49586034", "0.49032503", "0.4889934", "0.48851824", "0.48819873", "0.48602632", "0.4857601", "0.48493618", "0.48463315", "0.4846256", "0.48408517", "0.48351774", "0.4833987" ]
0.83881277
0
WriteByteArray is a helper method to define mock.On call logicalName string data []byte writerArgs ...WithWriterArgs
func (_e *MockWriteBufferJsonBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByteArray_Call { return &MockWriteBufferJsonBased_WriteByteArray_Call{Call: _e.mock.On("WriteByteArray", append([]interface{}{logicalName, data}, writerArgs...)...)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferXmlBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByteArray_Call {\n\treturn &MockWriteBufferXmlBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func CreateMockByteArray(mockData string) *[]byte {\n\tba := make([]byte, 0)\n\tba = append(ba, mockData...)\n\treturn &ba\n}", "func CreateMockByteArray(mockData string) *[]byte {\n\tba := make([]byte, 0)\n\tba = append(ba, mockData...)\n\treturn &ba\n}", "func CreateMockByteArray(mockData string) *[]byte {\n\tba := make([]byte, 0)\n\tba = append(ba, mockData...)\n\treturn &ba\n}", "func CreateMockByteArray(mockData string) *[]byte {\n\tba := make([]byte, 0)\n\tba = append(ba, mockData...)\n\treturn &ba\n}", "func CreateMockByteArray(mockData string) *[]byte {\n\tba := make([]byte, 0)\n\tba = append(ba, mockData...)\n\treturn &ba\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByte_Call {\n\treturn &MockWriteBufferJsonBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockOStream) WriteBytes(bytes []byte) {\n\t_m.ctrl.Call(_m, \"WriteBytes\", bytes)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByte_Call {\n\treturn &MockWriteBufferXmlBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (m *MockQueue) PublishBytes(arg0 ...[]byte) bool {\n\tm.ctrl.T.Helper()\n\tvarargs := []interface{}{}\n\tfor _, a := range arg0 {\n\t\tvarargs = append(varargs, a)\n\t}\n\tret := m.ctrl.Call(m, \"PublishBytes\", varargs...)\n\tret0, _ := ret[0].(bool)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call {\n\treturn &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockedAcknowledgementStorage) writeDataToFile() error {\n\targs := m.Called()\n\treturn args.Error(0)\n}", "func MockWire(hash []byte, round uint64, step uint8, keys []key.ConsensusKeys, p *user.Provisioners, i ...int) *bytes.Buffer {\n\tev := MockAgreementEvent(hash, round, step, keys, p, i...)\n\n\tbuf := new(bytes.Buffer)\n\tif err := header.Marshal(buf, ev.Header); err != nil {\n\t\tpanic(err)\n\t}\n\n\tif err := Marshal(buf, *ev); err != nil {\n\t\tpanic(err)\n\t}\n\treturn buf\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteString_Call {\n\treturn &MockWriteBufferXmlBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_e *MockTestTransportInstance_Expecter) Write(data interface{}) *MockTestTransportInstance_Write_Call {\n\treturn &MockTestTransportInstance_Write_Call{Call: _e.mock.On(\"Write\", data)}\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (p *WriterStub) Write(buf []byte) (int, error) {\n\tp.Body = append(p.Body, buf...)\n\treturn len(p.Body), nil\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBit_Call {\n\treturn &MockWriteBufferXmlBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (c *Client) WriteBytes(ctx context.Context, name string, data []byte) error {\n\treturn c.WriteChunked(ctx, name, chunker.NewFromBlob(data, int(c.ChunkMaxSize)))\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteBytes(ctx context.Context, data []byte, filename string) error {\n\tif strings.HasPrefix(filename, \"gs://\") {\n\t\treturn writeGCSObject(ctx, data, filename)\n\t}\n\treturn ioutil.WriteFile(filename, data, os.ModePerm)\n}", "func (res *ServerHTTPResponse) writeBytes(bytes []byte) {\n\t_, err := res.responseWriter.Write(bytes)\n\tif err != nil {\n\t\t/* coverage ignore next line */\n\t\tres.contextLogger.Error(res.Request.Context(),\n\t\t\t\"Could not write string to resp body\",\n\t\t\tzap.Error(err),\n\t\t\tzap.String(\"bytesLength\", strconv.Itoa(len(bytes))),\n\t\t)\n\t}\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteBytes(buffer []byte, offset int, value []byte) {\n copy(buffer[offset:offset + len(value)], value)\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *Output_Expecter) Write(ctx interface{}, batch interface{}) *Output_Write_Call {\n\treturn &Output_Write_Call{Call: _e.mock.On(\"Write\", ctx, batch)}\n}", "func (w *Writer) WriteBytes(data []byte) {\n\t// check length\n\tw.checkLength(len(data))\n\n\tcopy(w.buffer[w.index:], data)\n\tw.index += len(data)\n}", "func (_e *WriteCloser_Expecter) Write(p interface{}) *WriteCloser_Write_Call {\n\treturn &WriteCloser_Write_Call{Call: _e.mock.On(\"Write\", p)}\n}", "func WriteBytesStr(b []byte, w io.Writer, array bool) error {\n\tvar prefix string\n\tif array {\n\t\tprefix = \"[\" + strconv.Itoa(len(b)) + \"]byte{\"\n\t} else {\n\t\tprefix = \"[]byte{\"\n\t}\n\treturn writeBytesStr(b, prefix, w)\n}", "func WriteBytes(file *os.File, bytes []byte, particularOffset bool, addr int64) {\n\tfmt.Printf(\"%04X\\n\", addr)\n\tvar jmpFileLoc int64\n\tif particularOffset {\n\t\toriginalOffset, _ := file.Seek(0, 1)\n\t\tjmpFileLoc = originalOffset\n\t\tfile.Seek(addr, 0)\n\t}\n\tbytesWritten, err := file.Write(bytes)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tlog.Printf(\"Wrote %d bytes.\\n\", bytesWritten)\n\tif particularOffset {\n\t\tfile.Seek(jmpFileLoc, 0)\n\t}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt8_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockSession) WriteMapData(name string, data map[string]interface{}) error {\n\targs := m.Mock.Called(name, data)\n\treturn args.Error(0)\n}", "func (_mr *MockOStreamMockRecorder) WriteBytes(arg0 interface{}) *gomock.Call {\n\treturn _mr.mock.ctrl.RecordCallWithMethodType(_mr.mock, \"WriteBytes\", reflect.TypeOf((*MockOStream)(nil).WriteBytes), arg0)\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_Mapping *MappingCallerSession) BytesTest(arg0 [32]byte) (bool, error) {\n\treturn _Mapping.Contract.BytesTest(&_Mapping.CallOpts, arg0)\n}", "func Bytes(val *[]byte) http.Arrow {\n\treturn func(cat *assay.IOCat) *assay.IOCat {\n\t\t*val, cat.Fail = ioutil.ReadAll(cat.HTTP.Recv.Body)\n\t\tcat.Fail = cat.HTTP.Recv.Body.Close()\n\t\tcat.HTTP.Recv.Response = nil\n\t\tcat.HTTP.Recv.Payload = string(*val)\n\t\treturn cat\n\t}\n}", "func (dm *dataManager) writeByte(address uint, b byte) (err ProcessException) {\n\tdata := []byte{b}\n\n\terr = dm.process.WriteBytes(address, data)\n\n\treturn\n}", "func (_m *MockFile) Write(_param0 []byte) (int, error) {\n\tret := _m.ctrl.Call(_m, \"Write\", _param0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (r *MockReadWriteCloser) Write(p []byte) (n int, err error) {\n\n\tif err = r.WriteErr; err != nil {\n\t\tr.BytesWritten = p\n\t\tn = len(p)\n\t}\n\treturn\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_Mapping *MappingCaller) BytesTest(opts *bind.CallOpts, arg0 [32]byte) (bool, error) {\n\tvar (\n\t\tret0 = new(bool)\n\t)\n\tout := ret0\n\terr := _Mapping.contract.Call(opts, out, \"bytesTest\", arg0)\n\treturn *ret0, err\n}", "func TestWrite(t *testing.T) {\n\ttr := NewTFramedTransport(new(mockTTransport))\n\tbuff := make([]byte, 10)\n\n\tn, err := tr.Write(buff)\n\n\tassert.Equal(t, 10, n)\n\tassert.Nil(t, err)\n\tassert.Equal(t, buff, tr.buf.Bytes())\n}", "func (m *MockPostForkBlock) Bytes() []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Bytes\")\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt8_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (mock WriteCloser) Write(p []byte) (n int, err error) {\n\tmethodName := \"Write\" // nolint: goconst\n\tif mock.impl.Write != nil {\n\t\treturn mock.impl.Write(p)\n\t}\n\tif mock.callbackNotImplemented != nil {\n\t\tmock.callbackNotImplemented(mock.t, mock.name, methodName)\n\t} else {\n\t\tgomic.DefaultCallbackNotImplemented(mock.t, mock.name, methodName)\n\t}\n\treturn mock.fakeZeroWrite(p)\n}", "func (_m *OSIOAPI) WriteFile(filename string, data []byte, perm os.FileMode) error {\n\tret := _m.Called(filename, data, perm)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, os.FileMode) error); ok {\n\t\tr0 = rf(filename, data, perm)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteFileByByte(path string, data []byte, fileMod fs.FileMode, coverage bool) error {\n\tif !coverage {\n\t\texists, err := PathExists(path)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif exists {\n\t\t\treturn fmt.Errorf(\"not coverage, which path exist %v\", path)\n\t\t}\n\t}\n\tparentPath := filepath.Dir(path)\n\tif !PathExistsFast(parentPath) {\n\t\terr := os.MkdirAll(parentPath, fileMod)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"can not WriteFileByByte at new dir at mode: %v , at parent path: %v\", fileMod, parentPath)\n\t\t}\n\t}\n\terr := os.WriteFile(path, data, fileMod)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"write data at path: %v, err: %v\", path, err)\n\t}\n\treturn nil\n}", "func (res Responder) WriteBytes(b []byte) int {\n\tn := res.writeInline(binDOLLAR, strconv.Itoa(len(b)))\n\tm, _ := res.b.Write(b)\n\tres.b.Write(binCRLF)\n\treturn n + m + 2\n}", "func (_m *MockOStream) WriteByte(v byte) {\n\t_m.ctrl.Call(_m, \"WriteByte\", v)\n}", "func (c *Client) WriteMemory(addr uint64, data []byte) error {\n\tdataInHex := \"\"\n\tfor _, b := range data {\n\t\tdataInHex += fmt.Sprintf(\"%02x\", b)\n\t}\n\tcommand := fmt.Sprintf(\"M%x,%x:%s\", addr, len(data), dataInHex)\n\tif err := c.send(command); err != nil {\n\t\treturn err\n\t}\n\n\treturn c.receiveAndCheck()\n}", "func (gps *Device) WriteBytes(bytes []byte) {\n\tif gps.uart != nil {\n\t\tgps.uart.Write(bytes)\n\t} else {\n\t\tgps.bus.Tx(gps.address, []byte{}, bytes)\n\t}\n}", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func WriteBytes(obj any) []byte {\n\tvar b bytes.Buffer\n\tenc := toml.NewEncoder(&b)\n\tenc.Encode(obj)\n\treturn b.Bytes()\n}", "func (m *SignatureKeyHolderMock) AsBytes() (r []byte) {\n\tcounter := atomic.AddUint64(&m.AsBytesPreCounter, 1)\n\tdefer atomic.AddUint64(&m.AsBytesCounter, 1)\n\n\tif len(m.AsBytesMock.expectationSeries) > 0 {\n\t\tif counter > uint64(len(m.AsBytesMock.expectationSeries)) {\n\t\t\tm.t.Fatalf(\"Unexpected call to SignatureKeyHolderMock.AsBytes.\")\n\t\t\treturn\n\t\t}\n\n\t\tresult := m.AsBytesMock.expectationSeries[counter-1].result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the SignatureKeyHolderMock.AsBytes\")\n\t\t\treturn\n\t\t}\n\n\t\tr = result.r\n\n\t\treturn\n\t}\n\n\tif m.AsBytesMock.mainExpectation != nil {\n\n\t\tresult := m.AsBytesMock.mainExpectation.result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the SignatureKeyHolderMock.AsBytes\")\n\t\t}\n\n\t\tr = result.r\n\n\t\treturn\n\t}\n\n\tif m.AsBytesFunc == nil {\n\t\tm.t.Fatalf(\"Unexpected call to SignatureKeyHolderMock.AsBytes.\")\n\t\treturn\n\t}\n\n\treturn m.AsBytesFunc()\n}", "func (w *Writer) WriteBytes(bytes []byte) {\n\tsetWriterRef(w, nil, nil)\n\twriteBytes(w, bytes)\n}", "func (mbc *MBCRom) WriteRamBank(address uint16, data byte) {\n\n}", "func (m *MockIOPackage) AppendUint8(arg0 []byte, arg1 uint8) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint8\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func WriteBytes(w http.ResponseWriter, status int, text []byte) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(status)\n\tw.Write(text)\n}", "func (a *api) SetBytes(raw bool) {\n\ta.Commentf(\"%s constructs a field element from bytes in big-endian order.\", rawname(\"SetBytes\", raw))\n\ta.rawcomment(raw)\n\ta.Printf(\"func (x %s) %s(b []byte) %s\", a.PointerType(), rawname(\"SetBytes\", raw), a.PointerType())\n\ta.EnterBlock()\n\ta.Linef(\"x.%s(new(big.Int).SetBytes(b))\", rawname(\"SetInt\", raw))\n\ta.Linef(\"return x\")\n\ta.LeaveBlock()\n}", "func (mwc *MockWriteCloser) Bytes() []byte {\n\treturn mwc.b.Bytes()\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func WriteByte(buffer []byte, offset int, value byte) {\n buffer[offset] = value\n}", "func (tw *TestWriter) Write(incoming []byte) (n int, err error) {\n\tif tw.mockWriteHandler != nil {\n\t\treturn tw.mockWriteHandler(incoming)\n\t}\n\n\ttw.capturedOutput = append(tw.capturedOutput, string(incoming))\n\treturn 0, nil\n}", "func putBytes(log log.T, byteArray []byte, offsetStart int, offsetEnd int, inputBytes []byte) (err error) {\n\tbyteArrayLength := len(byteArray)\n\tif offsetStart > byteArrayLength-1 || offsetEnd > byteArrayLength-1 || offsetStart > offsetEnd || offsetStart < 0 {\n\t\tlog.Error(\"putBytes failed: Offset is invalid.\")\n\t\treturn errors.New(\"Offset is outside the byte array.\")\n\t}\n\n\tif offsetEnd-offsetStart+1 != len(inputBytes) {\n\t\tlog.Error(\"putBytes failed: Not enough space to save the bytes.\")\n\t\treturn errors.New(\"Not enough space to save the bytes.\")\n\t}\n\n\tcopy(byteArray[offsetStart:offsetEnd+1], inputBytes)\n\treturn nil\n}", "func (out *OutBuffer) WriteBytes(d []byte) {\n\tout.Append(d...)\n}", "func (m *MockScraper) WriteToFile(arg0 map[int]map[string]int, arg1 int) (string, error) {\n\tret := m.ctrl.Call(m, \"WriteToFile\", arg0, arg1)\n\tret0, _ := ret[0].(string)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (c *Action) WriteBytes(bytes []byte) error {\n\t_, err := c.ResponseWriter.Write(bytes)\n\tif err != nil {\n\t\tc.App.Server.Logger.Println(\"Error during write: \", err)\n\t}\n\treturn err\n}", "func encodeByteSlice(w io.Writer, bz []byte) (err error) {\n\terr = encodeVarint(w, int64(len(bz)))\n\tif err != nil {\n\t\treturn\n\t}\n\t_, err = w.Write(bz)\n\treturn\n}", "func(this*Window)Write(p[]byte)(int,error){\nf,err:=this.File(\"body\")\nif err!=nil{\nreturn 0,err\n}\n\n\n/*71:*/\n\n\n//line goacme.w:1016\n\nf= &wrapper{f:f}\n\n\n\n\n\n/*:71*/\n\n\n//line goacme.w:244\n\nreturn f.Write(p)\n}", "func (c *context) ArgBytes(name string) []byte {\n\treturn c.ParamBytes(name)\n}", "func (w *MockWriter) Write(p []byte) (int, error) {\n\tw.Entries = append(w.Entries, string(p))\n\treturn len(p), nil\n}", "func (t *SimpleChaincode) Write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n var name, value string // Entities\n var err error\n fmt.Println(\"running write()\")\n\n if len(args) != 2 {\n return nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the variable and value to set\")\n }\n\n name = args[0] //rename for funsies\n value = args[1]\n err = stub.PutState(name, []byte(value)) //write the variable into the chaincode state\n if err != nil {\n return nil, err\n }\n return nil, nil\n}", "func (fw *FileWrapper) WriteBytes(b []byte) (int, error) {\n\t_, err := fw.SeekRel(0)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\tn, err := fw.Write(b)\n\tif err == nil {\n\t\tfw.pos += int64(n)\n\t}\n\n\treturn n, err\n}", "func mockTest0101(w http.ResponseWriter, r *http.Request) {\n\thostname, err := os.Hostname()\n\tif err != nil {\n\t\tcommon.ErrHandler(w, err)\n\t\treturn\n\t}\n\ts := fmt.Sprintf(\"mockTest01: from Host %s\\n\", hostname)\n\n\t// get query args\n\tsize, err := common.GetIntArgFromQuery(r, \"size\")\n\tif err != nil {\n\t\tcommon.ErrHandler(w, err)\n\t\treturn\n\t}\n\n\t// mock bytes\n\tif size > 0 {\n\t\tlog.Printf(\"create mock bytes of length %d.\\n\", size)\n\t\ts += common.CreateMockString(size)\n\t}\n\n\tw.Header().Set(common.TextContentLength, strconv.Itoa(len(s)))\n\t// mockMD5 := \"f900b997e6f8a772994876dff023801e\"\n\t// w.Header().Set(\"Content-Md5\", mockMD5)\n\tw.WriteHeader(http.StatusOK)\n\n\tif _, err := io.Copy(w, bufio.NewReader(strings.NewReader(s))); err != nil {\n\t\tcommon.ErrHandler(w, err)\n\t}\n}", "func (_ValueTester *ValueTesterCaller) BytesToBytestackHash(opts *bind.CallOpts, data []byte, startOffset *big.Int, dataLength *big.Int) ([32]byte, error) {\n\tvar (\n\t\tret0 = new([32]byte)\n\t)\n\tout := ret0\n\terr := _ValueTester.contract.Call(opts, out, \"bytesToBytestackHash\", data, startOffset, dataLength)\n\treturn *ret0, err\n}", "func TestWrite(t *testing.T) {\n\tmockZooKeeper := &MockZooHandle{\n\t\tzk: mock.Mock{},\n\t}\n\n\tbytes := make([]byte, 3)\n\tff := NewFuseFile(bytes, 0, \"mock/path\", mockZooKeeper)\n\n\tmockZooKeeper.zk.On(\"Set\", \"mock/path\", bytes, int32(-1)).Return(&zk.Stat{DataLength: int32(len(bytes))}, nil)\n\n\t// assert that we send 3 bytes into the writer and status out == fuse.OK\n\tsize, stat := ff.Write(bytes, 0)\n\tassert.Equal(t, uint32(3), size)\n\tassert.Equal(t, fuse.OK, stat)\n}", "func (ap *APNGModel) WriteBytes(w io.Writer) error {\n\n\t_, err := w.Write(ap.buffer)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\n\treturn err\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_Mapping *MappingSession) BytesTest(arg0 [32]byte) (bool, error) {\n\treturn _Mapping.Contract.BytesTest(&_Mapping.CallOpts, arg0)\n}", "func (p *Proxy) SetupHandlerOnProxyStructBytes(handler OnProxyStructBytes) { p.HandlerOnProxyStructBytes = handler }", "func (_m *HadolintPiperFileUtils) FileWrite(filename string, data []byte, perm os.FileMode) error {\n\tret := _m.Called(filename, data, perm)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, os.FileMode) error); ok {\n\t\tr0 = rf(filename, data, perm)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (b *Buffer) AttachBytes(buffer []byte, offset int, size int) {\n if len(buffer) < size {\n panic(\"invalid buffer\")\n }\n if size <= 0 {\n panic(\"invalid size\")\n }\n if offset > size {\n panic(\"invalid offset\")\n }\n\n b.data = buffer\n b.size = size\n b.offset = offset\n}", "func (writer *Writer) WriteBytes(bytes []byte) {\n\twriter.buf.Write(bytes)\n}", "func (m *OutboundMock) AsByteString() (r string) {\n\tcounter := atomic.AddUint64(&m.AsByteStringPreCounter, 1)\n\tdefer atomic.AddUint64(&m.AsByteStringCounter, 1)\n\n\tif len(m.AsByteStringMock.expectationSeries) > 0 {\n\t\tif counter > uint64(len(m.AsByteStringMock.expectationSeries)) {\n\t\t\tm.t.Fatalf(\"Unexpected call to OutboundMock.AsByteString.\")\n\t\t\treturn\n\t\t}\n\n\t\tresult := m.AsByteStringMock.expectationSeries[counter-1].result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the OutboundMock.AsByteString\")\n\t\t\treturn\n\t\t}\n\n\t\tr = result.r\n\n\t\treturn\n\t}\n\n\tif m.AsByteStringMock.mainExpectation != nil {\n\n\t\tresult := m.AsByteStringMock.mainExpectation.result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the OutboundMock.AsByteString\")\n\t\t}\n\n\t\tr = result.r\n\n\t\treturn\n\t}\n\n\tif m.AsByteStringFunc == nil {\n\t\tm.t.Fatalf(\"Unexpected call to OutboundMock.AsByteString.\")\n\t\treturn\n\t}\n\n\treturn m.AsByteStringFunc()\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (mc *MockConn) MockWrite(msg wire.Message) {\n\tbuf := &bytes.Buffer{}\n\twire.WriteMessage(buf, msg, wire.MainNet)\n\tmc.receiveChan <- buf.Bytes()\n}" ]
[ "0.8080626", "0.80016965", "0.78044194", "0.62943184", "0.6067234", "0.60475725", "0.60475725", "0.60475725", "0.60475725", "0.60475725", "0.604", "0.5924456", "0.57793844", "0.5709523", "0.55990165", "0.54844356", "0.54584134", "0.5386788", "0.53769726", "0.5373169", "0.5347739", "0.53156", "0.53098565", "0.52644444", "0.5245396", "0.5219083", "0.5207578", "0.5206709", "0.5187145", "0.5184287", "0.51751214", "0.5160332", "0.5154916", "0.5151011", "0.51469254", "0.5125552", "0.5110066", "0.50879365", "0.5040241", "0.4991164", "0.49888638", "0.4976275", "0.49654788", "0.49644893", "0.49520585", "0.4945397", "0.4940888", "0.48935288", "0.48512352", "0.4834102", "0.47981364", "0.47914463", "0.47649333", "0.4762368", "0.47496575", "0.4749415", "0.4746892", "0.47455734", "0.47335464", "0.4723125", "0.4706513", "0.47036034", "0.46978882", "0.46977845", "0.46906883", "0.46902853", "0.468744", "0.4685736", "0.4685246", "0.46849057", "0.46846095", "0.46690196", "0.46668032", "0.46601337", "0.46594724", "0.46545616", "0.46537447", "0.46536037", "0.46491313", "0.46435246", "0.4639001", "0.4637843", "0.462988", "0.46276155", "0.4625449", "0.46154538", "0.45952982", "0.4592121", "0.45877072", "0.45875463", "0.45687023", "0.45645583", "0.45553824", "0.45511717", "0.45487487", "0.45451513", "0.4540762", "0.4530625", "0.45303577", "0.45270035" ]
0.8280528
0
WriteFloat32 provides a mock function with given fields: logicalName, bitLength, value, writerArgs
func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, logicalName, bitLength, value) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok { r0 = rf(logicalName, bitLength, value, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat32_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat32_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockSession) Float32(arg0 string) float32 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Float32\", arg0)\n\tret0, _ := ret[0].(float32)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat64_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat64_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteFloat(buffer []byte, offset int, value float32) {\n WriteUInt32(buffer, offset, math.Float32bits(value))\n}", "func (w *ByteWriter) WriteFloat32(val float32, offset int) (int, error) {\n\treturn w.WriteVal(val, offset)\n}", "func (w *Writer) WriteFloat32(v float32) error {\n\treturn binary.Write(w.out, w.bo, &v)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigFloat_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigFloat_Call{Call: _e.mock.On(\"WriteBigFloat\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockReader) Float() (float32, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Float\")\n\tret0, _ := ret[0].(float32)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockIOPackage) WriteUint32(arg0 []byte, arg1 int, arg2 uint32) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint32\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint32_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (out *OutBuffer) WriteFloat32LE(v float32) {\n\tout.WriteUint32LE(math.Float32bits(v))\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigFloat_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigFloat_Call{Call: _e.mock.On(\"WriteBigFloat\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (w *Writer) Float32(n float32) {\n\tw.buf = strconv.AppendFloat(w.buf, float64(n), 'g', -1, 32)\n}", "func (_m *ValueConverter) ToFloat(_a0 interface{}) float64 {\n\tret := _m.Called(_a0)\n\n\tvar r0 float64\n\tif rf, ok := ret.Get(0).(func(interface{}) float64); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tr0 = ret.Get(0).(float64)\n\t}\n\n\treturn r0\n}", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (w *ByteWriter) MustWriteFloat32(val float32, offset int) int {\n\treturn w.MustWriteVal(val, offset)\n}", "func (instance *Instance) SetFloat32(fieldName string, value float32) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (hm *HM) WriteFloat(addr int, val float64) error {\n\tbuf := make([]byte, int(unsafe.Sizeof(float64(0))))\n\tbinary.BigEndian.PutUint64(buf[:], math.Float64bits(val))\n\treturn hm.shm.WriteN(addr, buf)\n}", "func (mr *MockSessionMockRecorder) Float32(arg0 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Float32\", reflect.TypeOf((*MockSession)(nil).Float32), arg0)\n}", "func (x *Rat) Float32() (f float32, exact bool) {}", "func ExpectFloat32(t *testing.T, field string, expected float32, found float32) {\n\tif expected != found {\n\t\tt.Errorf(\"%s [%f], found '%f'\", field, expected, found)\n\t}\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (m *MockHash32) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestFloat(t *testing.T) {\n\ttype Test struct {\n\t\tvalue interface{}\n\t\texpected float64\n\t}\n\ttests := []Test{\n\t\t{float32(3), 3},\n\t\t{float32(3.4), 3.4},\n\t\t{float32(0), 0},\n\t\t{float32(0.4), 0.4},\n\t\t{float32(-10.1), -10.1},\n\t\t{float64(4), 4},\n\t\t{float64(4.1), 4.1},\n\t\t{float64(0), 0},\n\t\t{float64(0.3), 0.3},\n\t\t{float64(-12.03), -12.03},\n\t}\n\n\tfor _, test := range tests {\n\t\t// Encode it\n\t\tdata, err := Marshal(test.value)\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"Marshal failed: %v\", err)\n\t\t}\n\t\t// Decode it\n\t\tvar value float64\n\t\terr = Unmarshal(data, &value)\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"Unmarshal failed: %v\", err)\n\t\t}\n\t\t// Check equality.\n\t\tif !reflect.DeepEqual(value, test.expected) {\n\t\t\tt.Fatalf(\"not equal %v/%v\", value, test.expected)\n\t\t}\n\t\tt.Logf(\"Unmarshal() = %+v\\n\", value)\n\t}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt32_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockSession) Float64(arg0 string) float64 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Float64\", arg0)\n\tret0, _ := ret[0].(float64)\n\treturn ret0\n}", "func (m *MockSession) Uint32(arg0 string) uint32 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint32\", arg0)\n\tret0, _ := ret[0].(uint32)\n\treturn ret0\n}", "func float32bits(f float32) uint32 { return *(*uint32)(unsafe.Pointer(&f)) }", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m Measurement) AddFloat32(name string, value float32) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (f Float) Float32() float32 {\n\tpanic(\"not yet implemented\")\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (t Float32) WriteTo(w io.Writer) (int64, error) {\n\tlw := aWriterPool.Get(w)\n\tlw.WriteFloat32(float32(t))\n\treturn aWriterPool.Put(lw)\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *FormSerializationWriter) WriteFloat32Value(key string, value *float32) error {\n\tif value != nil {\n\t\tcast := float64(*value)\n\t\treturn w.WriteFloat64Value(key, &cast)\n\t}\n\treturn nil\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (out *OutBuffer) WriteFloat32BE(v float32) {\n\tout.WriteUint32BE(math.Float32bits(v))\n}", "func (m *MockIOPackage) AppendUint32(arg0 []byte, arg1 uint32) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint32\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (e *Encoder) WriteFloat(n float64, bitSize int) {\n\te.prepareNext(Number)\n\te.out = appendFloat(e.out, n, bitSize)\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *BlobStore) Put(path string, reader io.Reader, objectSize int64) error {\n\tret := _m.Called(path, reader, objectSize)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, io.Reader, int64) error); ok {\n\t\tr0 = rf(path, reader, objectSize)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash32) Sum32() uint32 {\n\tret := m.ctrl.Call(m, \"Sum32\")\n\tret0, _ := ret[0].(uint32)\n\treturn ret0\n}", "func (m *MockHash32) Sum32() uint32 {\n\tret := m.ctrl.Call(m, \"Sum32\")\n\tret0, _ := ret[0].(uint32)\n\treturn ret0\n}", "func TestFloat32NamedArguments(t *testing.T) {\n\tt.Skip(\"Bug(sickyoon): float32 as an argument throws a panic: https://github.com/gocelery/gocelery/issues/75\")\n\ttestCases := []struct {\n\t\tname string\n\t\tbroker CeleryBroker\n\t\tbackend CeleryBackend\n\t\ttaskName string\n\t\ttaskFunc interface{}\n\t\tinA float32\n\t\tinB float32\n\t\texpected float32\n\t}{\n\t\t{\n\t\t\tname: \"float32 addition with redis broker/backend\",\n\t\t\tbroker: redisBroker,\n\t\t\tbackend: redisBackend,\n\t\t\ttaskName: uuid.Must(uuid.NewV4(), nil).String(),\n\t\t\ttaskFunc: &addFloat32Task{},\n\t\t\tinA: 3.4580,\n\t\t\tinB: 5.3688,\n\t\t\texpected: float32(8.8268),\n\t\t},\n\t}\n\tfor _, tc := range testCases {\n\t\tcli, _ := NewCeleryClient(tc.broker, tc.backend, 1)\n\t\tcli.Register(tc.taskName, tc.taskFunc)\n\t\tcli.StartWorker()\n\t\tasyncResult, err := cli.DelayKwargs(\n\t\t\ttc.taskName,\n\t\t\tmap[string]interface{}{\n\t\t\t\t\"a\": tc.inA,\n\t\t\t\t\"b\": tc.inB,\n\t\t\t},\n\t\t)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"test '%s': failed to get result for task %s: %+v\", tc.name, tc.taskName, err)\n\t\t\tcli.StopWorker()\n\t\t\tcontinue\n\t\t}\n\t\tres, err := asyncResult.Get(TIMEOUT)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"test '%s': failed to get result for task %s: %+v\", tc.name, tc.taskName, err)\n\t\t\tcli.StopWorker()\n\t\t\tcontinue\n\t\t}\n\t\tif tc.expected != float32(res.(float64)) {\n\t\t\tt.Errorf(\"test '%s': returned result %+v is different from expected result %+v\", tc.name, res, tc.expected)\n\t\t}\n\t\tcli.StopWorker()\n\t}\n}", "func (m *MockSeriesRef) Write(ctx context.Context, timestamp time.UnixNano, value float64, unit time.Unit, annotation []byte, wOpts series.WriteOptions) (bool, series.WriteType, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", ctx, timestamp, value, unit, annotation, wOpts)\n\tret0, _ := ret[0].(bool)\n\tret1, _ := ret[1].(series.WriteType)\n\tret2, _ := ret[2].(error)\n\treturn ret0, ret1, ret2\n}", "func TestWrite(t *testing.T) {\n\tmockZooKeeper := &MockZooHandle{\n\t\tzk: mock.Mock{},\n\t}\n\n\tbytes := make([]byte, 3)\n\tff := NewFuseFile(bytes, 0, \"mock/path\", mockZooKeeper)\n\n\tmockZooKeeper.zk.On(\"Set\", \"mock/path\", bytes, int32(-1)).Return(&zk.Stat{DataLength: int32(len(bytes))}, nil)\n\n\t// assert that we send 3 bytes into the writer and status out == fuse.OK\n\tsize, stat := ff.Write(bytes, 0)\n\tassert.Equal(t, uint32(3), size)\n\tassert.Equal(t, fuse.OK, stat)\n}", "func (t *Type) Float32(defaultValue ...float32) Float32Accessor {\n\tnv := &NullFloat32{}\n\tif nv.Error = t.err; t.err != nil {\n\t\treturn nv\n\t}\n\tvalueTo := t.toFloat(reflect.Float32)\n\tnv = &NullFloat32{Float32Common{Error: valueTo.Err()}}\n\tif defaultFloat32(nv, defaultValue...) {\n\t\treturn nv\n\t}\n\tv := float32(valueTo.V())\n\tnv.P = &v\n\treturn nv\n}", "func Float32(k string, v float32) Field {\n\treturn Field{Key: k, Value: valf.Float32(v)}\n}", "func (m *MockValue) Float64(def float64) float64 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Float64\", def)\n\tret0, _ := ret[0].(float64)\n\treturn ret0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (o *OutputState) ApplyFloat32(applier interface{}) Float32Output {\n\treturn o.ApplyT(applier).(Float32Output)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt32_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func Float32(v *Value, def float32) float32 {\n\tf, err := v.Float32()\n\tif err != nil {\n\t\treturn def\n\t}\n\treturn f\n}", "func PtrFloat(v float32) *float32 { return &v }", "func (rw *RW) ToFloat32() float32 {\n\treturn *(*float32)(unsafe.Pointer(&rw.Value))\n}", "func (m *MockBigInterface) Foo32(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7 bool) Bar {\n\tif m.FnFoo32 != nil {\n\t\treturn m.FnFoo32(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)\n\t}\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Foo32\", arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)\n\tret0, _ := ret[0].(Bar)\n\treturn ret0\n}", "func PtrFloat32(v float32) *float32 { return &v }", "func (m *MockSpaceStorage) WriteSpaceHash(arg0 string) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteSpaceHash\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (r *Reader) Float32() float32 {\n\treturn math.Float32frombits(r.Uint32())\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (tv *TypedFloat) Float32() float32 {\n\tif len(tv.Bytes) == 0 {\n\t\treturn 0.0\n\t}\n\tvar value big.Float\n\t_ = value.GobDecode(tv.Bytes)\n\tflt32, _ := value.Float32()\n\treturn flt32\n}", "func Float32(key string, val float32) Field {\n\treturn Field{Key: key, Type: core.Float32Type, Integer: int64(math.Float32bits(val))}\n}", "func MarshalF32(x float32, buf []byte, rem int) ([]byte, int, error) {\n\tif len(buf) < SizeHintF32 || rem < SizeHintF32 {\n\t\treturn buf, rem, ErrUnexpectedEndOfBuffer\n\t}\n\tbinary.BigEndian.PutUint32(buf, math.Float32bits(x))\n\treturn buf[SizeHintF32:], rem - SizeHintF32, nil\n}", "func saveReturnFloat32(v *float32, def interface{}) interface{} {\n\tif v == nil {\n\t\treturn def\n\t} else {\n\t\treturn *v\n\t}\n}", "func (x *Float) Float32() (float32, Accuracy) {\n\t// possible: panic(\"unreachable\")\n}", "func (_m *MockStore) Write(ctx context.Context, key string, val interface{}, opts ...store.WriteOption) error {\n\t_va := make([]interface{}, len(opts))\n\tfor _i := range opts {\n\t\t_va[_i] = opts[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, key, val)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...store.WriteOption) error); ok {\n\t\tr0 = rf(ctx, key, val, opts...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestFloat32(t *testing.T) {\n\tt.Skip(\"Bug(sickyoon): float32 as an argument throws a panic: https://github.com/gocelery/gocelery/issues/75\")\n\ttestCases := []struct {\n\t\tname string\n\t\tbroker CeleryBroker\n\t\tbackend CeleryBackend\n\t\ttaskName string\n\t\ttaskFunc interface{}\n\t\tinA float32\n\t\tinB float32\n\t\texpected float32\n\t}{\n\t\t{\n\t\t\tname: \"float32 addition with redis broker/backend\",\n\t\t\tbroker: redisBroker,\n\t\t\tbackend: redisBackend,\n\t\t\ttaskName: uuid.Must(uuid.NewV4(), nil).String(),\n\t\t\ttaskFunc: addFloat32,\n\t\t\tinA: 3.4580,\n\t\t\tinB: 5.3688,\n\t\t\texpected: float32(8.8268),\n\t\t},\n\t}\n\tfor _, tc := range testCases {\n\t\tcli, _ := NewCeleryClient(tc.broker, tc.backend, 1)\n\t\tcli.Register(tc.taskName, tc.taskFunc)\n\t\tcli.StartWorker()\n\t\tasyncResult, err := cli.Delay(tc.taskName, tc.inA, tc.inB)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"test '%s': failed to get result for task %s: %+v\", tc.name, tc.taskName, err)\n\t\t\tcli.StopWorker()\n\t\t\tcontinue\n\t\t}\n\t\tres, err := asyncResult.Get(TIMEOUT)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"test '%s': failed to get result for task %s: %+v\", tc.name, tc.taskName, err)\n\t\t\tcli.StopWorker()\n\t\t\tcontinue\n\t\t}\n\t\tif tc.expected != float32(res.(float64)) {\n\t\t\tt.Errorf(\"test '%s': returned result %+v is different from expected result %+v\", tc.name, res, tc.expected)\n\t\t}\n\t\tcli.StopWorker()\n\t}\n}", "func Float32(v float32) *float32 {\n\treturn &v\n}", "func Float32(v float32) *float32 {\n\treturn &v\n}", "func Float32(v float32) *float32 {\n\treturn &v\n}", "func Float32Ptr(v float32) *float32 { return &v }", "func (o *FakeObject) Float() float64 { return o.Value.(float64) }", "func (this *channelStruct) WriteFloats(samples []float64) {\n\tthis.samples = append(this.samples, samples...)\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func MeasureFloat32(name string, field string, value float32) Measurement {\n\treturn NewMeasurement(name).AddFloat32(field, value)\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Float32Arg(register Register, name string, options ...ArgOptionApplyer) *float32 {\n\tp := new(float32)\n\t_ = Float32ArgVar(register, p, name, options...)\n\treturn p\n}", "func Float32(key string, val float32) Tag {\n\treturn Tag{key: key, tType: float32Type, floatVal: float64(val)}\n}", "func (_m *Session) Write(data []byte) (int, error) {\n\tret := _m.Called(data)\n\n\tvar r0 int\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func([]byte) (int, error)); ok {\n\t\treturn rf(data)\n\t}\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(data)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (m *MockHash) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (mr *MockIOPackageMockRecorder) WriteUint32(arg0, arg1, arg2 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteUint32\", reflect.TypeOf((*MockIOPackage)(nil).WriteUint32), arg0, arg1, arg2)\n}" ]
[ "0.83793396", "0.8179838", "0.8037956", "0.68830144", "0.67320764", "0.66668016", "0.6630418", "0.6474508", "0.64441586", "0.6406249", "0.6315927", "0.6209198", "0.6159037", "0.61152226", "0.60467637", "0.60296416", "0.60099757", "0.58896196", "0.5873409", "0.58431864", "0.58313423", "0.5784861", "0.56446385", "0.5635043", "0.56292826", "0.5602476", "0.55751544", "0.5553194", "0.5531333", "0.55148715", "0.54897404", "0.54805475", "0.54698575", "0.5453637", "0.5398555", "0.5378294", "0.5373862", "0.53562844", "0.5327836", "0.5294496", "0.5258089", "0.52546656", "0.52515423", "0.5232402", "0.5227431", "0.52256024", "0.522135", "0.5216681", "0.52141845", "0.5204342", "0.51994777", "0.5188137", "0.5181831", "0.5173843", "0.5138293", "0.51343656", "0.51321393", "0.51321393", "0.51163435", "0.51124364", "0.50976706", "0.5080159", "0.5074765", "0.5073633", "0.5063025", "0.504617", "0.50436884", "0.5030335", "0.5030003", "0.5014387", "0.50133395", "0.50098765", "0.50075877", "0.50059664", "0.49953133", "0.49934816", "0.4989276", "0.4983591", "0.49830198", "0.49644732", "0.49604928", "0.49541423", "0.49342677", "0.49317738", "0.49245113", "0.49245113", "0.49245113", "0.49079484", "0.49064466", "0.4897133", "0.4895571", "0.4894088", "0.48924315", "0.4891398", "0.48902726", "0.48751712", "0.4867878", "0.48673207", "0.48588854", "0.48575053" ]
0.84569436
0
WriteFloat32 is a helper method to define mock.On call logicalName string bitLength uint8 value float32 writerArgs ...WithWriterArgs
func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat32_Call { return &MockWriteBufferJsonBased_WriteFloat32_Call{Call: _e.mock.On("WriteFloat32", append([]interface{}{logicalName, bitLength, value}, writerArgs...)...)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat32_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *ByteWriter) WriteFloat32(val float32, offset int) (int, error) {\n\treturn w.WriteVal(val, offset)\n}", "func (w *Writer) Float32(n float32) {\n\tw.buf = strconv.AppendFloat(w.buf, float64(n), 'g', -1, 32)\n}", "func (w *Writer) WriteFloat32(v float32) error {\n\treturn binary.Write(w.out, w.bo, &v)\n}", "func WriteFloat(buffer []byte, offset int, value float32) {\n WriteUInt32(buffer, offset, math.Float32bits(value))\n}", "func (m *MockSession) Float32(arg0 string) float32 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Float32\", arg0)\n\tret0, _ := ret[0].(float32)\n\treturn ret0\n}", "func (out *OutBuffer) WriteFloat32LE(v float32) {\n\tout.WriteUint32LE(math.Float32bits(v))\n}", "func (instance *Instance) SetFloat32(fieldName string, value float32) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat64_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat64_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint32_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (b *Buffer) AppendFloat32(v float32) {\n\tb.AppendFloat64(float64(v))\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mr *MockSessionMockRecorder) Float32(arg0 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Float32\", reflect.TypeOf((*MockSession)(nil).Float32), arg0)\n}", "func (m Measurement) AddFloat32(name string, value float32) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (o *OutputState) ApplyFloat32(applier interface{}) Float32Output {\n\treturn o.ApplyT(applier).(Float32Output)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigFloat_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigFloat_Call{Call: _e.mock.On(\"WriteBigFloat\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (w *ByteWriter) MustWriteFloat32(val float32, offset int) int {\n\treturn w.MustWriteVal(val, offset)\n}", "func (x *Rat) Float32() (f float32, exact bool) {}", "func (f Float) Float32() float32 {\n\tpanic(\"not yet implemented\")\n}", "func Float32Arg(register Register, name string, options ...ArgOptionApplyer) *float32 {\n\tp := new(float32)\n\t_ = Float32ArgVar(register, p, name, options...)\n\treturn p\n}", "func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (f *FunctionCall) CallFloat32() float32 {\n\tif f.NumArgs > len(f.Words) {\n\t\tpanic(\"bad NumArgs\")\n\t}\n\tif f.addr == nil {\n\t\tpanic(\"variadic: CallFloat32 called with nil function addr\")\n\t}\n\treturn float32(C.VariadicCallFloat(unsafe.Pointer(f)))\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt32_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Float32(v *Value, def float32) float32 {\n\tf, err := v.Float32()\n\tif err != nil {\n\t\treturn def\n\t}\n\treturn f\n}", "func ExpectFloat32(t *testing.T, field string, expected float32, found float32) {\n\tif expected != found {\n\t\tt.Errorf(\"%s [%f], found '%f'\", field, expected, found)\n\t}\n}", "func Float32(k string, v float32) Field {\n\treturn Field{Key: k, Value: valf.Float32(v)}\n}", "func (_m *MockWriteBufferXmlBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func MarshalF32(x float32, buf []byte, rem int) ([]byte, int, error) {\n\tif len(buf) < SizeHintF32 || rem < SizeHintF32 {\n\t\treturn buf, rem, ErrUnexpectedEndOfBuffer\n\t}\n\tbinary.BigEndian.PutUint32(buf, math.Float32bits(x))\n\treturn buf[SizeHintF32:], rem - SizeHintF32, nil\n}", "func Float32(key string, val float32) Tag {\n\treturn Tag{key: key, tType: float32Type, floatVal: float64(val)}\n}", "func (hm *HM) WriteFloat(addr int, val float64) error {\n\tbuf := make([]byte, int(unsafe.Sizeof(float64(0))))\n\tbinary.BigEndian.PutUint64(buf[:], math.Float64bits(val))\n\treturn hm.shm.WriteN(addr, buf)\n}", "func (s *Streamer) Float32(f float32) *Streamer {\n\tif s.Error != nil {\n\t\treturn s\n\t}\n\tf64 := float64(f)\n\tif math.IsInf(f64, 0) {\n\t\ts.Error = ErrFloatIsInfinity\n\t\treturn s\n\t}\n\tif math.IsNaN(f64) {\n\t\ts.Error = ErrFloatIsNan\n\t\treturn s\n\t}\n\ts.onVal()\n\tfmt := byte('f')\n\tabs := math.Abs(f64)\n\tif abs != 0 {\n\t\tif f > 0 {\n\t\t\tif f < 1e-6 || f >= 1e21 {\n\t\t\t\tfmt = 'e'\n\t\t\t}\n\t\t} else {\n\t\t\tif f > -1e-6 || f <= -1e21 {\n\t\t\t\tfmt = 'e'\n\t\t\t}\n\t\t}\n\t}\n\ts.buffer = strconv.AppendFloat(s.buffer, f64, fmt, -1, 32)\n\tif fmt == 'e' {\n\t\tn := len(s.buffer)\n\t\tif n > 4 && s.buffer[n-4] == 'e' &&\n\t\t\ts.buffer[n-3] == '-' &&\n\t\t\ts.buffer[n-2] == '0' {\n\t\t\ts.buffer[n-2] = s.buffer[n-1]\n\t\t\ts.buffer = s.buffer[:n-1]\n\t\t}\n\t}\n\treturn s\n}", "func (t *Type) Float32(defaultValue ...float32) Float32Accessor {\n\tnv := &NullFloat32{}\n\tif nv.Error = t.err; t.err != nil {\n\t\treturn nv\n\t}\n\tvalueTo := t.toFloat(reflect.Float32)\n\tnv = &NullFloat32{Float32Common{Error: valueTo.Err()}}\n\tif defaultFloat32(nv, defaultValue...) {\n\t\treturn nv\n\t}\n\tv := float32(valueTo.V())\n\tnv.P = &v\n\treturn nv\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigFloat_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigFloat_Call{Call: _e.mock.On(\"WriteBigFloat\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (t Float32) WriteTo(w io.Writer) (int64, error) {\n\tlw := aWriterPool.Get(w)\n\tlw.WriteFloat32(float32(t))\n\treturn aWriterPool.Put(lw)\n}", "func (c Context) Float32(key string, f float32) Context {\n\tc.l.context = appendFloat32(c.l.context, key, f)\n\treturn c\n}", "func Float32Tag(name interface{}, value float32) Tag {\n\treturn &tag{\n\t\ttagType: TagFloat32,\n\t\tname: name,\n\t\tvalue: value,\n\t}\n}", "func (tv *TypedFloat) Float32() float32 {\n\tif len(tv.Bytes) == 0 {\n\t\treturn 0.0\n\t}\n\tvar value big.Float\n\t_ = value.GobDecode(tv.Bytes)\n\tflt32, _ := value.Float32()\n\treturn flt32\n}", "func (e *Encoder) WriteFloat(n float64, bitSize int) {\n\te.prepareNext(Number)\n\te.out = appendFloat(e.out, n, bitSize)\n}", "func (x *Float) Float32() (float32, Accuracy) {\n\t// possible: panic(\"unreachable\")\n}", "func (w *FormSerializationWriter) WriteFloat32Value(key string, value *float32) error {\n\tif value != nil {\n\t\tcast := float64(*value)\n\t\treturn w.WriteFloat64Value(key, &cast)\n\t}\n\treturn nil\n}", "func Float32(key string, val float32) Field {\n\treturn Field{Key: key, Type: core.Float32Type, Integer: int64(math.Float32bits(val))}\n}", "func MeasureFloat32(name string, field string, value float32) Measurement {\n\treturn NewMeasurement(name).AddFloat32(field, value)\n}", "func (out *OutBuffer) WriteFloat32BE(v float32) {\n\tout.WriteUint32BE(math.Float32bits(v))\n}", "func Float32(colName string) sif.GenericColumnAccessor[float32] {\n\treturn sif.CreateColumnAccessor[float32](&float32Type{}, colName)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt32_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (e Entry) Float32(key string, value float32) (entry Entry) {\n\te.Float64(key, float64(value))\n\treturn e\n}", "func Float32(f *float32) float32 {\n\tif f == nil {\n\t\treturn 0\n\t}\n\treturn *f\n}", "func (r *Reader) Float32() float32 {\n\treturn math.Float32frombits(r.Uint32())\n}", "func (m *MockIOPackage) WriteUint32(arg0 []byte, arg1 int, arg2 uint32) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint32\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (w *Writer) WriteUint32(data interface{}) {\n\tvar t = w.getType(data, 4)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.buffer[w.index+2] = byte(t[2])\n\tw.buffer[w.index+3] = byte(t[3])\n\tw.index += 4\n}", "func TestFloat32NamedArguments(t *testing.T) {\n\tt.Skip(\"Bug(sickyoon): float32 as an argument throws a panic: https://github.com/gocelery/gocelery/issues/75\")\n\ttestCases := []struct {\n\t\tname string\n\t\tbroker CeleryBroker\n\t\tbackend CeleryBackend\n\t\ttaskName string\n\t\ttaskFunc interface{}\n\t\tinA float32\n\t\tinB float32\n\t\texpected float32\n\t}{\n\t\t{\n\t\t\tname: \"float32 addition with redis broker/backend\",\n\t\t\tbroker: redisBroker,\n\t\t\tbackend: redisBackend,\n\t\t\ttaskName: uuid.Must(uuid.NewV4(), nil).String(),\n\t\t\ttaskFunc: &addFloat32Task{},\n\t\t\tinA: 3.4580,\n\t\t\tinB: 5.3688,\n\t\t\texpected: float32(8.8268),\n\t\t},\n\t}\n\tfor _, tc := range testCases {\n\t\tcli, _ := NewCeleryClient(tc.broker, tc.backend, 1)\n\t\tcli.Register(tc.taskName, tc.taskFunc)\n\t\tcli.StartWorker()\n\t\tasyncResult, err := cli.DelayKwargs(\n\t\t\ttc.taskName,\n\t\t\tmap[string]interface{}{\n\t\t\t\t\"a\": tc.inA,\n\t\t\t\t\"b\": tc.inB,\n\t\t\t},\n\t\t)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"test '%s': failed to get result for task %s: %+v\", tc.name, tc.taskName, err)\n\t\t\tcli.StopWorker()\n\t\t\tcontinue\n\t\t}\n\t\tres, err := asyncResult.Get(TIMEOUT)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"test '%s': failed to get result for task %s: %+v\", tc.name, tc.taskName, err)\n\t\t\tcli.StopWorker()\n\t\t\tcontinue\n\t\t}\n\t\tif tc.expected != float32(res.(float64)) {\n\t\t\tt.Errorf(\"test '%s': returned result %+v is different from expected result %+v\", tc.name, res, tc.expected)\n\t\t}\n\t\tcli.StopWorker()\n\t}\n}", "func (this *channelStruct) WriteFloats(samples []float64) {\n\tthis.samples = append(this.samples, samples...)\n}", "func (this *JSONObject) Float32(key string) float32 {\n\treturn float32(this.innerMap[key].(float64))\n}", "func Float32(v *float32) float32 {\n\tif v != nil {\n\t\treturn *v\n\t}\n\treturn 0\n}", "func Float32(v float32) *float32 {\n\treturn &v\n}", "func Float32(v float32) *float32 {\n\treturn &v\n}", "func Float32(v float32) *float32 {\n\treturn &v\n}", "func (m *MockReader) Float() (float32, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Float\")\n\tret0, _ := ret[0].(float32)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (o *OutputState) ApplyFloat32Ptr(applier interface{}) Float32PtrOutput {\n\treturn o.ApplyT(applier).(Float32PtrOutput)\n}", "func AppendFloat32(dst []byte, val float32) []byte {\n\tswitch {\n\tcase math.IsNaN(float64(val)):\n\t\treturn append(dst, \"\\xfa\\x7f\\xc0\\x00\\x00\"...)\n\tcase math.IsInf(float64(val), 1):\n\t\treturn append(dst, \"\\xfa\\x7f\\x80\\x00\\x00\"...)\n\tcase math.IsInf(float64(val), -1):\n\t\treturn append(dst, \"\\xfa\\xff\\x80\\x00\\x00\"...)\n\t}\n\tmajor := majorTypeSimpleAndFloat\n\tsubType := additionalTypeFloat32\n\tn := math.Float32bits(val)\n\tvar buf [4]byte\n\tfor i := uint(0); i < 4; i++ {\n\t\tbuf[i] = byte(n >> ((3 - i) * 8))\n\t}\n\treturn append(append(dst, byte(major|subType)), buf[0], buf[1], buf[2], buf[3])\n}", "func float32bits(f float32) uint32 { return *(*uint32)(unsafe.Pointer(&f)) }", "func Float32(val interface{}) (float32, error) {\n\tswitch ret := val.(type) {\n\tcase float32:\n\t\treturn ret, nil\n\tcase int:\n\t\treturn float32(ret), nil\n\tcase int8:\n\t\treturn float32(ret), nil\n\tcase int16:\n\t\treturn float32(ret), nil\n\tcase int32:\n\t\treturn float32(ret), nil\n\tcase int64:\n\t\treturn float32(ret), nil\n\tcase uint:\n\t\treturn float32(ret), nil\n\tcase uint8:\n\t\treturn float32(ret), nil\n\tcase uint16:\n\t\treturn float32(ret), nil\n\tcase uint32:\n\t\treturn float32(ret), nil\n\tcase uint64:\n\t\treturn float32(ret), nil\n\tcase float64:\n\t\treturn float32(ret), nil\n\tcase bool:\n\t\tif ret {\n\t\t\treturn 1.0, nil\n\t\t}\n\t\treturn 0.0, nil\n\tdefault:\n\t\tstr := strings.Replace(strings.TrimSpace(StringMust(val)), \" \", \"\", -1)\n\t\tf, err := strconv.ParseFloat(str, 32)\n\t\treturn float32(f), err\n\t}\n}", "func Float32(from float64, defaultValue ...float32) Float32Accessor {\n\tnv := &NullFloat32{}\n\tif safe := isSafeFloat(from, 32); !safe {\n\t\tnv.Error = ErrConvert\n\t\tif defaultFloat32(nv, defaultValue...) {\n\t\t\treturn nv\n\t\t}\n\t}\n\tv := float32(from)\n\tnv.P = &v\n\treturn nv\n}", "func NewFloat32(data arrow.ArrayData, shape, strides []int64, names []string) *Float32 {\n\ttsr := &Float32{tensorBase: *newTensor(arrow.PrimitiveTypes.Float32, data, shape, strides, names)}\n\tvals := tsr.data.Buffers()[1]\n\tif vals != nil {\n\t\ttsr.values = arrow.Float32Traits.CastFromBytes(vals.Bytes())\n\t\tbeg := tsr.data.Offset()\n\t\tend := beg + tsr.data.Len()\n\t\ttsr.values = tsr.values[beg:end]\n\t}\n\treturn tsr\n}", "func IsFloat32(v interface{}) bool {\n\tr := elconv.AsValueRef(reflect.ValueOf(v))\n\treturn r.Kind() == reflect.Float32\n}", "func (r *Random) Float32() float32 {\n\tr.m.Lock()\n\tdefer r.m.Unlock()\n\treturn r.rnd().Float32()\n}", "func FloatTag(name interface{}, value float32) Tag {\n\treturn &tag{\n\t\ttagType: TagFloat32,\n\t\tname: name,\n\t\tvalue: value,\n\t}\n}", "func (rw *RW) ToFloat32() float32 {\n\treturn *(*float32)(unsafe.Pointer(&rw.Value))\n}", "func Float32(a, b interface{}) int {\n\tf1, _ := a.(float32)\n\tf2, _ := b.(float32)\n\tswitch {\n\tcase f1 < f2:\n\t\treturn -1\n\tcase f1 > f2:\n\t\treturn 1\n\tdefault:\n\t\treturn 0\n\t}\n}", "func StringFloat32(from string, defaultValue ...float32) Float32Accessor {\n\tnv := &NullFloat32{}\n\tpv, err := strconv.ParseFloat(from, 32)\n\tnv.Error = err\n\tif defaultFloat32(nv, defaultValue...) {\n\t\treturn nv\n\t}\n\tv := float32(pv)\n\tnv.P = &v\n\treturn nv\n}", "func (buff *Bytes) ToFloat32() float32 {\r\n\treturn *(*float32)(unsafe.Pointer(&(*buff)[0]))\r\n}", "func anyToFloat32(i interface{}, def ...float32) float32 {\n\tvar defV float32 = 0\n\tif len(def) > 0 {\n\t\tdefV = def[0]\n\t}\n\tif i == nil {\n\t\treturn defV\n\t}\n\tswitch value := i.(type) {\n\tcase float32:\n\t\treturn value\n\tcase float64:\n\t\treturn float32(value)\n\tcase []byte:\n\t\treturn decodeToFloat32(value)\n\tdefault:\n\t\tv, _ := strconv.ParseFloat(anyToString(i), 64)\n\t\treturn float32(v)\n\t}\n}", "func PtrFloat32(v float32) *float32 { return &v }", "func Float32(f *frm.Field, inp ...string) {\n\tf64, err := strconv.ParseFloat(strings.TrimSpace(inp[0]), 32)\n\tif err != nil {\n\t\t//Return error if input string failed to convert.\n\t\tf.Err = err.Error()\n\t\treturn\n\t}\n\tnum := float32(f64)\n\tf.Value = num\n\n\tif !f.Required && num == 0 {\n\t\t//f.ValueFloat32 is zero by default so assigning zero isn't required\n\t\treturn\n\t}\n\n\tif f.Min != nil && num < f.Min.(float32) || f.Max != nil && num > f.Max.(float32) {\n\t\tf.Err = fmt.Sprintf(\"Must be between %v and %v.\", f.Min, f.Max)\n\t\treturn\n\t}\n\n\tif rem := toFixed32(math.Mod(f64, float64(f.Step)), 6); rem != 0 {\n\t\tf.Err = fmt.Sprintf(\"Please enter a valid value. The two nearest values are %v and %v.\", num-rem, num-rem+f.Step)\n\t}\n}", "func PtrFloat(v float32) *float32 { return &v }", "func Float32() float32 { return globalRand.Float32() }", "func Float32() float32 { return globalRand.Float32() }", "func Float32() float32 { return globalRand.Float32() }", "func FloatMarshalText(x *big.Float,) ([]byte, error)", "func Floats32(k string, v []float32) Field {\n\treturn Field{Key: k, Value: valf.Floats32(v)}\n}", "func (client PrimitiveClient) PutFloat(complexBody FloatWrapper) (result autorest.Response, err error) {\n req, err := client.PutFloatPreparer(complexBody)\n if err != nil {\n return result, autorest.NewErrorWithError(err, \"complexgroup.PrimitiveClient\", \"PutFloat\", nil , \"Failure preparing request\")\n }\n\n resp, err := client.PutFloatSender(req)\n if err != nil {\n result.Response = resp\n return result, autorest.NewErrorWithError(err, \"complexgroup.PrimitiveClient\", \"PutFloat\", resp, \"Failure sending request\")\n }\n\n result, err = client.PutFloatResponder(resp)\n if err != nil {\n err = autorest.NewErrorWithError(err, \"complexgroup.PrimitiveClient\", \"PutFloat\", resp, \"Failure responding to request\")\n }\n\n return\n}", "func (p Pointer) Float32LE(offset int) float32 {\n\treturn *(*float32)(unsafe.Pointer(uintptr(int(p) + offset)))\n}", "func (f Float32) Serialize() ([]byte, error) {\n\tvar buf bytes.Buffer\n\tbinary.Write(&buf, binary.LittleEndian, float32(f))\n\treturn buf.Bytes(), nil\n}", "func EqualFloat32(reporter interface{}, want, got float32) {\n\tif want != got {\n\t\treportError(reporter, &failedFloatCompMsg{float64(want), float64(got)})\n\t}\n}", "func FloatMod32(val, mod float32) float32 {\n\treturn float32(int(math.Round(float64(val/mod)))) * mod\n}", "func saveReturnFloat32(v *float32, def interface{}) interface{} {\n\tif v == nil {\n\t\treturn def\n\t} else {\n\t\treturn *v\n\t}\n}", "func (w *Writer) WriteFloat(f float64, bitSize int) {\n\tif f != f {\n\t\tw.writeByte(TagNaN)\n\t\treturn\n\t}\n\tif f > math.MaxFloat64 {\n\t\tw.write([]byte{TagInfinity, TagPos})\n\t\treturn\n\t}\n\tif f < -math.MaxFloat64 {\n\t\tw.write([]byte{TagInfinity, TagNeg})\n\t\treturn\n\t}\n\tw.writeByte(TagDouble)\n\tvar buf [64]byte\n\tw.write(strconv.AppendFloat(buf[:0], f, 'g', -1, bitSize))\n\tw.writeByte(TagSemicolon)\n}", "func (f Float) Encode(w io.Writer) error {\n\treturn util.WriteFloat32(w, float32(f))\n}", "func Float32Ptr(v float32) *float32 { return &v }", "func (r *Rand) Float32() float32 {\n\tif x, err := r.cryptoRand.Float32(); err == nil {\n\t\treturn x\n\t}\n\treturn r.mathRand.Float32()\n}", "func (m *MockSession) Uint32(arg0 string) uint32 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint32\", arg0)\n\tret0, _ := ret[0].(uint32)\n\treturn ret0\n}" ]
[ "0.845533", "0.81141347", "0.809199", "0.67059284", "0.6647784", "0.6629288", "0.66124403", "0.64545256", "0.63951033", "0.624393", "0.62078434", "0.6200449", "0.6177897", "0.6116901", "0.6070945", "0.60492617", "0.600736", "0.5990098", "0.5899771", "0.58664477", "0.583457", "0.58262473", "0.57673776", "0.57588583", "0.57551676", "0.57534873", "0.5739928", "0.57313275", "0.5707767", "0.56791943", "0.5673462", "0.5655044", "0.5629874", "0.5618364", "0.5611376", "0.5608863", "0.56056476", "0.55968827", "0.5561382", "0.5557079", "0.5555343", "0.55349344", "0.5512056", "0.5511798", "0.5511436", "0.5492691", "0.54913855", "0.5486217", "0.5484828", "0.54630256", "0.546136", "0.54511386", "0.5425575", "0.53924555", "0.5380327", "0.53761864", "0.5375043", "0.5373478", "0.5362062", "0.5315737", "0.53139615", "0.5299859", "0.52962726", "0.52844465", "0.52844465", "0.52844465", "0.5279909", "0.52705956", "0.52458864", "0.5231485", "0.5226796", "0.5208452", "0.5206439", "0.51992375", "0.5177876", "0.51632565", "0.51621675", "0.51599497", "0.5143027", "0.5141568", "0.5134036", "0.51277137", "0.5105975", "0.5102911", "0.50967926", "0.50967926", "0.50967926", "0.50962657", "0.50935155", "0.5085915", "0.5083127", "0.5081256", "0.5071346", "0.5064607", "0.50377005", "0.5037477", "0.5035399", "0.5018437", "0.5008138", "0.49971104" ]
0.8554434
0
WriteFloat64 provides a mock function with given fields: logicalName, bitLength, value, writerArgs
func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, logicalName, bitLength, value) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok { r0 = rf(logicalName, bitLength, value, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat64_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat64_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigFloat_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigFloat_Call{Call: _e.mock.On(\"WriteBigFloat\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigFloat_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigFloat_Call{Call: _e.mock.On(\"WriteBigFloat\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (w *ByteWriter) WriteFloat64(val float64, offset int) (int, error) {\n\treturn w.WriteVal(val, offset)\n}", "func (w *Writer) WriteFloat64(v float64) error {\n\treturn binary.Write(w.out, w.bo, &v)\n}", "func (bw *BufWriter) Float64(f float64) {\n\tif bw.Error != nil {\n\t\treturn\n\t}\n\tbw.stringBuf, bw.Error = Float64(f, bw.stringBuf[:0])\n\tif bw.Error != nil {\n\t\treturn\n\t}\n\t_, bw.Error = bw.writer.Write(bw.stringBuf)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockSession) Float64(arg0 string) float64 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Float64\", arg0)\n\tret0, _ := ret[0].(float64)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockValue) Float64(def float64) float64 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Float64\", def)\n\tret0, _ := ret[0].(float64)\n\treturn ret0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (c Channel) WriteFloat64(name string, value float64) error {\n\tcName := C.CString(name)\n\tdefer C.free(unsafe.Pointer(cName))\n\n\terrno := C.iio_channel_attr_write_double(\n\t\tc.handle,\n\t\tcName,\n\t\tC.double(value),\n\t)\n\tif errno == 0 {\n\t\treturn nil\n\t}\n\treturn syscall.Errno(-errno)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat32_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (p *Stream) WriteFloat64(value float64) {\n\tif value == 0 {\n\t\tp.writeFrame[p.writeIndex] = 4\n\t\tp.writeIndex++\n\t\tif p.writeIndex == streamBlockSize {\n\t\t\tp.gotoNextWriteFrame()\n\t\t}\n\t} else {\n\t\tv := math.Float64bits(value)\n\t\tif p.writeIndex < streamBlockSize-9 {\n\t\t\tb := p.writeFrame[p.writeIndex:]\n\t\t\tb[0] = 5\n\t\t\tb[1] = byte(v)\n\t\t\tb[2] = byte(v >> 8)\n\t\t\tb[3] = byte(v >> 16)\n\t\t\tb[4] = byte(v >> 24)\n\t\t\tb[5] = byte(v >> 32)\n\t\t\tb[6] = byte(v >> 40)\n\t\t\tb[7] = byte(v >> 48)\n\t\t\tb[8] = byte(v >> 56)\n\t\t\tp.writeIndex += 9\n\t\t} else {\n\t\t\tp.PutBytes([]byte{\n\t\t\t\t5,\n\t\t\t\tbyte(v),\n\t\t\t\tbyte(v >> 8),\n\t\t\t\tbyte(v >> 16),\n\t\t\t\tbyte(v >> 24),\n\t\t\t\tbyte(v >> 32),\n\t\t\t\tbyte(v >> 40),\n\t\t\t\tbyte(v >> 48),\n\t\t\t\tbyte(v >> 56),\n\t\t\t})\n\t\t}\n\t}\n}", "func WriteFloat64(buf io.Writer, v float64) (n int, err error) {\n\tbits := math.Float64bits(v)\n\tbits = bits ^ (-(bits >> 63) | (1 << 63))\n\tdata := make([]byte, 8)\n\tbinary.BigEndian.PutUint64(data, bits)\n\treturn buf.Write(data)\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat32_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func WriteFloat64(src []float64, dst Floating) int {\n\tlength := min(dst.Len(), len(src))\n\tfor i := 0; i < length; i++ {\n\t\tdst.SetSample(i, float64(src[i]))\n\t}\n\treturn ChannelLength(length, dst.Channels())\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (w *FormSerializationWriter) WriteFloat64Value(key string, value *float64) error {\n\tif key != \"\" && value != nil {\n\t\tw.writePropertyName(key)\n\t}\n\tif value != nil {\n\t\tw.writeRawValue(strconv.FormatFloat(*value, 'f', -1, 64))\n\t}\n\tif key != \"\" && value != nil {\n\t\tw.writePropertySeparator()\n\t}\n\treturn nil\n}", "func Float64(name string, value float64, usage string) *float64 {\n\treturn ex.FlagSet.Float64(name, value, usage)\n}", "func Float64(name string, value float64, usage string) *float64 {\n\treturn Global.Float64(name, value, usage)\n}", "func (instance *Instance) SetFloat64(fieldName string, value float64) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func Float64(name string, value float64, usage string) *float64 {\n\tp := new(float64);\n\tFloat64Var(p, name, value, usage);\n\treturn p;\n}", "func (m *MockIOPackage) WriteUint64(arg0 []byte, arg1 int, arg2 uint64) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint64\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (f *Flagger) Float64(name, shorthand string, value float64, usage string) {\n\tf.cmd.Flags().Float64P(name, shorthand, value, usage)\n\tf.cfg.BindPFlag(name, f.cmd.Flags().Lookup(name))\n}", "func TestFloat64(t *testing.T) {\n\tvalor := 12.34661\n\tt.Logf(\"valor:[%f]\", valor)\n\tvalor = utl.RoundFloat64(valor, 2)\n\tt.Logf(\"valor:[%f]\", valor)\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) Float64(n float64) {\n\tw.buf = strconv.AppendFloat(w.buf, float64(n), 'g', -1, 64)\n}", "func (w *ByteWriter) MustWriteFloat64(val float64, offset int) int {\n\treturn w.MustWriteVal(val, offset)\n}", "func (m Measurement) AddFloat64(name string, value float64) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (out *OutBuffer) WriteFloat64LE(v float64) {\n\tout.WriteUint64LE(math.Float64bits(v))\n}", "func Float64(name, description, unit string) *Float64Measure {\n\tmi := registerMeasureHandle(name, description, unit)\n\treturn &Float64Measure{mi}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt64_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (v *Value) Float64() float64 {\n\tswitch {\n\tcase v.fvalOk:\n\tcase v.ivalOk:\n\t\tv.fval = float64(v.ival)\n\t\tv.fvalOk = true\n\tcase v.svalOk:\n\t\t// Perform a best-effort conversion from string to float64.\n\t\tv.fval = 0.0\n\t\tstrs := matchFloat.FindStringSubmatch(v.sval)\n\t\tif len(strs) >= 2 {\n\t\t\tv.fval, _ = strconv.ParseFloat(strs[1], 64)\n\t\t}\n\t\tv.fvalOk = true\n\t}\n\treturn v.fval\n}", "func (p *PoolAllocator) Float64() Floating {\n\ts := p.f64.Get().(*f64)\n\ts.channels = channels(p.Channels)\n\ts.buffer = s.buffer[:p.Length*p.Channels]\n\treturn s\n}", "func RegisterFloat64(key string, def float64, description string) onion.Float {\n\tsetDescription(key, description)\n\treturn o.RegisterFloat64(key, def)\n}", "func (c *Configurator) Float64(name string, value float64, usage string) *float64 {\n\tp := new(float64)\n\n\tc.Float64Var(p, name, value, usage)\n\n\treturn p\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (z *Float) SetFloat64(x float64) *Float {}", "func (s *StressFlag) Float64(name string, def float64, usage string) *float64 {\n\tv := def\n\treturn &v\n}", "func (fw *Writer) PutFloat64Field(addr biopb.Coord, v float64) {\n\twb := fw.buf\n\twb.updateAddrBounds(addr)\n\twb.defaultBuf.PutFloat64(v)\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Float64(r interface{}, err error) (float64, error) {\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tswitch r := r.(type) {\n\tcase float64:\n\t\treturn r, err\n\tcase []byte:\n\t\tn, err := strconv.ParseFloat(string(r), 64)\n\t\treturn n, err\n\tcase string:\n\t\tn, err := strconv.ParseFloat(r, 64)\n\t\treturn n, err\n\tcase nil:\n\t\treturn 0, simplesessions.ErrNil\n\t}\n\treturn 0, simplesessions.ErrAssertType\n}", "func (nvp *NameValues) PtrFloat64(name string) (*float64, bool) {\n\tvalue, exists := nvp.Float64(name)\n\treturn &value, exists\n}", "func (s *Structure) Float64(isMaster bool, cmd string, params ...interface{}) (reply float64, err error) {\n\tconn := s.getConn(isMaster)\n\tif conn == nil {\n\t\treturn constant.ZeroFLOAT64, configNotExistsOrLoad(s.InstanceName, isMaster)\n\t}\n\n\treply, err = redis.Float64(conn.Do(cmd, params...))\n\tconn.Close()\n\n\treturn reply, err\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mr *MockValueMockRecorder) Float64(def interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Float64\", reflect.TypeOf((*MockValue)(nil).Float64), def)\n}", "func (e *Encoder) Float64(v float64) (int, error) {\n\treturn e.uint64(math.Float64bits(v))\n}", "func (hm *HM) WriteFloat(addr int, val float64) error {\n\tbuf := make([]byte, int(unsafe.Sizeof(float64(0))))\n\tbinary.BigEndian.PutUint64(buf[:], math.Float64bits(val))\n\treturn hm.shm.WriteN(addr, buf)\n}", "func Float64(k string, v float64) Field {\n\treturn Field{Key: k, Value: valf.Float64(v)}\n}", "func MeasureFloat64(name string, field string, value float64) Measurement {\n\treturn NewMeasurement(name).AddFloat64(field, value)\n}", "func (m *MockHash64) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (z *Rat) SetFloat64(f float64) *Rat {}", "func (nvp *NameValues) Float64(name string) (float64, bool) {\n\n\tif !nvp.prepared {\n\t\tnvp.prepare()\n\t}\n\n\tvar value float64\n\n\tname = strings.ToLower(name)\n\ttmp, exists := nvp.Pair[name]\n\tif exists {\n\t\tvalue, _ = strconv.ParseFloat(tmp.(string), 64)\n\t}\n\n\treturn value, exists\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt64_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Float64() float64 {\n\tmu.Lock()\n\tres := r.Float64()\n\tmu.Unlock()\n\treturn res\n}", "func (feature Feature) SetFieldFloat64(index int, value float64) {\n\tC.OGR_F_SetFieldDouble(feature.cval, C.int(index), C.double(value))\n}", "func WriteDouble(buffer []byte, offset int, value float64) {\n WriteUInt64(buffer, offset, math.Float64bits(value))\n}", "func (wfgb *WithFieldsGroupBy) Float64(ctx context.Context) (_ float64, err error) {\n\tvar v []float64\n\tif v, err = wfgb.Float64s(ctx); err != nil {\n\t\treturn\n\t}\n\tswitch len(v) {\n\tcase 1:\n\t\treturn v[0], nil\n\tcase 0:\n\t\terr = &NotFoundError{withfields.Label}\n\tdefault:\n\t\terr = fmt.Errorf(\"ent: WithFieldsGroupBy.Float64s returned %d results when one was expected\", len(v))\n\t}\n\treturn\n}", "func FloatFloat64(val float64) (out *big.Float, err error) {\n\tout = new(big.Float).SetFloat64(val)\n\treturn\n}", "func (m *Message) putFloat64(v float64) {\n\tb := m.bufferForPut(8)\n\tdefer b.Advance(8)\n\n\tbinary.LittleEndian.PutUint64(b.Bytes[b.Offset:], math.Float64bits(v))\n}", "func Float64(name string) (float64, error) {\n\tf, err := strconv.ParseFloat(String(name), 64)\n\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"failed to decode input %q as float64: %w\", name, err)\n\t}\n\n\treturn f, nil\n}", "func Float64(v *Value, def float64) float64 {\n\tf, err := v.Float64()\n\tif err != nil {\n\t\treturn def\n\t}\n\treturn f\n}", "func (form *FormData) Float64(key string, target *float64, defaultValue float64) *FormData {\n\treturn form.mustValue(key, target, defaultValue)\n}", "func (f Float) Float64() float64 {\n\tpanic(\"not yet implemented\")\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockStore) Write(ctx context.Context, key string, val interface{}, opts ...store.WriteOption) error {\n\t_va := make([]interface{}, len(opts))\n\tfor _i := range opts {\n\t\t_va[_i] = opts[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, key, val)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...store.WriteOption) error); ok {\n\t\tr0 = rf(ctx, key, val, opts...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (z *Float) SetUint64(x uint64) *Float {}", "func (j *JSONData) Float64(path ...interface{}) (float64, error) {\n\tjson, err := j.get(path...)\n\treturn json.MustFloat64(), err\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Float64F(name string, value float64, usage string) *float64 {\n\treturn Global.Float64F(name, value, usage)\n}", "func (i *InsertFactBuilder) OFloat64(f float64, unitID uint64) *InsertFactBuilder {\n\ti.fact.Object = AFloat64(f, unitID)\n\treturn i\n}", "func NewFloat64Field(name string, value float64) *Float64Field {\n\tvar checks []Check\n\treturn &Float64Field{name, value, checks}\n}", "func NewFloat64Metric(name string, description string, unit string, aggregation Aggregation, tagNames []string) (*Float64Metric, error) {\n\tif name == \"\" {\n\t\treturn nil, nil\n\t}\n\n\ttagKeys, err := getTagKeysFromNames(tagNames)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to create metric %q because of tag creation failure: %v\", name, err)\n\t}\n\n\tvar aggregationMethod *view.Aggregation\n\tswitch aggregation {\n\tcase LastValue:\n\t\taggregationMethod = view.LastValue()\n\tcase Sum:\n\t\taggregationMethod = view.Sum()\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"unknown aggregation option %q\", aggregation)\n\t}\n\n\tmeasure := stats.Float64(name, description, unit)\n\tnewView := &view.View{\n\t\tName: name,\n\t\tMeasure: measure,\n\t\tDescription: description,\n\t\tAggregation: aggregationMethod,\n\t\tTagKeys: tagKeys,\n\t}\n\tview.Register(newView)\n\n\tmetric := Float64Metric{name, measure}\n\treturn &metric, nil\n}", "func Float64(f *float64) float64 {\n\tif f == nil {\n\t\treturn 0\n\t}\n\treturn *f\n}", "func (o *OutputState) ApplyFloat64(applier interface{}) Float64Output {\n\treturn o.ApplyT(applier).(Float64Output)\n}", "func (x *Rat) Float64() (f float64, exact bool) {}", "func NewFloat64Gauge(name string, mos ...GaugeOptionApplier) (g Float64Gauge) {\n\tg.commonMetric = newGauge(name, Float64ValueKind, mos...)\n\treturn\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Float64(key string, val float64) Field {\n\treturn Field{Key: key, Type: core.Float64Type, Integer: int64(math.Float64bits(val))}\n}", "func (out *OutBuffer) WriteFloat64BE(v float64) {\n\tout.WriteUint64BE(math.Float64bits(v))\n}", "func (mr *MockIOPackageMockRecorder) WriteUint64(arg0, arg1, arg2 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteUint64\", reflect.TypeOf((*MockIOPackage)(nil).WriteUint64), arg0, arg1, arg2)\n}", "func (wfs *WithFieldsSelect) Float64(ctx context.Context) (_ float64, err error) {\n\tvar v []float64\n\tif v, err = wfs.Float64s(ctx); err != nil {\n\t\treturn\n\t}\n\tswitch len(v) {\n\tcase 1:\n\t\treturn v[0], nil\n\tcase 0:\n\t\terr = &NotFoundError{withfields.Label}\n\tdefault:\n\t\terr = fmt.Errorf(\"ent: WithFieldsSelect.Float64s returned %d results when one was expected\", len(v))\n\t}\n\treturn\n}", "func WriteI64(p thrift.TProtocol, value int64, name string, field int16) error {\n\treturn WriteI64WithContext(context.Background(), p, value, name, field)\n}", "func (r *Reader) Float64() float64 {\n\treturn math.Float64frombits(r.Uint64())\n}", "func Float64(key string, val float64) Tag {\n\treturn Tag{key: key, tType: float64Type, floatVal: val}\n}", "func (num Number) Float64() (float64, bool) {\n\tf, err := json.Number(num).Float64()\n\tif err != nil {\n\t\treturn 0, false\n\t}\n\treturn f, true\n}", "func (m *MockSeriesRef) Write(ctx context.Context, timestamp time.UnixNano, value float64, unit time.Unit, annotation []byte, wOpts series.WriteOptions) (bool, series.WriteType, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", ctx, timestamp, value, unit, annotation, wOpts)\n\tret0, _ := ret[0].(bool)\n\tret1, _ := ret[1].(series.WriteType)\n\tret2, _ := ret[2].(error)\n\treturn ret0, ret1, ret2\n}" ]
[ "0.8559333", "0.84461194", "0.8298874", "0.74783444", "0.74164194", "0.70990425", "0.70209634", "0.69555604", "0.69520307", "0.68594766", "0.6851309", "0.6647621", "0.66071135", "0.6567019", "0.65221953", "0.649533", "0.64684814", "0.63795465", "0.63250566", "0.6324369", "0.6299148", "0.6298606", "0.6282694", "0.62511504", "0.61588144", "0.61019063", "0.5998652", "0.59985083", "0.5992436", "0.5987567", "0.598755", "0.5983944", "0.5884005", "0.58834", "0.5854347", "0.5853909", "0.5849966", "0.579517", "0.5761145", "0.57540405", "0.57509726", "0.5745495", "0.5724341", "0.56998503", "0.5699832", "0.568329", "0.5665348", "0.5663167", "0.5659026", "0.56489915", "0.5625667", "0.5610147", "0.56056565", "0.56014436", "0.5592139", "0.5578692", "0.5576262", "0.5554756", "0.5551811", "0.55478907", "0.55478525", "0.5540436", "0.5533913", "0.55108786", "0.55092996", "0.55088717", "0.55024767", "0.54997957", "0.5497701", "0.5478833", "0.5478542", "0.54572386", "0.54492456", "0.5445328", "0.54417807", "0.5435696", "0.54353875", "0.5427933", "0.54274094", "0.5425984", "0.54210395", "0.54131997", "0.54074776", "0.53968114", "0.5382159", "0.5376972", "0.5376523", "0.5375961", "0.53733677", "0.5367658", "0.53639215", "0.5351562", "0.5347343", "0.5345322", "0.53440994", "0.5337769", "0.53365105", "0.53340393", "0.5331127", "0.5324651" ]
0.8649441
0
WriteFloat64 is a helper method to define mock.On call logicalName string bitLength uint8 value float64 writerArgs ...WithWriterArgs
func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat64_Call { return &MockWriteBufferJsonBased_WriteFloat64_Call{Call: _e.mock.On("WriteFloat64", append([]interface{}{logicalName, bitLength, value}, writerArgs...)...)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat64_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigFloat_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigFloat_Call{Call: _e.mock.On(\"WriteBigFloat\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigFloat_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigFloat_Call{Call: _e.mock.On(\"WriteBigFloat\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (bw *BufWriter) Float64(f float64) {\n\tif bw.Error != nil {\n\t\treturn\n\t}\n\tbw.stringBuf, bw.Error = Float64(f, bw.stringBuf[:0])\n\tif bw.Error != nil {\n\t\treturn\n\t}\n\t_, bw.Error = bw.writer.Write(bw.stringBuf)\n}", "func (w *ByteWriter) WriteFloat64(val float64, offset int) (int, error) {\n\treturn w.WriteVal(val, offset)\n}", "func (w *Writer) WriteFloat64(v float64) error {\n\treturn binary.Write(w.out, w.bo, &v)\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (c Channel) WriteFloat64(name string, value float64) error {\n\tcName := C.CString(name)\n\tdefer C.free(unsafe.Pointer(cName))\n\n\terrno := C.iio_channel_attr_write_double(\n\t\tc.handle,\n\t\tcName,\n\t\tC.double(value),\n\t)\n\tif errno == 0 {\n\t\treturn nil\n\t}\n\treturn syscall.Errno(-errno)\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat32_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat32_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (w *Writer) Float64(n float64) {\n\tw.buf = strconv.AppendFloat(w.buf, float64(n), 'g', -1, 64)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (p *Stream) WriteFloat64(value float64) {\n\tif value == 0 {\n\t\tp.writeFrame[p.writeIndex] = 4\n\t\tp.writeIndex++\n\t\tif p.writeIndex == streamBlockSize {\n\t\t\tp.gotoNextWriteFrame()\n\t\t}\n\t} else {\n\t\tv := math.Float64bits(value)\n\t\tif p.writeIndex < streamBlockSize-9 {\n\t\t\tb := p.writeFrame[p.writeIndex:]\n\t\t\tb[0] = 5\n\t\t\tb[1] = byte(v)\n\t\t\tb[2] = byte(v >> 8)\n\t\t\tb[3] = byte(v >> 16)\n\t\t\tb[4] = byte(v >> 24)\n\t\t\tb[5] = byte(v >> 32)\n\t\t\tb[6] = byte(v >> 40)\n\t\t\tb[7] = byte(v >> 48)\n\t\t\tb[8] = byte(v >> 56)\n\t\t\tp.writeIndex += 9\n\t\t} else {\n\t\t\tp.PutBytes([]byte{\n\t\t\t\t5,\n\t\t\t\tbyte(v),\n\t\t\t\tbyte(v >> 8),\n\t\t\t\tbyte(v >> 16),\n\t\t\t\tbyte(v >> 24),\n\t\t\t\tbyte(v >> 32),\n\t\t\t\tbyte(v >> 40),\n\t\t\t\tbyte(v >> 48),\n\t\t\t\tbyte(v >> 56),\n\t\t\t})\n\t\t}\n\t}\n}", "func (f *Flagger) Float64(name, shorthand string, value float64, usage string) {\n\tf.cmd.Flags().Float64P(name, shorthand, value, usage)\n\tf.cfg.BindPFlag(name, f.cmd.Flags().Lookup(name))\n}", "func Float64(name string, value float64, usage string) *float64 {\n\tp := new(float64);\n\tFloat64Var(p, name, value, usage);\n\treturn p;\n}", "func WriteFloat64(src []float64, dst Floating) int {\n\tlength := min(dst.Len(), len(src))\n\tfor i := 0; i < length; i++ {\n\t\tdst.SetSample(i, float64(src[i]))\n\t}\n\treturn ChannelLength(length, dst.Channels())\n}", "func WriteFloat64(buf io.Writer, v float64) (n int, err error) {\n\tbits := math.Float64bits(v)\n\tbits = bits ^ (-(bits >> 63) | (1 << 63))\n\tdata := make([]byte, 8)\n\tbinary.BigEndian.PutUint64(data, bits)\n\treturn buf.Write(data)\n}", "func Float64(name string, value float64, usage string) *float64 {\n\treturn Global.Float64(name, value, usage)\n}", "func (instance *Instance) SetFloat64(fieldName string, value float64) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func Float64(name string, value float64, usage string) *float64 {\n\treturn ex.FlagSet.Float64(name, value, usage)\n}", "func (out *OutBuffer) WriteFloat64LE(v float64) {\n\tout.WriteUint64LE(math.Float64bits(v))\n}", "func RegisterFloat64(key string, def float64, description string) onion.Float {\n\tsetDescription(key, description)\n\treturn o.RegisterFloat64(key, def)\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Float64(name, description, unit string) *Float64Measure {\n\tmi := registerMeasureHandle(name, description, unit)\n\treturn &Float64Measure{mi}\n}", "func (m *MockSession) Float64(arg0 string) float64 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Float64\", arg0)\n\tret0, _ := ret[0].(float64)\n\treturn ret0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m Measurement) AddFloat64(name string, value float64) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (c *Configurator) Float64(name string, value float64, usage string) *float64 {\n\tp := new(float64)\n\n\tc.Float64Var(p, name, value, usage)\n\n\treturn p\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt64_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (s *StressFlag) Float64(name string, def float64, usage string) *float64 {\n\tv := def\n\treturn &v\n}", "func (w *FormSerializationWriter) WriteFloat64Value(key string, value *float64) error {\n\tif key != \"\" && value != nil {\n\t\tw.writePropertyName(key)\n\t}\n\tif value != nil {\n\t\tw.writeRawValue(strconv.FormatFloat(*value, 'f', -1, 64))\n\t}\n\tif key != \"\" && value != nil {\n\t\tw.writePropertySeparator()\n\t}\n\treturn nil\n}", "func (m *MockValue) Float64(def float64) float64 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Float64\", def)\n\tret0, _ := ret[0].(float64)\n\treturn ret0\n}", "func (b *Buffer) AppendFloat64(v float64) {\n\tb.buf = strconv.AppendFloat(b.buf, v, 'f', -1, 64)\n}", "func (z *Float) SetFloat64(x float64) *Float {}", "func Float64(name string) (float64, error) {\n\tf, err := strconv.ParseFloat(String(name), 64)\n\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"failed to decode input %q as float64: %w\", name, err)\n\t}\n\n\treturn f, nil\n}", "func (nvp *NameValues) Float64(name string) (float64, bool) {\n\n\tif !nvp.prepared {\n\t\tnvp.prepare()\n\t}\n\n\tvar value float64\n\n\tname = strings.ToLower(name)\n\ttmp, exists := nvp.Pair[name]\n\tif exists {\n\t\tvalue, _ = strconv.ParseFloat(tmp.(string), 64)\n\t}\n\n\treturn value, exists\n}", "func MeasureFloat64(name string, field string, value float64) Measurement {\n\treturn NewMeasurement(name).AddFloat64(field, value)\n}", "func Float64Arg(register Register, name string, options ...ArgOptionApplyer) *float64 {\n\tp := new(float64)\n\t_ = Float64ArgVar(register, p, name, options...)\n\treturn p\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt64_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func Float64(i float64) string {\n\treturn strconv.FormatFloat(i, 'f', -1, 64)\n}", "func Float64(r interface{}, err error) (float64, error) {\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tswitch r := r.(type) {\n\tcase float64:\n\t\treturn r, err\n\tcase []byte:\n\t\tn, err := strconv.ParseFloat(string(r), 64)\n\t\treturn n, err\n\tcase string:\n\t\tn, err := strconv.ParseFloat(r, 64)\n\t\treturn n, err\n\tcase nil:\n\t\treturn 0, simplesessions.ErrNil\n\t}\n\treturn 0, simplesessions.ErrAssertType\n}", "func NewFloat64Gauge(name string, mos ...GaugeOptionApplier) (g Float64Gauge) {\n\tg.commonMetric = newGauge(name, Float64ValueKind, mos...)\n\treturn\n}", "func WriteDouble(buffer []byte, offset int, value float64) {\n WriteUInt64(buffer, offset, math.Float64bits(value))\n}", "func (mr *MockValueMockRecorder) Float64(def interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Float64\", reflect.TypeOf((*MockValue)(nil).Float64), def)\n}", "func (f *FunctionCall) CallFloat64() float64 {\n\tif f.NumArgs > len(f.Words) {\n\t\tpanic(\"bad NumArgs\")\n\t}\n\tif f.addr == nil {\n\t\tpanic(\"variadic: CallFloat64 called with nil function addr\")\n\t}\n\treturn float64(C.VariadicCallDouble(unsafe.Pointer(f)))\n}", "func TestFloat64(t *testing.T) {\n\tvalor := 12.34661\n\tt.Logf(\"valor:[%f]\", valor)\n\tvalor = utl.RoundFloat64(valor, 2)\n\tt.Logf(\"valor:[%f]\", valor)\n}", "func (z *Rat) SetFloat64(f float64) *Rat {}", "func Float64ToStr(v float64, precision int) (str string) {\n\tvar match []string\n\tstr = strconv.FormatFloat(v, 'f', precision, 64)\n\tmatch = reFloat.FindStringSubmatch(str)\n\t// log.Printf(\"str [%s], reFloat [%s], match %v\", str, reFloat, match)\n\tif match != nil {\n\t\tstr = match[1] + StrDelimit(match[2], \",\", 3) + match[3]\n\t}\n\treturn\n}", "func Float64(key string, val float64) Tag {\n\treturn Tag{key: key, tType: float64Type, floatVal: val}\n}", "func Float64(v *Value, def float64) float64 {\n\tf, err := v.Float64()\n\tif err != nil {\n\t\treturn def\n\t}\n\treturn f\n}", "func (p *PoolAllocator) Float64() Floating {\n\ts := p.f64.Get().(*f64)\n\ts.channels = channels(p.Channels)\n\ts.buffer = s.buffer[:p.Length*p.Channels]\n\treturn s\n}", "func (v *Value) Float64() float64 {\n\tswitch {\n\tcase v.fvalOk:\n\tcase v.ivalOk:\n\t\tv.fval = float64(v.ival)\n\t\tv.fvalOk = true\n\tcase v.svalOk:\n\t\t// Perform a best-effort conversion from string to float64.\n\t\tv.fval = 0.0\n\t\tstrs := matchFloat.FindStringSubmatch(v.sval)\n\t\tif len(strs) >= 2 {\n\t\t\tv.fval, _ = strconv.ParseFloat(strs[1], 64)\n\t\t}\n\t\tv.fvalOk = true\n\t}\n\treturn v.fval\n}", "func (nvp *NameValues) PtrFloat64(name string) (*float64, bool) {\n\tvalue, exists := nvp.Float64(name)\n\treturn &value, exists\n}", "func (e *Encoder) Float64(v float64) (int, error) {\n\treturn e.uint64(math.Float64bits(v))\n}", "func Float64(k string, v float64) Field {\n\treturn Field{Key: k, Value: valf.Float64(v)}\n}", "func (o *OutputState) ApplyFloat64(applier interface{}) Float64Output {\n\treturn o.ApplyT(applier).(Float64Output)\n}", "func (w *ByteWriter) MustWriteFloat64(val float64, offset int) int {\n\treturn w.MustWriteVal(val, offset)\n}", "func Float64(name string, defaultValue float64) float64 {\n\tif strVal, ok := os.LookupEnv(name); ok {\n\t\tif f, err := strconv.ParseFloat(strVal, 64); err == nil {\n\t\t\treturn f\n\t\t}\n\t}\n\n\treturn defaultValue\n}", "func packFloat64(Data float64, pad *scratchpad) {\n\t// Allocate the bytes.\n\ta := make([]byte, 9)\n\n\t// Set the header.\n\ta[0] = 'F'\n\n\t// Cast the memory to a uint64.\n\ti := *(*uint64)(unsafe.Pointer(&Data))\n\n\t// Write the integer.\n\tbe64toh(i, a, 1)\n\n\t// Write to the pad.\n\tpad.endAppend(a...)\n}", "func WriteI64(p thrift.TProtocol, value int64, name string, field int16) error {\n\treturn WriteI64WithContext(context.Background(), p, value, name, field)\n}", "func (d LegacyDec) Float64() (float64, error) {\n\treturn strconv.ParseFloat(d.String(), 64)\n}", "func FloatFloat64(val float64) (out *big.Float, err error) {\n\tout = new(big.Float).SetFloat64(val)\n\treturn\n}", "func Float64(key string, def float64) float64 {\n\tif s := String(key, \"\"); s != \"\" {\n\t\tif d, err := strconv.ParseFloat(s, 64); err == nil {\n\t\t\treturn d\n\t\t} else {\n\t\t\tLog(key, err)\n\t\t}\n\t}\n\treturn def\n}", "func (f Float) Float64() float64 {\n\tpanic(\"not yet implemented\")\n}", "func (hm *HM) WriteFloat(addr int, val float64) error {\n\tbuf := make([]byte, int(unsafe.Sizeof(float64(0))))\n\tbinary.BigEndian.PutUint64(buf[:], math.Float64bits(val))\n\treturn hm.shm.WriteN(addr, buf)\n}", "func (c *Context) ParamFloat64(name string) float64 {\n\tf, _ := strconv.ParseFloat(c.Param(name), 64)\n\treturn f\n}", "func Float64ToStr(val float64) string {\n\treturn strconv.FormatFloat(val, 'f', -1, 64)\n}", "func Float64(val interface{}) float64 {\r\n\r\n\tswitch t := val.(type) {\r\n\tcase int:\r\n\t\treturn float64(t)\r\n\tcase int8:\r\n\t\treturn float64(t)\r\n\tcase int16:\r\n\t\treturn float64(t)\r\n\tcase int32:\r\n\t\treturn float64(t)\r\n\tcase int64:\r\n\t\treturn float64(t)\r\n\tcase uint:\r\n\t\treturn float64(t)\r\n\tcase uint8:\r\n\t\treturn float64(t)\r\n\tcase uint16:\r\n\t\treturn float64(t)\r\n\tcase uint32:\r\n\t\treturn float64(t)\r\n\tcase uint64:\r\n\t\treturn float64(t)\r\n\tcase float32:\r\n\t\treturn float64(t)\r\n\tcase float64:\r\n\t\treturn float64(t)\r\n\tcase bool:\r\n\t\tif t == true {\r\n\t\t\treturn float64(1)\r\n\t\t}\r\n\t\treturn float64(0)\r\n\tcase string:\r\n\t\tf, _ := strconv.ParseFloat(val.(string), 64)\r\n\t\treturn f\r\n\tdefault:\r\n\t\ts := String(val)\r\n\t\tf, _ := strconv.ParseFloat(s, 64)\r\n\t\treturn f\r\n\t}\r\n\r\n\tpanic(\"Reached\")\r\n}", "func MarshalF64(x float64, buf []byte, rem int) ([]byte, int, error) {\n\tif len(buf) < SizeHintF64 || rem < SizeHintF64 {\n\t\treturn buf, rem, ErrUnexpectedEndOfBuffer\n\t}\n\tbinary.BigEndian.PutUint64(buf, math.Float64bits(x))\n\treturn buf[SizeHintF64:], rem - SizeHintF64, nil\n}", "func (n StringNumber) Float64() float64 {\n\treturn float64(n)\n}", "func Float64F(name string, value float64, usage string) *float64 {\n\treturn Global.Float64F(name, value, usage)\n}", "func (feature Feature) SetFieldFloat64(index int, value float64) {\n\tC.OGR_F_SetFieldDouble(feature.cval, C.int(index), C.double(value))\n}", "func (p *Parser) Float64(i int, context string) float64 {\n\treturn p.NullFloat64(i, context).Value\n}", "func AppendFloat64(dst []byte, val float64) []byte {\n\tswitch {\n\tcase math.IsNaN(val):\n\t\treturn append(dst, \"\\xfb\\x7f\\xf8\\x00\\x00\\x00\\x00\\x00\\x00\"...)\n\tcase math.IsInf(val, 1):\n\t\treturn append(dst, \"\\xfb\\x7f\\xf0\\x00\\x00\\x00\\x00\\x00\\x00\"...)\n\tcase math.IsInf(val, -1):\n\t\treturn append(dst, \"\\xfb\\xff\\xf0\\x00\\x00\\x00\\x00\\x00\\x00\"...)\n\t}\n\tmajor := majorTypeSimpleAndFloat\n\tsubType := additionalTypeFloat64\n\tn := math.Float64bits(val)\n\tdst = append(dst, byte(major|subType))\n\tfor i := uint(1); i <= 8; i++ {\n\t\tb := byte(n >> ((8 - i) * 8))\n\t\tdst = append(dst, b)\n\t}\n\treturn dst\n}", "func (m *Message) putFloat64(v float64) {\n\tb := m.bufferForPut(8)\n\tdefer b.Advance(8)\n\n\tbinary.LittleEndian.PutUint64(b.Bytes[b.Offset:], math.Float64bits(v))\n}", "func (z *Float) SetUint64(x uint64) *Float {}", "func EncodeFloat64(d []byte, v *float64) {\n\thead := (*reflect.SliceHeader)(unsafe.Pointer(&d))\n\tvalue := (*float64)(unsafe.Pointer(head.Data))\n\t*value = *v\n}", "func (c *Configurator) Float64F(name string, value float64, usage string) *float64 {\n\tp := new(float64)\n\n\tc.Float64VarF(p, name, value, usage)\n\n\treturn p\n}", "func (w *Writer) WriteUint64(data interface{}) {\n\tvar t = w.getType(data, 8)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.buffer[w.index+2] = byte(t[2])\n\tw.buffer[w.index+3] = byte(t[3])\n\tw.buffer[w.index+4] = byte(t[4])\n\tw.buffer[w.index+5] = byte(t[5])\n\tw.buffer[w.index+6] = byte(t[6])\n\tw.buffer[w.index+7] = byte(t[7])\n\tw.index += 8\n}", "func (fw *Writer) PutFloat64Field(addr biopb.Coord, v float64) {\n\twb := fw.buf\n\twb.updateAddrBounds(addr)\n\twb.defaultBuf.PutFloat64(v)\n}", "func (r *Response) Float64() (float64, error) {\n\treturn strconv.ParseFloat(r.String(), 64)\n}", "func (form *FormData) Float64(key string, target *float64, defaultValue float64) *FormData {\n\treturn form.mustValue(key, target, defaultValue)\n}", "func Float64(key string, val float64) Field {\n\treturn Field{Key: key, Type: core.Float64Type, Integer: int64(math.Float64bits(val))}\n}", "func (i *InsertFactBuilder) OFloat64(f float64, unitID uint64) *InsertFactBuilder {\n\ti.fact.Object = AFloat64(f, unitID)\n\treturn i\n}", "func anyToFloat64(i interface{}, def ...float64) float64 {\n\tvar defValue float64 = 0\n\tif len(def) > 0 {\n\t\tdefValue = def[0]\n\t}\n\tif i == nil {\n\t\treturn defValue\n\t}\n\tswitch value := i.(type) {\n\tcase float32:\n\t\treturn float64(value)\n\tcase float64:\n\t\treturn value\n\tcase []byte:\n\t\treturn decodeToFloat64(value)\n\tdefault:\n\t\tv, _ := strconv.ParseFloat(anyToString(i), 64)\n\t\treturn v\n\t}\n}", "func (this *channelStruct) WriteFloats(samples []float64) {\n\tthis.samples = append(this.samples, samples...)\n}", "func (s *Structure) Float64(isMaster bool, cmd string, params ...interface{}) (reply float64, err error) {\n\tconn := s.getConn(isMaster)\n\tif conn == nil {\n\t\treturn constant.ZeroFLOAT64, configNotExistsOrLoad(s.InstanceName, isMaster)\n\t}\n\n\treply, err = redis.Float64(conn.Do(cmd, params...))\n\tconn.Close()\n\n\treturn reply, err\n}", "func (j *JSONData) Float64(path ...interface{}) (float64, error) {\n\tjson, err := j.get(path...)\n\treturn json.MustFloat64(), err\n}", "func TestFloat64Type(t *testing.T) {\n\n\t// Arrange.\n\n\tvar f float64\n\tf = 19.99\n\n\t// Act.\n\n\tresult := reflect.TypeOf(f).String()\n\n\t// Assert.\n\n\tassert.Equal(t, \"float64\", result)\n}", "func (f F128d16) Float64() (float64, error) {\n\tn := f.AsFloat64()\n\tif strconv.FormatFloat(n, 'g', -1, 64) != f.String() {\n\t\treturn 0, errDoesNotFitInFloat64\n\t}\n\treturn n, nil\n}", "func (mr *MockSessionMockRecorder) Float64(arg0 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Float64\", reflect.TypeOf((*MockSession)(nil).Float64), arg0)\n}", "func (wfs *WithFieldsSelect) Float64(ctx context.Context) (_ float64, err error) {\n\tvar v []float64\n\tif v, err = wfs.Float64s(ctx); err != nil {\n\t\treturn\n\t}\n\tswitch len(v) {\n\tcase 1:\n\t\treturn v[0], nil\n\tcase 0:\n\t\terr = &NotFoundError{withfields.Label}\n\tdefault:\n\t\terr = fmt.Errorf(\"ent: WithFieldsSelect.Float64s returned %d results when one was expected\", len(v))\n\t}\n\treturn\n}" ]
[ "0.8370667", "0.81056327", "0.80675614", "0.69926786", "0.6889711", "0.68633646", "0.68429995", "0.67940533", "0.6770056", "0.66764176", "0.6581765", "0.6567619", "0.6555145", "0.6544424", "0.6535084", "0.64763635", "0.64600545", "0.643431", "0.6362032", "0.63282436", "0.62554944", "0.6247578", "0.618334", "0.61744", "0.6162228", "0.61607045", "0.615503", "0.61282027", "0.6070264", "0.60512125", "0.60434", "0.60308295", "0.59329724", "0.5895244", "0.58917964", "0.58843666", "0.5869441", "0.58632946", "0.5845724", "0.58209234", "0.57468414", "0.57407504", "0.5724082", "0.57152474", "0.571467", "0.56955796", "0.56941825", "0.56805116", "0.5670109", "0.56556606", "0.56534016", "0.56478167", "0.5616271", "0.56154954", "0.5596106", "0.55944", "0.55929434", "0.55922127", "0.55834407", "0.55765647", "0.5571865", "0.55640763", "0.5561571", "0.5560252", "0.555769", "0.55431825", "0.55186874", "0.5518134", "0.55037993", "0.5501062", "0.54940766", "0.5483257", "0.5478254", "0.54767305", "0.54628336", "0.54603857", "0.54580176", "0.54547715", "0.54501957", "0.54467297", "0.5439873", "0.5439032", "0.5437833", "0.5435005", "0.542969", "0.5420533", "0.54158527", "0.54151917", "0.541229", "0.54101616", "0.54036885", "0.5402543", "0.54014546", "0.53997654", "0.537839", "0.5371453", "0.53704447", "0.53631806", "0.535935", "0.53559977" ]
0.84707
0
WriteInt16 provides a mock function with given fields: logicalName, bitLength, value, writerArgs
func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, logicalName, bitLength, value) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok { r0 = rf(logicalName, bitLength, value, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt16_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt16_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint16_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockIOPackage) WriteUint16(arg0 []byte, arg1 int, arg2 uint16) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint16\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint16_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockIOPackage) AppendUint16(arg0 []byte, arg1 uint16) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint16\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (m *MockSession) Int16(arg0 string) int16 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Int16\", arg0)\n\tret0, _ := ret[0].(int16)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockSession) Uint16(arg0 string) uint16 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint16\", arg0)\n\tret0, _ := ret[0].(uint16)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteI16(p thrift.TProtocol, value int16, name string, field int16) error {\n\treturn WriteI16WithContext(context.Background(), p, value, name, field)\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteInt16(buffer []byte, offset int, value int16) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mr *MockIOPackageMockRecorder) WriteUint16(arg0, arg1, arg2 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteUint16\", reflect.TypeOf((*MockIOPackage)(nil).WriteUint16), arg0, arg1, arg2)\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) WriteInt16(data interface{}) {\n\tvar t = w.getType(data, 2)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.index += 2\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestReadUint16FromBytes(t *testing.T) {\n\tvar file, _ = ioutil.TempFile(os.TempDir(), \"\")\n\tvar filePath = file.Name()\n\tdefer file.Close()\n\tdefer os.Remove(filePath)\n\n\tvar writer = bufio.NewWriter(file)\n\t// Little endian pow disposition:\n\t// 16^1 16^0 | 16^3 16^2\n\t// 0 x 0 4 | 0 x 0 1\n\t// 0 4 | 0 256\n\t// -----------------------------\n\t// TOTAL = 260\n\twriter.Write([]byte{0x04, 0x01})\n\twriter.Flush()\n\n\tfile.Seek(0, os.SEEK_SET)\n\n\tvar readerBag = fileReader.ByteReaderBag{File: file}\n\tvar number = fileReader.ReadUint16FromBytes(&readerBag)\n\tif readerBag.Err != nil {\n\t\tt.Errorf(\"Expected no errors, got '%v'\", readerBag.Err)\n\t}\n\n\ttest.ExpectUint16(t, \"Number test\", 260, number)\n\n\tfileReader.ReadUint16FromBytes(&readerBag)\n\tif readerBag.Err == nil {\n\t\tt.Errorf(\"Expected no bytes to read, but got no error\")\n\t}\n}", "func (m *MockBigInterface) Foo16(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7 bool) Bar {\n\tif m.FnFoo16 != nil {\n\t\treturn m.FnFoo16(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)\n\t}\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Foo16\", arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)\n\tret0, _ := ret[0].(Bar)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteUInt16(buffer []byte, offset int, value uint16) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func (w *Writer) WriteUint16(data interface{}) {\n\tvar t = w.getType(data, 2)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.index += 2\n}", "func (m Measurement) AddInt16(name string, value int16) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (instance *Instance) SetInt16(fieldName string, value int16) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestUint16(t *testing.T) {\n\ttests := []struct {\n\t\tin uint16 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x01,0x00}}, // Min single byte\n\t\t{255, []byte{0xff,0x00}}, // Max single byte\n\t\t{256, []byte{0x00,0x01}}, // Min 2-byte\n\t\t{65535, []byte{0xff,0xff}}, // Max 2-byte\n\t}\n\n\tt.Logf(\"Running uint16 %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint16(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint16 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint16 #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint16\n\t\terr = ReadUint16(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint16 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint16 #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func TestWrite(t *testing.T) {\n\tmockZooKeeper := &MockZooHandle{\n\t\tzk: mock.Mock{},\n\t}\n\n\tbytes := make([]byte, 3)\n\tff := NewFuseFile(bytes, 0, \"mock/path\", mockZooKeeper)\n\n\tmockZooKeeper.zk.On(\"Set\", \"mock/path\", bytes, int32(-1)).Return(&zk.Stat{DataLength: int32(len(bytes))}, nil)\n\n\t// assert that we send 3 bytes into the writer and status out == fuse.OK\n\tsize, stat := ff.Write(bytes, 0)\n\tassert.Equal(t, uint32(3), size)\n\tassert.Equal(t, fuse.OK, stat)\n}", "func (m *MockIOPackage) WriteUint32(arg0 []byte, arg1 int, arg2 uint32) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint32\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt64_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (mr *MockIOPackageMockRecorder) AppendUint16(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"AppendUint16\", reflect.TypeOf((*MockIOPackage)(nil).AppendUint16), arg0, arg1)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint32_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m Measurement) AddUInt16(name string, value uint16) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (vb *keyWriter) WriteUint16(num uint16) (int, error) {\n\tbinary.BigEndian.PutUint16(vb.buffer[:2], num)\n\tvb.hash.Write(vb.buffer[:2])\n\treturn 2, nil\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt8_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (instance *Instance) SetUint16(fieldName string, value uint16) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func TestRead16BitId(t *testing.T) {\n\tsfdpV1_5 := []byte{\n\t\t// 0x00: Magic\n\t\t0x53, 0x46, 0x44, 0x50,\n\t\t// 0x04: Version v1.5\n\t\t0x05, 0x01,\n\t\t// 0x06: Number of headers, 0 means there's 1 header\n\t\t0x00,\n\t\t// 0x07: Unused\n\t\t0x00,\n\t\t// 0x08: Parameter header ID LSB\n\t\t0xcd,\n\t\t// 0x09: Version v1.0\n\t\t0x00, 0x01,\n\t\t// 0x0b: Number of DWORDS in the table\n\t\t0x01,\n\t\t// 0x0c: Pointer\n\t\t0x10, 0x00, 0x00,\n\t\t// 0x0f: ID MSB\n\t\t0xab,\n\t\t// 0x10: DWORD 0\n\t\t0x78, 0x56, 0x34, 0x12,\n\t}\n\n\tr := bytes.NewReader(sfdpV1_5)\n\tsfdp, err := Read(r)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\ttestParam := Param{\n\t\tTable: 0xabcd,\n\t\tDword: 0,\n\t\tShift: 0,\n\t\tBits: 32,\n\t}\n\n\tval, err := sfdp.Param(testParam)\n\tif err != nil {\n\t\tt.Error(err)\n\t}\n\tvar want int64 = 0x12345678\n\tif val != want {\n\t\tt.Errorf(\"sfdp.Param() = %#x; want %#x\", val, want)\n\t}\n}", "func (vb *keyWriter) WriteInt16(num int16) (int, error) {\n\treturn vb.WriteUint16(uint16(num))\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt64_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (b *Bus) Write16(addr mirv.Address, v uint16) error {\n\tblk := b.p\n\tif !blk.contains(addr) {\n\t\tblk = b.find(addr)\n\t}\n\treturn blk.m.Write16(addr-blk.s, v)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt32_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (t Int16) WriteTo(w io.Writer) (int64, error) {\n\tlw := aWriterPool.Get(w)\n\tlw.WriteInt16(int16(t))\n\treturn aWriterPool.Put(lw)\n}", "func (m *MockIOPackage) WriteUint64(arg0 []byte, arg1 int, arg2 uint64) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint64\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Session) Write(data []byte) (int, error) {\n\tret := _m.Called(data)\n\n\tvar r0 int\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func([]byte) (int, error)); ok {\n\t\treturn rf(data)\n\t}\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(data)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (m *MockIOPackage) ReadUint16(arg0 []byte, arg1 int) (uint16, int, bool) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ReadUint16\", arg0, arg1)\n\tret0, _ := ret[0].(uint16)\n\tret1, _ := ret[1].(int)\n\tret2, _ := ret[2].(bool)\n\treturn ret0, ret1, ret2\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *OSIOAPI) WriteFile(filename string, data []byte, perm os.FileMode) error {\n\tret := _m.Called(filename, data, perm)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, os.FileMode) error); ok {\n\t\tr0 = rf(filename, data, perm)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockFile) Write(_param0 []byte) (int, error) {\n\tret := _m.ctrl.Call(_m, \"Write\", _param0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (mock WriteCloser) fakeZeroWrite(p []byte) (n int, err error) {\n\treturn n, err\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Int16(key string, val int16) Field {\n\treturn Field{Key: key, Type: core.Int16Type, Integer: int64(val)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt8_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockHash32) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func TestSet16(t *testing.T) {\n\thm, _ := NewHashMap(16)\n\ttestSetN(testN, hm)\n}", "func TestWrite(t *testing.T) {\n\ttests := []struct {\n\t\tid *ua.NodeID\n\t\tv interface{}\n\t\tstatus ua.StatusCode\n\t}{\n\t\t// happy flows\n\t\t{ua.NewStringNodeID(2, \"rw_bool\"), false, ua.StatusOK},\n\t\t{ua.NewStringNodeID(2, \"rw_int32\"), int32(9), ua.StatusOK},\n\n\t\t// error flows\n\t\t{ua.NewStringNodeID(2, \"ro_bool\"), false, ua.StatusBadUserAccessDenied},\n\t}\n\n\tctx := context.Background()\n\n\tsrv := NewServer(\"rw_server.py\")\n\tdefer srv.Close()\n\n\tc, err := opcua.NewClient(srv.Endpoint, srv.Opts...)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tif err := c.Connect(ctx); err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer c.Close(ctx)\n\n\tfor _, tt := range tests {\n\t\tt.Run(tt.id.String(), func(t *testing.T) {\n\t\t\ttestWrite(t, ctx, c, tt.status, &ua.WriteRequest{\n\t\t\t\tNodesToWrite: []*ua.WriteValue{\n\t\t\t\t\t&ua.WriteValue{\n\t\t\t\t\t\tNodeID: tt.id,\n\t\t\t\t\t\tAttributeID: ua.AttributeIDValue,\n\t\t\t\t\t\tValue: &ua.DataValue{\n\t\t\t\t\t\t\tEncodingMask: ua.DataValueValue,\n\t\t\t\t\t\t\tValue: ua.MustVariant(tt.v),\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t})\n\n\t\t\t// skip read tests if the write is expected to fail\n\t\t\tif tt.status != ua.StatusOK {\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\ttestRead(t, ctx, c, tt.v, tt.id)\n\t\t})\n\t}\n}", "func ExpectUint16(t *testing.T, field string, expected uint16, found uint16) {\n\tif expected != found {\n\t\tt.Errorf(\"%s [%d], found '%d'\", field, expected, found)\n\t}\n}", "func Int16(k string, v int16) Field {\n\treturn Field{Key: k, Value: valf.Int16(v)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt32_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (out *OutBuffer) WriteUint16LE(v uint16) bool {\n\tcontainer := out.GetContainer()\n\tif len(container) < 2 {\n\t\treturn false\n\t}\n\n\tbinary.LittleEndian.PutUint16(out.GetContainer(), v)\n\tout.pos += 2\n\treturn true\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockReader) Short() (int16, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Short\")\n\tret0, _ := ret[0].(int16)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (b *bitWriter) addBits16Clean(value uint16, bits uint8) {\n\tb.bitContainer |= uint64(value) << (b.nBits & 63)\n\tb.nBits += bits\n}", "func (m *MockResponseWriter) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func TestWriteTagHeader(t *testing.T) {\n\tt.Parallel()\n\n\tbuf := new(bytes.Buffer)\n\tbw := bufio.NewWriter(buf)\n\tdst := make([]byte, 4)\n\n\tif err := writeTagHeader(bw, dst, 15351, 4); err != nil {\n\t\tt.Fatal(err)\n\t}\n\tif err := bw.Flush(); err != nil {\n\t\tt.Fatal(err)\n\t}\n\tif !bytes.Equal(thb, buf.Bytes()) {\n\t\tt.Fatalf(\"Expected %v, got %v\", thb, buf.Bytes())\n\t}\n}", "func (_m *WriteCloser) Write(p []byte) (int, error) {\n\tret := _m.Called(p)\n\n\tvar r0 int\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(p)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(p)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func FastWrite(out io.Writer, x []int16) (int, error) {\n\tconst sizeOfScalar = 2\n\tif len(x) == 0 {\n\t\treturn 0, nil\n\t}\n\tnumBytes := len(x) * sizeOfScalar\n\tvar xb []byte\n\thx := (*reflect.SliceHeader)(unsafe.Pointer(&xb))\n\thx.Cap = numBytes\n\thx.Len = numBytes\n\thx.Data = uintptr(unsafe.Pointer(&x[0]))\n\tn, err := out.Write(xb)\n\truntime.KeepAlive(x)\n\treturn n, err\n}", "func TestWriteBits(t *testing.T) {\n\tt.Run(\"NonMSB\", func(t *testing.T) {\n\t\tb := &Buffer{}\n\n\t\t// having empty buffered MSB.\n\t\tn, err := b.WriteBits(0xb, 4)\n\t\trequire.NoError(t, err)\n\t\tassert.Zero(t, n)\n\n\t\tassert.Len(t, b.data, 1)\n\t\tassert.Equal(t, byte(0xb), b.data[0])\n\n\t\tn, err = b.WriteBits(0xdf, 8)\n\t\trequire.NoError(t, err)\n\t\tassert.Equal(t, 1, n)\n\n\t\tif assert.Len(t, b.data, 2) {\n\t\t\tassert.Equal(t, byte(0xd), b.data[1])\n\t\t\tassert.Equal(t, byte(0xfb), b.data[0])\n\t\t}\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tb := BufferedMSB()\n\n\t\tn, err := b.WriteBits(0xf, 4)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Zero(t, n)\n\n\t\t// the output now should be\n\t\t// 11110000\n\t\t// ^\n\t\tif assert.Len(t, b.data, 1) {\n\t\t\tassert.Equal(t, byte(0xf0), b.data[0], \"%08b\", b.data[0])\n\t\t}\n\n\t\t// write 10111 = 0x17, 5\n\t\tn, err = b.WriteBits(0x17, 5)\n\t\trequire.NoError(t, err)\n\n\t\t// current output should be\n\t\t// 11111011 10000000\n\t\t// ^\n\t\tif assert.Len(t, b.data, 2) {\n\t\t\tassert.Equal(t, byte(0xfb), b.data[0])\n\t\t\tassert.Equal(t, byte(0x80), b.data[1])\n\t\t\tassert.Equal(t, uint8(1), b.bitIndex)\n\t\t}\n\t})\n}", "func (m *MockSerial) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockIOPackage) WriteUint8(arg0 []byte, arg1 int, arg2 uint8) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint8\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (mock *WriteCloser) SetReturnWrite(n int, err error) *WriteCloser {\n\tmock.impl.Write = func([]byte) (int, error) {\n\t\treturn n, err\n\t}\n\treturn mock\n}", "func PutBufioWriter16M(w *bufio.Writer) bool {\n\tif w == nil {\n\t\treturn false\n\t}\n\tif l := w.Size(); l < 16777216 || l >= 33554432 {\n\t\treturn PutBufioWriter(w)\n\t}\n\tw.Reset(nil) // to not keep the parent writer alive\n\tputw16M(w)\n\treturn true\n}", "func (r *MockReadWriteCloser) Write(p []byte) (n int, err error) {\n\n\tif err = r.WriteErr; err != nil {\n\t\tr.BytesWritten = p\n\t\tn = len(p)\n\t}\n\treturn\n}", "func (mock WriteCloser) Write(p []byte) (n int, err error) {\n\tmethodName := \"Write\" // nolint: goconst\n\tif mock.impl.Write != nil {\n\t\treturn mock.impl.Write(p)\n\t}\n\tif mock.callbackNotImplemented != nil {\n\t\tmock.callbackNotImplemented(mock.t, mock.name, methodName)\n\t} else {\n\t\tgomic.DefaultCallbackNotImplemented(mock.t, mock.name, methodName)\n\t}\n\treturn mock.fakeZeroWrite(p)\n}" ]
[ "0.7762862", "0.7650404", "0.736158", "0.7335879", "0.7232687", "0.7005769", "0.67486763", "0.66867214", "0.6286402", "0.6165782", "0.6029808", "0.6028487", "0.6002215", "0.59917396", "0.5969979", "0.5960438", "0.59112614", "0.591019", "0.5908368", "0.58890295", "0.58355784", "0.5789036", "0.57361174", "0.57266474", "0.570408", "0.5691842", "0.56876963", "0.5664208", "0.565013", "0.5636765", "0.56348634", "0.561385", "0.5609762", "0.5593225", "0.55220896", "0.5515983", "0.54833186", "0.54560333", "0.5452918", "0.541161", "0.5404817", "0.5390313", "0.5379949", "0.53545773", "0.532093", "0.53084666", "0.5271151", "0.5260414", "0.5260053", "0.5254547", "0.52511746", "0.5241643", "0.5240478", "0.5233083", "0.52327305", "0.52293986", "0.5214683", "0.51888126", "0.51884663", "0.5162894", "0.5159825", "0.5158536", "0.51557326", "0.5142935", "0.5132661", "0.5127405", "0.51263833", "0.51175827", "0.51162124", "0.51102376", "0.5103422", "0.5096046", "0.50956994", "0.50873667", "0.5086165", "0.5080179", "0.5069436", "0.5059977", "0.5055799", "0.5050042", "0.504805", "0.5045086", "0.50282055", "0.5022989", "0.5009071", "0.49965146", "0.49952793", "0.49783802", "0.49733505", "0.49686083", "0.49534485", "0.49331108", "0.49170476", "0.49152562", "0.48965663", "0.48965374", "0.48955277", "0.48912326", "0.48897415", "0.48777863" ]
0.7914389
0
WriteInt16 is a helper method to define mock.On call logicalName string bitLength uint8 value int16 writerArgs ...WithWriterArgs
func (_e *MockWriteBufferJsonBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt16_Call { return &MockWriteBufferJsonBased_WriteInt16_Call{Call: _e.mock.On("WriteInt16", append([]interface{}{logicalName, bitLength, value}, writerArgs...)...)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt16_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint16_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint16_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockIOPackage) WriteUint16(arg0 []byte, arg1 int, arg2 uint16) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint16\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (w *Writer) WriteInt16(data interface{}) {\n\tvar t = w.getType(data, 2)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.index += 2\n}", "func WriteI16(p thrift.TProtocol, value int16, name string, field int16) error {\n\treturn WriteI16WithContext(context.Background(), p, value, name, field)\n}", "func (w *Writer) WriteUint16(data interface{}) {\n\tvar t = w.getType(data, 2)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.index += 2\n}", "func WriteInt16(buffer []byte, offset int, value int16) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n}", "func (instance *Instance) SetInt16(fieldName string, value int16) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (m Measurement) AddInt16(name string, value int16) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (m *MockIOPackage) AppendUint16(arg0 []byte, arg1 uint16) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint16\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func WriteUInt16(buffer []byte, offset int, value uint16) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n}", "func (m *MockSession) Int16(arg0 string) int16 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Int16\", arg0)\n\tret0, _ := ret[0].(int16)\n\treturn ret0\n}", "func (mr *MockIOPackageMockRecorder) WriteUint16(arg0, arg1, arg2 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteUint16\", reflect.TypeOf((*MockIOPackage)(nil).WriteUint16), arg0, arg1, arg2)\n}", "func (instance *Instance) SetUint16(fieldName string, value uint16) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (m Measurement) AddUInt16(name string, value uint16) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (vb *keyWriter) WriteInt16(num int16) (int, error) {\n\treturn vb.WriteUint16(uint16(num))\n}", "func (m *MockSession) Uint16(arg0 string) uint16 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint16\", arg0)\n\tret0, _ := ret[0].(uint16)\n\treturn ret0\n}", "func FormatInt16(name string) string {\n\treturn formatIntFunction(name, true)\n}", "func (vb *keyWriter) WriteUint16(num uint16) (int, error) {\n\tbinary.BigEndian.PutUint16(vb.buffer[:2], num)\n\tvb.hash.Write(vb.buffer[:2])\n\treturn 2, nil\n}", "func (mr *MockIOPackageMockRecorder) AppendUint16(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"AppendUint16\", reflect.TypeOf((*MockIOPackage)(nil).AppendUint16), arg0, arg1)\n}", "func (b *Bus) Write16(addr mirv.Address, v uint16) error {\n\tblk := b.p\n\tif !blk.contains(addr) {\n\t\tblk = b.find(addr)\n\t}\n\treturn blk.m.Write16(addr-blk.s, v)\n}", "func Uint16(name string, value uint16, usage string) *uint16 {\n\treturn Environment.Uint16(name, value, usage)\n}", "func (b *Buffer) AppendInt16(v int16) {\n\tb.AppendInt64(int64(v))\n}", "func (out *OutBuffer) WriteUint16LE(v uint16) bool {\n\tcontainer := out.GetContainer()\n\tif len(container) < 2 {\n\t\treturn false\n\t}\n\n\tbinary.LittleEndian.PutUint16(out.GetContainer(), v)\n\tout.pos += 2\n\treturn true\n}", "func FormatUint16(name string) string {\n\treturn formatUintFunction(name, true)\n}", "func (out *OutBuffer) WriteUint16LE(v uint16) {\n\tout.Append(byte(v), byte(v>>8))\n}", "func Int16(k string, v int16) Field {\n\treturn Field{Key: k, Value: valf.Int16(v)}\n}", "func WriteUint16(data []byte, x uint, v uint16) {\n\tif x+1 >= uint(len(data)) {\n\t\treturn\n\t}\n\tdata[x] = byte(v >> 8)\n\tdata[x+1] = byte(v)\n}", "func (t Int16) WriteTo(w io.Writer) (int64, error) {\n\tlw := aWriterPool.Get(w)\n\tlw.WriteInt16(int16(t))\n\treturn aWriterPool.Put(lw)\n}", "func Int16Arg(register Register, name string, options ...ArgOptionApplyer) *int16 {\n\tp := new(int16)\n\t_ = Int16ArgVar(register, p, name, options...)\n\treturn p\n}", "func (bio *BinaryIO) WriteUint16(off int64, value uint16) {\n\tvar buf [2]byte\n\tbio.order.PutUint16(buf[:], value)\n\tbio.WriteAt(off, buf[:])\n}", "func (o *OutputState) ApplyUint16(applier interface{}) Uint16Output {\n\treturn o.ApplyT(applier).(Uint16Output)\n}", "func Int16(key string, val int16) Field {\n\treturn Field{Key: key, Type: core.Int16Type, Integer: int64(val)}\n}", "func (s *Streamer) Int16(v int16) *Streamer {\n\tif s.Error != nil {\n\t\treturn s\n\t}\n\ts.onVal()\n\ts.buffer = appendInt16(s.buffer, v)\n\treturn s\n}", "func (b *Buffer) AppendInt16(x int16) error {\n\treturn b.appendInteger(x)\n}", "func (stream *Stream) WriteInt16(nval int16) {\n\tstream.ensure(6)\n\tn := stream.n\n\tvar val uint16\n\tif nval < 0 {\n\t\tval = uint16(-nval)\n\t\tstream.buf[n] = '-'\n\t\tn++\n\t} else {\n\t\tval = uint16(nval)\n\t}\n\tq1 := val / 1000\n\tif q1 == 0 {\n\t\tstream.n = writeFirstBuf(stream.buf, digits[val], n)\n\t\treturn\n\t}\n\tr1 := val - q1*1000\n\tn = writeFirstBuf(stream.buf, digits[q1], n)\n\twriteBuf(stream.buf, digits[r1], n)\n\tstream.n = n + 3\n\treturn\n}", "func (s *Streamer) Uint16(v uint16) *Streamer {\n\tif s.Error != nil {\n\t\treturn s\n\t}\n\ts.onVal()\n\ts.buffer = appendUint16(s.buffer, v)\n\treturn s\n}", "func Int16(key string, val int16) Tag {\n\treturn Tag{key: key, tType: int16Type, integerVal: int64(val)}\n}", "func WriteUint16(w io.Writer, v uint16) error {\n\treturn WriteUint64(w, uint64(v))\n}", "func MeasureInt16(name string, field string, value int16) Measurement {\n\treturn NewMeasurement(name).AddInt16(field, value)\n}", "func (stream *Stream) WriteUint16(val uint16) {\n\tstream.ensure(5)\n\tq1 := val / 1000\n\tif q1 == 0 {\n\t\tstream.n = writeFirstBuf(stream.buf, digits[val], stream.n)\n\t\treturn\n\t}\n\tr1 := val - q1*1000\n\tn := writeFirstBuf(stream.buf, digits[q1], stream.n)\n\twriteBuf(stream.buf, digits[r1], n)\n\tstream.n = n + 3\n\treturn\n}", "func (s *EnvVarSet) Uint16(name string, value uint16, usage string) *uint16 {\n\tp := new(uint16)\n\n\ts.Uint16Var(p, name, value, usage)\n\n\treturn p\n}", "func Uint16(k string, v uint16) Field {\n\treturn Field{Key: k, Value: valf.Uint16(v)}\n}", "func ExpectUint16(t *testing.T, field string, expected uint16, found uint16) {\n\tif expected != found {\n\t\tt.Errorf(\"%s [%d], found '%d'\", field, expected, found)\n\t}\n}", "func (o *OutputState) ApplyInt16(applier interface{}) Int16Output {\n\treturn o.ApplyT(applier).(Int16Output)\n}", "func (b *Buffer) AppendUint16(v uint16) {\n\tb.AppendUint64(uint64(v))\n}", "func (t Uint16) WriteTo(w io.Writer) (int64, error) {\n\tlw := aWriterPool.Get(w)\n\tlw.WriteUint16(uint16(t))\n\treturn aWriterPool.Put(lw)\n}", "func Uint16(key string, val uint16) Tag {\n\treturn Tag{key: key, tType: uint16Type, integerVal: int64(val)}\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) WriteUInt16(v uint16) error {\n\treturn binary.Write(w.out, w.bo, &v)\n}", "func Uint16Tag(name interface{}, value uint16) Tag {\n\treturn &tag{\n\t\ttagType: TagUint16,\n\t\tname: name,\n\t\tvalue: value,\n\t}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (c Context) Uint16(key string, i uint16) Context {\n\tc.l.context = appendUint16(c.l.context, key, i)\n\treturn c\n}", "func (e Entry) Uint16(key string, value uint16) (entry Entry) {\n\te.Uint64(key, uint64(value))\n\treturn e\n}", "func opI16ToStr(expr *CXExpression, fp int) {\n\toutB0 := FromStr(strconv.FormatInt(int64(ReadI16(fp, expr.Inputs[0])), 10))\n\tWriteObject(GetOffset_str(fp, expr.Outputs[0]), outB0)\n}", "func Int16(any interface{}) int16 {\n\tif any == nil {\n\t\treturn 0\n\t}\n\tif v, ok := any.(int16); ok {\n\t\treturn v\n\t}\n\treturn int16(Int64(any))\n}", "func (p *Periph) StoreWord16(v uint16) {\n\t(*mmio.U16)(unsafe.Pointer(&p.dr)).Store(v)\n}", "func WriteI16WithContext(ctx context.Context, p thrift.TProtocol, value int16, name string, field int16) error {\n\tif err := p.WriteFieldBegin(ctx, name, thrift.I16, field); err != nil {\n\t\treturn thrift.PrependError(\"write field begin error: \", err)\n\t}\n\tif err := p.WriteI16(ctx, value); err != nil {\n\t\treturn thrift.PrependError(\"field write error: \", err)\n\t}\n\tif err := p.WriteFieldEnd(ctx); err != nil {\n\t\treturn thrift.PrependError(\"write field end error: \", err)\n\t}\n\treturn nil\n}", "func TestUint16(t *testing.T) {\n\ttests := []struct {\n\t\tin uint16 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x01,0x00}}, // Min single byte\n\t\t{255, []byte{0xff,0x00}}, // Max single byte\n\t\t{256, []byte{0x00,0x01}}, // Min 2-byte\n\t\t{65535, []byte{0xff,0xff}}, // Max 2-byte\n\t}\n\n\tt.Logf(\"Running uint16 %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint16(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint16 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint16 #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint16\n\t\terr = ReadUint16(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint16 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint16 #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Uint16(key string, val uint16) Field {\n\treturn Field{Key: key, Type: core.Uint16Type, Integer: int64(val)}\n}", "func (b *Buffer) AppendUint16(x uint16) error {\n\treturn b.appendInteger(x)\n}", "func MeasureUInt16(name string, field string, value uint16) Measurement {\n\treturn NewMeasurement(name).AddUInt16(field, value)\n}", "func (z *Numeric) SetUint16(x uint16) *Numeric {\n\tif x == 0 {\n\t\treturn z.SetZero()\n\t}\n\n\tz.sign = numericPositive\n\tz.weight = -1\n\tz.digits = make([]int16, 0, 1) // as x!=0 there is at least 1 1000-base digit\n\tfor x != 0 {\n\t\td := int16(x % numericBase)\n\t\tx /= numericBase\n\t\tif d != 0 || len(z.digits) > 0 { // avoid tailing zero\n\t\t\tz.digits = append([]int16{d}, z.digits...)\n\t\t}\n\t\tz.weight++\n\t}\n\n\treturn z\n}", "func (c Context) Int16(key string, i int16) Context {\n\tc.l.context = appendInt16(c.l.context, key, i)\n\treturn c\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func Int16(v int16) *int16 {\n\treturn &v\n}", "func Int16(v int16) *int16 {\n\treturn &v\n}", "func (m *MockBigInterface) Foo16(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7 bool) Bar {\n\tif m.FnFoo16 != nil {\n\t\treturn m.FnFoo16(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)\n\t}\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Foo16\", arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)\n\tret0, _ := ret[0].(Bar)\n\treturn ret0\n}", "func Uint16Var(p *uint16, name string, value uint16, usage string) {\n\tEnvironment.Uint16Var(p, name, value, usage)\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func AppendInt16(dst []byte, val int16) []byte {\n\treturn AppendInt(dst, int(val))\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mtr *Dppdpp1intspareMetrics) SetSpare_16(val metrics.Counter) error {\n\tmtr.metrics.SetCounter(val, mtr.getOffset(\"Spare_16\"))\n\treturn nil\n}", "func (e Entry) Int16(key string, value int16) (entry Entry) {\n\te.Int64(key, int64(value))\n\treturn e\n}", "func (mr *MockSessionMockRecorder) Uint16(arg0 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Uint16\", reflect.TypeOf((*MockSession)(nil).Uint16), arg0)\n}", "func PutBufioWriter16M(w *bufio.Writer) bool {\n\tif w == nil {\n\t\treturn false\n\t}\n\tif l := w.Size(); l < 16777216 || l >= 33554432 {\n\t\treturn PutBufioWriter(w)\n\t}\n\tw.Reset(nil) // to not keep the parent writer alive\n\tputw16M(w)\n\treturn true\n}", "func (mtr *Dppdpp0intspareMetrics) SetSpare_16(val metrics.Counter) error {\n\tmtr.metrics.SetCounter(val, mtr.getOffset(\"Spare_16\"))\n\treturn nil\n}", "func opUI16ToStr(prgrm *CXProgram) {\n\texpr := prgrm.GetExpr()\n\tfp := prgrm.GetFramePointer()\n\n\toutB0 := FromStr(strconv.FormatUint(uint64(ReadUI16(fp, expr.Inputs[0])), 10))\n\tWriteObject(GetFinalOffset(fp, expr.Outputs[0]), outB0)\n}", "func (m *Message) putUint16(v uint16) {\n\tb := m.bufferForPut(2)\n\tdefer b.Advance(2)\n\n\tbinary.LittleEndian.PutUint16(b.Bytes[b.Offset:], v)\n}", "func Uints16(k string, v []uint16) Field {\n\treturn Field{Key: k, Value: valf.Uints16(v)}\n}", "func WW(address uint16, value uint16)", "func writeInt16ToFile(input int16, fp *os.File) {\n\tbuff := new(bytes.Buffer)\n\terr := binary.Write(buff, binary.LittleEndian, input)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\tintByteArray := buff.Bytes()\n\tfp.Write(intByteArray)\n}", "func TestReadUint16FromBytes(t *testing.T) {\n\tvar file, _ = ioutil.TempFile(os.TempDir(), \"\")\n\tvar filePath = file.Name()\n\tdefer file.Close()\n\tdefer os.Remove(filePath)\n\n\tvar writer = bufio.NewWriter(file)\n\t// Little endian pow disposition:\n\t// 16^1 16^0 | 16^3 16^2\n\t// 0 x 0 4 | 0 x 0 1\n\t// 0 4 | 0 256\n\t// -----------------------------\n\t// TOTAL = 260\n\twriter.Write([]byte{0x04, 0x01})\n\twriter.Flush()\n\n\tfile.Seek(0, os.SEEK_SET)\n\n\tvar readerBag = fileReader.ByteReaderBag{File: file}\n\tvar number = fileReader.ReadUint16FromBytes(&readerBag)\n\tif readerBag.Err != nil {\n\t\tt.Errorf(\"Expected no errors, got '%v'\", readerBag.Err)\n\t}\n\n\ttest.ExpectUint16(t, \"Number test\", 260, number)\n\n\tfileReader.ReadUint16FromBytes(&readerBag)\n\tif readerBag.Err == nil {\n\t\tt.Errorf(\"Expected no bytes to read, but got no error\")\n\t}\n}", "func (z *Numeric) SetInt16(x int16) *Numeric {\n\tif x == 0 {\n\t\treturn z.SetZero()\n\t}\n\n\tif x < 0 {\n\t\tz.sign = numericNegative\n\t} else {\n\t\tz.sign = numericPositive\n\t}\n\n\tz.weight = -1\n\tz.digits = make([]int16, 0, 1) // as x!=0 there is at least 1 1000-base digit\n\tfor x != 0 {\n\t\td := mathh.AbsInt16(int16(x % numericBase))\n\t\tx /= numericBase\n\t\tif d != 0 || len(z.digits) > 0 { // avoid tailing zero\n\t\t\tz.digits = append([]int16{d}, z.digits...)\n\t\t}\n\t\tz.weight++\n\t}\n\n\treturn z\n}", "func Mux16() gate.Chip {\n\treturn nil\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint32_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func setNBitsOfUint16(src, size, startIndex, val uint16) (uint16, error) {\n\tif startIndex+size > 16 {\n\t\treturn 0, errInvalidSizeOrStartIndex\n\t}\n\n\t// truncate val to size bits\n\tval &= (1 << size) - 1\n\n\treturn src | (val << (16 - size - startIndex)), nil\n}", "func (ba *BitArray) Add16(bits uint16) {\n\tn := uint64(bits)\n\tn = n << 48\n\tba.AddVar(n, 16)\n}", "func MarshalUint16(dst []byte, u uint16) []byte {\n\treturn append(dst, byte(u>>8), byte(u))\n}", "func Uint16Arg(register Register, name string, options ...ArgOptionApplyer) *uint16 {\n\tp := new(uint16)\n\t_ = Uint16ArgVar(register, p, name, options...)\n\treturn p\n}", "func MarshalInt16(dst []byte, v int16) []byte {\n\t// Such encoding for negative v must improve compression.\n\tv = (v << 1) ^ (v >> 15) // zig-zag encoding without branching.\n\tu := uint16(v)\n\treturn append(dst, byte(u>>8), byte(u))\n}", "func Uint16(a, b interface{}) int {\n\tu1, _ := a.(uint16)\n\tu2, _ := b.(uint16)\n\tswitch {\n\tcase u1 < u2:\n\t\treturn -1\n\tcase u1 > u2:\n\t\treturn 1\n\tdefault:\n\t\treturn 0\n\t}\n}", "func (mtr *Dprdpr0intspareMetrics) SetSpare_16(val metrics.Counter) error {\n\tmtr.metrics.SetCounter(val, mtr.getOffset(\"Spare_16\"))\n\treturn nil\n}" ]
[ "0.7732717", "0.7541344", "0.74752706", "0.7246873", "0.7016308", "0.6995027", "0.69238025", "0.6432141", "0.63554776", "0.6245883", "0.6170915", "0.6113621", "0.60915047", "0.60676646", "0.6049336", "0.6033905", "0.6004892", "0.59424984", "0.5905912", "0.5879465", "0.58328354", "0.5828363", "0.57011825", "0.5682786", "0.5648454", "0.5642018", "0.5613687", "0.55844605", "0.55422664", "0.55372584", "0.553155", "0.54857475", "0.54776555", "0.5474703", "0.54693484", "0.54674923", "0.5401604", "0.53996605", "0.5399028", "0.5395554", "0.53683466", "0.53671813", "0.5358045", "0.5357119", "0.5346605", "0.532679", "0.5324159", "0.5315117", "0.5303693", "0.53029853", "0.52818185", "0.52769256", "0.52767664", "0.5268852", "0.526373", "0.5262306", "0.5253564", "0.525266", "0.52470165", "0.52362764", "0.5233809", "0.5231198", "0.5229369", "0.5221866", "0.5219587", "0.5219293", "0.5217887", "0.52103484", "0.52018946", "0.5190116", "0.51792324", "0.51786053", "0.51783705", "0.51783705", "0.5174521", "0.516775", "0.5167588", "0.5155149", "0.51446915", "0.51410186", "0.51386046", "0.513454", "0.51343024", "0.51324576", "0.51235676", "0.51167345", "0.51153904", "0.5114246", "0.5113985", "0.51079106", "0.5096718", "0.50940603", "0.5093381", "0.507003", "0.5068837", "0.5068774", "0.505485", "0.50513947", "0.5050518", "0.5040831" ]
0.79537535
0
WriteInt32 provides a mock function with given fields: logicalName, bitLength, value, writerArgs
func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, logicalName, bitLength, value) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok { r0 = rf(logicalName, bitLength, value, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt32_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt32_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint32_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockIOPackage) WriteUint32(arg0 []byte, arg1 int, arg2 uint32) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint32\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt64_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash32) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat32_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt64_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockIOPackage) AppendUint32(arg0 []byte, arg1 uint32) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint32\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat32_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockSession) Uint32(arg0 string) uint32 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint32\", arg0)\n\tret0, _ := ret[0].(uint32)\n\treturn ret0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt16_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockSession) Int32(arg0 string) int32 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Int32\", arg0)\n\tret0, _ := ret[0].(int32)\n\treturn ret0\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt8_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Session) Write(data []byte) (int, error) {\n\tret := _m.Called(data)\n\n\tvar r0 int\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func([]byte) (int, error)); ok {\n\t\treturn rf(data)\n\t}\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(data)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *MockFile) Write(_param0 []byte) (int, error) {\n\tret := _m.ctrl.Call(_m, \"Write\", _param0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (w *ByteWriter) MustWriteInt32(val int32, offset int) int {\n\treturn w.MustWriteVal(val, offset)\n}", "func (m *MockSerial) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockHash64) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (mock *WriteCloser) SetReturnWrite(n int, err error) *WriteCloser {\n\tmock.impl.Write = func([]byte) (int, error) {\n\t\treturn n, err\n\t}\n\treturn mock\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt8_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func WriteI32(p thrift.TProtocol, value int32, name string, field int16) error {\n\treturn WriteI32WithContext(context.Background(), p, value, name, field)\n}", "func (mock WriteCloser) fakeZeroWrite(p []byte) (n int, err error) {\n\treturn n, err\n}", "func SetUint32ByName(o interface{}, name string, val uint32) {\n\tif fd := reflect.ValueOf(o).Elem().FieldByName(name); fd.IsValid() {\n\t\tfd.SetUint(uint64(val))\n\t}\n}", "func (w *Writer) WriteInt32(data interface{}) {\n\tvar t = w.getType(data, 4)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.buffer[w.index+2] = byte(t[2])\n\tw.buffer[w.index+3] = byte(t[3])\n\tw.index += 4\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockResponseWriter) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func WriteInt32(buffer []byte, offset int, value int32) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n buffer[offset + 2] = byte(value >> 16)\n buffer[offset + 3] = byte(value >> 24)\n}", "func (mr *MockIOPackageMockRecorder) WriteUint32(arg0, arg1, arg2 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteUint32\", reflect.TypeOf((*MockIOPackage)(nil).WriteUint32), arg0, arg1, arg2)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt16_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockBigInterface) Foo32(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7 bool) Bar {\n\tif m.FnFoo32 != nil {\n\t\treturn m.FnFoo32(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)\n\t}\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Foo32\", arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)\n\tret0, _ := ret[0].(Bar)\n\treturn ret0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigInt_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func (r *MockReadWriteCloser) Write(p []byte) (n int, err error) {\n\n\tif err = r.WriteErr; err != nil {\n\t\tr.BytesWritten = p\n\t\tn = len(p)\n\t}\n\treturn\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash32) Sum32() uint32 {\n\tret := m.ctrl.Call(m, \"Sum32\")\n\tret0, _ := ret[0].(uint32)\n\treturn ret0\n}", "func (m *MockHash32) Sum32() uint32 {\n\tret := m.ctrl.Call(m, \"Sum32\")\n\tret0, _ := ret[0].(uint32)\n\treturn ret0\n}", "func (m *MockFile) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockFile) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *BlobStore) Put(path string, reader io.Reader, objectSize int64) error {\n\tret := _m.Called(path, reader, objectSize)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, io.Reader, int64) error); ok {\n\t\tr0 = rf(path, reader, objectSize)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestWrite(t *testing.T) {\n\tmockZooKeeper := &MockZooHandle{\n\t\tzk: mock.Mock{},\n\t}\n\n\tbytes := make([]byte, 3)\n\tff := NewFuseFile(bytes, 0, \"mock/path\", mockZooKeeper)\n\n\tmockZooKeeper.zk.On(\"Set\", \"mock/path\", bytes, int32(-1)).Return(&zk.Stat{DataLength: int32(len(bytes))}, nil)\n\n\t// assert that we send 3 bytes into the writer and status out == fuse.OK\n\tsize, stat := ff.Write(bytes, 0)\n\tassert.Equal(t, uint32(3), size)\n\tassert.Equal(t, fuse.OK, stat)\n}", "func TestWrite(t *testing.T) {\n\ttests := []struct {\n\t\tid *ua.NodeID\n\t\tv interface{}\n\t\tstatus ua.StatusCode\n\t}{\n\t\t// happy flows\n\t\t{ua.NewStringNodeID(2, \"rw_bool\"), false, ua.StatusOK},\n\t\t{ua.NewStringNodeID(2, \"rw_int32\"), int32(9), ua.StatusOK},\n\n\t\t// error flows\n\t\t{ua.NewStringNodeID(2, \"ro_bool\"), false, ua.StatusBadUserAccessDenied},\n\t}\n\n\tctx := context.Background()\n\n\tsrv := NewServer(\"rw_server.py\")\n\tdefer srv.Close()\n\n\tc, err := opcua.NewClient(srv.Endpoint, srv.Opts...)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tif err := c.Connect(ctx); err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer c.Close(ctx)\n\n\tfor _, tt := range tests {\n\t\tt.Run(tt.id.String(), func(t *testing.T) {\n\t\t\ttestWrite(t, ctx, c, tt.status, &ua.WriteRequest{\n\t\t\t\tNodesToWrite: []*ua.WriteValue{\n\t\t\t\t\t&ua.WriteValue{\n\t\t\t\t\t\tNodeID: tt.id,\n\t\t\t\t\t\tAttributeID: ua.AttributeIDValue,\n\t\t\t\t\t\tValue: &ua.DataValue{\n\t\t\t\t\t\t\tEncodingMask: ua.DataValueValue,\n\t\t\t\t\t\t\tValue: ua.MustVariant(tt.v),\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t})\n\n\t\t\t// skip read tests if the write is expected to fail\n\t\t\tif tt.status != ua.StatusOK {\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\ttestRead(t, ctx, c, tt.v, tt.id)\n\t\t})\n\t}\n}", "func TestWriteBits(t *testing.T) {\n\tt.Run(\"NonMSB\", func(t *testing.T) {\n\t\tb := &Buffer{}\n\n\t\t// having empty buffered MSB.\n\t\tn, err := b.WriteBits(0xb, 4)\n\t\trequire.NoError(t, err)\n\t\tassert.Zero(t, n)\n\n\t\tassert.Len(t, b.data, 1)\n\t\tassert.Equal(t, byte(0xb), b.data[0])\n\n\t\tn, err = b.WriteBits(0xdf, 8)\n\t\trequire.NoError(t, err)\n\t\tassert.Equal(t, 1, n)\n\n\t\tif assert.Len(t, b.data, 2) {\n\t\t\tassert.Equal(t, byte(0xd), b.data[1])\n\t\t\tassert.Equal(t, byte(0xfb), b.data[0])\n\t\t}\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tb := BufferedMSB()\n\n\t\tn, err := b.WriteBits(0xf, 4)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Zero(t, n)\n\n\t\t// the output now should be\n\t\t// 11110000\n\t\t// ^\n\t\tif assert.Len(t, b.data, 1) {\n\t\t\tassert.Equal(t, byte(0xf0), b.data[0], \"%08b\", b.data[0])\n\t\t}\n\n\t\t// write 10111 = 0x17, 5\n\t\tn, err = b.WriteBits(0x17, 5)\n\t\trequire.NoError(t, err)\n\n\t\t// current output should be\n\t\t// 11111011 10000000\n\t\t// ^\n\t\tif assert.Len(t, b.data, 2) {\n\t\t\tassert.Equal(t, byte(0xfb), b.data[0])\n\t\t\tassert.Equal(t, byte(0x80), b.data[1])\n\t\t\tassert.Equal(t, uint8(1), b.bitIndex)\n\t\t}\n\t})\n}", "func (mock *WriteCloser) SetFuncWrite(impl func(p []byte) (n int, err error)) *WriteCloser {\n\tmock.impl.Write = impl\n\treturn mock\n}", "func (_m *blockchainInt64) toLittleEndian() []byte {\n\tret := _m.Called()\n\n\tvar r0 []byte\n\tif rf, ok := ret.Get(0).(func() []byte); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]byte)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (_m *WriteCloser) Write(p []byte) (int, error) {\n\tret := _m.Called(p)\n\n\tvar r0 int\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(p)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(p)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (tw *TestWriter) Write(incoming []byte) (n int, err error) {\n\tif tw.mockWriteHandler != nil {\n\t\treturn tw.mockWriteHandler(incoming)\n\t}\n\n\ttw.capturedOutput = append(tw.capturedOutput, string(incoming))\n\treturn 0, nil\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func TestBufferWrite(t *testing.T) {\n\tt.Run(\"Valid\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\n\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\tn, err := w.Write(toWrite)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Equal(t, 3, n)\n\n\t\tn, err = w.Write([]byte{0xff})\n\t\trequire.NoError(t, err)\n\n\t\tassert.Equal(t, 1, n)\n\n\t\texpected := append(toWrite, 0xff)\n\t\tfor i, bt := range w.Data() {\n\t\t\tassert.Equal(t, expected[i], bt, \"%d\", i)\n\t\t}\n\t})\n\n\tt.Run(\"Shifted\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\t\t// write empty byte and reset it's byte index to 0.\n\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\tw.byteIndex = 0\n\t\t// assume that 3 '0' bits were already written.\n\t\tw.bitIndex = 3\n\n\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\tn, err := w.Write(toWrite)\n\t\trequire.NoError(t, err)\n\t\tassert.Equal(t, 3, n)\n\n\t\t// 0x3f - 00111111\n\t\t// 00111111 << 3 = 11111000\n\t\texpected := byte(0xf8)\n\t\tassert.Equal(t, expected, w.data[0])\n\n\t\t// 0x12 - 00010010\n\t\t// 00111111 >> 5 = 00000001\n\t\t// 00010010 << 3 = 10010000\n\t\t// \t\t\t\t | 10010101\n\t\t// 10010111 - 0x91\n\t\texpected = byte(0x91)\n\t\tassert.Equal(t, expected, w.data[1])\n\n\t\t// 0x86 - 10000110\n\t\t// 00010010 >> 5 = \t00000000\n\t\t// 10000110 << 3 = \t00110000\n\t\t// \t\t\t\t |\t00110000\n\t\t// 00110000 = 0x30\n\t\texpected = byte(0x30)\n\t\tassert.Equal(t, expected, w.data[2])\n\t\tassert.Len(t, w.Data(), 4)\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tt.Run(\"Valid\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\t\tn, err := w.Write(toWrite)\n\t\t\trequire.NoError(t, err)\n\n\t\t\tassert.Equal(t, 3, n)\n\n\t\t\tn, err = w.Write([]byte{0xff})\n\t\t\trequire.NoError(t, err)\n\n\t\t\tassert.Equal(t, 1, n)\n\n\t\t\texpected := append(toWrite, 0xff)\n\t\t\tfor i, bt := range w.Data() {\n\t\t\t\tassert.Equal(t, expected[i], bt, \"%d\", i)\n\t\t\t}\n\t\t})\n\n\t\tt.Run(\"Shifted\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\t\t\t// write empty byte so the buffer data is initialized\n\t\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\t\t// reset it's byteindex\n\t\t\tw.byteIndex = 0\n\t\t\t// assume three '0' bits are already stored.\n\t\t\tw.bitIndex = 3\n\n\t\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\t\tn, err := w.Write(toWrite)\n\t\t\trequire.NoError(t, err)\n\t\t\tassert.Equal(t, 3, n)\n\n\t\t\t// 0x3f - 00111111\n\t\t\t// 00111111 >> 3 = 00000111\n\t\t\t// 00000111 = 0x07\n\t\t\texpected := byte(0x07)\n\t\t\tassert.Equal(t, expected, w.data[0])\n\n\t\t\t// 0x12 - 00010010\n\t\t\t// 00111111 << 5 = 11100000\n\t\t\t// 00010010 >> 3 = 00000010\n\t\t\t// \t\t\t\t | 11100010\n\t\t\t// 11100010 - 0xE2\n\t\t\texpected = byte(0xE2)\n\t\t\tassert.Equal(t, expected, w.data[1])\n\n\t\t\t// 0x86 - 10000110\n\t\t\t// 00010010 << 5 = \t01000000\n\t\t\t// 10000110 >> 3 = \t00010000\n\t\t\t// \t\t\t\t |\t01010000\n\t\t\t// 00110000 = 0x50\n\t\t\texpected = byte(0x50)\n\t\t\tassert.Equal(t, expected, w.data[2])\n\n\t\t\t// 0x86 - 10000110\n\t\t\t// 10000110 << 5 = \t11000000\n\t\t\t// 11000000 = 0xC0\n\t\t\texpected = byte(0xC0)\n\t\t\tassert.Equal(t, expected, w.data[3])\n\t\t})\n\t})\n}", "func (c *fakeRedisConn) WriteInt64(num int64) { c.rsp = append(c.rsp, num) }", "func (mock WriteCloser) Write(p []byte) (n int, err error) {\n\tmethodName := \"Write\" // nolint: goconst\n\tif mock.impl.Write != nil {\n\t\treturn mock.impl.Write(p)\n\t}\n\tif mock.callbackNotImplemented != nil {\n\t\tmock.callbackNotImplemented(mock.t, mock.name, methodName)\n\t} else {\n\t\tgomic.DefaultCallbackNotImplemented(mock.t, mock.name, methodName)\n\t}\n\treturn mock.fakeZeroWrite(p)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Output) WriteOne(ctx context.Context, msg stream.WritableMessage) error {\n\tret := _m.Called(ctx, msg)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, stream.WritableMessage) error); ok {\n\t\tr0 = rf(ctx, msg)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *ValueConverter) ToInt(_a0 interface{}) int {\n\tret := _m.Called(_a0)\n\n\tvar r0 int\n\tif rf, ok := ret.Get(0).(func(interface{}) int); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestBufferWriteBit(t *testing.T) {\n\tt.Run(\"Valid\", func(t *testing.T) {\n\t\tbuf := &Buffer{}\n\t\t// 10010011 11000111\n\t\t// 0x93 \t0xC7\n\t\tbits := []int{1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1}\n\t\tfor i := len(bits) - 1; i > -1; i-- {\n\t\t\tbit := bits[i]\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\tassert.Equal(t, byte(0xC7), buf.data[0], \"expected: %08b, is: %08b\", 0xc7, buf.data[0])\n\t\tassert.Equal(t, byte(0x93), buf.data[1], \"expected: %08b, is: %08b\", 0x93, buf.data[1])\n\t})\n\n\tt.Run(\"BitShifted\", func(t *testing.T) {\n\t\tt.Run(\"Empty\", func(t *testing.T) {\n\t\t\tbuf := &Buffer{}\n\t\t\t// fill thee buffer with 3 bits\n\t\t\tfor i := 0; i < 3; i++ {\n\t\t\t\terr := buf.WriteBit(int(0))\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// bits 11101\n\t\t\tbits := []int{1, 1, 1, 0, 1}\n\t\t\tfor i := len(bits) - 1; i > -1; i-- {\n\t\t\t\tbit := bits[i]\n\t\t\t\terr := buf.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// should be 11101000 - 0xe8\n\t\t\tassert.Equal(t, byte(0xe8), buf.data[0])\n\t\t})\n\t})\n\n\tt.Run(\"ByteShifted\", func(t *testing.T) {\n\t\tbuf := &Buffer{}\n\t\trequire.NoError(t, buf.WriteByte(0x00))\n\n\t\t// write 8 bits that should look like a byte 0xe3\n\t\t// 11100011 - 0xe3\n\t\tbits := []int{1, 1, 1, 0, 0, 0, 1, 1}\n\t\tfor i := len(bits) - 1; i > -1; i-- {\n\t\t\tbit := bits[i]\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\t\tassert.Equal(t, 2, len(buf.data))\n\t\tassert.Equal(t, byte(0xe3), buf.data[1])\n\n\t\t// there should be no error on writing additional byte.\n\t\tassert.NoError(t, buf.WriteByte(0x00))\n\t})\n\n\tt.Run(\"Finished\", func(t *testing.T) {\n\t\tbuf := &Buffer{}\n\n\t\t// write some bits to the first byte.\n\t\tfirstBits := []int{1, 0, 1}\n\t\tfor _, bit := range firstBits {\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\t// finish this byte\n\t\tbuf.FinishByte()\n\t\tsecondBits := []int{1, 0, 1}\n\n\t\t// write some bits to the second byte.\n\t\tfor _, bit := range secondBits {\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\tif assert.Len(t, buf.data, 2) {\n\t\t\t// 00000101 - 0x05\n\t\t\tassert.Equal(t, byte(0x05), buf.Data()[0])\n\t\t\tassert.Equal(t, byte(0x05), buf.Data()[1])\n\t\t}\n\t})\n\n\tt.Run(\"Inverse\", func(t *testing.T) {\n\t\tt.Run(\"Valid\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\t// \t10010111 10101100\n\t\t\t//\t0x97\t 0xac\n\t\t\tbits := []int{1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0}\n\n\t\t\t// write all the bits\n\t\t\tfor _, bit := range bits {\n\t\t\t\terr := w.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\texpected := byte(0x97)\n\t\t\tassert.Equal(t, expected, w.data[0], \"expected: %08b is: %08b\", expected, w.data[0])\n\t\t\texpected = byte(0xac)\n\t\t\tassert.Equal(t, expected, w.data[1], \"expected: %08b is: %08b\", expected, w.data[1])\n\t\t})\n\n\t\tt.Run(\"ByteShifted\", func(t *testing.T) {\n\t\t\tbuf := BufferedMSB()\n\t\t\terr := buf.WriteByte(0x00)\n\t\t\trequire.NoError(t, err)\n\n\t\t\t// 11100011 - 0xe3\n\t\t\tbits := []int{1, 1, 1, 0, 0, 0, 1, 1}\n\t\t\tfor _, bit := range bits {\n\t\t\t\terr := buf.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\tassert.Equal(t, byte(0xe3), buf.data[1], \"expected: %08b, is: %08b\", byte(0xe3), buf.data[1])\n\t\t})\n\n\t\tt.Run(\"BitShifted\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\t// 0xE0 - 11100000\n\t\t\terr := w.WriteByte(0xE0)\n\t\t\trequire.NoError(t, err)\n\n\t\t\tw.bitIndex = 5\n\t\t\tw.byteIndex = 0\n\n\t\t\tbits := []int{1, 0, 1, 0, 1}\n\t\t\tfor _, bit := range bits {\n\t\t\t\terr := w.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// should be 11100101 01000000 ...\n\t\t\t//\t\t\t 0xE5\t 0x40\n\t\t\tassert.Equal(t, byte(0xE5), w.data[0], \"expected: %08b, is: %08b\", byte(0xE5), w.data[0])\n\t\t\tassert.Equal(t, byte(0x40), w.data[1], \"expected: %08b, is: %08b\", byte(0x40), w.data[1])\n\t\t})\n\n\t\tt.Run(\"Finished\", func(t *testing.T) {\n\t\t\tbuf := BufferedMSB()\n\n\t\t\t// write some bits to the first byte\n\t\t\tfirstBits := []int{1, 0, 1}\n\t\t\tfor _, bit := range firstBits {\n\t\t\t\trequire.NoError(t, buf.WriteBit(bit))\n\t\t\t}\n\t\t\t// finish the byte\n\t\t\tbuf.FinishByte()\n\n\t\t\t// write bits to the second byte.\n\t\t\tsecondBits := []int{1, 0, 1}\n\t\t\tfor _, bit := range secondBits {\n\t\t\t\trequire.NoError(t, buf.WriteBit(bit))\n\t\t\t}\n\n\t\t\tif assert.Len(t, buf.Data(), 2) {\n\t\t\t\t// 10100000 - 0xa0\n\t\t\t\tassert.Equal(t, byte(0xa0), buf.Data()[0])\n\t\t\t\tassert.Equal(t, byte(0xa0), buf.Data()[1])\n\t\t\t}\n\t\t})\n\t})\n}", "func (instance *Instance) SetInt32(fieldName string, value int32) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func mockTest0103(w http.ResponseWriter, r *http.Request) {\n\tretCode, err := common.GetIntArgFromQuery(r, \"code\")\n\tif err != nil {\n\t\tcommon.ErrHandler(w, err)\n\t\treturn\n\t}\n\tif retCode < http.StatusOK {\n\t\tretCode = http.StatusOK\n\t}\n\n\tb := []byte(\"mockTest0103, mock return error code.\")\n\tw.Header().Set(common.TextContentLength, strconv.Itoa(len(b)))\n\tw.WriteHeader(retCode)\n\tlog.Println(\"mock return error code:\", retCode)\n\n\tif _, err := w.Write(b); err != nil {\n\t\tcommon.ErrHandler(w, err)\n\t}\n}", "func (m *MockisWRingKeyHandle_KeyOrHandle) MarshalTo(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"MarshalTo\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockStore) Write(ctx context.Context, key string, val interface{}, opts ...store.WriteOption) error {\n\t_va := make([]interface{}, len(opts))\n\tfor _i := range opts {\n\t\t_va[_i] = opts[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, key, val)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...store.WriteOption) error); ok {\n\t\tr0 = rf(ctx, key, val, opts...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestUint32(t *testing.T) {\n\ttests := []struct {\n\t\tin uint32 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x01,0x00,0x00,0x00}}, // Min single byte\n\t\t{255, []byte{0xff,0x00,0x00,0x00}}, // Max single byte\n\t\t{256, []byte{0x00,0x01,0x00,0x00}}, // Min 2-byte\n\t\t{65535, []byte{0xff,0xff,0x00,0x00}}, // Max 2-byte\n\t\t{0x10000, []byte{0x00,0x00,0x01,0x00}}, // Min 4-byte\n\t\t{0xffffffff, []byte{0xff,0xff,0xff,0xff}}, // Max 4-byte\n\t}\n\n\tt.Logf(\"Running uint32 %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint32(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint32 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint32 #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint32\n\t\terr = ReadUint32(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint32 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint32 #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func (w *Writer) WriteUint32(data interface{}) {\n\tvar t = w.getType(data, 4)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.buffer[w.index+2] = byte(t[2])\n\tw.buffer[w.index+3] = byte(t[3])\n\tw.index += 4\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockRedisLockIface) Set(arg0 *redis.Pool, arg1 string, arg2 uint32) (bool, string, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Set\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(bool)\n\tret1, _ := ret[1].(string)\n\tret2, _ := ret[2].(error)\n\treturn ret0, ret1, ret2\n}", "func (c *fakeRedisConn) WriteInt(num int) { c.rsp = append(c.rsp, num) }", "func (m *MockIOPackage) WriteUint64(arg0 []byte, arg1 int, arg2 uint64) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint64\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (b *Bus) Write32(addr mirv.Address, v uint32) error {\n\tblk := b.p\n\tif !blk.contains(addr) {\n\t\tblk = b.find(addr)\n\t}\n\treturn blk.m.Write32(addr-blk.s, v)\n}", "func (m *MockIOPackage) IsLittleEndian() bool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"IsLittleEndian\")\n\tret0, _ := ret[0].(bool)\n\treturn ret0\n}" ]
[ "0.7649489", "0.74640656", "0.7225452", "0.69621444", "0.6961996", "0.6692921", "0.658768", "0.65772974", "0.6544161", "0.6509446", "0.64336514", "0.64254093", "0.6419719", "0.6404061", "0.6371113", "0.6248544", "0.62387335", "0.6236297", "0.6112445", "0.61115235", "0.6067686", "0.5976449", "0.5880014", "0.58768237", "0.58750415", "0.58656454", "0.5856616", "0.57650954", "0.5760272", "0.57306415", "0.5703675", "0.56584126", "0.5646681", "0.55210906", "0.55118024", "0.55117285", "0.54833394", "0.5473413", "0.54549414", "0.54540074", "0.5420572", "0.5417532", "0.5407807", "0.5402897", "0.53959304", "0.53938967", "0.5390723", "0.53860754", "0.5369979", "0.5343531", "0.53247577", "0.53111756", "0.52989095", "0.5297658", "0.52782714", "0.52685344", "0.5264089", "0.5256462", "0.52409554", "0.5228624", "0.521725", "0.521311", "0.521311", "0.5202554", "0.5202554", "0.5160769", "0.51594394", "0.51561207", "0.5133504", "0.5131454", "0.51239294", "0.5100725", "0.5096404", "0.5077123", "0.505996", "0.5049676", "0.5032934", "0.50099766", "0.4993869", "0.4991467", "0.49839318", "0.49807298", "0.49683154", "0.49653316", "0.49620283", "0.49587205", "0.4949044", "0.49372956", "0.4923122", "0.492149", "0.4909081", "0.49067494", "0.4893439", "0.487769", "0.48751947", "0.48743963", "0.48693186", "0.4866953", "0.48380727", "0.4836648" ]
0.7749478
0
WriteInt32 is a helper method to define mock.On call logicalName string bitLength uint8 value int32 writerArgs ...WithWriterArgs
func (_e *MockWriteBufferJsonBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt32_Call { return &MockWriteBufferJsonBased_WriteInt32_Call{Call: _e.mock.On("WriteInt32", append([]interface{}{logicalName, bitLength, value}, writerArgs...)...)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt32_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint32_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) WriteInt32(data interface{}) {\n\tvar t = w.getType(data, 4)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.buffer[w.index+2] = byte(t[2])\n\tw.buffer[w.index+3] = byte(t[3])\n\tw.index += 4\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockIOPackage) WriteUint32(arg0 []byte, arg1 int, arg2 uint32) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint32\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt16_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func WriteI32(p thrift.TProtocol, value int32, name string, field int16) error {\n\treturn WriteI32WithContext(context.Background(), p, value, name, field)\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat32_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt8_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt64_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func SetUint32ByName(o interface{}, name string, val uint32) {\n\tif fd := reflect.ValueOf(o).Elem().FieldByName(name); fd.IsValid() {\n\t\tfd.SetUint(uint64(val))\n\t}\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat32_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (w *ByteWriter) MustWriteInt32(val int32, offset int) int {\n\treturn w.MustWriteVal(val, offset)\n}", "func WriteInt32(buffer []byte, offset int, value int32) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n buffer[offset + 2] = byte(value >> 16)\n buffer[offset + 3] = byte(value >> 24)\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt8_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt64_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt16_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (w *Writer) WriteUint32(data interface{}) {\n\tvar t = w.getType(data, 4)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.buffer[w.index+2] = byte(t[2])\n\tw.buffer[w.index+3] = byte(t[3])\n\tw.index += 4\n}", "func (m *MockSession) Int32(arg0 string) int32 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Int32\", arg0)\n\tret0, _ := ret[0].(int32)\n\treturn ret0\n}", "func (m *MockIOPackage) AppendUint32(arg0 []byte, arg1 uint32) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint32\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (instance *Instance) SetInt32(fieldName string, value int32) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (m *MockSession) Uint32(arg0 string) uint32 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint32\", arg0)\n\tret0, _ := ret[0].(uint32)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (vb *keyWriter) WriteInt32(num int32) (int, error) {\n\treturn vb.WriteUint32(uint32(num))\n}", "func (mr *MockIOPackageMockRecorder) WriteUint32(arg0, arg1, arg2 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteUint32\", reflect.TypeOf((*MockIOPackage)(nil).WriteUint32), arg0, arg1, arg2)\n}", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func WriteUInt32(buffer []byte, offset int, value uint32) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n buffer[offset + 2] = byte(value >> 16)\n buffer[offset + 3] = byte(value >> 24)\n}", "func (m Measurement) AddInt32(name string, value int32) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call {\n\treturn &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestWrite(t *testing.T) {\n\ttests := []struct {\n\t\tid *ua.NodeID\n\t\tv interface{}\n\t\tstatus ua.StatusCode\n\t}{\n\t\t// happy flows\n\t\t{ua.NewStringNodeID(2, \"rw_bool\"), false, ua.StatusOK},\n\t\t{ua.NewStringNodeID(2, \"rw_int32\"), int32(9), ua.StatusOK},\n\n\t\t// error flows\n\t\t{ua.NewStringNodeID(2, \"ro_bool\"), false, ua.StatusBadUserAccessDenied},\n\t}\n\n\tctx := context.Background()\n\n\tsrv := NewServer(\"rw_server.py\")\n\tdefer srv.Close()\n\n\tc, err := opcua.NewClient(srv.Endpoint, srv.Opts...)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tif err := c.Connect(ctx); err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer c.Close(ctx)\n\n\tfor _, tt := range tests {\n\t\tt.Run(tt.id.String(), func(t *testing.T) {\n\t\t\ttestWrite(t, ctx, c, tt.status, &ua.WriteRequest{\n\t\t\t\tNodesToWrite: []*ua.WriteValue{\n\t\t\t\t\t&ua.WriteValue{\n\t\t\t\t\t\tNodeID: tt.id,\n\t\t\t\t\t\tAttributeID: ua.AttributeIDValue,\n\t\t\t\t\t\tValue: &ua.DataValue{\n\t\t\t\t\t\t\tEncodingMask: ua.DataValueValue,\n\t\t\t\t\t\t\tValue: ua.MustVariant(tt.v),\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t})\n\n\t\t\t// skip read tests if the write is expected to fail\n\t\t\tif tt.status != ua.StatusOK {\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\ttestRead(t, ctx, c, tt.v, tt.id)\n\t\t})\n\t}\n}", "func (m *MockHash32) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (w *ByteWriter) WriteInt32(val int32, offset int) (int, error) {\n\treturn w.WriteVal(val, offset)\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func BindWriterOnInt32Slice(v *[]int32) io.Writer {\n\tif v == nil {\n\t\treturn nil\n\t}\n\treturn &writerBinderInt32Slice{v: v}\n}", "func StoreInt32(addr *int32, val int32)", "func (bio *BinaryIO) WriteUint32(off int64, value uint32) {\n\tvar buf [4]byte\n\tbio.order.PutUint32(buf[:], value)\n\tbio.WriteAt(off, buf[:])\n}", "func (b *Bus) Write32(addr mirv.Address, v uint32) error {\n\tblk := b.p\n\tif !blk.contains(addr) {\n\t\tblk = b.find(addr)\n\t}\n\treturn blk.m.Write32(addr-blk.s, v)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigInt_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (mock *WriteCloser) SetReturnWrite(n int, err error) *WriteCloser {\n\tmock.impl.Write = func([]byte) (int, error) {\n\t\treturn n, err\n\t}\n\treturn mock\n}", "func (instance *Instance) SetUint32(fieldName string, value uint32) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (t Int32) WriteTo(w io.Writer) (int64, error) {\n\tlw := aWriterPool.Get(w)\n\tlw.WriteInt32(int32(t))\n\treturn aWriterPool.Put(lw)\n}", "func (m Measurement) AddUInt32(name string, value uint32) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func SetUint32(gauge prometheus.Gauge, arg uint32) {\n\tgauge.Set(float64(arg))\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteString_Call {\n\treturn &MockWriteBufferXmlBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (tw *TestWriter) Write(incoming []byte) (n int, err error) {\n\tif tw.mockWriteHandler != nil {\n\t\treturn tw.mockWriteHandler(incoming)\n\t}\n\n\ttw.capturedOutput = append(tw.capturedOutput, string(incoming))\n\treturn 0, nil\n}", "func (mt *MTService) HelloZ32(strArg string, inT64 int64, inT32 int64, nameArr []string) int32 {\n\treturn 2222222\n\t// return 9223372036854775808\n}", "func (c *fakeRedisConn) WriteInt(num int) { c.rsp = append(c.rsp, num) }", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) WriteUInt32(v uint32) error {\n\treturn binary.Write(w.out, w.bo, &v)\n}", "func I32(name string, defs ...int32) int32 {\n\tvar def int32\n\tfor _, d := range defs {\n\t\tdef = d\n\t\tbreak\n\t}\n\tv := Raw(name)\n\ti, err := strconv.ParseInt(v, 10, 32)\n\tif err != nil {\n\t\treturn def\n\t}\n\treturn int32(i)\n}", "func (mock *WriteCloser) SetFuncWrite(impl func(p []byte) (n int, err error)) *WriteCloser {\n\tmock.impl.Write = impl\n\treturn mock\n}", "func mockTest0103(w http.ResponseWriter, r *http.Request) {\n\tretCode, err := common.GetIntArgFromQuery(r, \"code\")\n\tif err != nil {\n\t\tcommon.ErrHandler(w, err)\n\t\treturn\n\t}\n\tif retCode < http.StatusOK {\n\t\tretCode = http.StatusOK\n\t}\n\n\tb := []byte(\"mockTest0103, mock return error code.\")\n\tw.Header().Set(common.TextContentLength, strconv.Itoa(len(b)))\n\tw.WriteHeader(retCode)\n\tlog.Println(\"mock return error code:\", retCode)\n\n\tif _, err := w.Write(b); err != nil {\n\t\tcommon.ErrHandler(w, err)\n\t}\n}", "func (r *HashJsonCodecRedisController) SetSomeInt32(key string, someInt32 int32) (err error) {\n\t// redis conn\n\tconn := r.pool.Get()\n\tdefer conn.Close()\n\n\t// set SomeInt32 field\n\tr.m.SomeInt32 = someInt32\n\t_, err = conn.Do(\"HSET\", key, \"SomeInt32\", someInt32)\n\n\treturn\n}", "func (b *Buffer) AppendInt32(v int32) {\n\tb.AppendInt64(int64(v))\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func WriteMemI32(mem []byte, offset int, v int32) {\n\tmem[offset] = byte(v)\n\tmem[offset+1] = byte(v >> 8)\n\tmem[offset+2] = byte(v >> 16)\n\tmem[offset+3] = byte(v >> 24)\n}", "func iow(t, nr, size uintptr) uintptr {\n\treturn ioc(directionWrite, t, nr, size)\n}", "func (mr *MockIOPackageMockRecorder) AppendUint32(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"AppendUint32\", reflect.TypeOf((*MockIOPackage)(nil).AppendUint32), arg0, arg1)\n}", "func (m *MockBigInterface) Foo32(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7 bool) Bar {\n\tif m.FnFoo32 != nil {\n\t\treturn m.FnFoo32(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)\n\t}\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Foo32\", arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)\n\tret0, _ := ret[0].(Bar)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (vb *keyWriter) WriteUint32(num uint32) (int, error) {\n\tbinary.BigEndian.PutUint32(vb.buffer[:4], num)\n\tvb.hash.Write(vb.buffer[:4])\n\treturn 4, nil\n}", "func FormatInt32(name string) string {\n\treturn formatIntFunction(name, true)\n}", "func ExpectInt32(t *testing.T, field string, expected int32, found int32) {\n\tif expected != found {\n\t\tt.Errorf(\"%s [%d], found '%d'\", field, expected, found)\n\t}\n}", "func (e *Event) AddInt32(key string, value int32) {\n\tvar ckey *C.char = C.CString(key)\n\tC.oboe_event_add_info_int64(&e.event, ckey, C.int64_t(value))\n\tC.free(unsafe.Pointer(ckey))\n}", "func WriteI32WithContext(ctx context.Context, p thrift.TProtocol, value int32, name string, field int16) error {\n\tif err := p.WriteFieldBegin(ctx, name, thrift.I32, field); err != nil {\n\t\treturn thrift.PrependError(\"write field begin error: \", err)\n\t}\n\tif err := p.WriteI32(ctx, value); err != nil {\n\t\treturn thrift.PrependError(\"field write error: \", err)\n\t}\n\tif err := p.WriteFieldEnd(ctx); err != nil {\n\t\treturn thrift.PrependError(\"write field end error: \", err)\n\t}\n\treturn nil\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (b *Buffer) AppendInt32(x int32) error {\n\treturn b.appendInteger(x)\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (shader *Shader) SetValueInt32(uniformLoc int, value []int32, uniformType ShaderUniformDataType) {\n\tcshader := *shader.cptr()\n\tcvalue := (*C.int)(unsafe.Pointer((*reflect.SliceHeader)(unsafe.Pointer(&value)).Data))\n\tclen := C.int(1)\n\tC.SetShaderValueV(cshader, C.int(int32(uniformLoc)), unsafe.Pointer(cvalue), C.int(int32(uniformType)), clen)\n}", "func (c *fakeRedisConn) WriteInt64(num int64) { c.rsp = append(c.rsp, num) }", "func Int32(i32 int32) Val {\n\tv := Val{t: bsontype.Int32}\n\tv.bootstrap[0] = byte(i32)\n\tv.bootstrap[1] = byte(i32 >> 8)\n\tv.bootstrap[2] = byte(i32 >> 16)\n\tv.bootstrap[3] = byte(i32 >> 24)\n\treturn v\n}", "func (dm *dataManager) writeInt(address uint, i int) (err ProcessException) {\n\tdata := make([]byte, 4)\n\tbinary.LittleEndian.PutUint32(data, uint32(i))\n\n\terr = dm.process.WriteBytes(address, data)\n\n\treturn\n}", "func stringToBytes32(delegateName string) [32]byte {\n\tvar name [32]byte\n\tcopy(name[:], delegateName)\n\treturn name\n}", "func (_e *Output_Expecter) WriteOne(ctx interface{}, msg interface{}) *Output_WriteOne_Call {\n\treturn &Output_WriteOne_Call{Call: _e.mock.On(\"WriteOne\", ctx, msg)}\n}", "func Int32Arg(register Register, name string, options ...ArgOptionApplyer) *int32 {\n\tp := new(int32)\n\t_ = Int32ArgVar(register, p, name, options...)\n\treturn p\n}", "func (u *Int32) Set(value int32) {\n\tatomic.StoreInt32(&u.value, value)\n}", "func (s *Streamer) Int32(v int32) *Streamer {\n\tif s.Error != nil {\n\t\treturn s\n\t}\n\ts.onVal()\n\ts.buffer = appendInt32(s.buffer, v)\n\treturn s\n}", "func (m *Message) putNamedValues32(values NamedValues) {\n\tl := len(values)\n\tif l == 0 {\n\t\treturn\n\t} else if int64(l) > math.MaxUint32 {\n\t\t// safeguard, should have been checked beforehand.\n\t\tpanic(\"too many parameters\")\n\t}\n\tn := uint32(l)\n\n\tm.putUint32(n)\n\tm.putNamedValuesInner(values)\n}", "func (mock WriteCloser) Write(p []byte) (n int, err error) {\n\tmethodName := \"Write\" // nolint: goconst\n\tif mock.impl.Write != nil {\n\t\treturn mock.impl.Write(p)\n\t}\n\tif mock.callbackNotImplemented != nil {\n\t\tmock.callbackNotImplemented(mock.t, mock.name, methodName)\n\t} else {\n\t\tgomic.DefaultCallbackNotImplemented(mock.t, mock.name, methodName)\n\t}\n\treturn mock.fakeZeroWrite(p)\n}", "func (c *Configurator) Uint32(name string, value uint32, usage string) *uint32 {\n\tp := new(uint32)\n\n\tc.Uint32Var(p, name, value, usage)\n\n\treturn p\n}", "func writeToFile(file *os.File, data uint32, offset int) {\n\tbuffer := make([]byte, UINT32_LENGTH)\n\tbinary.LittleEndian.PutUint32(buffer, data)\n\tfile.WriteAt(buffer, int64(offset))\n}", "func (w *ByteWriter) WriteUint32(val uint32, offset int) (int, error) {\n\treturn w.WriteVal(val, offset)\n}", "func (m *MockResponseWriter) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}" ]
[ "0.744865", "0.7265903", "0.72464997", "0.67170894", "0.6583917", "0.64238054", "0.6383818", "0.60076624", "0.5881504", "0.58767384", "0.58664405", "0.58551913", "0.5838302", "0.5795263", "0.57854205", "0.57753366", "0.57647866", "0.5738884", "0.57262653", "0.57165986", "0.56893754", "0.5681455", "0.56727314", "0.56496143", "0.5644101", "0.5613943", "0.5578451", "0.55659884", "0.54572916", "0.5425", "0.54234725", "0.5416435", "0.54079443", "0.5382461", "0.5353105", "0.52690315", "0.5256084", "0.5231989", "0.5209138", "0.5187907", "0.5141628", "0.51374435", "0.51202726", "0.5090585", "0.5055318", "0.50220776", "0.49994412", "0.49783018", "0.49713612", "0.49693853", "0.49572873", "0.49530762", "0.49206054", "0.4905029", "0.48995325", "0.48982885", "0.48931915", "0.48863596", "0.48861462", "0.4876608", "0.48514697", "0.48263282", "0.48262635", "0.4821989", "0.48092452", "0.4804111", "0.47928518", "0.4788643", "0.47853372", "0.47709435", "0.47654393", "0.47612652", "0.4753186", "0.47412586", "0.4738151", "0.4736787", "0.47230944", "0.47221777", "0.47020188", "0.470051", "0.46804646", "0.46531764", "0.46527693", "0.4649582", "0.46487883", "0.46483025", "0.4647434", "0.46459502", "0.46431008", "0.46413293", "0.4620193", "0.46182203", "0.46177465", "0.46175924", "0.46125835", "0.4606726", "0.46013078", "0.45901647", "0.45896488", "0.45806327" ]
0.7617576
0
WriteInt64 provides a mock function with given fields: logicalName, bitLength, value, writerArgs
func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, logicalName, bitLength, value) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok { r0 = rf(logicalName, bitLength, value, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt64_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt64_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockIOPackage) WriteUint64(arg0 []byte, arg1 int, arg2 uint64) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint64\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash64) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockIOPackage) AppendUint64(arg0 []byte, arg1 uint64) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint64\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat64_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func WriteI64(p thrift.TProtocol, value int64, name string, field int16) error {\n\treturn WriteI64WithContext(context.Background(), p, value, name, field)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat64_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (c *fakeRedisConn) WriteInt64(num int64) { c.rsp = append(c.rsp, num) }", "func WriteInt64(buffer []byte, offset int, value int64) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n buffer[offset + 2] = byte(value >> 16)\n buffer[offset + 3] = byte(value >> 24)\n buffer[offset + 4] = byte(value >> 32)\n buffer[offset + 5] = byte(value >> 40)\n buffer[offset + 6] = byte(value >> 48)\n buffer[offset + 7] = byte(value >> 56)\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (mr *MockIOPackageMockRecorder) WriteUint64(arg0, arg1, arg2 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteUint64\", reflect.TypeOf((*MockIOPackage)(nil).WriteUint64), arg0, arg1, arg2)\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockSession) Uint64(arg0 string) uint64 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint64\", arg0)\n\tret0, _ := ret[0].(uint64)\n\treturn ret0\n}", "func WriteUInt64(buffer []byte, offset int, value uint64) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n buffer[offset + 2] = byte(value >> 16)\n buffer[offset + 3] = byte(value >> 24)\n buffer[offset + 4] = byte(value >> 32)\n buffer[offset + 5] = byte(value >> 40)\n buffer[offset + 6] = byte(value >> 48)\n buffer[offset + 7] = byte(value >> 56)\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (w *ByteWriter) MustWriteInt64(val int64, offset int) int {\n\treturn w.MustWriteVal(val, offset)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigInt_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (b *Bus) Write64(addr mirv.Address, v uint64) error {\n\tblk := b.p\n\tif !blk.contains(addr) {\n\t\tblk = b.find(addr)\n\t}\n\treturn blk.m.Write64(addr-blk.s, v)\n}", "func (w *Writer) WriteUint64(data interface{}) {\n\tvar t = w.getType(data, 8)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.buffer[w.index+2] = byte(t[2])\n\tw.buffer[w.index+3] = byte(t[3])\n\tw.buffer[w.index+4] = byte(t[4])\n\tw.buffer[w.index+5] = byte(t[5])\n\tw.buffer[w.index+6] = byte(t[6])\n\tw.buffer[w.index+7] = byte(t[7])\n\tw.index += 8\n}", "func TestUint64(t *testing.T) {\n\ttests := []struct {\n\t\tin uint64 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00}}, // Min single byte\n\t\t{255, []byte{0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00}}, // Max single byte\n\t\t{256, []byte{0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00}}, // Min 2-byte\n\t\t{65535, []byte{0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00}}, // Max 2-byte\n\t\t{0x10000, []byte{0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00}}, // Min 4-byte\n\t\t{0xffffffff, []byte{0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00}}, // Max 4-byte\n\t\t{0x100000000, []byte{0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00}}, // Min 8-byte\n\t\t{0xffffffffffffffff, []byte{0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff}}, // Max 8-byte\n\t}\n\n\tt.Logf(\"Running uint64 %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint64(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint64 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint64 #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint64\n\t\terr = ReadUint64(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint64 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint64 #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (o *FakeObject) Uint64() uint64 { return o.Value.(uint64) }", "func (w *Writer) WriteInt64(data interface{}) {\n\tvar t = w.getType(data, 8)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.buffer[w.index+2] = byte(t[2])\n\tw.buffer[w.index+3] = byte(t[3])\n\tw.buffer[w.index+4] = byte(t[4])\n\tw.buffer[w.index+5] = byte(t[5])\n\tw.buffer[w.index+6] = byte(t[6])\n\tw.buffer[w.index+7] = byte(t[7])\n\tw.index += 8\n}", "func (a *api) SetInt64(raw bool) {\n\ta.Commentf(\"%s constructs a field element from an integer.\", rawname(\"SetInt64\", raw))\n\ta.rawcomment(raw)\n\ta.Printf(\"func (x %s) %s(y int64) %s\", a.PointerType(), rawname(\"SetInt64\", raw), a.PointerType())\n\ta.EnterBlock()\n\ta.Linef(\"x.%s(big.NewInt(y))\", rawname(\"SetInt\", raw))\n\ta.Linef(\"return x\")\n\ta.LeaveBlock()\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteMemI64(mem []byte, offset int, v int64) {\n\tmem[offset] = byte(v)\n\tmem[offset+1] = byte(v >> 8)\n\tmem[offset+2] = byte(v >> 16)\n\tmem[offset+3] = byte(v >> 24)\n\tmem[offset+4] = byte(v >> 32)\n\tmem[offset+5] = byte(v >> 40)\n\tmem[offset+6] = byte(v >> 48)\n\tmem[offset+7] = byte(v >> 56)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (c Channel) WriteInt64(name string, value int64) error {\n\tcName := C.CString(name)\n\tdefer C.free(unsafe.Pointer(cName))\n\n\terrno := C.iio_channel_attr_write_longlong(\n\t\tc.handle,\n\t\tcName,\n\t\tC.longlong(value),\n\t)\n\tif errno == 0 {\n\t\treturn nil\n\t}\n\treturn syscall.Errno(-errno)\n}", "func (s *State) Write64(h uint64) (err error) {\n\ts.clen += 8\n\ts.tail = append(s.tail, byte(h>>56), byte(h>>48), byte(h>>40), byte(h>>32), byte(h>>24), byte(h>>16), byte(h>>8), byte(h))\n\treturn nil\n}", "func (_m *MockFile) Write(_param0 []byte) (int, error) {\n\tret := _m.ctrl.Call(_m, \"Write\", _param0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (mock WriteCloser) fakeZeroWrite(p []byte) (n int, err error) {\n\treturn n, err\n}", "func (m *MockHash32) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Int64(i64 int64) Val { return Val{t: bsontype.Int64}.writei64(i64) }", "func (m *MockHash) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (bw *BufWriter) Uint64(val uint64) {\n\tif bw.Error != nil {\n\t\treturn\n\t}\n\t_, bw.Error = bw.writer.WriteString(strconv.FormatUint(val, 10))\n}", "func (m *MockSession) Int64(arg0 string) int64 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Int64\", arg0)\n\tret0, _ := ret[0].(int64)\n\treturn ret0\n}", "func TestSet64(t *testing.T) {\n\thm, _ := NewHashMap(64)\n\ttestSetN(testN, hm)\n}", "func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (z *Int) SetUint64(x uint64) *Int {}", "func (_m *Session) Write(data []byte) (int, error) {\n\tret := _m.Called(data)\n\n\tvar r0 int\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func([]byte) (int, error)); ok {\n\t\treturn rf(data)\n\t}\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(data)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (z *Rat) SetUint64(x uint64) *Rat {}", "func (_m *MockStore) Write(ctx context.Context, key string, val interface{}, opts ...store.WriteOption) error {\n\t_va := make([]interface{}, len(opts))\n\tfor _i := range opts {\n\t\t_va[_i] = opts[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, key, val)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...store.WriteOption) error); ok {\n\t\tr0 = rf(ctx, key, val, opts...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestWrite(t *testing.T) {\n\tmockZooKeeper := &MockZooHandle{\n\t\tzk: mock.Mock{},\n\t}\n\n\tbytes := make([]byte, 3)\n\tff := NewFuseFile(bytes, 0, \"mock/path\", mockZooKeeper)\n\n\tmockZooKeeper.zk.On(\"Set\", \"mock/path\", bytes, int32(-1)).Return(&zk.Stat{DataLength: int32(len(bytes))}, nil)\n\n\t// assert that we send 3 bytes into the writer and status out == fuse.OK\n\tsize, stat := ff.Write(bytes, 0)\n\tassert.Equal(t, uint32(3), size)\n\tassert.Equal(t, fuse.OK, stat)\n}", "func (m *MockHash64) Sum64() uint64 {\n\tret := m.ctrl.Call(m, \"Sum64\")\n\tret0, _ := ret[0].(uint64)\n\treturn ret0\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func (r *MockReadWriteCloser) Write(p []byte) (n int, err error) {\n\n\tif err = r.WriteErr; err != nil {\n\t\tr.BytesWritten = p\n\t\tn = len(p)\n\t}\n\treturn\n}", "func (bio *BinaryIO) WriteUint64(off int64, value uint64) {\n\tvar buf [8]byte\n\tbio.order.PutUint64(buf[:], value)\n\tbio.WriteAt(off, buf[:])\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (instance *Instance) SetInt64(fieldName string, value int64) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (mock *WriteCloser) SetReturnWrite(n int, err error) *WriteCloser {\n\tmock.impl.Write = func([]byte) (int, error) {\n\t\treturn n, err\n\t}\n\treturn mock\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (instance *Instance) SetUint64(fieldName string, value uint64) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (m Measurement) AddUInt64(name string, value uint64) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (o *FakeObject) Int64() int64 { return o.Value.(int64) }", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mock *WriteCloser) SetFuncWrite(impl func(p []byte) (n int, err error)) *WriteCloser {\n\tmock.impl.Write = impl\n\treturn mock\n}", "func (bw *BufWriter) Int64(val int64) {\n\tif bw.Error != nil {\n\t\treturn\n\t}\n\t_, bw.Error = bw.writer.WriteString(strconv.FormatInt(val, 10))\n}", "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockResponseWriter) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (mr *MockIOPackageMockRecorder) AppendUint64(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"AppendUint64\", reflect.TypeOf((*MockIOPackage)(nil).AppendUint64), arg0, arg1)\n}", "func (m *MockFile) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockFile) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func StoreInt64(addr *int64, val int64)", "func Uint64(name string, val uint64) Field {\n\treturn Field(zap.Uint64(name, val))\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (z *Int) SetInt64(x int64) *Int {}", "func TestInt64(tst *testing.T) {\n\n\t// Test bool\n\ti, err := StringToInt64(\"187480198367637651\")\n\tbrtesting.AssertEqual(tst, err, nil, \"StringToInt64 failed\")\n\tbrtesting.AssertEqual(tst, i, int64(187480198367637651), \"StringToInt64 failed\")\n\ti, err = StringToInt64(\"go-bedrock\")\n\tbrtesting.AssertNotEqual(tst, err, nil, \"StringToInt64 failed\")\n}", "func (m *MockSerial) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (w *Writer) Uint64(n uint64) {\n\tw.buf = strconv.AppendUint(w.buf, uint64(n), 10)\n}", "func (m *Mmap) WriteInt64(start, val int64) error {\n\tbinary.LittleEndian.PutUint64(m.MmapBytes[start:start+8], uint64(val))\n\treturn nil\n}", "func (mock WriteCloser) Write(p []byte) (n int, err error) {\n\tmethodName := \"Write\" // nolint: goconst\n\tif mock.impl.Write != nil {\n\t\treturn mock.impl.Write(p)\n\t}\n\tif mock.callbackNotImplemented != nil {\n\t\tmock.callbackNotImplemented(mock.t, mock.name, methodName)\n\t} else {\n\t\tgomic.DefaultCallbackNotImplemented(mock.t, mock.name, methodName)\n\t}\n\treturn mock.fakeZeroWrite(p)\n}", "func (z *Rat) SetInt64(x int64) *Rat {}" ]
[ "0.7606551", "0.7589502", "0.7458165", "0.7214177", "0.71845156", "0.70734125", "0.6946393", "0.66048145", "0.65864784", "0.65779644", "0.65288866", "0.64605117", "0.6458609", "0.62467855", "0.61882997", "0.6172667", "0.6139164", "0.609823", "0.6026624", "0.5967225", "0.5902899", "0.5901353", "0.5889088", "0.5843369", "0.5834754", "0.5810162", "0.5794585", "0.57883894", "0.5777749", "0.57764155", "0.5747503", "0.5694671", "0.5693891", "0.56906474", "0.5682909", "0.56763715", "0.5673819", "0.5668299", "0.5659477", "0.5659103", "0.5657636", "0.56465465", "0.5636817", "0.5617917", "0.5604429", "0.558903", "0.5575127", "0.55736935", "0.557207", "0.5570797", "0.55631596", "0.5549927", "0.55452985", "0.5518191", "0.54732454", "0.5456944", "0.5432546", "0.5431531", "0.5428171", "0.5418763", "0.54079187", "0.54030776", "0.53974015", "0.53910494", "0.53909814", "0.53820115", "0.5371031", "0.5368556", "0.5367031", "0.5358465", "0.53462386", "0.533045", "0.53127927", "0.53089434", "0.53039145", "0.5302664", "0.529713", "0.5296639", "0.5289579", "0.5287605", "0.5266972", "0.52550113", "0.52517396", "0.52395254", "0.5237799", "0.5236809", "0.52256006", "0.52256006", "0.5219923", "0.52136457", "0.5201199", "0.5196011", "0.51936096", "0.5182485", "0.51438326", "0.5143576", "0.5138484", "0.5137502", "0.5134903", "0.5134065" ]
0.7658769
0
WriteInt64 is a helper method to define mock.On call logicalName string bitLength uint8 value int64 writerArgs ...WithWriterArgs
func (_e *MockWriteBufferJsonBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt64_Call { return &MockWriteBufferJsonBased_WriteInt64_Call{Call: _e.mock.On("WriteInt64", append([]interface{}{logicalName, bitLength, value}, writerArgs...)...)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt64_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteI64(p thrift.TProtocol, value int64, name string, field int16) error {\n\treturn WriteI64WithContext(context.Background(), p, value, name, field)\n}", "func (m *MockIOPackage) WriteUint64(arg0 []byte, arg1 int, arg2 uint64) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint64\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (c *fakeRedisConn) WriteInt64(num int64) { c.rsp = append(c.rsp, num) }", "func (a *api) SetInt64(raw bool) {\n\ta.Commentf(\"%s constructs a field element from an integer.\", rawname(\"SetInt64\", raw))\n\ta.rawcomment(raw)\n\ta.Printf(\"func (x %s) %s(y int64) %s\", a.PointerType(), rawname(\"SetInt64\", raw), a.PointerType())\n\ta.EnterBlock()\n\ta.Linef(\"x.%s(big.NewInt(y))\", rawname(\"SetInt\", raw))\n\ta.Linef(\"return x\")\n\ta.LeaveBlock()\n}", "func (c Channel) WriteInt64(name string, value int64) error {\n\tcName := C.CString(name)\n\tdefer C.free(unsafe.Pointer(cName))\n\n\terrno := C.iio_channel_attr_write_longlong(\n\t\tc.handle,\n\t\tcName,\n\t\tC.longlong(value),\n\t)\n\tif errno == 0 {\n\t\treturn nil\n\t}\n\treturn syscall.Errno(-errno)\n}", "func (w *Writer) WriteInt64(data interface{}) {\n\tvar t = w.getType(data, 8)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.buffer[w.index+2] = byte(t[2])\n\tw.buffer[w.index+3] = byte(t[3])\n\tw.buffer[w.index+4] = byte(t[4])\n\tw.buffer[w.index+5] = byte(t[5])\n\tw.buffer[w.index+6] = byte(t[6])\n\tw.buffer[w.index+7] = byte(t[7])\n\tw.index += 8\n}", "func (w *Writer) WriteUint64(data interface{}) {\n\tvar t = w.getType(data, 8)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.buffer[w.index+2] = byte(t[2])\n\tw.buffer[w.index+3] = byte(t[3])\n\tw.buffer[w.index+4] = byte(t[4])\n\tw.buffer[w.index+5] = byte(t[5])\n\tw.buffer[w.index+6] = byte(t[6])\n\tw.buffer[w.index+7] = byte(t[7])\n\tw.index += 8\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat64_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockIOPackage) AppendUint64(arg0 []byte, arg1 uint64) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint64\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func Int64(i64 int64) Val { return Val{t: bsontype.Int64}.writei64(i64) }", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat64_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (bw *BufWriter) Uint64(val uint64) {\n\tif bw.Error != nil {\n\t\treturn\n\t}\n\t_, bw.Error = bw.writer.WriteString(strconv.FormatUint(val, 10))\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mr *MockIOPackageMockRecorder) WriteUint64(arg0, arg1, arg2 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteUint64\", reflect.TypeOf((*MockIOPackage)(nil).WriteUint64), arg0, arg1, arg2)\n}", "func (s *State) Write64(h uint64) (err error) {\n\ts.clen += 8\n\ts.tail = append(s.tail, byte(h>>56), byte(h>>48), byte(h>>40), byte(h>>32), byte(h>>24), byte(h>>16), byte(h>>8), byte(h))\n\treturn nil\n}", "func WriteInt64(buffer []byte, offset int, value int64) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n buffer[offset + 2] = byte(value >> 16)\n buffer[offset + 3] = byte(value >> 24)\n buffer[offset + 4] = byte(value >> 32)\n buffer[offset + 5] = byte(value >> 40)\n buffer[offset + 6] = byte(value >> 48)\n buffer[offset + 7] = byte(value >> 56)\n}", "func WriteUInt64(buffer []byte, offset int, value uint64) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n buffer[offset + 2] = byte(value >> 16)\n buffer[offset + 3] = byte(value >> 24)\n buffer[offset + 4] = byte(value >> 32)\n buffer[offset + 5] = byte(value >> 40)\n buffer[offset + 6] = byte(value >> 48)\n buffer[offset + 7] = byte(value >> 56)\n}", "func (w *ByteWriter) MustWriteInt64(val int64, offset int) int {\n\treturn w.MustWriteVal(val, offset)\n}", "func WriteMemI64(mem []byte, offset int, v int64) {\n\tmem[offset] = byte(v)\n\tmem[offset+1] = byte(v >> 8)\n\tmem[offset+2] = byte(v >> 16)\n\tmem[offset+3] = byte(v >> 24)\n\tmem[offset+4] = byte(v >> 32)\n\tmem[offset+5] = byte(v >> 40)\n\tmem[offset+6] = byte(v >> 48)\n\tmem[offset+7] = byte(v >> 56)\n}", "func (b *Bus) Write64(addr mirv.Address, v uint64) error {\n\tblk := b.p\n\tif !blk.contains(addr) {\n\t\tblk = b.find(addr)\n\t}\n\treturn blk.m.Write64(addr-blk.s, v)\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigInt_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (w *Writer) Uint64(n uint64) {\n\tw.buf = strconv.AppendUint(w.buf, uint64(n), 10)\n}", "func (bw *BufWriter) Int64(val int64) {\n\tif bw.Error != nil {\n\t\treturn\n\t}\n\t_, bw.Error = bw.writer.WriteString(strconv.FormatInt(val, 10))\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call {\n\treturn &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (m Measurement) AddUInt64(name string, value uint64) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func RegisterInt64(key string, def int64, description string) onion.Int {\n\tsetDescription(key, description)\n\treturn o.RegisterInt64(key, def)\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockSession) Uint64(arg0 string) uint64 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint64\", arg0)\n\tret0, _ := ret[0].(uint64)\n\treturn ret0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt8_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (instance *Instance) SetInt64(fieldName string, value int64) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mr *MockIOPackageMockRecorder) AppendUint64(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"AppendUint64\", reflect.TypeOf((*MockIOPackage)(nil).AppendUint64), arg0, arg1)\n}", "func (bio *BinaryIO) WriteUint64(off int64, value uint64) {\n\tvar buf [8]byte\n\tbio.order.PutUint64(buf[:], value)\n\tbio.WriteAt(off, buf[:])\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (vb *keyWriter) WriteInt64(num int64) (int, error) {\n\treturn vb.WriteUint64(uint64(num))\n}", "func Uint64(name string, val uint64) Field {\n\treturn Field(zap.Uint64(name, val))\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteString_Call {\n\treturn &MockWriteBufferXmlBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (w *Writer) WriteInt64(num int64) {\n\tif w.err != nil {\n\t\treturn\n\t}\n\tw.b = AppendInt(w.b, num)\n}", "func (w *Writer) WriteUint64(num uint64) {\n\tif w.err != nil {\n\t\treturn\n\t}\n\tw.b = AppendUint(w.b, num)\n}", "func (instance *Instance) SetUint64(fieldName string, value uint64) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func Uint64(name string, alias rune, value uint64, usage string, fn Callback) *uint64 {\n\treturn CommandLine.Uint64(name, alias, value, usage, fn)\n}", "func (w *ByteWriter) WriteInt64(val int64, offset int) (int, error) {\n\treturn w.WriteVal(val, offset)\n}", "func (w *ByteWriter) WriteUint64(val uint64, offset int) (int, error) {\n\treturn w.WriteVal(val, offset)\n}", "func (z *Int) SetUint64(x uint64) *Int {}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (o *FakeObject) Uint64() uint64 { return o.Value.(uint64) }", "func (m *MockHash64) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Uint64(name string, value uint64, usage string) *uint64 {\n\treturn Environment.Uint64(name, value, usage)\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt8_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func I64(name string, defs ...int64) int64 {\n\tvar def int64\n\tfor _, d := range defs {\n\t\tdef = d\n\t\tbreak\n\t}\n\tv := Raw(name)\n\ti, err := strconv.ParseInt(v, 10, 64)\n\tif err != nil {\n\t\treturn def\n\t}\n\treturn i\n}", "func FormatInt64(name string) string {\n\treturn formatIntFunction(name, false)\n}", "func (w *Writer) Int64(n int64) {\n\tw.buf = strconv.AppendInt(w.buf, int64(n), 10)\n}", "func (m Measurement) AddInt64(name string, value int64) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func Int64(name string, alias rune, value int64, usage string, fn Callback) *int64 {\n\treturn CommandLine.Int64(name, alias, value, usage, fn)\n}", "func (z *Rat) SetUint64(x uint64) *Rat {}", "func opI64ToStr(expr *CXExpression, fp int) {\n\toutB0 := FromStr(strconv.FormatInt(ReadI64(fp, expr.Inputs[0]), 10))\n\tWriteObject(GetOffset_str(fp, expr.Outputs[0]), outB0)\n}", "func Int64(name string, value int64, usage string) *int64 {\n\tp := new(int64);\n\tInt64Var(p, name, value, usage);\n\treturn p;\n}", "func (m *Mmap) AppendInt64(val int64) error {\n\tif err := m.checkFilePointerOutOfRange(8); err != nil {\n\t\treturn err\n\t}\n\tbinary.LittleEndian.PutUint64(m.MmapBytes[m.FilePointer:m.FilePointer+8], uint64(val))\n\tm.FilePointer += 8\n\treturn nil\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (m *Mmap) WriteInt64(start, val int64) error {\n\tbinary.LittleEndian.PutUint64(m.MmapBytes[start:start+8], uint64(val))\n\treturn nil\n}", "func (f *FlagSet) Uint64(name string, alias rune, value uint64, usage string, fn Callback) *uint64 {\n\tp := new(uint64)\n\tf.Uint64Var(p, name, alias, value, usage, fn)\n\treturn p\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt16_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func Int64Arg(register Register, name string, options ...ArgOptionApplyer) *int64 {\n\tp := new(int64)\n\t_ = Int64ArgVar(register, p, name, options...)\n\treturn p\n}", "func Uint64(name string, value uint64, usage string) *uint64 {\n\treturn ex.FlagSet.Uint64(name, value, usage)\n}", "func (vb *keyWriter) WriteUint64(num uint64) (int, error) {\n\tbinary.BigEndian.PutUint64(vb.buffer[:8], num)\n\tvb.hash.Write(vb.buffer[:8])\n\treturn 8, nil\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (z *Int) SetInt64(x int64) *Int {}", "func WriteI64WithContext(ctx context.Context, p thrift.TProtocol, value int64, name string, field int16) error {\n\tif err := p.WriteFieldBegin(ctx, name, thrift.I64, field); err != nil {\n\t\treturn thrift.PrependError(\"write field begin error: \", err)\n\t}\n\tif err := p.WriteI64(ctx, value); err != nil {\n\t\treturn thrift.PrependError(\"field write error: \", err)\n\t}\n\tif err := p.WriteFieldEnd(ctx); err != nil {\n\t\treturn thrift.PrependError(\"write field end error: \", err)\n\t}\n\treturn nil\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (out *OutBuffer) WriteUint64LE(v uint64) {\n\tout.Append(\n\t\tbyte(v),\n\t\tbyte(v>>8),\n\t\tbyte(v>>16),\n\t\tbyte(v>>24),\n\t\tbyte(v>>32),\n\t\tbyte(v>>40),\n\t\tbyte(v>>48),\n\t\tbyte(v>>56),\n\t)\n}", "func (mock *WriteCloser) SetFuncWrite(impl func(p []byte) (n int, err error)) *WriteCloser {\n\tmock.impl.Write = impl\n\treturn mock\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func Uint64Arg(register Register, name string, options ...ArgOptionApplyer) *uint64 {\n\tp := new(uint64)\n\t_ = Uint64ArgVar(register, p, name, options...)\n\treturn p\n}", "func marshallAbiUint64(value interface{}) (uint64, error) {\n\tabiType, err := abi.TypeOf(\"uint64\")\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tencoded, err := abiType.Encode(value)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tdecoded, err := abiType.Decode(encoded)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tmarshalledValue, ok := decoded.(uint64)\n\tif !ok {\n\t\terr = fmt.Errorf(\"Decoded value is not a uint64\")\n\t}\n\treturn marshalledValue, err\n}", "func (e *Eth) SetInt64(w int64) *Eth {\n\treturn (*Eth)(e.ToInt().SetInt64(w))\n}", "func (r *Record) SetInt64Field(d *Db, number uint16, data int64) error {\n\tif C.wg_set_int_field(d.db, r.rec, C.wg_int(number), C.wg_int(data)) != 0 {\n\t\treturn WDBError(\"Could not set field\")\n\t}\n\treturn nil\n}", "func SysInt64(name string) int64 {\r\n\treturn converter.StrToInt64(SysString(name))\r\n}", "func TestUint64(t *testing.T) {\n\ttests := []struct {\n\t\tin uint64 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00}}, // Min single byte\n\t\t{255, []byte{0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00}}, // Max single byte\n\t\t{256, []byte{0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00}}, // Min 2-byte\n\t\t{65535, []byte{0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00}}, // Max 2-byte\n\t\t{0x10000, []byte{0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00}}, // Min 4-byte\n\t\t{0xffffffff, []byte{0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00}}, // Max 4-byte\n\t\t{0x100000000, []byte{0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00}}, // Min 8-byte\n\t\t{0xffffffffffffffff, []byte{0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff}}, // Max 8-byte\n\t}\n\n\tt.Logf(\"Running uint64 %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint64(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint64 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint64 #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint64\n\t\terr = ReadUint64(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint64 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint64 #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func StoreInt64(addr *int64, val int64)", "func TestSet64(t *testing.T) {\n\thm, _ := NewHashMap(64)\n\ttestSetN(testN, hm)\n}", "func (m *MockSession) Int64(arg0 string) int64 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Int64\", arg0)\n\tret0, _ := ret[0].(int64)\n\treturn ret0\n}" ]
[ "0.745478", "0.7041171", "0.7005116", "0.68596226", "0.67897564", "0.6550462", "0.6464553", "0.6193622", "0.58839697", "0.577427", "0.5771744", "0.57218456", "0.57080114", "0.5679537", "0.56642324", "0.5650233", "0.5649466", "0.5645485", "0.5641192", "0.5626163", "0.56200266", "0.56147075", "0.56070083", "0.5597681", "0.5584796", "0.55763906", "0.55714834", "0.55434835", "0.5505052", "0.5486279", "0.5464294", "0.5451997", "0.5443416", "0.54384947", "0.53893846", "0.53748876", "0.5368704", "0.5330718", "0.5320532", "0.53172565", "0.5292246", "0.5281685", "0.5274593", "0.5257709", "0.5256246", "0.52543515", "0.52539027", "0.52232826", "0.5215393", "0.5208917", "0.5206405", "0.5205257", "0.5195616", "0.5189497", "0.5176143", "0.51685387", "0.5168198", "0.51639026", "0.51543945", "0.5148694", "0.5145121", "0.51401615", "0.513707", "0.5118313", "0.51080877", "0.5106133", "0.5089989", "0.50830954", "0.50774133", "0.5073274", "0.5071617", "0.50695515", "0.50680363", "0.50678235", "0.50664866", "0.50580823", "0.5046752", "0.5043716", "0.50293744", "0.5025831", "0.5025812", "0.5008947", "0.5008268", "0.49982497", "0.49977234", "0.4992613", "0.49878594", "0.49804333", "0.49720824", "0.49660233", "0.4962144", "0.49617594", "0.49545646", "0.49513298", "0.49461457", "0.4945124", "0.49336404", "0.49314132", "0.49289262", "0.4927881" ]
0.75035644
0
WriteInt8 provides a mock function with given fields: logicalName, bitLength, value, writerArgs
func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, logicalName, bitLength, value) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok { r0 = rf(logicalName, bitLength, value, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt8_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt8_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockIOPackage) WriteUint8(arg0 []byte, arg1 int, arg2 uint8) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint8\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (m *MockIOPackage) AppendUint8(arg0 []byte, arg1 uint8) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint8\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockSession) Int8(arg0 string) int8 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Int8\", arg0)\n\tret0, _ := ret[0].(int8)\n\treturn ret0\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockSession) Uint8(arg0 string) byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint8\", arg0)\n\tret0, _ := ret[0].(byte)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockBigInterface) Foo8(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7 bool) Bar {\n\tif m.FnFoo8 != nil {\n\t\treturn m.FnFoo8(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)\n\t}\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Foo8\", arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)\n\tret0, _ := ret[0].(Bar)\n\treturn ret0\n}", "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteInt8(buffer []byte, offset int, value int8) {\n buffer[offset] = byte(value)\n}", "func (_m *MockOStream) WriteByte(v byte) {\n\t_m.ctrl.Call(_m, \"WriteByte\", v)\n}", "func (_m *MockOStream) WriteBytes(bytes []byte) {\n\t_m.ctrl.Call(_m, \"WriteBytes\", bytes)\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func TestBufferWriteBit(t *testing.T) {\n\tt.Run(\"Valid\", func(t *testing.T) {\n\t\tbuf := &Buffer{}\n\t\t// 10010011 11000111\n\t\t// 0x93 \t0xC7\n\t\tbits := []int{1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1}\n\t\tfor i := len(bits) - 1; i > -1; i-- {\n\t\t\tbit := bits[i]\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\tassert.Equal(t, byte(0xC7), buf.data[0], \"expected: %08b, is: %08b\", 0xc7, buf.data[0])\n\t\tassert.Equal(t, byte(0x93), buf.data[1], \"expected: %08b, is: %08b\", 0x93, buf.data[1])\n\t})\n\n\tt.Run(\"BitShifted\", func(t *testing.T) {\n\t\tt.Run(\"Empty\", func(t *testing.T) {\n\t\t\tbuf := &Buffer{}\n\t\t\t// fill thee buffer with 3 bits\n\t\t\tfor i := 0; i < 3; i++ {\n\t\t\t\terr := buf.WriteBit(int(0))\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// bits 11101\n\t\t\tbits := []int{1, 1, 1, 0, 1}\n\t\t\tfor i := len(bits) - 1; i > -1; i-- {\n\t\t\t\tbit := bits[i]\n\t\t\t\terr := buf.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// should be 11101000 - 0xe8\n\t\t\tassert.Equal(t, byte(0xe8), buf.data[0])\n\t\t})\n\t})\n\n\tt.Run(\"ByteShifted\", func(t *testing.T) {\n\t\tbuf := &Buffer{}\n\t\trequire.NoError(t, buf.WriteByte(0x00))\n\n\t\t// write 8 bits that should look like a byte 0xe3\n\t\t// 11100011 - 0xe3\n\t\tbits := []int{1, 1, 1, 0, 0, 0, 1, 1}\n\t\tfor i := len(bits) - 1; i > -1; i-- {\n\t\t\tbit := bits[i]\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\t\tassert.Equal(t, 2, len(buf.data))\n\t\tassert.Equal(t, byte(0xe3), buf.data[1])\n\n\t\t// there should be no error on writing additional byte.\n\t\tassert.NoError(t, buf.WriteByte(0x00))\n\t})\n\n\tt.Run(\"Finished\", func(t *testing.T) {\n\t\tbuf := &Buffer{}\n\n\t\t// write some bits to the first byte.\n\t\tfirstBits := []int{1, 0, 1}\n\t\tfor _, bit := range firstBits {\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\t// finish this byte\n\t\tbuf.FinishByte()\n\t\tsecondBits := []int{1, 0, 1}\n\n\t\t// write some bits to the second byte.\n\t\tfor _, bit := range secondBits {\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\tif assert.Len(t, buf.data, 2) {\n\t\t\t// 00000101 - 0x05\n\t\t\tassert.Equal(t, byte(0x05), buf.Data()[0])\n\t\t\tassert.Equal(t, byte(0x05), buf.Data()[1])\n\t\t}\n\t})\n\n\tt.Run(\"Inverse\", func(t *testing.T) {\n\t\tt.Run(\"Valid\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\t// \t10010111 10101100\n\t\t\t//\t0x97\t 0xac\n\t\t\tbits := []int{1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0}\n\n\t\t\t// write all the bits\n\t\t\tfor _, bit := range bits {\n\t\t\t\terr := w.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\texpected := byte(0x97)\n\t\t\tassert.Equal(t, expected, w.data[0], \"expected: %08b is: %08b\", expected, w.data[0])\n\t\t\texpected = byte(0xac)\n\t\t\tassert.Equal(t, expected, w.data[1], \"expected: %08b is: %08b\", expected, w.data[1])\n\t\t})\n\n\t\tt.Run(\"ByteShifted\", func(t *testing.T) {\n\t\t\tbuf := BufferedMSB()\n\t\t\terr := buf.WriteByte(0x00)\n\t\t\trequire.NoError(t, err)\n\n\t\t\t// 11100011 - 0xe3\n\t\t\tbits := []int{1, 1, 1, 0, 0, 0, 1, 1}\n\t\t\tfor _, bit := range bits {\n\t\t\t\terr := buf.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\tassert.Equal(t, byte(0xe3), buf.data[1], \"expected: %08b, is: %08b\", byte(0xe3), buf.data[1])\n\t\t})\n\n\t\tt.Run(\"BitShifted\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\t// 0xE0 - 11100000\n\t\t\terr := w.WriteByte(0xE0)\n\t\t\trequire.NoError(t, err)\n\n\t\t\tw.bitIndex = 5\n\t\t\tw.byteIndex = 0\n\n\t\t\tbits := []int{1, 0, 1, 0, 1}\n\t\t\tfor _, bit := range bits {\n\t\t\t\terr := w.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// should be 11100101 01000000 ...\n\t\t\t//\t\t\t 0xE5\t 0x40\n\t\t\tassert.Equal(t, byte(0xE5), w.data[0], \"expected: %08b, is: %08b\", byte(0xE5), w.data[0])\n\t\t\tassert.Equal(t, byte(0x40), w.data[1], \"expected: %08b, is: %08b\", byte(0x40), w.data[1])\n\t\t})\n\n\t\tt.Run(\"Finished\", func(t *testing.T) {\n\t\t\tbuf := BufferedMSB()\n\n\t\t\t// write some bits to the first byte\n\t\t\tfirstBits := []int{1, 0, 1}\n\t\t\tfor _, bit := range firstBits {\n\t\t\t\trequire.NoError(t, buf.WriteBit(bit))\n\t\t\t}\n\t\t\t// finish the byte\n\t\t\tbuf.FinishByte()\n\n\t\t\t// write bits to the second byte.\n\t\t\tsecondBits := []int{1, 0, 1}\n\t\t\tfor _, bit := range secondBits {\n\t\t\t\trequire.NoError(t, buf.WriteBit(bit))\n\t\t\t}\n\n\t\t\tif assert.Len(t, buf.Data(), 2) {\n\t\t\t\t// 10100000 - 0xa0\n\t\t\t\tassert.Equal(t, byte(0xa0), buf.Data()[0])\n\t\t\t\tassert.Equal(t, byte(0xa0), buf.Data()[1])\n\t\t\t}\n\t\t})\n\t})\n}", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (mr *MockIOPackageMockRecorder) WriteUint8(arg0, arg1, arg2 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteUint8\", reflect.TypeOf((*MockIOPackage)(nil).WriteUint8), arg0, arg1, arg2)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockFile) Write(_param0 []byte) (int, error) {\n\tret := _m.ctrl.Call(_m, \"Write\", _param0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func TestWriteBits(t *testing.T) {\n\tt.Run(\"NonMSB\", func(t *testing.T) {\n\t\tb := &Buffer{}\n\n\t\t// having empty buffered MSB.\n\t\tn, err := b.WriteBits(0xb, 4)\n\t\trequire.NoError(t, err)\n\t\tassert.Zero(t, n)\n\n\t\tassert.Len(t, b.data, 1)\n\t\tassert.Equal(t, byte(0xb), b.data[0])\n\n\t\tn, err = b.WriteBits(0xdf, 8)\n\t\trequire.NoError(t, err)\n\t\tassert.Equal(t, 1, n)\n\n\t\tif assert.Len(t, b.data, 2) {\n\t\t\tassert.Equal(t, byte(0xd), b.data[1])\n\t\t\tassert.Equal(t, byte(0xfb), b.data[0])\n\t\t}\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tb := BufferedMSB()\n\n\t\tn, err := b.WriteBits(0xf, 4)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Zero(t, n)\n\n\t\t// the output now should be\n\t\t// 11110000\n\t\t// ^\n\t\tif assert.Len(t, b.data, 1) {\n\t\t\tassert.Equal(t, byte(0xf0), b.data[0], \"%08b\", b.data[0])\n\t\t}\n\n\t\t// write 10111 = 0x17, 5\n\t\tn, err = b.WriteBits(0x17, 5)\n\t\trequire.NoError(t, err)\n\n\t\t// current output should be\n\t\t// 11111011 10000000\n\t\t// ^\n\t\tif assert.Len(t, b.data, 2) {\n\t\t\tassert.Equal(t, byte(0xfb), b.data[0])\n\t\t\tassert.Equal(t, byte(0x80), b.data[1])\n\t\t\tassert.Equal(t, uint8(1), b.bitIndex)\n\t\t}\n\t})\n}", "func (w *Writer) WriteBits8(b byte, n uint) error {\n\t//fmt.Printf(\"b: %d, n: %d, free: %d\\n\", b, n, w.free)\n\tif n == 0 {\n\t\treturn nil\n\t}\n\n\tif n > 8 {\n\t\tn = 8\n\t}\n\n\tvar rem uint\n\tif n > w.free {\n\t\trem = n - w.free\n\t\tn = w.free\n\t}\n\n\tw.free -= n\n\tw.bits <<= n\n\tw.bits |= ((b >> rem) & (1<<n - 1))\n\n\tvar err error\n\tif w.free == 0 {\n\t\terr = w.Flush()\n\t}\n\tif rem == 0 {\n\t\treturn err\n\t}\n\n\treturn w.WriteBits8(b, rem)\n}", "func (m Measurement) AddInt8(name string, value int8) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (_m *blockchainInt64) toLittleEndian() []byte {\n\tret := _m.Called()\n\n\tvar r0 []byte\n\tif rf, ok := ret.Get(0).(func() []byte); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]byte)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash32) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func ExpectInt8(t *testing.T, field string, expected int8, found int8) {\n\tif expected != found {\n\t\tt.Errorf(\"%s [%d], found '%d'\", field, expected, found)\n\t}\n}", "func (m *MockHash64) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (mock WriteCloser) fakeZeroWrite(p []byte) (n int, err error) {\n\treturn n, err\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt64_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m Measurement) AddUInt8(name string, value uint8) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBit_Call {\n\treturn &MockWriteBufferXmlBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *Session) Write(data []byte) (int, error) {\n\tret := _m.Called(data)\n\n\tvar r0 int\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func([]byte) (int, error)); ok {\n\t\treturn rf(data)\n\t}\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(data)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (b *Bus) Write8(addr mirv.Address, v uint8) error {\n\tblk := b.p\n\tif !blk.contains(addr) {\n\t\tblk = b.find(addr)\n\t}\n\treturn blk.m.Write8(addr-blk.s, v)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call {\n\treturn &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (r *MockReadWriteCloser) Write(p []byte) (n int, err error) {\n\n\tif err = r.WriteErr; err != nil {\n\t\tr.BytesWritten = p\n\t\tn = len(p)\n\t}\n\treturn\n}", "func (m *MockIOPackage) ReadUint8(arg0 []byte, arg1 int) (uint8, int, bool) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ReadUint8\", arg0, arg1)\n\tret0, _ := ret[0].(uint8)\n\tret1, _ := ret[1].(int)\n\tret2, _ := ret[2].(bool)\n\treturn ret0, ret1, ret2\n}", "func WriteUInt8(buffer []byte, offset int, value uint8) {\n buffer[offset] = byte(value)\n}", "func TestBufferWrite(t *testing.T) {\n\tt.Run(\"Valid\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\n\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\tn, err := w.Write(toWrite)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Equal(t, 3, n)\n\n\t\tn, err = w.Write([]byte{0xff})\n\t\trequire.NoError(t, err)\n\n\t\tassert.Equal(t, 1, n)\n\n\t\texpected := append(toWrite, 0xff)\n\t\tfor i, bt := range w.Data() {\n\t\t\tassert.Equal(t, expected[i], bt, \"%d\", i)\n\t\t}\n\t})\n\n\tt.Run(\"Shifted\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\t\t// write empty byte and reset it's byte index to 0.\n\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\tw.byteIndex = 0\n\t\t// assume that 3 '0' bits were already written.\n\t\tw.bitIndex = 3\n\n\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\tn, err := w.Write(toWrite)\n\t\trequire.NoError(t, err)\n\t\tassert.Equal(t, 3, n)\n\n\t\t// 0x3f - 00111111\n\t\t// 00111111 << 3 = 11111000\n\t\texpected := byte(0xf8)\n\t\tassert.Equal(t, expected, w.data[0])\n\n\t\t// 0x12 - 00010010\n\t\t// 00111111 >> 5 = 00000001\n\t\t// 00010010 << 3 = 10010000\n\t\t// \t\t\t\t | 10010101\n\t\t// 10010111 - 0x91\n\t\texpected = byte(0x91)\n\t\tassert.Equal(t, expected, w.data[1])\n\n\t\t// 0x86 - 10000110\n\t\t// 00010010 >> 5 = \t00000000\n\t\t// 10000110 << 3 = \t00110000\n\t\t// \t\t\t\t |\t00110000\n\t\t// 00110000 = 0x30\n\t\texpected = byte(0x30)\n\t\tassert.Equal(t, expected, w.data[2])\n\t\tassert.Len(t, w.Data(), 4)\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tt.Run(\"Valid\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\t\tn, err := w.Write(toWrite)\n\t\t\trequire.NoError(t, err)\n\n\t\t\tassert.Equal(t, 3, n)\n\n\t\t\tn, err = w.Write([]byte{0xff})\n\t\t\trequire.NoError(t, err)\n\n\t\t\tassert.Equal(t, 1, n)\n\n\t\t\texpected := append(toWrite, 0xff)\n\t\t\tfor i, bt := range w.Data() {\n\t\t\t\tassert.Equal(t, expected[i], bt, \"%d\", i)\n\t\t\t}\n\t\t})\n\n\t\tt.Run(\"Shifted\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\t\t\t// write empty byte so the buffer data is initialized\n\t\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\t\t// reset it's byteindex\n\t\t\tw.byteIndex = 0\n\t\t\t// assume three '0' bits are already stored.\n\t\t\tw.bitIndex = 3\n\n\t\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\t\tn, err := w.Write(toWrite)\n\t\t\trequire.NoError(t, err)\n\t\t\tassert.Equal(t, 3, n)\n\n\t\t\t// 0x3f - 00111111\n\t\t\t// 00111111 >> 3 = 00000111\n\t\t\t// 00000111 = 0x07\n\t\t\texpected := byte(0x07)\n\t\t\tassert.Equal(t, expected, w.data[0])\n\n\t\t\t// 0x12 - 00010010\n\t\t\t// 00111111 << 5 = 11100000\n\t\t\t// 00010010 >> 3 = 00000010\n\t\t\t// \t\t\t\t | 11100010\n\t\t\t// 11100010 - 0xE2\n\t\t\texpected = byte(0xE2)\n\t\t\tassert.Equal(t, expected, w.data[1])\n\n\t\t\t// 0x86 - 10000110\n\t\t\t// 00010010 << 5 = \t01000000\n\t\t\t// 10000110 >> 3 = \t00010000\n\t\t\t// \t\t\t\t |\t01010000\n\t\t\t// 00110000 = 0x50\n\t\t\texpected = byte(0x50)\n\t\t\tassert.Equal(t, expected, w.data[2])\n\n\t\t\t// 0x86 - 10000110\n\t\t\t// 10000110 << 5 = \t11000000\n\t\t\t// 11000000 = 0xC0\n\t\t\texpected = byte(0xC0)\n\t\t\tassert.Equal(t, expected, w.data[3])\n\t\t})\n\t})\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *memory) Write8(adr uint16, val uint8) {\n\tm.ram[adr] = val\n}", "func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteString_Call {\n\treturn &MockWriteBufferXmlBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByte_Call {\n\treturn &MockWriteBufferJsonBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt64_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByteArray_Call {\n\treturn &MockWriteBufferJsonBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (util *MarshalUtil) WriteUint8(value uint8) *MarshalUtil {\n\twriteEndOffset := util.expandWriteCapacity(Uint8Size)\n\n\tutil.bytes[util.writeOffset] = value\n\n\tutil.WriteSeek(writeEndOffset)\n\n\treturn util\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt16_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (instance *Instance) SetInt8(fieldName string, value int8) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestUint8(t *testing.T) {\n\ttests := []struct {\n\t\tin uint8 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x01}},\n\t\t{128, []byte{0x80}},\n\t\t{255, []byte{0xff}},\n\t}\n\n\tt.Logf(\"Running uint8 %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint8(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint8 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint8 #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint8\n\t\terr = ReadUint8(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint8 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint8 #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mock *WriteCloser) SetReturnWrite(n int, err error) *WriteCloser {\n\tmock.impl.Write = func([]byte) (int, error) {\n\t\treturn n, err\n\t}\n\treturn mock\n}", "func ExpectUint8(t *testing.T, field string, expected uint8, found uint8) {\n\tif expected != found {\n\t\tt.Errorf(\"%s [%d], found '%d'\", field, expected, found)\n\t}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByteArray_Call {\n\treturn &MockWriteBufferXmlBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt32_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (w *FormSerializationWriter) WriteInt8Value(key string, value *int8) error {\n\tif value != nil {\n\t\tcast := int64(*value)\n\t\treturn w.WriteInt64Value(key, &cast)\n\t}\n\treturn nil\n}", "func (m *MockIOPackage) WriteUint64(arg0 []byte, arg1 int, arg2 uint64) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint64\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigInt_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockIOPackage) IsLittleEndian() bool {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"IsLittleEndian\")\n\tret0, _ := ret[0].(bool)\n\treturn ret0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByte_Call {\n\treturn &MockWriteBufferXmlBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (m *MockIOPackage) WriteUint32(arg0 []byte, arg1 int, arg2 uint32) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint32\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *WriteCloser) Write(p []byte) (int, error) {\n\tret := _m.Called(p)\n\n\tvar r0 int\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(p)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(p)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (mock WriteCloser) Write(p []byte) (n int, err error) {\n\tmethodName := \"Write\" // nolint: goconst\n\tif mock.impl.Write != nil {\n\t\treturn mock.impl.Write(p)\n\t}\n\tif mock.callbackNotImplemented != nil {\n\t\tmock.callbackNotImplemented(mock.t, mock.name, methodName)\n\t} else {\n\t\tgomic.DefaultCallbackNotImplemented(mock.t, mock.name, methodName)\n\t}\n\treturn mock.fakeZeroWrite(p)\n}", "func (m *MockSerial) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (bio *BinaryIO) WriteUint8(off int64, value uint8) {\n\tbuf := []byte{value}\n\tbio.WriteAt(off, buf[:])\n}", "func (m *MockisCryptoAsymApiReqSetupPrivateKeyEx_Key) MarshalTo(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"MarshalTo\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func TestWrite(t *testing.T) {\n\tmockZooKeeper := &MockZooHandle{\n\t\tzk: mock.Mock{},\n\t}\n\n\tbytes := make([]byte, 3)\n\tff := NewFuseFile(bytes, 0, \"mock/path\", mockZooKeeper)\n\n\tmockZooKeeper.zk.On(\"Set\", \"mock/path\", bytes, int32(-1)).Return(&zk.Stat{DataLength: int32(len(bytes))}, nil)\n\n\t// assert that we send 3 bytes into the writer and status out == fuse.OK\n\tsize, stat := ff.Write(bytes, 0)\n\tassert.Equal(t, uint32(3), size)\n\tassert.Equal(t, fuse.OK, stat)\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestBitFlags(t *testing.T){\n\tvar bitflag8 BitFlag8 = 16;\n\tvar bitflag16 BitFlag16 = 256;\n\tvar bitflag32 BitFlag32 = 65536;\n\tvar bitflag64 BitFlag64 = 4294967296;\n\tvar bitflag8_p *BitFlag8 = &(bitflag8);\n\tvar bitflag16_p *BitFlag16 = &(bitflag16);\n\tvar bitflag32_p *BitFlag32 = &(bitflag32);\n\tvar bitflag64_p *BitFlag64 = &(bitflag64);\n\t//BitFlag8 test\n\t_ = bitflag8;\n\tt.Logf(\"%T: %d @ %p (%d)\", bitflag8, bitflag8, bitflag8_p, *bitflag8_p);\n\tt.Logf(\"GetSize: %d (%d)\", bitflag8.GetSize(), bitflag8_p.GetSize());\n\tt.Logf(\"HasFlagBit(8): %t (%t) HasFlagBit(16): %t (%t)\", bitflag8.HasFlagBit(uint64(8)), bitflag8_p.HasFlagBit(uint64(8)), bitflag8.HasFlagBit(uint64(16)), bitflag8_p.HasFlagBit(uint64(16)));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag8.GetFlagBit(uint64(8)), bitflag8_p.GetFlagBit(uint64(8)), bitflag8.GetFlagBit(uint64(16)), bitflag8_p.GetFlagBit(uint64(16)));\n\tt.Logf(\"SetFlagBit(8)\");\n\tbitflag8.SetFlagBit(uint64(8));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag8.GetFlagBit(uint64(8)), bitflag8_p.GetFlagBit(uint64(8)), bitflag8.GetFlagBit(uint64(16)), bitflag8_p.GetFlagBit(uint64(16)));\n\tt.Logf(\"UnsetFlagBit(16)\");\n\tbitflag8.UnsetFlagBit(uint64(16));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag8.GetFlagBit(uint64(8)), bitflag8_p.GetFlagBit(uint64(8)), bitflag8.GetFlagBit(uint64(16)), bitflag8_p.GetFlagBit(uint64(16)));\n\tt.Logf(\"GetBit(2): %d (%d) GetBit(5): %d (%d)\", bitflag8.GetBit(uint8(2)), bitflag8_p.GetBit(uint8(2)), bitflag8.GetBit(uint8(5)), bitflag8_p.GetBit(uint8(5)));\n\tt.Logf(\"SetBit(5, 1)\");\n\tbitflag8.SetBit(uint8(5), uint8(1));\n\tt.Logf(\"GetBit(2): %d (%d) GetBit(5): %d (%d)\", bitflag8.GetBit(uint8(2)), bitflag8_p.GetBit(uint8(2)), bitflag8.GetBit(uint8(5)), bitflag8_p.GetBit(uint8(5)));\n\t//BitFlag16 test\n\t_ = bitflag16;\n\tt.Logf(\"%T: %d @ %p (%d)\", bitflag16, bitflag16, bitflag16_p, *bitflag16_p);\n\tt.Logf(\"GetSize: %d (%d)\", bitflag16.GetSize(), bitflag16_p.GetSize());\n\tt.Logf(\"HasFlagBit(8): %t (%t) HasFlagBit(16): %t (%t)\", bitflag16.HasFlagBit(uint64(8)), bitflag16_p.HasFlagBit(uint64(8)), bitflag16.HasFlagBit(uint64(256)), bitflag16_p.HasFlagBit(uint64(256)));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag16.GetFlagBit(uint64(8)), bitflag16_p.GetFlagBit(uint64(8)), bitflag16.GetFlagBit(uint64(256)), bitflag16_p.GetFlagBit(uint64(256)));\n\tt.Logf(\"SetFlagBit(8)\");\n\tbitflag16.SetFlagBit(uint64(8));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag16.GetFlagBit(uint64(8)), bitflag16_p.GetFlagBit(uint64(8)), bitflag16.GetFlagBit(uint64(256)), bitflag16_p.GetFlagBit(uint64(256)));\n\tt.Logf(\"UnsetFlagBit(16)\");\n\tbitflag16.UnsetFlagBit(uint64(256));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag16.GetFlagBit(uint64(8)), bitflag16_p.GetFlagBit(uint64(8)), bitflag16.GetFlagBit(uint64(256)), bitflag16_p.GetFlagBit(uint64(256)));\n\tt.Logf(\"GetBit(2): %d (%d) GetBit(5): %d (%d)\", bitflag16.GetBit(uint8(2)), bitflag16_p.GetBit(uint8(2)), bitflag16.GetBit(uint8(5)), bitflag16_p.GetBit(uint8(5)));\n\tt.Logf(\"SetBit(5, 1)\");\n\tbitflag16.SetBit(uint8(5), uint8(1));\n\tt.Logf(\"GetBit(2): %d (%d) GetBit(5): %d (%d)\", bitflag16.GetBit(uint8(2)), bitflag16_p.GetBit(uint8(2)), bitflag16.GetBit(uint8(5)), bitflag16_p.GetBit(uint8(5)));\n\t//BitFlag32 test\n\t_ = bitflag32;\n\tt.Logf(\"%T: %d @ %p (%d)\", bitflag32, bitflag32, bitflag32_p, *bitflag32_p);\n\tt.Logf(\"GetSize: %d (%d)\", bitflag32.GetSize(), bitflag32_p.GetSize());\n\tt.Logf(\"HasFlagBit(8): %t (%t) HasFlagBit(16): %t (%t)\", bitflag32.HasFlagBit(uint64(8)), bitflag32_p.HasFlagBit(uint64(8)), bitflag32.HasFlagBit(uint64(65536)), bitflag32_p.HasFlagBit(uint64(65536)));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag32.GetFlagBit(uint64(8)), bitflag32_p.GetFlagBit(uint64(8)), bitflag32.GetFlagBit(uint64(65536)), bitflag32_p.GetFlagBit(uint64(65536)));\n\tt.Logf(\"SetFlagBit(8)\");\n\tbitflag32.SetFlagBit(uint64(8));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag32.GetFlagBit(uint64(8)), bitflag32_p.GetFlagBit(uint64(8)), bitflag32.GetFlagBit(uint64(65536)), bitflag32_p.GetFlagBit(uint64(65536)));\n\tt.Logf(\"UnsetFlagBit(16)\");\n\tbitflag32.UnsetFlagBit(uint64(65536));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag32.GetFlagBit(uint64(8)), bitflag32_p.GetFlagBit(uint64(8)), bitflag32.GetFlagBit(uint64(65536)), bitflag32_p.GetFlagBit(uint64(65536)));\n\tt.Logf(\"GetBit(2): %d (%d) GetBit(5): %d (%d)\", bitflag32.GetBit(uint8(2)), bitflag32_p.GetBit(uint8(2)), bitflag32.GetBit(uint8(5)), bitflag32_p.GetBit(uint8(5)));\n\tt.Logf(\"SetBit(5, 1)\");\n\tbitflag32.SetBit(uint8(5), uint8(1));\n\tt.Logf(\"GetBit(2): %d (%d) GetBit(5): %d (%d)\", bitflag32.GetBit(uint8(2)), bitflag32_p.GetBit(uint8(2)), bitflag32.GetBit(uint8(5)), bitflag32_p.GetBit(uint8(5)));\n\t//BitFlag64 test\n\t_ = bitflag64;\n\tt.Logf(\"%T: %d @ %p (%d)\", bitflag64, bitflag64, bitflag64_p, *bitflag64_p);\n\tt.Logf(\"GetSize: %d (%d)\", bitflag64.GetSize(), bitflag64_p.GetSize());\n\tt.Logf(\"HasFlagBit(8): %t (%t) HasFlagBit(16): %t (%t)\", bitflag64.HasFlagBit(uint64(8)), bitflag64_p.HasFlagBit(uint64(8)), bitflag64.HasFlagBit(uint64(4294967296)), bitflag64_p.HasFlagBit(uint64(4294967296)));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag64.GetFlagBit(uint64(8)), bitflag64_p.GetFlagBit(uint64(8)), bitflag64.GetFlagBit(uint64(4294967296)), bitflag64_p.GetFlagBit(uint64(4294967296)));\n\tt.Logf(\"SetFlagBit(8)\");\n\tbitflag64.SetFlagBit(uint64(8));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag64.GetFlagBit(uint64(8)), bitflag64_p.GetFlagBit(uint64(8)), bitflag64.GetFlagBit(uint64(4294967296)), bitflag64_p.GetFlagBit(uint64(4294967296)));\n\tt.Logf(\"UnsetFlagBit(16)\");\n\tbitflag64.UnsetFlagBit(uint64(4294967296));\n\tt.Logf(\"GetFlagBit(8): %d (%d) GetFlagBit(16): %d (%d)\", bitflag64.GetFlagBit(uint64(8)), bitflag64_p.GetFlagBit(uint64(8)), bitflag64.GetFlagBit(uint64(4294967296)), bitflag64_p.GetFlagBit(uint64(4294967296)));\n\tt.Logf(\"GetBit(2): %d (%d) GetBit(5): %d (%d)\", bitflag64.GetBit(uint8(2)), bitflag64_p.GetBit(uint8(2)), bitflag64.GetBit(uint8(5)), bitflag64_p.GetBit(uint8(5)));\n\tt.Logf(\"SetBit(5, 1)\");\n\tbitflag64.SetBit(uint8(5), uint8(1));\n\tt.Logf(\"GetBit(2): %d (%d) GetBit(5): %d (%d)\", bitflag64.GetBit(uint8(2)), bitflag64_p.GetBit(uint8(2)), bitflag64.GetBit(uint8(5)), bitflag64_p.GetBit(uint8(5)));\n\t//arraylist test\n\tarray_list := arraylist.New();\n\tarray_list.Add( bitflag8 );\n\tt.Logf(\"%T: %v Size: %d\", array_list, array_list, array_list.Size());\n\tarray_list.Add( bitflag16, bitflag32, bitflag64 );\n\tt.Logf(\"%v (size: %d)\", array_list, array_list.Size());\n\tarray_list.Insert(2, bitflag32_p);\n\tt.Logf(\"%v (size: %d)\", array_list, array_list.Size());\n\tbitflag32.SetBit(uint8(7), uint8(1));\n\tt.Logf(\"%v (size: %d)\", array_list, array_list.Size());\n\tarray_list.Swap(2, 3);\n\tt.Logf(\"%v (size: %d)\", array_list, array_list.Size());\n\tarray_list.Remove(2);\n\tt.Logf(\"%v (size: %d)\", array_list, array_list.Size());\n\tarray_list.Remove(2);\n\tt.Logf(\"%v (size: %d)\", array_list, array_list.Size());\n\tarray_list.Insert(2, *bitflag32_p);\n\tt.Logf(\"%v (size: %d)\", array_list, array_list.Size());\n\tarray_list.Clear();\n\tt.Logf(\"%v (size: %d)\", array_list, array_list.Size());\n\n\treturn;\n}" ]
[ "0.7526864", "0.75220495", "0.7294613", "0.72525036", "0.72291154", "0.7033541", "0.6841485", "0.6598849", "0.6146291", "0.60917044", "0.60411733", "0.5988987", "0.5983668", "0.59794724", "0.5931811", "0.5925149", "0.5913828", "0.58781624", "0.5848677", "0.5779525", "0.5764866", "0.5723756", "0.57064545", "0.568135", "0.5667554", "0.5650651", "0.5601818", "0.5575697", "0.55562496", "0.5488321", "0.5447318", "0.5442981", "0.5435019", "0.5423636", "0.5408756", "0.53989625", "0.5393048", "0.53737015", "0.53578955", "0.5323296", "0.5317529", "0.53165203", "0.530359", "0.5278422", "0.5272788", "0.5253482", "0.524254", "0.5225969", "0.52204096", "0.5211276", "0.5209431", "0.52091444", "0.5205105", "0.52048093", "0.5200027", "0.51978594", "0.5188005", "0.51829886", "0.5181177", "0.5159561", "0.5147982", "0.5135612", "0.51311296", "0.5130641", "0.510863", "0.5096053", "0.50924444", "0.5089581", "0.50864434", "0.50814927", "0.5071601", "0.5071426", "0.50604934", "0.5059219", "0.5048474", "0.50405693", "0.5033162", "0.50262547", "0.5021415", "0.498843", "0.49600646", "0.49571776", "0.49536082", "0.49503297", "0.4945336", "0.49420598", "0.4923625", "0.4910253", "0.4910159", "0.49018478", "0.48932698", "0.4879388", "0.48793736", "0.48693588", "0.48664597", "0.4844854", "0.48386222", "0.483712", "0.4821047", "0.482016" ]
0.7615206
0
WriteInt8 is a helper method to define mock.On call logicalName string bitLength uint8 value int8 writerArgs ...WithWriterArgs
func (_e *MockWriteBufferJsonBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt8_Call { return &MockWriteBufferJsonBased_WriteInt8_Call{Call: _e.mock.On("WriteInt8", append([]interface{}{logicalName, bitLength, value}, writerArgs...)...)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt8_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockIOPackage) WriteUint8(arg0 []byte, arg1 int, arg2 uint8) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint8\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (m *MockIOPackage) AppendUint8(arg0 []byte, arg1 uint8) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint8\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (m *MockSession) Int8(arg0 string) int8 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Int8\", arg0)\n\tret0, _ := ret[0].(int8)\n\treturn ret0\n}", "func (m *memory) Write8(adr uint16, val uint8) {\n\tm.ram[adr] = val\n}", "func (m *MockSession) Uint8(arg0 string) byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint8\", arg0)\n\tret0, _ := ret[0].(byte)\n\treturn ret0\n}", "func (m Measurement) AddInt8(name string, value int8) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func WriteInt8(buffer []byte, offset int, value int8) {\n buffer[offset] = byte(value)\n}", "func (m Measurement) AddUInt8(name string, value uint8) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (b *Bus) Write8(addr mirv.Address, v uint8) error {\n\tblk := b.p\n\tif !blk.contains(addr) {\n\t\tblk = b.find(addr)\n\t}\n\treturn blk.m.Write8(addr-blk.s, v)\n}", "func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (instance *Instance) SetInt8(fieldName string, value int8) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func WriteUInt8(buffer []byte, offset int, value uint8) {\n buffer[offset] = byte(value)\n}", "func (util *MarshalUtil) WriteUint8(value uint8) *MarshalUtil {\n\twriteEndOffset := util.expandWriteCapacity(Uint8Size)\n\n\tutil.bytes[util.writeOffset] = value\n\n\tutil.WriteSeek(writeEndOffset)\n\n\treturn util\n}", "func (m *MockBigInterface) Foo8(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7 bool) Bar {\n\tif m.FnFoo8 != nil {\n\t\treturn m.FnFoo8(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)\n\t}\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Foo8\", arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)\n\tret0, _ := ret[0].(Bar)\n\treturn ret0\n}", "func (mr *MockIOPackageMockRecorder) WriteUint8(arg0, arg1, arg2 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteUint8\", reflect.TypeOf((*MockIOPackage)(nil).WriteUint8), arg0, arg1, arg2)\n}", "func (bio *BinaryIO) WriteUint8(off int64, value uint8) {\n\tbuf := []byte{value}\n\tbio.WriteAt(off, buf[:])\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (s *serialController) Write8(address uint16, v byte) {\n\tswitch address {\n\tcase 0xFF01:\n\t\ts.writeRegister(registerFF01, v)\n\tcase 0xFF02:\n\t\ts.writeRegister(registerFF02, v)\n\tdefault:\n\t\tnotImplemented(\"write of unimplemented SERIAL register at %#4x\", address)\n\t}\n}", "func (_m *MockWriteBufferXmlBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call {\n\treturn &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (this *i2c) WriteUint8(bus gopi.I2CBus, reg, value uint8) error {\n\tthis.Mutex.Lock()\n\tdefer this.Mutex.Unlock()\n\n\tif device, err := this.Open(bus); err != nil {\n\t\treturn err\n\t} else {\n\t\treturn linux.I2CWriteUint8(device.Fd(), reg, value, linux.I2CFunction(device.funcs))\n\t}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteString_Call {\n\treturn &MockWriteBufferXmlBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func ExpectInt8(t *testing.T, field string, expected int8, found int8) {\n\tif expected != found {\n\t\tt.Errorf(\"%s [%d], found '%d'\", field, expected, found)\n\t}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByte_Call {\n\treturn &MockWriteBufferJsonBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func WriteUint8(w io.Writer, v uint8) error {\n\treturn WriteUint64(w, uint64(v))\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (instance *Instance) SetUint8(fieldName string, value uint8) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (this *i2c) WriteUint8(bus gopi.I2CBus, reg, value uint8) error {\n\treturn gopi.ErrNotImplemented\n\n}", "func (s *videoController) Write8(address uint16, v byte) {\n\tif s.isRegisterAddress(address) {\n\t\tswitch address {\n\t\tcase registerFF41:\n\t\t\t// lowest 3 bits are read-only\n\t\t\tcurrent := s.registers[address-offsetRegisters]\n\t\t\ts.registers[address-offsetRegisters] = copyBits(v, current, 0, 1, 2)\n\t\tcase registerFF44:\n\t\t\t// do nothing - address is read-only\n\t\tcase 0xFF46:\n\t\t\tnotImplemented(\"OAM DMA transfers not implemented\")\n\t\tdefault:\n\t\t\ts.registers[address-offsetRegisters] = v\n\t\t}\n\t\treturn\n\t}\n\n\tif s.isOAMAddress(address) {\n\t\tif s.oamAccessible {\n\t\t\ts.oam[address-offsetOAM] = v\n\t\t}\n\t\treturn\n\t}\n\n\tif s.vramAccessible {\n\t\ts.vram[address-offsetVRAM] = v\n\t}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (out *OutBuffer) WriteUint8(v uint8) {\n\tout.Append(byte(v))\n}", "func (stream *Stream) WriteUint8(val uint8) {\n\tstream.ensure(3)\n\tstream.n = writeFirstBuf(stream.buf, digits[val], stream.n)\n}", "func Int8Arg(register Register, name string, options ...ArgOptionApplyer) *int8 {\n\tp := new(int8)\n\t_ = Int8ArgVar(register, p, name, options...)\n\treturn p\n}", "func Uint8(name string, value uint8, usage string) *uint8 {\n\treturn Environment.Uint8(name, value, usage)\n}", "func (m *OnPremisesExtensionAttributes) SetExtensionAttribute8(value *string)() {\n m.extensionAttribute8 = value\n}", "func (f *FlagSet) Int8(name string, value int8, usage string) *int8 {\n\tp := new(int8)\n\tf.Int8VarP(p, name, \"\", value, usage)\n\treturn p\n}", "func ExpectUint8(t *testing.T, field string, expected uint8, found uint8) {\n\tif expected != found {\n\t\tt.Errorf(\"%s [%d], found '%d'\", field, expected, found)\n\t}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByte_Call {\n\treturn &MockWriteBufferXmlBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func opI8ToStr(expr *CXExpression, fp int) {\n\toutB0 := FromStr(strconv.FormatInt(int64(ReadI8(fp, expr.Inputs[0])), 10))\n\tWriteObject(GetOffset_str(fp, expr.Outputs[0]), outB0)\n}", "func Uint8(colName string) sif.GenericColumnAccessor[uint8] {\n\treturn sif.CreateColumnAccessor[uint8](&uint8Type{}, colName)\n}", "func (out *OutBuffer) WriteUint8(v uint8) bool {\n\tcontainer := out.GetContainer()\n\tif len(container) < 1 {\n\t\treturn false\n\t}\n\n\tcontainer[0] = byte(v)\n\tout.pos += 1\n\treturn true\n}", "func MeasureInt8(name string, field string, value int8) Measurement {\n\treturn NewMeasurement(name).AddInt8(field, value)\n}", "func (w *Writer) WriteBits8(b byte, n uint) error {\n\t//fmt.Printf(\"b: %d, n: %d, free: %d\\n\", b, n, w.free)\n\tif n == 0 {\n\t\treturn nil\n\t}\n\n\tif n > 8 {\n\t\tn = 8\n\t}\n\n\tvar rem uint\n\tif n > w.free {\n\t\trem = n - w.free\n\t\tn = w.free\n\t}\n\n\tw.free -= n\n\tw.bits <<= n\n\tw.bits |= ((b >> rem) & (1<<n - 1))\n\n\tvar err error\n\tif w.free == 0 {\n\t\terr = w.Flush()\n\t}\n\tif rem == 0 {\n\t\treturn err\n\t}\n\n\treturn w.WriteBits8(b, rem)\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (d *Display) write8bits(bits uint8, rs int) {\n\td.rs.Write(rs)\n\td.write4bits(bits)\n\td.write4bits(bits << 4)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBit_Call {\n\treturn &MockWriteBufferXmlBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (mr *MockIOPackageMockRecorder) AppendUint8(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"AppendUint8\", reflect.TypeOf((*MockIOPackage)(nil).AppendUint8), arg0, arg1)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt16_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func PutCode8(code byte, buffer []byte, index int) {\n\tbuffer[index] = code\n}", "func (w *FormSerializationWriter) WriteInt8Value(key string, value *int8) error {\n\tif value != nil {\n\t\tcast := int64(*value)\n\t\treturn w.WriteInt64Value(key, &cast)\n\t}\n\treturn nil\n}", "func FormatInt8(name string) string {\n\treturn formatIntFunction(name, true)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByteArray_Call {\n\treturn &MockWriteBufferJsonBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func (m *MockIOPackage) ReadUint8(arg0 []byte, arg1 int) (uint8, int, bool) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ReadUint8\", arg0, arg1)\n\tret0, _ := ret[0].(uint8)\n\tret1, _ := ret[1].(int)\n\tret2, _ := ret[2].(bool)\n\treturn ret0, ret1, ret2\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Int8(k string, v int8) Field {\n\treturn Field{Key: k, Value: valf.Int8(v)}\n}", "func Int8(key string, val int8) Field {\n\treturn Field{Key: key, Type: core.Int8Type, Integer: int64(val)}\n}", "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Uint8Arg(register Register, name string, options ...ArgOptionApplyer) *uint8 {\n\tp := new(uint8)\n\t_ = Uint8ArgVar(register, p, name, options...)\n\treturn p\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByteArray_Call {\n\treturn &MockWriteBufferXmlBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func MeasureUInt8(name string, field string, value uint8) Measurement {\n\treturn NewMeasurement(name).AddUInt8(field, value)\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestUint8(t *testing.T) {\n\ttests := []struct {\n\t\tin uint8 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x01}},\n\t\t{128, []byte{0x80}},\n\t\t{255, []byte{0xff}},\n\t}\n\n\tt.Logf(\"Running uint8 %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint8(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint8 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint8 #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint8\n\t\terr = ReadUint8(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint8 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint8 #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func Uint8(k string, v uint8) Field {\n\treturn Field{Key: k, Value: valf.Uint8(v)}\n}", "func opUI8ToStr(inputs []ast.CXValue, outputs []ast.CXValue) {\n\toutV0 := strconv.FormatUint(uint64(inputs[0].Get_ui8()), 10)\n\toutputs[0].Set_str(outV0)\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt32_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockOStream) WriteByte(v byte) {\n\t_m.ctrl.Call(_m, \"WriteByte\", v)\n}", "func MakeInt8(v int8) Int8 {\n\treturn Int8{val: v, set: true}\n}", "func Uint8Tag(name interface{}, value uint8) Tag {\n\treturn &tag{\n\t\ttagType: TagUint8,\n\t\tname: name,\n\t\tvalue: value,\n\t}\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WB(address uint16, value uint8) {}", "func (t Int8) WriteTo(w io.Writer) (int64, error) {\n\tlw := aWriterPool.Get(w)\n\tlw.WriteInt8(int8(t))\n\treturn aWriterPool.Put(lw)\n}", "func FormatUint8(name string) string {\n\treturn formatUintFunction(name, true)\n}", "func Uint8Var(p *uint8, name string, value uint8, usage string) {\n\tEnvironment.Uint8Var(p, name, value, usage)\n}", "func newUInt8(value uint8) RootType {\n return &UInt8 { value }\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func opUI8Eq(inputs []ast.CXValue, outputs []ast.CXValue) {\n\toutV0 := inputs[0].Get_ui8() == inputs[1].Get_ui8()\n\toutputs[0].Set_bool(outV0)\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (e Entry) Uint8(key string, value uint8) (entry Entry) {\n\te.Uint64(key, uint64(value))\n\treturn e\n}", "func (ba *BitArray) Add8(bits byte) {\n\tn := uint64(bits)\n\tn = n << 56\n\tba.AddVar(n, 8)\n}", "func (s *Streamer) Uint8(v uint8) *Streamer {\n\tif s.Error != nil {\n\t\treturn s\n\t}\n\ts.onVal()\n\ts.buffer = appendUint8(s.buffer, v)\n\treturn s\n}", "func Int8(any interface{}) int8 {\n\tif any == nil {\n\t\treturn 0\n\t}\n\tif v, ok := any.(int8); ok {\n\t\treturn v\n\t}\n\treturn int8(Int64(any))\n}", "func (s *Streamer) Int8(v int8) *Streamer {\n\tif s.Error != nil {\n\t\treturn s\n\t}\n\ts.onVal()\n\ts.buffer = appendInt8(s.buffer, v)\n\treturn s\n}", "func (m *Message) putUint8(v uint8) {\n\tb := m.bufferForPut(1)\n\tdefer b.Advance(1)\n\n\tb.Bytes[b.Offset] = v\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt32_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}" ]
[ "0.76325625", "0.72302467", "0.72160834", "0.719626", "0.7053013", "0.68904275", "0.6882388", "0.63131183", "0.59803814", "0.5924511", "0.58434534", "0.57836455", "0.5724962", "0.57248455", "0.5676652", "0.5562593", "0.55599374", "0.55452335", "0.5531778", "0.5526422", "0.5486618", "0.5468745", "0.54571426", "0.54555523", "0.5443698", "0.536436", "0.53557664", "0.5308167", "0.52999324", "0.52925104", "0.52680707", "0.5259932", "0.5224312", "0.5204525", "0.5203277", "0.5182378", "0.51788855", "0.51638705", "0.51349634", "0.5124457", "0.5095122", "0.5092792", "0.50803214", "0.50739324", "0.505145", "0.5040944", "0.5039878", "0.50378686", "0.50320697", "0.5029743", "0.5011868", "0.49985445", "0.49845967", "0.49763212", "0.49718153", "0.49590296", "0.4955848", "0.4955819", "0.4947993", "0.49409008", "0.49270022", "0.4921366", "0.49127048", "0.49015117", "0.4889451", "0.48854503", "0.48653945", "0.484278", "0.48326564", "0.48178542", "0.48178455", "0.48061514", "0.48040497", "0.48039475", "0.4798099", "0.47969308", "0.47913897", "0.4784749", "0.47576883", "0.47531644", "0.4752008", "0.47485876", "0.47434175", "0.47323585", "0.47306296", "0.4728984", "0.47279954", "0.4721348", "0.47176793", "0.47139776", "0.4709143", "0.47008094", "0.46963683", "0.46887377", "0.46887317", "0.46669507", "0.46632758", "0.46443877", "0.46342364", "0.46339256" ]
0.779784
0
WriteSerializable provides a mock function with given fields: ctx, serializable
func (_m *MockWriteBufferJsonBased) WriteSerializable(ctx context.Context, serializable Serializable) error { ret := _m.Called(ctx, serializable) var r0 error if rf, ok := ret.Get(0).(func(context.Context, Serializable) error); ok { r0 = rf(ctx, serializable) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferJsonBased_Expecter) WriteSerializable(ctx interface{}, serializable interface{}) *MockWriteBufferJsonBased_WriteSerializable_Call {\n\treturn &MockWriteBufferJsonBased_WriteSerializable_Call{Call: _e.mock.On(\"WriteSerializable\", ctx, serializable)}\n}", "func (_m *MockWriteBufferXmlBased) WriteSerializable(ctx context.Context, serializable Serializable) error {\n\tret := _m.Called(ctx, serializable)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, Serializable) error); ok {\n\t\tr0 = rf(ctx, serializable)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteSerializable(ctx interface{}, serializable interface{}) *MockWriteBufferXmlBased_WriteSerializable_Call {\n\treturn &MockWriteBufferXmlBased_WriteSerializable_Call{Call: _e.mock.On(\"WriteSerializable\", ctx, serializable)}\n}", "func (m *IncomingContext) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteStringValue(\"observedParticipantId\", m.GetObservedParticipantId())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"onBehalfOf\", m.GetOnBehalfOf())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"sourceParticipantId\", m.GetSourceParticipantId())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"transferor\", m.GetTransferor())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *Synchronization) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetJobs() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetJobs()))\n for i, v := range m.GetJobs() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"jobs\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetSecrets() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetSecrets()))\n for i, v := range m.GetSecrets() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"secrets\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetTemplates() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetTemplates()))\n for i, v := range m.GetTemplates() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"templates\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *Vulnerability) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteBoolValue(\"activeExploitsObserved\", m.GetActiveExploitsObserved())\n if err != nil {\n return err\n }\n }\n if m.GetArticles() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetArticles()))\n for i, v := range m.GetArticles() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"articles\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetCommonWeaknessEnumerationIds() != nil {\n err = writer.WriteCollectionOfStringValues(\"commonWeaknessEnumerationIds\", m.GetCommonWeaknessEnumerationIds())\n if err != nil {\n return err\n }\n }\n if m.GetComponents() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetComponents()))\n for i, v := range m.GetComponents() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"components\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"createdDateTime\", m.GetCreatedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"cvss2Summary\", m.GetCvss2Summary())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"cvss3Summary\", m.GetCvss3Summary())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"description\", m.GetDescription())\n if err != nil {\n return err\n }\n }\n if m.GetExploits() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetExploits()))\n for i, v := range m.GetExploits() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"exploits\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"exploitsAvailable\", m.GetExploitsAvailable())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"hasChatter\", m.GetHasChatter())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastModifiedDateTime\", m.GetLastModifiedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteInt32Value(\"priorityScore\", m.GetPriorityScore())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"publishedDateTime\", m.GetPublishedDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetReferences() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetReferences()))\n for i, v := range m.GetReferences() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"references\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"remediation\", m.GetRemediation())\n if err != nil {\n return err\n }\n }\n if m.GetSeverity() != nil {\n cast := (*m.GetSeverity()).String()\n err = writer.WriteStringValue(\"severity\", &cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func serialize(toMarshal interface{}) *bytes.Buffer {\n\tjsonStr, _ := json.Marshal(toMarshal)\n\treturn bytes.NewBuffer(jsonStr)\n}", "func (m *MockManager) SerializeShipMetadata(arg0 api.ShipAppMetadata, arg1 string) error {\n\tret := m.ctrl.Call(m, \"SerializeShipMetadata\", arg0, arg1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_m *Output) Write(ctx context.Context, batch []stream.WritableMessage) error {\n\tret := _m.Called(ctx, batch)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, []stream.WritableMessage) error); ok {\n\t\tr0 = rf(ctx, batch)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestObjectSerialize(t *testing.T) {\n\n\tjo := NewJavaTcObject(1)\n\tclz := NewJavaTcClassDesc(\"com.david.test.serialize.D\", 1, 0x02)\n\tjfa := NewJavaField(TC_PRIM_INTEGER, \"a\", 1)\n\tjfb := NewJavaField(TC_OBJ_OBJECT, \"b\", \"abcdefg\")\n\tjfb.FieldObjectClassName = \"java.lang.String\"\n\tclz.AddField(jfa)\n\tclz.AddField(jfb)\n\tclz.SortFields()\n\n\tjo.AddClassDesc(clz)\n\n\tvar f *os.File\n\tvar err error\n\n\tif f, err = os.OpenFile(\"d:\\\\tmp\\\\serialize-go.data\", os.O_CREATE|os.O_TRUNC, 0755); err != nil {\n\t\tt.Fatalf(\"got error when open file %v\\n\", err)\n\t}\n\tdefer f.Close()\n\n\tif err = SerializeJavaEntity(f, jo); err != nil {\n\t\tt.Fatalf(\"SerializeJavaEntity got %v\\n\", err)\n\t} else {\n\t\tt.Logf(\"SerializeJavaEntity succeed!\\n\")\n\t}\n}", "func (m *VirtualEndpoint) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetAuditEvents() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetAuditEvents()))\n for i, v := range m.GetAuditEvents() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"auditEvents\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetBulkActions() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetBulkActions()))\n for i, v := range m.GetBulkActions() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"bulkActions\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetCloudPCs() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetCloudPCs()))\n for i, v := range m.GetCloudPCs() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"cloudPCs\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"crossCloudGovernmentOrganizationMapping\", m.GetCrossCloudGovernmentOrganizationMapping())\n if err != nil {\n return err\n }\n }\n if m.GetDeviceImages() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetDeviceImages()))\n for i, v := range m.GetDeviceImages() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"deviceImages\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetExternalPartnerSettings() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetExternalPartnerSettings()))\n for i, v := range m.GetExternalPartnerSettings() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"externalPartnerSettings\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetFrontLineServicePlans() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetFrontLineServicePlans()))\n for i, v := range m.GetFrontLineServicePlans() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"frontLineServicePlans\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetGalleryImages() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetGalleryImages()))\n for i, v := range m.GetGalleryImages() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"galleryImages\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetOnPremisesConnections() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetOnPremisesConnections()))\n for i, v := range m.GetOnPremisesConnections() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"onPremisesConnections\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"organizationSettings\", m.GetOrganizationSettings())\n if err != nil {\n return err\n }\n }\n if m.GetProvisioningPolicies() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetProvisioningPolicies()))\n for i, v := range m.GetProvisioningPolicies() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"provisioningPolicies\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"reports\", m.GetReports())\n if err != nil {\n return err\n }\n }\n if m.GetServicePlans() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetServicePlans()))\n for i, v := range m.GetServicePlans() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"servicePlans\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetSharedUseServicePlans() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetSharedUseServicePlans()))\n for i, v := range m.GetSharedUseServicePlans() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"sharedUseServicePlans\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetSnapshots() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetSnapshots()))\n for i, v := range m.GetSnapshots() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"snapshots\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetSupportedRegions() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetSupportedRegions()))\n for i, v := range m.GetSupportedRegions() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"supportedRegions\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetUserSettings() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetUserSettings()))\n for i, v := range m.GetUserSettings() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"userSettings\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *AuthenticationContext) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n if m.GetDetail() != nil {\n cast := (*m.GetDetail()).String()\n err := writer.WriteStringValue(\"detail\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"id\", m.GetId())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func AssertSerialize(t *testing.T, dialect jet.Dialect, serializer jet.Serializer, query string, args ...interface{}) {\n\tout := jet.SQLBuilder{Dialect: dialect}\n\tjet.Serialize(serializer, jet.SelectStatementType, &out)\n\n\t//fmt.Println(out.Buff.String())\n\n\tAssertDeepEqual(t, out.Buff.String(), query)\n\n\tif len(args) > 0 {\n\t\tAssertDeepEqual(t, out.Args, args)\n\t}\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockClient) Serialize() ([]byte, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Serialize\")\n\tret0, _ := ret[0].([]byte)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockStore) Write(ctx context.Context, key string, val interface{}, opts ...store.WriteOption) error {\n\t_va := make([]interface{}, len(opts))\n\tfor _i := range opts {\n\t\t_va[_i] = opts[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, key, val)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...store.WriteOption) error); ok {\n\t\tr0 = rf(ctx, key, val, opts...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *ItemItemsItemWorkbookFunctionsRandBetweenPostRequestBody) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteObjectValue(\"bottom\", m.GetBottom())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"top\", m.GetTop())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func Serialize(o interface{}) ([]byte, error) {\n\tautil.TODO(\"CBOR-serialization\")\n\treturn nil, nil\n}", "func (m *ChannelIdentity) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteStringValue(\"channelId\", m.GetChannelId())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"teamId\", m.GetTeamId())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *DeviceLocalCredentialInfo) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetCredentials() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetCredentials()))\n for i, v := range m.GetCredentials() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"credentials\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"deviceName\", m.GetDeviceName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastBackupDateTime\", m.GetLastBackupDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"refreshDateTime\", m.GetRefreshDateTime())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *SignatureKeyHolderMock) WriteTo(p io.Writer) (r int64, r1 error) {\n\tcounter := atomic.AddUint64(&m.WriteToPreCounter, 1)\n\tdefer atomic.AddUint64(&m.WriteToCounter, 1)\n\n\tif len(m.WriteToMock.expectationSeries) > 0 {\n\t\tif counter > uint64(len(m.WriteToMock.expectationSeries)) {\n\t\t\tm.t.Fatalf(\"Unexpected call to SignatureKeyHolderMock.WriteTo. %v\", p)\n\t\t\treturn\n\t\t}\n\n\t\tinput := m.WriteToMock.expectationSeries[counter-1].input\n\t\ttestify_assert.Equal(m.t, *input, SignatureKeyHolderMockWriteToInput{p}, \"SignatureKeyHolder.WriteTo got unexpected parameters\")\n\n\t\tresult := m.WriteToMock.expectationSeries[counter-1].result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the SignatureKeyHolderMock.WriteTo\")\n\t\t\treturn\n\t\t}\n\n\t\tr = result.r\n\t\tr1 = result.r1\n\n\t\treturn\n\t}\n\n\tif m.WriteToMock.mainExpectation != nil {\n\n\t\tinput := m.WriteToMock.mainExpectation.input\n\t\tif input != nil {\n\t\t\ttestify_assert.Equal(m.t, *input, SignatureKeyHolderMockWriteToInput{p}, \"SignatureKeyHolder.WriteTo got unexpected parameters\")\n\t\t}\n\n\t\tresult := m.WriteToMock.mainExpectation.result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the SignatureKeyHolderMock.WriteTo\")\n\t\t}\n\n\t\tr = result.r\n\t\tr1 = result.r1\n\n\t\treturn\n\t}\n\n\tif m.WriteToFunc == nil {\n\t\tm.t.Fatalf(\"Unexpected call to SignatureKeyHolderMock.WriteTo. %v\", p)\n\t\treturn\n\t}\n\n\treturn m.WriteToFunc(p)\n}", "func (m *MockManager) SerializeHelmValues(arg0, arg1 string) error {\n\tret := m.ctrl.Call(m, \"SerializeHelmValues\", arg0, arg1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *ServicePlanInfo) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteStringValue(\"appliesTo\", m.GetAppliesTo())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"provisioningStatus\", m.GetProvisioningStatus())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"servicePlanId\", m.GetServicePlanId())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"servicePlanName\", m.GetServicePlanName())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *ItemItemsItemWorkbookFunctionsComplexPostRequestBody) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteObjectValue(\"iNum\", m.GetINum())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"realNum\", m.GetRealNum())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"suffix\", m.GetSuffix())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *CallTranscript) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteByteArrayValue(\"content\", m.GetContent())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"createdDateTime\", m.GetCreatedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteByteArrayValue(\"metadataContent\", m.GetMetadataContent())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockManager) SerializeNamespace(arg0 string) error {\n\tret := m.ctrl.Call(m, \"SerializeNamespace\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *Planner) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetBuckets() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetBuckets())\n err = writer.WriteCollectionOfObjectValues(\"buckets\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetPlans() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetPlans())\n err = writer.WriteCollectionOfObjectValues(\"plans\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetTasks() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetTasks())\n err = writer.WriteCollectionOfObjectValues(\"tasks\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *TokenStorage) Save(id []byte, context common.TokenContext, data []byte) error {\n\tret := _m.Called(id, context, data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte, common.TokenContext, []byte) error); ok {\n\t\tr0 = rf(id, context, data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (c *cacheableStoreMock) Write() error {\n\treturn c.err\n}", "func (m *BusinessScenarioPlanner) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteObjectValue(\"planConfiguration\", m.GetPlanConfiguration())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"taskConfiguration\", m.GetTaskConfiguration())\n if err != nil {\n return err\n }\n }\n if m.GetTasks() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetTasks()))\n for i, v := range m.GetTasks() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"tasks\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *Malware) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteStringValue(\"description\", m.GetDescription())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestRecordSerialization(t *testing.T) {\n\tcommon.Log(\"test\", \"\\n\\nTestRecordSerialization\")\n\tindexFile, err := os.OpenFile(config.Base.IndexPrefix+\"_record_persistence_test\", os.O_RDWR|os.O_CREATE, 0666)\n\tcommon.FaceIt(err)\n\tdefer indexFile.Close()\n\n\trecord := common.Record{}\n\tslots := record.Slots()\n\tfor i := 0; i < len(slots); i++ {\n\t\t*slots[i] = uint64(i + 1)\n\t}\n\n\tindexFile.Seek(0, os.SEEK_SET)\n\tindexFile.Write(record.Serialize())\n\n\tindexFile.Seek(0, os.SEEK_SET)\n\treadRecord := common.Record{}\n\treadRecord.Deserialize(indexFile)\n\n\treadSlots := readRecord.Slots()\n\tfor i := 0; i < len(slots); i++ {\n\t\tif *slots[i] != *readSlots[i] {\n\t\t\tt.Fatalf(\"deserialized field %d ne expected %d\", *readSlots[i], *slots[i])\n\t\t}\n\t}\n}", "func (m *CloudCommunications) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetCalls() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetCalls()))\n for i, v := range m.GetCalls() {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n err = writer.WriteCollectionOfObjectValues(\"calls\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetOnlineMeetings() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetOnlineMeetings()))\n for i, v := range m.GetOnlineMeetings() {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n err = writer.WriteCollectionOfObjectValues(\"onlineMeetings\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetPresences() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetPresences()))\n for i, v := range m.GetPresences() {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n err = writer.WriteCollectionOfObjectValues(\"presences\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *IdentityProviderBase) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"displayName\", m.GetDisplayName())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *ExternalConnection) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteObjectValue(\"configuration\", m.GetConfiguration())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"description\", m.GetDescription())\n if err != nil {\n return err\n }\n }\n if m.GetGroups() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetGroups())\n err = writer.WriteCollectionOfObjectValues(\"groups\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetItems() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetItems())\n err = writer.WriteCollectionOfObjectValues(\"items\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"name\", m.GetName())\n if err != nil {\n return err\n }\n }\n if m.GetOperations() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetOperations())\n err = writer.WriteCollectionOfObjectValues(\"operations\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"schema\", m.GetSchema())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *MockManager) SerializeContentSHA(arg0 string) error {\n\tret := m.ctrl.Call(m, \"SerializeContentSHA\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *SharedWithChannelTeamInfo) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.TeamInfo.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetAllowedMembers() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetAllowedMembers()))\n for i, v := range m.GetAllowedMembers() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"allowedMembers\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"isHostTeam\", m.GetIsHostTeam())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *SocialIdentityProvider) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.IdentityProviderBase.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"clientId\", m.GetClientId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"clientSecret\", m.GetClientSecret())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"identityProviderType\", m.GetIdentityProviderType())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *SocialIdentityProvider) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.IdentityProviderBase.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"clientId\", m.GetClientId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"clientSecret\", m.GetClientSecret())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"identityProviderType\", m.GetIdentityProviderType())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *ExternalActivity) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteObjectValue(\"performedBy\", m.GetPerformedBy())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"startDateTime\", m.GetStartDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetTypeEscaped() != nil {\n cast := (*m.GetTypeEscaped()).String()\n err = writer.WriteStringValue(\"type\", &cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *Workbook) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteObjectValue(\"application\", m.GetApplication())\n if err != nil {\n return err\n }\n }\n if m.GetComments() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetComments())\n err = writer.WriteCollectionOfObjectValues(\"comments\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"functions\", m.GetFunctions())\n if err != nil {\n return err\n }\n }\n if m.GetNames() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetNames())\n err = writer.WriteCollectionOfObjectValues(\"names\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetOperations() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetOperations())\n err = writer.WriteCollectionOfObjectValues(\"operations\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetTables() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetTables())\n err = writer.WriteCollectionOfObjectValues(\"tables\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetWorksheets() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetWorksheets())\n err = writer.WriteCollectionOfObjectValues(\"worksheets\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (_e *Output_Expecter) Write(ctx interface{}, batch interface{}) *Output_Write_Call {\n\treturn &Output_Write_Call{Call: _e.mock.On(\"Write\", ctx, batch)}\n}", "func (m *DiscoveredSensitiveType) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n if m.GetClassificationAttributes() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetClassificationAttributes()))\n for i, v := range m.GetClassificationAttributes() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err := writer.WriteCollectionOfObjectValues(\"classificationAttributes\", cast)\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteInt32Value(\"confidence\", m.GetConfidence())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteInt32Value(\"count\", m.GetCount())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteUUIDValue(\"id\", m.GetId())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *Set) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetChildren() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetChildren()))\n for i, v := range m.GetChildren() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"children\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"createdDateTime\", m.GetCreatedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"description\", m.GetDescription())\n if err != nil {\n return err\n }\n }\n if m.GetLocalizedNames() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetLocalizedNames()))\n for i, v := range m.GetLocalizedNames() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"localizedNames\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"parentGroup\", m.GetParentGroup())\n if err != nil {\n return err\n }\n }\n if m.GetProperties() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetProperties()))\n for i, v := range m.GetProperties() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"properties\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetRelations() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetRelations()))\n for i, v := range m.GetRelations() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"relations\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetTerms() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetTerms()))\n for i, v := range m.GetTerms() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"terms\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *AuditLogRoot) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetDirectoryAudits() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetDirectoryAudits())\n err = writer.WriteCollectionOfObjectValues(\"directoryAudits\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetProvisioning() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetProvisioning())\n err = writer.WriteCollectionOfObjectValues(\"provisioning\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetSignIns() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetSignIns())\n err = writer.WriteCollectionOfObjectValues(\"signIns\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func TestMsgSerialize(t *testing.T) {\n handler := new(CmdMsgHandler)\n cmd := new(CmdMsg)\n cmd.Cmd = CMD_ENV\n cmd.Data = \"\"\n\n b, err := handler.SerializeMsg(cmd)\n if err != nil {\n t.Fatal(err)\n }\n\n obj, err := handler.DeserializeMsg(b, 255)\n if err != nil {\n t.Fatal(err)\n }\n\n newCmd, ok := obj.(*CmdMsg)\n if !ok {\n t.Fatal(\"Invalid type received %T\", obj)\n }\n\n if cmd.Cmd != newCmd.Cmd {\n t.Fatalf(\n \"Cmd mismatch: %s vs %s\", \n cmd.Cmd, \n newCmd.Cmd,\n )\n }\n\n if cmd.Data != newCmd.Data {\n t.Fatalf(\n \"Data mismatch: %s vs %s\", \n cmd.Data, \n newCmd.Data,\n ) }\n\n log.Printf(\"TestMsgSerialize: passed\")\n}", "func (m *MockManager) SerializeUpstreamContents(arg0 *state.UpstreamContents) error {\n\tret := m.ctrl.Call(m, \"SerializeUpstreamContents\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func TestJsonEncode(t *testing.T) {\n\tt.Parallel()\n\n\t// Set up a mock struct for testing\n\ttype TestStruct struct {\n\t\tTestKey string `json:\"test_key\"`\n\t\tTestKeyTwo string `json:\"test_key_two\"`\n\t\tnotAllowed string\n\t}\n\n\t// Base model and test model\n\tvar model = new(TestStruct)\n\tvar modelTest = new(TestStruct)\n\tvar allowedFields = []string{\"test_key\", \"test_key_two\"} // notice omitted: notAllowed\n\n\t// Set the testing data\n\tmodel.TestKey = \"TestValue1\"\n\tmodel.TestKeyTwo = \"TestValue2\"\n\tmodel.notAllowed = \"PrivateValue\"\n\n\t// Set the buffer and encoder\n\tvar b bytes.Buffer\n\tenc := json.NewEncoder(&b)\n\n\t// Run the encoder\n\terr := JSONEncode(enc, model, allowedFields)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\t// Now unmarshal and test\n\tif err = json.Unmarshal(b.Bytes(), &modelTest); err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\t// Test for our fields and values now\n\tif modelTest.TestKey != \"TestValue1\" {\n\t\tt.Fatal(\"TestKey does not have the right value! Encoding failed.\", modelTest.TestKey)\n\t} else if modelTest.TestKeyTwo != \"TestValue2\" {\n\t\tt.Fatal(\"TestKeyTwo does not have the right value! Encoding failed.\", modelTest.TestKeyTwo)\n\t} else if modelTest.notAllowed == \"PrivateValue\" {\n\t\tt.Fatal(\"Field not removed! notAllowed does not have the right value! Encoding failed.\", modelTest.notAllowed)\n\t}\n}", "func (m *Reminder) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteStringValue(\"changeKey\", m.GetChangeKey())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"eventEndTime\", m.GetEventEndTime())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"eventId\", m.GetEventId())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"eventLocation\", m.GetEventLocation())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"eventStartTime\", m.GetEventStartTime())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"eventSubject\", m.GetEventSubject())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"eventWebLink\", m.GetEventWebLink())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"reminderFireTime\", m.GetReminderFireTime())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Connection) Write(ctx context.Context, typ websocket.MessageType, p []byte) error {\n\tret := _m.Called(ctx, typ, p)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, websocket.MessageType, []byte) error); ok {\n\t\tr0 = rf(ctx, typ, p)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func newMockEncodeDataFunc(mockStore *mock.NodeStore) func(chunk Chunk) []byte {\n\treturn func(chunk Chunk) []byte {\n\t\tif err := mockStore.Put(chunk.Address(), encodeData(chunk)); err != nil {\n\t\t\tlog.Error(fmt.Sprintf(\"%T: Chunk %v put: %v\", mockStore, chunk.Address().Log(), err))\n\t\t}\n\t\treturn chunk.Address()[:]\n\t}\n}", "func (_m *OSIOAPI) WriteFile(filename string, data []byte, perm os.FileMode) error {\n\tret := _m.Called(filename, data, perm)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, os.FileMode) error); ok {\n\t\tr0 = rf(filename, data, perm)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockScraper) WriteToFile(arg0 map[int]map[string]int, arg1 int) (string, error) {\n\tret := m.ctrl.Call(m, \"WriteToFile\", arg0, arg1)\n\tret0, _ := ret[0].(string)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *AuthenticationMethod) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n return nil\n}", "func serialize(src interface{}) ([]byte, error) {\n\tbuf := new(bytes.Buffer)\n\tenc := gob.NewEncoder(buf)\n\tif err := enc.Encode(src); err != nil {\n\t\treturn nil, err\n\t}\n\treturn buf.Bytes(), nil\n}", "func (m *Store) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"defaultLanguageTag\", m.GetDefaultLanguageTag())\n if err != nil {\n return err\n }\n }\n if m.GetGroups() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetGroups())\n err = writer.WriteCollectionOfObjectValues(\"groups\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetLanguageTags() != nil {\n err = writer.WriteCollectionOfStringValues(\"languageTags\", m.GetLanguageTags())\n if err != nil {\n return err\n }\n }\n if m.GetSets() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetSets())\n err = writer.WriteCollectionOfObjectValues(\"sets\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *KeyValue) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteStringValue(\"key\", m.GetKey())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"value\", m.GetValue())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *User) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.DirectoryObject.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"aboutMe\", m.GetAboutMe())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"accountEnabled\", m.GetAccountEnabled())\n if err != nil {\n return err\n }\n }\n if m.GetActivities() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetActivities())\n err = writer.WriteCollectionOfObjectValues(\"activities\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"ageGroup\", m.GetAgeGroup())\n if err != nil {\n return err\n }\n }\n if m.GetAgreementAcceptances() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetAgreementAcceptances())\n err = writer.WriteCollectionOfObjectValues(\"agreementAcceptances\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetAppRoleAssignments() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetAppRoleAssignments())\n err = writer.WriteCollectionOfObjectValues(\"appRoleAssignments\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetAssignedLicenses() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetAssignedLicenses())\n err = writer.WriteCollectionOfObjectValues(\"assignedLicenses\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetAssignedPlans() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetAssignedPlans())\n err = writer.WriteCollectionOfObjectValues(\"assignedPlans\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"authentication\", m.GetAuthentication())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"authorizationInfo\", m.GetAuthorizationInfo())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"birthday\", m.GetBirthday())\n if err != nil {\n return err\n }\n }\n if m.GetBusinessPhones() != nil {\n err = writer.WriteCollectionOfStringValues(\"businessPhones\", m.GetBusinessPhones())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"calendar\", m.GetCalendar())\n if err != nil {\n return err\n }\n }\n if m.GetCalendarGroups() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetCalendarGroups())\n err = writer.WriteCollectionOfObjectValues(\"calendarGroups\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetCalendars() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetCalendars())\n err = writer.WriteCollectionOfObjectValues(\"calendars\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetCalendarView() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetCalendarView())\n err = writer.WriteCollectionOfObjectValues(\"calendarView\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetChats() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetChats())\n err = writer.WriteCollectionOfObjectValues(\"chats\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"city\", m.GetCity())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"companyName\", m.GetCompanyName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"consentProvidedForMinor\", m.GetConsentProvidedForMinor())\n if err != nil {\n return err\n }\n }\n if m.GetContactFolders() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetContactFolders())\n err = writer.WriteCollectionOfObjectValues(\"contactFolders\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetContacts() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetContacts())\n err = writer.WriteCollectionOfObjectValues(\"contacts\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"country\", m.GetCountry())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"createdDateTime\", m.GetCreatedDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetCreatedObjects() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetCreatedObjects())\n err = writer.WriteCollectionOfObjectValues(\"createdObjects\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"creationType\", m.GetCreationType())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"department\", m.GetDepartment())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteInt32Value(\"deviceEnrollmentLimit\", m.GetDeviceEnrollmentLimit())\n if err != nil {\n return err\n }\n }\n if m.GetDeviceManagementTroubleshootingEvents() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetDeviceManagementTroubleshootingEvents())\n err = writer.WriteCollectionOfObjectValues(\"deviceManagementTroubleshootingEvents\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetDirectReports() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetDirectReports())\n err = writer.WriteCollectionOfObjectValues(\"directReports\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"displayName\", m.GetDisplayName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"drive\", m.GetDrive())\n if err != nil {\n return err\n }\n }\n if m.GetDrives() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetDrives())\n err = writer.WriteCollectionOfObjectValues(\"drives\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"employeeHireDate\", m.GetEmployeeHireDate())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"employeeId\", m.GetEmployeeId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"employeeOrgData\", m.GetEmployeeOrgData())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"employeeType\", m.GetEmployeeType())\n if err != nil {\n return err\n }\n }\n if m.GetEvents() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetEvents())\n err = writer.WriteCollectionOfObjectValues(\"events\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetExtensions() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetExtensions())\n err = writer.WriteCollectionOfObjectValues(\"extensions\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"externalUserState\", m.GetExternalUserState())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"externalUserStateChangeDateTime\", m.GetExternalUserStateChangeDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"faxNumber\", m.GetFaxNumber())\n if err != nil {\n return err\n }\n }\n if m.GetFollowedSites() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetFollowedSites())\n err = writer.WriteCollectionOfObjectValues(\"followedSites\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"givenName\", m.GetGivenName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"hireDate\", m.GetHireDate())\n if err != nil {\n return err\n }\n }\n if m.GetIdentities() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetIdentities())\n err = writer.WriteCollectionOfObjectValues(\"identities\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetImAddresses() != nil {\n err = writer.WriteCollectionOfStringValues(\"imAddresses\", m.GetImAddresses())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"inferenceClassification\", m.GetInferenceClassification())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"insights\", m.GetInsights())\n if err != nil {\n return err\n }\n }\n if m.GetInterests() != nil {\n err = writer.WriteCollectionOfStringValues(\"interests\", m.GetInterests())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"isResourceAccount\", m.GetIsResourceAccount())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"jobTitle\", m.GetJobTitle())\n if err != nil {\n return err\n }\n }\n if m.GetJoinedTeams() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetJoinedTeams())\n err = writer.WriteCollectionOfObjectValues(\"joinedTeams\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastPasswordChangeDateTime\", m.GetLastPasswordChangeDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"legalAgeGroupClassification\", m.GetLegalAgeGroupClassification())\n if err != nil {\n return err\n }\n }\n if m.GetLicenseAssignmentStates() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetLicenseAssignmentStates())\n err = writer.WriteCollectionOfObjectValues(\"licenseAssignmentStates\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetLicenseDetails() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetLicenseDetails())\n err = writer.WriteCollectionOfObjectValues(\"licenseDetails\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"mail\", m.GetMail())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"mailboxSettings\", m.GetMailboxSettings())\n if err != nil {\n return err\n }\n }\n if m.GetMailFolders() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetMailFolders())\n err = writer.WriteCollectionOfObjectValues(\"mailFolders\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"mailNickname\", m.GetMailNickname())\n if err != nil {\n return err\n }\n }\n if m.GetManagedAppRegistrations() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetManagedAppRegistrations())\n err = writer.WriteCollectionOfObjectValues(\"managedAppRegistrations\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetManagedDevices() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetManagedDevices())\n err = writer.WriteCollectionOfObjectValues(\"managedDevices\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"manager\", m.GetManager())\n if err != nil {\n return err\n }\n }\n if m.GetMemberOf() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetMemberOf())\n err = writer.WriteCollectionOfObjectValues(\"memberOf\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetMessages() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetMessages())\n err = writer.WriteCollectionOfObjectValues(\"messages\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"mobilePhone\", m.GetMobilePhone())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"mySite\", m.GetMySite())\n if err != nil {\n return err\n }\n }\n if m.GetOauth2PermissionGrants() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetOauth2PermissionGrants())\n err = writer.WriteCollectionOfObjectValues(\"oauth2PermissionGrants\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"officeLocation\", m.GetOfficeLocation())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"onenote\", m.GetOnenote())\n if err != nil {\n return err\n }\n }\n if m.GetOnlineMeetings() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetOnlineMeetings())\n err = writer.WriteCollectionOfObjectValues(\"onlineMeetings\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"onPremisesDistinguishedName\", m.GetOnPremisesDistinguishedName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"onPremisesDomainName\", m.GetOnPremisesDomainName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"onPremisesExtensionAttributes\", m.GetOnPremisesExtensionAttributes())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"onPremisesImmutableId\", m.GetOnPremisesImmutableId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"onPremisesLastSyncDateTime\", m.GetOnPremisesLastSyncDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetOnPremisesProvisioningErrors() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetOnPremisesProvisioningErrors())\n err = writer.WriteCollectionOfObjectValues(\"onPremisesProvisioningErrors\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"onPremisesSamAccountName\", m.GetOnPremisesSamAccountName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"onPremisesSecurityIdentifier\", m.GetOnPremisesSecurityIdentifier())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"onPremisesSyncEnabled\", m.GetOnPremisesSyncEnabled())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"onPremisesUserPrincipalName\", m.GetOnPremisesUserPrincipalName())\n if err != nil {\n return err\n }\n }\n if m.GetOtherMails() != nil {\n err = writer.WriteCollectionOfStringValues(\"otherMails\", m.GetOtherMails())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"outlook\", m.GetOutlook())\n if err != nil {\n return err\n }\n }\n if m.GetOwnedDevices() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetOwnedDevices())\n err = writer.WriteCollectionOfObjectValues(\"ownedDevices\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetOwnedObjects() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetOwnedObjects())\n err = writer.WriteCollectionOfObjectValues(\"ownedObjects\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"passwordPolicies\", m.GetPasswordPolicies())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"passwordProfile\", m.GetPasswordProfile())\n if err != nil {\n return err\n }\n }\n if m.GetPastProjects() != nil {\n err = writer.WriteCollectionOfStringValues(\"pastProjects\", m.GetPastProjects())\n if err != nil {\n return err\n }\n }\n if m.GetPeople() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetPeople())\n err = writer.WriteCollectionOfObjectValues(\"people\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"photo\", m.GetPhoto())\n if err != nil {\n return err\n }\n }\n if m.GetPhotos() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetPhotos())\n err = writer.WriteCollectionOfObjectValues(\"photos\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"planner\", m.GetPlanner())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"postalCode\", m.GetPostalCode())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"preferredDataLocation\", m.GetPreferredDataLocation())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"preferredLanguage\", m.GetPreferredLanguage())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"preferredName\", m.GetPreferredName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"presence\", m.GetPresence())\n if err != nil {\n return err\n }\n }\n if m.GetProvisionedPlans() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetProvisionedPlans())\n err = writer.WriteCollectionOfObjectValues(\"provisionedPlans\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetProxyAddresses() != nil {\n err = writer.WriteCollectionOfStringValues(\"proxyAddresses\", m.GetProxyAddresses())\n if err != nil {\n return err\n }\n }\n if m.GetRegisteredDevices() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetRegisteredDevices())\n err = writer.WriteCollectionOfObjectValues(\"registeredDevices\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetResponsibilities() != nil {\n err = writer.WriteCollectionOfStringValues(\"responsibilities\", m.GetResponsibilities())\n if err != nil {\n return err\n }\n }\n if m.GetSchools() != nil {\n err = writer.WriteCollectionOfStringValues(\"schools\", m.GetSchools())\n if err != nil {\n return err\n }\n }\n if m.GetScopedRoleMemberOf() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetScopedRoleMemberOf())\n err = writer.WriteCollectionOfObjectValues(\"scopedRoleMemberOf\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"securityIdentifier\", m.GetSecurityIdentifier())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"settings\", m.GetSettings())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"showInAddressList\", m.GetShowInAddressList())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"signInSessionsValidFromDateTime\", m.GetSignInSessionsValidFromDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetSkills() != nil {\n err = writer.WriteCollectionOfStringValues(\"skills\", m.GetSkills())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"state\", m.GetState())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"streetAddress\", m.GetStreetAddress())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"surname\", m.GetSurname())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"teamwork\", m.GetTeamwork())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"todo\", m.GetTodo())\n if err != nil {\n return err\n }\n }\n if m.GetTransitiveMemberOf() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetTransitiveMemberOf())\n err = writer.WriteCollectionOfObjectValues(\"transitiveMemberOf\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"usageLocation\", m.GetUsageLocation())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"userPrincipalName\", m.GetUserPrincipalName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"userType\", m.GetUserType())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *EmbeddedSIMActivationCode) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteStringValue(\"integratedCircuitCardIdentifier\", m.GetIntegratedCircuitCardIdentifier())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"matchingIdentifier\", m.GetMatchingIdentifier())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"smdpPlusServerAddress\", m.GetSmdpPlusServerAddress())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *MockManager) SerializeConfig(arg0 []api.Asset, arg1 api.ReleaseMetadata, arg2 map[string]interface{}) error {\n\tret := m.ctrl.Call(m, \"SerializeConfig\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *Artifact) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n return nil\n}", "func (m *ManagementTemplateStep) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteObjectValue(\"acceptedVersion\", m.GetAcceptedVersion())\n if err != nil {\n return err\n }\n }\n if m.GetCategory() != nil {\n cast := (*m.GetCategory()).String()\n err = writer.WriteStringValue(\"category\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"createdByUserId\", m.GetCreatedByUserId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"createdDateTime\", m.GetCreatedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"description\", m.GetDescription())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"displayName\", m.GetDisplayName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"lastActionByUserId\", m.GetLastActionByUserId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastActionDateTime\", m.GetLastActionDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"managementTemplate\", m.GetManagementTemplate())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"portalLink\", m.GetPortalLink())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteInt32Value(\"priority\", m.GetPriority())\n if err != nil {\n return err\n }\n }\n if m.GetVersions() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetVersions()))\n for i, v := range m.GetVersions() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"versions\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *TeamworkTag) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"description\", m.GetDescription())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"displayName\", m.GetDisplayName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteInt32Value(\"memberCount\", m.GetMemberCount())\n if err != nil {\n return err\n }\n }\n if m.GetMembers() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetMembers()))\n for i, v := range m.GetMembers() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"members\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetTagType() != nil {\n cast := (*m.GetTagType()).String()\n err = writer.WriteStringValue(\"tagType\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"teamId\", m.GetTeamId())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func TestTxSerialize(t *testing.T) {\n\tnoTx := NewNativeMsgTx(1, nil, nil)\n\tnoTxEncoded := []byte{\n\t\t0x01, 0x00, 0x00, 0x00, // Version\n\t\t0x00, // Varint for number of input transactions\n\t\t0x00, // Varint for number of output transactions\n\t\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Lock time\n\t\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t\t0x00, 0x00, 0x00, 0x00, // Sub Network ID\n\t}\n\n\tregistryTx := NewRegistryMsgTx(1, nil, nil, 16)\n\tregistryTxEncoded := []byte{\n\t\t0x01, 0x00, 0x00, 0x00, // Version\n\t\t0x00, // Varint for number of input transactions\n\t\t0x00, // Varint for number of output transactions\n\t\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Lock time\n\t\t0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t\t0x00, 0x00, 0x00, 0x00, // Sub Network ID\n\t\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Gas\n\t\t0x77, 0x56, 0x36, 0xb4, 0x89, 0x32, 0xe9, 0xa8,\n\t\t0xbb, 0x67, 0xe6, 0x54, 0x84, 0x36, 0x93, 0x8d,\n\t\t0x9f, 0xc5, 0x62, 0x49, 0x79, 0x5c, 0x0d, 0x0a,\n\t\t0x86, 0xaf, 0x7c, 0x5d, 0x54, 0x45, 0x4c, 0x4b, // Payload hash\n\t\t0x08, // Payload length varint\n\t\t0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Payload / Gas limit\n\t}\n\n\tsubnetworkTx := NewSubnetworkMsgTx(1, nil, nil, &subnetworkid.SubnetworkID{0xff}, 5, []byte{0, 1, 2})\n\n\tsubnetworkTxEncoded := []byte{\n\t\t0x01, 0x00, 0x00, 0x00, // Version\n\t\t0x00, // Varint for number of input transactions\n\t\t0x00, // Varint for number of output transactions\n\t\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Lock time\n\t\t0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t\t0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\t\t0x00, 0x00, 0x00, 0x00, // Sub Network ID\n\t\t0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Gas\n\t\t0x35, 0xf9, 0xf2, 0x93, 0x0e, 0xa3, 0x44, 0x61,\n\t\t0x88, 0x22, 0x79, 0x5e, 0xee, 0xc5, 0x68, 0xae,\n\t\t0x67, 0xab, 0x29, 0x87, 0xd8, 0xb1, 0x9e, 0x45,\n\t\t0x91, 0xe1, 0x05, 0x27, 0xba, 0xa1, 0xdf, 0x3d, // Payload hash\n\t\t0x03, // Payload length varint\n\t\t0x00, 0x01, 0x02, // Payload\n\t}\n\n\ttests := []struct {\n\t\tname string\n\t\tin *MsgTx // Message to encode\n\t\tout *MsgTx // Expected decoded message\n\t\tbuf []byte // Serialized data\n\t\tscriptPubKeyLocs []int // Expected output script locations\n\t}{\n\t\t// No transactions.\n\t\t{\n\t\t\t\"noTx\",\n\t\t\tnoTx,\n\t\t\tnoTx,\n\t\t\tnoTxEncoded,\n\t\t\tnil,\n\t\t},\n\n\t\t// Registry Transaction.\n\t\t{\n\t\t\t\"registryTx\",\n\t\t\tregistryTx,\n\t\t\tregistryTx,\n\t\t\tregistryTxEncoded,\n\t\t\tnil,\n\t\t},\n\n\t\t// Sub Network Transaction.\n\t\t{\n\t\t\t\"subnetworkTx\",\n\t\t\tsubnetworkTx,\n\t\t\tsubnetworkTx,\n\t\t\tsubnetworkTxEncoded,\n\t\t\tnil,\n\t\t},\n\n\t\t// Multiple transactions.\n\t\t{\n\t\t\t\"multiTx\",\n\t\t\tmultiTx,\n\t\t\tmultiTx,\n\t\t\tmultiTxEncoded,\n\t\t\tmultiTxScriptPubKeyLocs,\n\t\t},\n\t}\n\n\tt.Logf(\"Running %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\t// Serialize the transaction.\n\t\tvar buf bytes.Buffer\n\t\terr := test.in.Serialize(&buf)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Serialize %s: error %v\", test.name, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"Serialize %s:\\n got: %s want: %s\", test.name,\n\t\t\t\tspew.Sdump(buf.Bytes()), spew.Sdump(test.buf))\n\t\t\tcontinue\n\t\t}\n\n\t\t// Deserialize the transaction.\n\t\tvar tx MsgTx\n\t\trbuf := bytes.NewReader(test.buf)\n\t\terr = tx.Deserialize(rbuf)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Deserialize #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !reflect.DeepEqual(&tx, test.out) {\n\t\t\tt.Errorf(\"Deserialize #%d\\n got: %s want: %s\", i,\n\t\t\t\tspew.Sdump(&tx), spew.Sdump(test.out))\n\t\t\tcontinue\n\t\t}\n\n\t\t// Ensure the public key script locations are accurate.\n\t\tscriptPubKeyLocs := test.in.ScriptPubKeyLocs()\n\t\tif !reflect.DeepEqual(scriptPubKeyLocs, test.scriptPubKeyLocs) {\n\t\t\tt.Errorf(\"ScriptPubKeyLocs #%d\\n got: %s want: %s\", i,\n\t\t\t\tspew.Sdump(scriptPubKeyLocs),\n\t\t\t\tspew.Sdump(test.scriptPubKeyLocs))\n\t\t\tcontinue\n\t\t}\n\t\tfor j, loc := range scriptPubKeyLocs {\n\t\t\twantScriptPubKey := test.in.TxOut[j].ScriptPubKey\n\t\t\tgotScriptPubKey := test.buf[loc : loc+len(wantScriptPubKey)]\n\t\t\tif !bytes.Equal(gotScriptPubKey, wantScriptPubKey) {\n\t\t\t\tt.Errorf(\"ScriptPubKeyLocs #%d:%d\\n unexpected \"+\n\t\t\t\t\t\"script got: %s want: %s\", i, j,\n\t\t\t\t\tspew.Sdump(gotScriptPubKey),\n\t\t\t\t\tspew.Sdump(wantScriptPubKey))\n\t\t\t}\n\t\t}\n\t}\n}", "func (m *AccessPackage) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetAccessPackagesIncompatibleWith() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetAccessPackagesIncompatibleWith())\n err = writer.WriteCollectionOfObjectValues(\"accessPackagesIncompatibleWith\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetAssignmentPolicies() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetAssignmentPolicies())\n err = writer.WriteCollectionOfObjectValues(\"assignmentPolicies\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"catalog\", m.GetCatalog())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"createdDateTime\", m.GetCreatedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"description\", m.GetDescription())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"displayName\", m.GetDisplayName())\n if err != nil {\n return err\n }\n }\n if m.GetIncompatibleAccessPackages() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetIncompatibleAccessPackages())\n err = writer.WriteCollectionOfObjectValues(\"incompatibleAccessPackages\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetIncompatibleGroups() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetIncompatibleGroups())\n err = writer.WriteCollectionOfObjectValues(\"incompatibleGroups\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"isHidden\", m.GetIsHidden())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"modifiedDateTime\", m.GetModifiedDateTime())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockConn) WriteJSON(v interface{}) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteJSON\", v)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *WorkbookPivotTable) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"name\", m.GetName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"worksheet\", m.GetWorksheet())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *Schema) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"baseType\", m.GetBaseType())\n if err != nil {\n return err\n }\n }\n if m.GetProperties() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetProperties())\n err = writer.WriteCollectionOfObjectValues(\"properties\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (w *Writer) Serialize(v interface{}) *Writer {\n\tif v == nil {\n\t\tw.WriteNil()\n\t} else if rv, ok := v.(reflect.Value); ok {\n\t\tw.WriteValue(rv)\n\t} else {\n\t\tw.WriteValue(reflect.ValueOf(v))\n\t}\n\treturn w\n}", "func (m *AppVulnerabilityTask) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.DeviceAppManagementTask.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"appName\", m.GetAppName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"appPublisher\", m.GetAppPublisher())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"appVersion\", m.GetAppVersion())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"insights\", m.GetInsights())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteInt32Value(\"managedDeviceCount\", m.GetManagedDeviceCount())\n if err != nil {\n return err\n }\n }\n if m.GetManagedDevices() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetManagedDevices()))\n for i, v := range m.GetManagedDevices() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"managedDevices\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetMitigationType() != nil {\n cast := (*m.GetMitigationType()).String()\n err = writer.WriteStringValue(\"mitigationType\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteInt32Value(\"mobileAppCount\", m.GetMobileAppCount())\n if err != nil {\n return err\n }\n }\n if m.GetMobileApps() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetMobileApps()))\n for i, v := range m.GetMobileApps() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"mobileApps\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"remediation\", m.GetRemediation())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *InformationProtection) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteObjectValue(\"bitlocker\", m.GetBitlocker())\n if err != nil {\n return err\n }\n }\n if m.GetDataLossPreventionPolicies() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetDataLossPreventionPolicies()))\n for i, v := range m.GetDataLossPreventionPolicies() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"dataLossPreventionPolicies\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"policy\", m.GetPolicy())\n if err != nil {\n return err\n }\n }\n if m.GetSensitivityLabels() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetSensitivityLabels()))\n for i, v := range m.GetSensitivityLabels() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"sensitivityLabels\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"sensitivityPolicySettings\", m.GetSensitivityPolicySettings())\n if err != nil {\n return err\n }\n }\n if m.GetThreatAssessmentRequests() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetThreatAssessmentRequests()))\n for i, v := range m.GetThreatAssessmentRequests() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"threatAssessmentRequests\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (req *PutRequest) serialize(w proto.Writer, serialVersion int16) (err error) {\n\treturn req.serializeInternal(w, serialVersion, true)\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *DeviceAndAppManagementAssignmentFilter) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetAssignmentFilterManagementType() != nil {\n cast := (*m.GetAssignmentFilterManagementType()).String()\n err = writer.WriteStringValue(\"assignmentFilterManagementType\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"createdDateTime\", m.GetCreatedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"description\", m.GetDescription())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"displayName\", m.GetDisplayName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastModifiedDateTime\", m.GetLastModifiedDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetPayloads() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetPayloads()))\n for i, v := range m.GetPayloads() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"payloads\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetPlatform() != nil {\n cast := (*m.GetPlatform()).String()\n err = writer.WriteStringValue(\"platform\", &cast)\n if err != nil {\n return err\n }\n }\n if m.GetRoleScopeTags() != nil {\n err = writer.WriteCollectionOfStringValues(\"roleScopeTags\", m.GetRoleScopeTags())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"rule\", m.GetRule())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *SolutionsRoot) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n if m.GetBusinessScenarios() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetBusinessScenarios()))\n for i, v := range m.GetBusinessScenarios() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err := writer.WriteCollectionOfObjectValues(\"businessScenarios\", cast)\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"virtualEvents\", m.GetVirtualEvents())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (uw *UnorderedWriter) serialize(ctx context.Context) error {\n\tif uw.buffer.Empty() {\n\t\treturn nil\n\t}\n\treturn log.LogStep(ctx, \"UnorderedWriter.serialize\", func(_ context.Context) error {\n\t\treturn uw.withWriter(func(w *Writer) error {\n\t\t\tif err := uw.buffer.WalkAdditive(func(path, datum string, r io.Reader) error {\n\t\t\t\treturn w.Add(path, datum, r)\n\t\t\t}, func(f File, datum string) error {\n\t\t\t\treturn w.Copy(f, datum)\n\t\t\t}); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\treturn uw.buffer.WalkDeletive(func(path, datum string) error {\n\t\t\t\treturn w.Delete(path, datum)\n\t\t\t})\n\t\t})\n\t})\n}", "func (m *ServicePrincipalRiskDetection) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetActivity() != nil {\n cast := (*m.GetActivity()).String()\n err = writer.WriteStringValue(\"activity\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"activityDateTime\", m.GetActivityDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"additionalInfo\", m.GetAdditionalInfo())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"appId\", m.GetAppId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"correlationId\", m.GetCorrelationId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"detectedDateTime\", m.GetDetectedDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetDetectionTimingType() != nil {\n cast := (*m.GetDetectionTimingType()).String()\n err = writer.WriteStringValue(\"detectionTimingType\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"ipAddress\", m.GetIpAddress())\n if err != nil {\n return err\n }\n }\n if m.GetKeyIds() != nil {\n err = writer.WriteCollectionOfStringValues(\"keyIds\", m.GetKeyIds())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastUpdatedDateTime\", m.GetLastUpdatedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"location\", m.GetLocation())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"requestId\", m.GetRequestId())\n if err != nil {\n return err\n }\n }\n if m.GetRiskDetail() != nil {\n cast := (*m.GetRiskDetail()).String()\n err = writer.WriteStringValue(\"riskDetail\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"riskEventType\", m.GetRiskEventType())\n if err != nil {\n return err\n }\n }\n if m.GetRiskLevel() != nil {\n cast := (*m.GetRiskLevel()).String()\n err = writer.WriteStringValue(\"riskLevel\", &cast)\n if err != nil {\n return err\n }\n }\n if m.GetRiskState() != nil {\n cast := (*m.GetRiskState()).String()\n err = writer.WriteStringValue(\"riskState\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"servicePrincipalDisplayName\", m.GetServicePrincipalDisplayName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"servicePrincipalId\", m.GetServicePrincipalId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"source\", m.GetSource())\n if err != nil {\n return err\n }\n }\n if m.GetTokenIssuerType() != nil {\n cast := (*m.GetTokenIssuerType()).String()\n err = writer.WriteStringValue(\"tokenIssuerType\", &cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (_m *Session) Write(data []byte) (int, error) {\n\tret := _m.Called(data)\n\n\tvar r0 int\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func([]byte) (int, error)); ok {\n\t\treturn rf(data)\n\t}\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(data)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func NewSerializablePipeline() SerializablePipeline {\n\treturn SerializablePipeline{\n\t\tDisabled: false,\n\t\tKeepWaitingPipelines: false,\n\t\tLimitConcurrent: true,\n\t\tTriggers: []*Trigger{},\n\t\tAppConfig: map[string]interface{}{},\n\t\tParameterConfig: &[]*PipelineParameter{},\n\t}\n}", "func (m *SharePostRequestBody) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteTimeValue(\"endDateTime\", m.GetEndDateTime())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteBoolValue(\"notifyTeam\", m.GetNotifyTeam())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteTimeValue(\"startDateTime\", m.GetStartDateTime())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *MockManager) SerializeUpstream(arg0 string) error {\n\tret := m.ctrl.Call(m, \"SerializeUpstream\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func TestCanMarshalFileInstanceToJSON(t *testing.T) {\n\tfi := &FileInstance{\n\t\tID: 505,\n\t\tRepoPullID: 17,\n\t\tFileHashID: 923,\n\t\tPath: \"/test/somefile_test.go\",\n\t}\n\n\tjs, err := json.Marshal(fi)\n\tif err != nil {\n\t\tt.Fatalf(\"got non-nil error: %v\", err)\n\t}\n\n\t// read back in as empty interface to check values\n\t// should be a map whose keys are strings, values are empty interface values\n\t// per https://blog.golang.org/json-and-go\n\tvar mapGot interface{}\n\terr = json.Unmarshal(js, &mapGot)\n\tif err != nil {\n\t\tt.Fatalf(\"got non-nil error: %v\", err)\n\t}\n\tmGot := mapGot.(map[string]interface{})\n\n\t// check for expected values\n\tif float64(fi.ID) != mGot[\"id\"].(float64) {\n\t\tt.Errorf(\"expected %v, got %v\", float64(fi.ID), mGot[\"id\"].(float64))\n\t}\n\tif float64(fi.RepoPullID) != mGot[\"repopull_id\"].(float64) {\n\t\tt.Errorf(\"expected %v, got %v\", float64(fi.RepoPullID), mGot[\"repopull_id\"].(float64))\n\t}\n\tif float64(fi.FileHashID) != mGot[\"filehash_id\"].(float64) {\n\t\tt.Errorf(\"expected %v, got %v\", float64(fi.FileHashID), mGot[\"filehash_id\"].(float64))\n\t}\n\tif fi.Path != mGot[\"path\"].(string) {\n\t\tt.Errorf(\"expected %v, got %v\", fi.Path, mGot[\"path\"].(string))\n\t}\n}" ]
[ "0.75833577", "0.75421596", "0.73490566", "0.56474906", "0.53332233", "0.529335", "0.5187571", "0.5130418", "0.5108408", "0.50526226", "0.5044453", "0.50438744", "0.50290453", "0.500808", "0.4997123", "0.4987219", "0.49714684", "0.4951362", "0.49494442", "0.4940294", "0.4926888", "0.49101734", "0.49076283", "0.49004212", "0.4893916", "0.4886048", "0.48856935", "0.4882559", "0.486924", "0.48558325", "0.48352164", "0.482769", "0.4821784", "0.48039314", "0.48023424", "0.47946474", "0.47924978", "0.4772446", "0.4760994", "0.47599843", "0.47521257", "0.47377416", "0.47318", "0.47294128", "0.47284025", "0.4726482", "0.47201428", "0.47180545", "0.47170573", "0.47170573", "0.47092143", "0.4696676", "0.4695079", "0.46909052", "0.46901032", "0.4688646", "0.4685333", "0.46810016", "0.46771577", "0.46732318", "0.46726263", "0.46678197", "0.4664832", "0.4648626", "0.46430734", "0.46336317", "0.46314335", "0.46291593", "0.4627125", "0.46199214", "0.45992967", "0.45984438", "0.45916286", "0.4589364", "0.45884216", "0.45805746", "0.45703638", "0.45669407", "0.45570838", "0.45523664", "0.454117", "0.45371893", "0.45347965", "0.4534716", "0.45244792", "0.45234334", "0.4523038", "0.45220608", "0.45169908", "0.45140284", "0.45117837", "0.451135", "0.45102304", "0.45095178", "0.4502834", "0.45016187", "0.4501562", "0.44994387", "0.44993678", "0.44990638" ]
0.7721632
0
WriteSerializable is a helper method to define mock.On call ctx context.Context serializable Serializable
func (_e *MockWriteBufferJsonBased_Expecter) WriteSerializable(ctx interface{}, serializable interface{}) *MockWriteBufferJsonBased_WriteSerializable_Call { return &MockWriteBufferJsonBased_WriteSerializable_Call{Call: _e.mock.On("WriteSerializable", ctx, serializable)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferXmlBased_Expecter) WriteSerializable(ctx interface{}, serializable interface{}) *MockWriteBufferXmlBased_WriteSerializable_Call {\n\treturn &MockWriteBufferXmlBased_WriteSerializable_Call{Call: _e.mock.On(\"WriteSerializable\", ctx, serializable)}\n}", "func (_m *MockWriteBufferJsonBased) WriteSerializable(ctx context.Context, serializable Serializable) error {\n\tret := _m.Called(ctx, serializable)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, Serializable) error); ok {\n\t\tr0 = rf(ctx, serializable)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteSerializable(ctx context.Context, serializable Serializable) error {\n\tret := _m.Called(ctx, serializable)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, Serializable) error); ok {\n\t\tr0 = rf(ctx, serializable)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *IncomingContext) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteStringValue(\"observedParticipantId\", m.GetObservedParticipantId())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"onBehalfOf\", m.GetOnBehalfOf())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"sourceParticipantId\", m.GetSourceParticipantId())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"transferor\", m.GetTransferor())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *CallTranscript) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteByteArrayValue(\"content\", m.GetContent())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"createdDateTime\", m.GetCreatedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteByteArrayValue(\"metadataContent\", m.GetMetadataContent())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func serialize(toMarshal interface{}) *bytes.Buffer {\n\tjsonStr, _ := json.Marshal(toMarshal)\n\treturn bytes.NewBuffer(jsonStr)\n}", "func (m *VirtualEndpoint) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetAuditEvents() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetAuditEvents()))\n for i, v := range m.GetAuditEvents() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"auditEvents\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetBulkActions() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetBulkActions()))\n for i, v := range m.GetBulkActions() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"bulkActions\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetCloudPCs() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetCloudPCs()))\n for i, v := range m.GetCloudPCs() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"cloudPCs\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"crossCloudGovernmentOrganizationMapping\", m.GetCrossCloudGovernmentOrganizationMapping())\n if err != nil {\n return err\n }\n }\n if m.GetDeviceImages() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetDeviceImages()))\n for i, v := range m.GetDeviceImages() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"deviceImages\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetExternalPartnerSettings() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetExternalPartnerSettings()))\n for i, v := range m.GetExternalPartnerSettings() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"externalPartnerSettings\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetFrontLineServicePlans() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetFrontLineServicePlans()))\n for i, v := range m.GetFrontLineServicePlans() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"frontLineServicePlans\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetGalleryImages() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetGalleryImages()))\n for i, v := range m.GetGalleryImages() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"galleryImages\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetOnPremisesConnections() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetOnPremisesConnections()))\n for i, v := range m.GetOnPremisesConnections() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"onPremisesConnections\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"organizationSettings\", m.GetOrganizationSettings())\n if err != nil {\n return err\n }\n }\n if m.GetProvisioningPolicies() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetProvisioningPolicies()))\n for i, v := range m.GetProvisioningPolicies() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"provisioningPolicies\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"reports\", m.GetReports())\n if err != nil {\n return err\n }\n }\n if m.GetServicePlans() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetServicePlans()))\n for i, v := range m.GetServicePlans() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"servicePlans\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetSharedUseServicePlans() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetSharedUseServicePlans()))\n for i, v := range m.GetSharedUseServicePlans() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"sharedUseServicePlans\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetSnapshots() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetSnapshots()))\n for i, v := range m.GetSnapshots() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"snapshots\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetSupportedRegions() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetSupportedRegions()))\n for i, v := range m.GetSupportedRegions() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"supportedRegions\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetUserSettings() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetUserSettings()))\n for i, v := range m.GetUserSettings() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"userSettings\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *ExternalActivity) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteObjectValue(\"performedBy\", m.GetPerformedBy())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"startDateTime\", m.GetStartDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetTypeEscaped() != nil {\n cast := (*m.GetTypeEscaped()).String()\n err = writer.WriteStringValue(\"type\", &cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *Synchronization) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetJobs() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetJobs()))\n for i, v := range m.GetJobs() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"jobs\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetSecrets() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetSecrets()))\n for i, v := range m.GetSecrets() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"secrets\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetTemplates() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetTemplates()))\n for i, v := range m.GetTemplates() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"templates\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *AuthenticationContext) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n if m.GetDetail() != nil {\n cast := (*m.GetDetail()).String()\n err := writer.WriteStringValue(\"detail\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"id\", m.GetId())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (_e *Output_Expecter) Write(ctx interface{}, batch interface{}) *Output_Write_Call {\n\treturn &Output_Write_Call{Call: _e.mock.On(\"Write\", ctx, batch)}\n}", "func (m *Vulnerability) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteBoolValue(\"activeExploitsObserved\", m.GetActiveExploitsObserved())\n if err != nil {\n return err\n }\n }\n if m.GetArticles() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetArticles()))\n for i, v := range m.GetArticles() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"articles\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetCommonWeaknessEnumerationIds() != nil {\n err = writer.WriteCollectionOfStringValues(\"commonWeaknessEnumerationIds\", m.GetCommonWeaknessEnumerationIds())\n if err != nil {\n return err\n }\n }\n if m.GetComponents() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetComponents()))\n for i, v := range m.GetComponents() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"components\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"createdDateTime\", m.GetCreatedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"cvss2Summary\", m.GetCvss2Summary())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"cvss3Summary\", m.GetCvss3Summary())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"description\", m.GetDescription())\n if err != nil {\n return err\n }\n }\n if m.GetExploits() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetExploits()))\n for i, v := range m.GetExploits() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"exploits\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"exploitsAvailable\", m.GetExploitsAvailable())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"hasChatter\", m.GetHasChatter())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastModifiedDateTime\", m.GetLastModifiedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteInt32Value(\"priorityScore\", m.GetPriorityScore())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"publishedDateTime\", m.GetPublishedDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetReferences() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetReferences()))\n for i, v := range m.GetReferences() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"references\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"remediation\", m.GetRemediation())\n if err != nil {\n return err\n }\n }\n if m.GetSeverity() != nil {\n cast := (*m.GetSeverity()).String()\n err = writer.WriteStringValue(\"severity\", &cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func Serialize(o interface{}) ([]byte, error) {\n\tautil.TODO(\"CBOR-serialization\")\n\treturn nil, nil\n}", "func (m *ServicePlanInfo) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteStringValue(\"appliesTo\", m.GetAppliesTo())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"provisioningStatus\", m.GetProvisioningStatus())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"servicePlanId\", m.GetServicePlanId())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"servicePlanName\", m.GetServicePlanName())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *Reminder) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteStringValue(\"changeKey\", m.GetChangeKey())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"eventEndTime\", m.GetEventEndTime())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"eventId\", m.GetEventId())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"eventLocation\", m.GetEventLocation())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"eventStartTime\", m.GetEventStartTime())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"eventSubject\", m.GetEventSubject())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"eventWebLink\", m.GetEventWebLink())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"reminderFireTime\", m.GetReminderFireTime())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (JSONPresenter) Serialize(object interface{}) []byte {\n\tserial, err := json.Marshal(object)\n\n\tif err != nil {\n\t\tlog.Printf(\"failed to serialize: \\\"%s\\\"\", err)\n\t}\n\n\treturn serial\n}", "func (m *CloudPcAuditEvent) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"activity\", m.GetActivity())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"activityDateTime\", m.GetActivityDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetActivityOperationType() != nil {\n cast := (*m.GetActivityOperationType()).String()\n err = writer.WriteStringValue(\"activityOperationType\", &cast)\n if err != nil {\n return err\n }\n }\n if m.GetActivityResult() != nil {\n cast := (*m.GetActivityResult()).String()\n err = writer.WriteStringValue(\"activityResult\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"activityType\", m.GetActivityType())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"actor\", m.GetActor())\n if err != nil {\n return err\n }\n }\n if m.GetCategory() != nil {\n cast := (*m.GetCategory()).String()\n err = writer.WriteStringValue(\"category\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"componentName\", m.GetComponentName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"correlationId\", m.GetCorrelationId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"displayName\", m.GetDisplayName())\n if err != nil {\n return err\n }\n }\n if m.GetResources() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetResources()))\n for i, v := range m.GetResources() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"resources\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *IndustryDataRunActivity) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteObjectValue(\"activity\", m.GetActivity())\n if err != nil {\n return err\n }\n }\n if m.GetStatus() != nil {\n cast := (*m.GetStatus()).String()\n err = writer.WriteStringValue(\"status\", &cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *WorkbookPivotTable) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"name\", m.GetName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"worksheet\", m.GetWorksheet())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *ManagementTemplateStep) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteObjectValue(\"acceptedVersion\", m.GetAcceptedVersion())\n if err != nil {\n return err\n }\n }\n if m.GetCategory() != nil {\n cast := (*m.GetCategory()).String()\n err = writer.WriteStringValue(\"category\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"createdByUserId\", m.GetCreatedByUserId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"createdDateTime\", m.GetCreatedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"description\", m.GetDescription())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"displayName\", m.GetDisplayName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"lastActionByUserId\", m.GetLastActionByUserId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastActionDateTime\", m.GetLastActionDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"managementTemplate\", m.GetManagementTemplate())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"portalLink\", m.GetPortalLink())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteInt32Value(\"priority\", m.GetPriority())\n if err != nil {\n return err\n }\n }\n if m.GetVersions() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetVersions()))\n for i, v := range m.GetVersions() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"versions\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *AuditLogRoot) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetDirectoryAudits() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetDirectoryAudits())\n err = writer.WriteCollectionOfObjectValues(\"directoryAudits\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetProvisioning() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetProvisioning())\n err = writer.WriteCollectionOfObjectValues(\"provisioning\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetSignIns() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetSignIns())\n err = writer.WriteCollectionOfObjectValues(\"signIns\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *Workbook) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteObjectValue(\"application\", m.GetApplication())\n if err != nil {\n return err\n }\n }\n if m.GetComments() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetComments())\n err = writer.WriteCollectionOfObjectValues(\"comments\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"functions\", m.GetFunctions())\n if err != nil {\n return err\n }\n }\n if m.GetNames() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetNames())\n err = writer.WriteCollectionOfObjectValues(\"names\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetOperations() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetOperations())\n err = writer.WriteCollectionOfObjectValues(\"operations\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetTables() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetTables())\n err = writer.WriteCollectionOfObjectValues(\"tables\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetWorksheets() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetWorksheets())\n err = writer.WriteCollectionOfObjectValues(\"worksheets\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *ExternalConnection) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteObjectValue(\"configuration\", m.GetConfiguration())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"description\", m.GetDescription())\n if err != nil {\n return err\n }\n }\n if m.GetGroups() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetGroups())\n err = writer.WriteCollectionOfObjectValues(\"groups\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetItems() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetItems())\n err = writer.WriteCollectionOfObjectValues(\"items\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"name\", m.GetName())\n if err != nil {\n return err\n }\n }\n if m.GetOperations() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetOperations())\n err = writer.WriteCollectionOfObjectValues(\"operations\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"schema\", m.GetSchema())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *Artifact) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n return nil\n}", "func (m *EventMessageDetail) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *ItemItemsItemWorkbookFunctionsRandBetweenPostRequestBody) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteObjectValue(\"bottom\", m.GetBottom())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"top\", m.GetTop())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *CloudCommunications) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetCalls() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetCalls()))\n for i, v := range m.GetCalls() {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n err = writer.WriteCollectionOfObjectValues(\"calls\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetOnlineMeetings() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetOnlineMeetings()))\n for i, v := range m.GetOnlineMeetings() {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n err = writer.WriteCollectionOfObjectValues(\"onlineMeetings\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetPresences() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetPresences()))\n for i, v := range m.GetPresences() {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n err = writer.WriteCollectionOfObjectValues(\"presences\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *IdentityProviderBase) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"displayName\", m.GetDisplayName())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *ChannelIdentity) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteStringValue(\"channelId\", m.GetChannelId())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"teamId\", m.GetTeamId())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (_e *MockTestTransportInstance_Expecter) Write(data interface{}) *MockTestTransportInstance_Write_Call {\n\treturn &MockTestTransportInstance_Write_Call{Call: _e.mock.On(\"Write\", data)}\n}", "func (m *ImpactedMailboxAsset) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.ImpactedAsset.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetIdentifier() != nil {\n cast := (*m.GetIdentifier()).String()\n err = writer.WriteStringValue(\"identifier\", &cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *DeviceLocalCredentialInfo) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetCredentials() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetCredentials()))\n for i, v := range m.GetCredentials() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"credentials\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"deviceName\", m.GetDeviceName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastBackupDateTime\", m.GetLastBackupDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"refreshDateTime\", m.GetRefreshDateTime())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *SocialIdentityProvider) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.IdentityProviderBase.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"clientId\", m.GetClientId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"clientSecret\", m.GetClientSecret())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"identityProviderType\", m.GetIdentityProviderType())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *SocialIdentityProvider) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.IdentityProviderBase.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"clientId\", m.GetClientId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"clientSecret\", m.GetClientSecret())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"identityProviderType\", m.GetIdentityProviderType())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (el *ZapEventLogger) SerializeContext(ctx context.Context) ([]byte, error) {\n\tgTracer := opentrace.GlobalTracer()\n\tb := make([]byte, 0)\n\tcarrier := bytes.NewBuffer(b)\n\tspan := opentrace.SpanFromContext(ctx)\n\tif err := gTracer.Inject(span.Context(), opentrace.Binary, carrier); err != nil {\n\t\treturn nil, err\n\t}\n\treturn carrier.Bytes(), nil\n}", "func (m *ItemItemsItemWorkbookFunctionsComplexPostRequestBody) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteObjectValue(\"iNum\", m.GetINum())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"realNum\", m.GetRealNum())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"suffix\", m.GetSuffix())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func AssertSerialize(t *testing.T, dialect jet.Dialect, serializer jet.Serializer, query string, args ...interface{}) {\n\tout := jet.SQLBuilder{Dialect: dialect}\n\tjet.Serialize(serializer, jet.SelectStatementType, &out)\n\n\t//fmt.Println(out.Buff.String())\n\n\tAssertDeepEqual(t, out.Buff.String(), query)\n\n\tif len(args) > 0 {\n\t\tAssertDeepEqual(t, out.Args, args)\n\t}\n}", "func (w *Writer) Serialize(v interface{}) *Writer {\n\tif v == nil {\n\t\tw.WriteNil()\n\t} else if rv, ok := v.(reflect.Value); ok {\n\t\tw.WriteValue(rv)\n\t} else {\n\t\tw.WriteValue(reflect.ValueOf(v))\n\t}\n\treturn w\n}", "func (m *ManagedAppPolicy) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteTimeValue(\"createdDateTime\", m.GetCreatedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"description\", m.GetDescription())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"displayName\", m.GetDisplayName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastModifiedDateTime\", m.GetLastModifiedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"version\", m.GetVersion())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *SimulationAutomationRun) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteTimeValue(\"endDateTime\", m.GetEndDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"simulationId\", m.GetSimulationId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"startDateTime\", m.GetStartDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetStatus() != nil {\n cast := (*m.GetStatus()).String()\n err = writer.WriteStringValue(\"status\", &cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (s *Serializer) Serialize(p svermaker.ProjectVersion) error {\n\ts.SerializerInvoked = true\n\treturn s.SerializerFn(p)\n}", "func (m *UserExperienceAnalyticsMetricHistory) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"deviceId\", m.GetDeviceId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"metricDateTime\", m.GetMetricDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"metricType\", m.GetMetricType())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func serialize(src interface{}) ([]byte, error) {\n\tbuf := new(bytes.Buffer)\n\tenc := gob.NewEncoder(buf)\n\tif err := enc.Encode(src); err != nil {\n\t\treturn nil, err\n\t}\n\treturn buf.Bytes(), nil\n}", "func serialize(gnarkObject io.WriterTo, fileName string) {\n\tf, err := os.Create(fileName)\n\tassertNoError(err)\n\n\t_, err = gnarkObject.WriteTo(f)\n\tassertNoError(err)\n}", "func (m *KeyValue) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteStringValue(\"key\", m.GetKey())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"value\", m.GetValue())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *WorkbookWorksheet) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetCharts() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetCharts())\n err = writer.WriteCollectionOfObjectValues(\"charts\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"name\", m.GetName())\n if err != nil {\n return err\n }\n }\n if m.GetNames() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetNames())\n err = writer.WriteCollectionOfObjectValues(\"names\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetPivotTables() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetPivotTables())\n err = writer.WriteCollectionOfObjectValues(\"pivotTables\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteInt32Value(\"position\", m.GetPosition())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"protection\", m.GetProtection())\n if err != nil {\n return err\n }\n }\n if m.GetTables() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetTables())\n err = writer.WriteCollectionOfObjectValues(\"tables\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"visibility\", m.GetVisibility())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (r *SbProxy) Serialize() rotator.Object {\n\tr.RLock()\n\tdefer r.RUnlock()\n\treturn r.serialize()\n}", "func encodeMockEvent(e models.Event) ([]byte, error) {\n\tbyteBuffer, err := cbor.Marshal(e)\n\tif err != nil {\n\t\treturn []byte{}, err\n\t}\n\treturn byteBuffer, nil\n}", "func (m *SolutionsRoot) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n if m.GetBusinessScenarios() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetBusinessScenarios()))\n for i, v := range m.GetBusinessScenarios() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err := writer.WriteCollectionOfObjectValues(\"businessScenarios\", cast)\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"virtualEvents\", m.GetVirtualEvents())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *Set) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetChildren() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetChildren()))\n for i, v := range m.GetChildren() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"children\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"createdDateTime\", m.GetCreatedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"description\", m.GetDescription())\n if err != nil {\n return err\n }\n }\n if m.GetLocalizedNames() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetLocalizedNames()))\n for i, v := range m.GetLocalizedNames() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"localizedNames\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"parentGroup\", m.GetParentGroup())\n if err != nil {\n return err\n }\n }\n if m.GetProperties() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetProperties()))\n for i, v := range m.GetProperties() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"properties\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetRelations() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetRelations()))\n for i, v := range m.GetRelations() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"relations\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetTerms() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetTerms()))\n for i, v := range m.GetTerms() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"terms\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *AndroidManagedStoreApp) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.MobileApp.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"appIdentifier\", m.GetAppIdentifier())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"appStoreUrl\", m.GetAppStoreUrl())\n if err != nil {\n return err\n }\n }\n if m.GetAppTracks() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetAppTracks()))\n for i, v := range m.GetAppTracks() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"appTracks\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"isPrivate\", m.GetIsPrivate())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"isSystemApp\", m.GetIsSystemApp())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"packageId\", m.GetPackageId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"supportsOemConfig\", m.GetSupportsOemConfig())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteInt32Value(\"totalLicenseCount\", m.GetTotalLicenseCount())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteInt32Value(\"usedLicenseCount\", m.GetUsedLicenseCount())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *ActionResultPart) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteObjectValue(\"error\", m.GetError())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *BusinessScenarioPlanner) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteObjectValue(\"planConfiguration\", m.GetPlanConfiguration())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"taskConfiguration\", m.GetTaskConfiguration())\n if err != nil {\n return err\n }\n }\n if m.GetTasks() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetTasks()))\n for i, v := range m.GetTasks() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"tasks\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (_e *WriteCloser_Expecter) Write(p interface{}) *WriteCloser_Write_Call {\n\treturn &WriteCloser_Write_Call{Call: _e.mock.On(\"Write\", p)}\n}", "func (m *EmbeddedSIMActivationCode) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteStringValue(\"integratedCircuitCardIdentifier\", m.GetIntegratedCircuitCardIdentifier())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"matchingIdentifier\", m.GetMatchingIdentifier())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"smdpPlusServerAddress\", m.GetSmdpPlusServerAddress())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *AuthenticationMethod) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n return nil\n}", "func (m *Store) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"defaultLanguageTag\", m.GetDefaultLanguageTag())\n if err != nil {\n return err\n }\n }\n if m.GetGroups() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetGroups())\n err = writer.WriteCollectionOfObjectValues(\"groups\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetLanguageTags() != nil {\n err = writer.WriteCollectionOfStringValues(\"languageTags\", m.GetLanguageTags())\n if err != nil {\n return err\n }\n }\n if m.GetSets() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetSets())\n err = writer.WriteCollectionOfObjectValues(\"sets\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *Malware) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteStringValue(\"description\", m.GetDescription())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *MicrosoftStoreForBusinessContainedApp) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.MobileContainedApp.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"appUserModelId\", m.GetAppUserModelId())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *AppliedAuthenticationEventListener) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n if m.GetEventType() != nil {\n cast := (*m.GetEventType()).String()\n err := writer.WriteStringValue(\"eventType\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"executedListenerId\", m.GetExecutedListenerId())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteObjectValue(\"handlerResult\", m.GetHandlerResult())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func NewSerializablePipeline() SerializablePipeline {\n\treturn SerializablePipeline{\n\t\tDisabled: false,\n\t\tKeepWaitingPipelines: false,\n\t\tLimitConcurrent: true,\n\t\tTriggers: []*Trigger{},\n\t\tAppConfig: map[string]interface{}{},\n\t\tParameterConfig: &[]*PipelineParameter{},\n\t}\n}", "func (m *PrintConnector) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"appVersion\", m.GetAppVersion())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"displayName\", m.GetDisplayName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"fullyQualifiedDomainName\", m.GetFullyQualifiedDomainName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"location\", m.GetLocation())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"operatingSystem\", m.GetOperatingSystem())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"registeredDateTime\", m.GetRegisteredDateTime())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func TestObjectSerialize(t *testing.T) {\n\n\tjo := NewJavaTcObject(1)\n\tclz := NewJavaTcClassDesc(\"com.david.test.serialize.D\", 1, 0x02)\n\tjfa := NewJavaField(TC_PRIM_INTEGER, \"a\", 1)\n\tjfb := NewJavaField(TC_OBJ_OBJECT, \"b\", \"abcdefg\")\n\tjfb.FieldObjectClassName = \"java.lang.String\"\n\tclz.AddField(jfa)\n\tclz.AddField(jfb)\n\tclz.SortFields()\n\n\tjo.AddClassDesc(clz)\n\n\tvar f *os.File\n\tvar err error\n\n\tif f, err = os.OpenFile(\"d:\\\\tmp\\\\serialize-go.data\", os.O_CREATE|os.O_TRUNC, 0755); err != nil {\n\t\tt.Fatalf(\"got error when open file %v\\n\", err)\n\t}\n\tdefer f.Close()\n\n\tif err = SerializeJavaEntity(f, jo); err != nil {\n\t\tt.Fatalf(\"SerializeJavaEntity got %v\\n\", err)\n\t} else {\n\t\tt.Logf(\"SerializeJavaEntity succeed!\\n\")\n\t}\n}", "func (m *CustomAccessPackageWorkflowExtension) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.CustomCalloutExtension.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteTimeValue(\"createdDateTime\", m.GetCreatedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastModifiedDateTime\", m.GetLastModifiedDateTime())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *SignatureKeyHolderMock) WriteTo(p io.Writer) (r int64, r1 error) {\n\tcounter := atomic.AddUint64(&m.WriteToPreCounter, 1)\n\tdefer atomic.AddUint64(&m.WriteToCounter, 1)\n\n\tif len(m.WriteToMock.expectationSeries) > 0 {\n\t\tif counter > uint64(len(m.WriteToMock.expectationSeries)) {\n\t\t\tm.t.Fatalf(\"Unexpected call to SignatureKeyHolderMock.WriteTo. %v\", p)\n\t\t\treturn\n\t\t}\n\n\t\tinput := m.WriteToMock.expectationSeries[counter-1].input\n\t\ttestify_assert.Equal(m.t, *input, SignatureKeyHolderMockWriteToInput{p}, \"SignatureKeyHolder.WriteTo got unexpected parameters\")\n\n\t\tresult := m.WriteToMock.expectationSeries[counter-1].result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the SignatureKeyHolderMock.WriteTo\")\n\t\t\treturn\n\t\t}\n\n\t\tr = result.r\n\t\tr1 = result.r1\n\n\t\treturn\n\t}\n\n\tif m.WriteToMock.mainExpectation != nil {\n\n\t\tinput := m.WriteToMock.mainExpectation.input\n\t\tif input != nil {\n\t\t\ttestify_assert.Equal(m.t, *input, SignatureKeyHolderMockWriteToInput{p}, \"SignatureKeyHolder.WriteTo got unexpected parameters\")\n\t\t}\n\n\t\tresult := m.WriteToMock.mainExpectation.result\n\t\tif result == nil {\n\t\t\tm.t.Fatal(\"No results are set for the SignatureKeyHolderMock.WriteTo\")\n\t\t}\n\n\t\tr = result.r\n\t\tr1 = result.r1\n\n\t\treturn\n\t}\n\n\tif m.WriteToFunc == nil {\n\t\tm.t.Fatalf(\"Unexpected call to SignatureKeyHolderMock.WriteTo. %v\", p)\n\t\treturn\n\t}\n\n\treturn m.WriteToFunc(p)\n}", "func (writer *Writer) Serialize(v interface{}) {\n\tif v == nil {\n\t\twriter.WriteNil()\n\t} else {\n\t\tv := reflect.ValueOf(v)\n\t\tvalueEncoders[v.Kind()](writer, v)\n\t}\n}", "func (c *cacheableStoreMock) Write() error {\n\treturn c.err\n}", "func (m *DiscoveredSensitiveType) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n if m.GetClassificationAttributes() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetClassificationAttributes()))\n for i, v := range m.GetClassificationAttributes() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err := writer.WriteCollectionOfObjectValues(\"classificationAttributes\", cast)\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteInt32Value(\"confidence\", m.GetConfidence())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteInt32Value(\"count\", m.GetCount())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteUUIDValue(\"id\", m.GetId())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *PolicyRule) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"name\", m.GetName())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *WorkbookOperation) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteObjectValue(\"error\", m.GetError())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"resourceLocation\", m.GetResourceLocation())\n if err != nil {\n return err\n }\n }\n if m.GetStatus() != nil {\n cast := (*m.GetStatus()).String()\n err = writer.WriteStringValue(\"status\", &cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *Planner) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetBuckets() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetBuckets())\n err = writer.WriteCollectionOfObjectValues(\"buckets\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetPlans() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetPlans())\n err = writer.WriteCollectionOfObjectValues(\"plans\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetTasks() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetTasks())\n err = writer.WriteCollectionOfObjectValues(\"tasks\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *DeviceAndAppManagementAssignmentFilter) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetAssignmentFilterManagementType() != nil {\n cast := (*m.GetAssignmentFilterManagementType()).String()\n err = writer.WriteStringValue(\"assignmentFilterManagementType\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"createdDateTime\", m.GetCreatedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"description\", m.GetDescription())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"displayName\", m.GetDisplayName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastModifiedDateTime\", m.GetLastModifiedDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetPayloads() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetPayloads()))\n for i, v := range m.GetPayloads() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"payloads\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetPlatform() != nil {\n cast := (*m.GetPlatform()).String()\n err = writer.WriteStringValue(\"platform\", &cast)\n if err != nil {\n return err\n }\n }\n if m.GetRoleScopeTags() != nil {\n err = writer.WriteCollectionOfStringValues(\"roleScopeTags\", m.GetRoleScopeTags())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"rule\", m.GetRule())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *WorkbookFilter) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteObjectValue(\"criteria\", m.GetCriteria())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (d *dataUsageCache) serializeTo(dst io.Writer) error {\n\t// Add version and compress.\n\t_, err := dst.Write([]byte{dataUsageCacheVerCurrent})\n\tif err != nil {\n\t\treturn err\n\t}\n\tenc, err := zstd.NewWriter(dst,\n\t\tzstd.WithEncoderLevel(zstd.SpeedFastest),\n\t\tzstd.WithWindowSize(1<<20),\n\t\tzstd.WithEncoderConcurrency(2))\n\tif err != nil {\n\t\treturn err\n\t}\n\tmEnc := msgp.NewWriter(enc)\n\terr = d.EncodeMsg(mEnc)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = mEnc.Flush()\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = enc.Close()\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (m *InformationProtection) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteObjectValue(\"bitlocker\", m.GetBitlocker())\n if err != nil {\n return err\n }\n }\n if m.GetDataLossPreventionPolicies() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetDataLossPreventionPolicies()))\n for i, v := range m.GetDataLossPreventionPolicies() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"dataLossPreventionPolicies\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"policy\", m.GetPolicy())\n if err != nil {\n return err\n }\n }\n if m.GetSensitivityLabels() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetSensitivityLabels()))\n for i, v := range m.GetSensitivityLabels() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"sensitivityLabels\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"sensitivityPolicySettings\", m.GetSensitivityPolicySettings())\n if err != nil {\n return err\n }\n }\n if m.GetThreatAssessmentRequests() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetThreatAssessmentRequests()))\n for i, v := range m.GetThreatAssessmentRequests() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"threatAssessmentRequests\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func newMockEncodeDataFunc(mockStore *mock.NodeStore) func(chunk Chunk) []byte {\n\treturn func(chunk Chunk) []byte {\n\t\tif err := mockStore.Put(chunk.Address(), encodeData(chunk)); err != nil {\n\t\t\tlog.Error(fmt.Sprintf(\"%T: Chunk %v put: %v\", mockStore, chunk.Address().Log(), err))\n\t\t}\n\t\treturn chunk.Address()[:]\n\t}\n}", "func (m *AzureCommunicationServicesUserIdentity) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Identity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"azureCommunicationServicesResourceId\", m.GetAzureCommunicationServicesResourceId())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *WorkforceIntegration) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.ChangeTrackedEntity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteInt32Value(\"apiVersion\", m.GetApiVersion())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"displayName\", m.GetDisplayName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"encryption\", m.GetEncryption())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"isActive\", m.GetIsActive())\n if err != nil {\n return err\n }\n }\n if m.GetSupportedEntities() != nil {\n cast := (*m.GetSupportedEntities()).String()\n err = writer.WriteStringValue(\"supportedEntities\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"url\", m.GetUrl())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (uw *UnorderedWriter) serialize(ctx context.Context) error {\n\tif uw.buffer.Empty() {\n\t\treturn nil\n\t}\n\treturn log.LogStep(ctx, \"UnorderedWriter.serialize\", func(_ context.Context) error {\n\t\treturn uw.withWriter(func(w *Writer) error {\n\t\t\tif err := uw.buffer.WalkAdditive(func(path, datum string, r io.Reader) error {\n\t\t\t\treturn w.Add(path, datum, r)\n\t\t\t}, func(f File, datum string) error {\n\t\t\t\treturn w.Copy(f, datum)\n\t\t\t}); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\treturn uw.buffer.WalkDeletive(func(path, datum string) error {\n\t\t\t\treturn w.Delete(path, datum)\n\t\t\t})\n\t\t})\n\t})\n}", "func (req *PutRequest) serialize(w proto.Writer, serialVersion int16) (err error) {\n\treturn req.serializeInternal(w, serialVersion, true)\n}", "func (m *StsPolicy) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.PolicyBase.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetAppliesTo() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetAppliesTo())\n err = writer.WriteCollectionOfObjectValues(\"appliesTo\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetDefinition() != nil {\n err = writer.WriteCollectionOfStringValues(\"definition\", m.GetDefinition())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"isOrganizationDefault\", m.GetIsOrganizationDefault())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (sh *ServerPropertiesHandle) Serialize(serverProperties interface{}) {\n\tsh.serverProperties = serverProperties.(ServerProperties)\n\tsh.resetBuffer()\n\tif sh.GetFormat() == handle.JSON {\n\t\tenc := json.NewEncoder(sh)\n\t\tenc.Encode(sh.serverProperties)\n\t} else {\n\t\tenc := xml.NewEncoder(sh)\n\t\tenc.Encode(sh.serverProperties)\n\t}\n}", "func (m *AppVulnerabilityTask) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.DeviceAppManagementTask.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"appName\", m.GetAppName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"appPublisher\", m.GetAppPublisher())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"appVersion\", m.GetAppVersion())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"insights\", m.GetInsights())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteInt32Value(\"managedDeviceCount\", m.GetManagedDeviceCount())\n if err != nil {\n return err\n }\n }\n if m.GetManagedDevices() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetManagedDevices()))\n for i, v := range m.GetManagedDevices() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"managedDevices\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetMitigationType() != nil {\n cast := (*m.GetMitigationType()).String()\n err = writer.WriteStringValue(\"mitigationType\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteInt32Value(\"mobileAppCount\", m.GetMobileAppCount())\n if err != nil {\n return err\n }\n }\n if m.GetMobileApps() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetMobileApps()))\n for i, v := range m.GetMobileApps() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"mobileApps\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"remediation\", m.GetRemediation())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *RemoteAssistancePartner) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"displayName\", m.GetDisplayName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastConnectionDateTime\", m.GetLastConnectionDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetOnboardingStatus() != nil {\n cast := (*m.GetOnboardingStatus()).String()\n err = writer.WriteStringValue(\"onboardingStatus\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"onboardingUrl\", m.GetOnboardingUrl())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *TeamsAsyncOperation) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteInt32Value(\"attemptsCount\", m.GetAttemptsCount())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"createdDateTime\", m.GetCreatedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"error\", m.GetError())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastActionDateTime\", m.GetLastActionDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetOperationType() != nil {\n cast := (*m.GetOperationType()).String()\n err = writer.WriteStringValue(\"operationType\", &cast)\n if err != nil {\n return err\n }\n }\n if m.GetStatus() != nil {\n cast := (*m.GetStatus()).String()\n err = writer.WriteStringValue(\"status\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"targetResourceId\", m.GetTargetResourceId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"targetResourceLocation\", m.GetTargetResourceLocation())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *Schema) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"baseType\", m.GetBaseType())\n if err != nil {\n return err\n }\n }\n if m.GetProperties() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetProperties())\n err = writer.WriteCollectionOfObjectValues(\"properties\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *PlayPromptPostRequestBody) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteStringValue(\"clientContext\", m.GetClientContext())\n if err != nil {\n return err\n }\n }\n if m.GetPrompts() != nil {\n cast := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.CollectionCast[i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable](m.GetPrompts())\n err := writer.WriteCollectionOfObjectValues(\"prompts\", cast)\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func Serialize(v interface{}) ([]byte, error) {\n\tbuf := bytes.NewBuffer(make([]byte, 0))\n\tif err := gob.NewEncoder(buf).Encode(v); err != nil {\n\t\treturn nil, err\n\t}\n\treturn buf.Bytes(), nil\n}", "func (m *CloudPcConnection) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"displayName\", m.GetDisplayName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"healthCheckStatus\", m.GetHealthCheckStatus())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastRefreshedDateTime\", m.GetLastRefreshedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"tenantDisplayName\", m.GetTenantDisplayName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"tenantId\", m.GetTenantId())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (b *mockEncoder) Encode(value interface{}) (data []byte, err error) {\n\treturn json.Marshal(value)\n}", "func (m *AmazonResourceEvidence) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.AlertEvidence.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"amazonAccountId\", m.GetAmazonAccountId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"amazonResourceId\", m.GetAmazonResourceId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"resourceName\", m.GetResourceName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"resourceType\", m.GetResourceType())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *WorkbookNamedItem) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"comment\", m.GetComment())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"name\", m.GetName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"scope\", m.GetScope())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"type\", m.GetTypeEscaped())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"value\", m.GetValue())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"visible\", m.GetVisible())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"worksheet\", m.GetWorksheet())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *Printer) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.PrinterBase.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetConnectors() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetConnectors()))\n for i, v := range m.GetConnectors() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"connectors\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"hasPhysicalDevice\", m.GetHasPhysicalDevice())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"isShared\", m.GetIsShared())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastSeenDateTime\", m.GetLastSeenDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"registeredDateTime\", m.GetRegisteredDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetShares() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetShares()))\n for i, v := range m.GetShares() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"shares\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetTaskTriggers() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetTaskTriggers()))\n for i, v := range m.GetTaskTriggers() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"taskTriggers\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *SynchronizationJob) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteObjectValue(\"schedule\", m.GetSchedule())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"schema\", m.GetSchema())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"status\", m.GetStatus())\n if err != nil {\n return err\n }\n }\n if m.GetSynchronizationJobSettings() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetSynchronizationJobSettings()))\n for i, v := range m.GetSynchronizationJobSettings() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"synchronizationJobSettings\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"templateId\", m.GetTemplateId())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *CommunicationsIdentitySet) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.IdentitySet.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteObjectValue(\"applicationInstance\", m.GetApplicationInstance())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"assertedIdentity\", m.GetAssertedIdentity())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"azureCommunicationServicesUser\", m.GetAzureCommunicationServicesUser())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"encrypted\", m.GetEncrypted())\n if err != nil {\n return err\n }\n }\n if m.GetEndpointType() != nil {\n cast := (*m.GetEndpointType()).String()\n err = writer.WriteStringValue(\"endpointType\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"guest\", m.GetGuest())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"onPremises\", m.GetOnPremises())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"phone\", m.GetPhone())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (m *ThreatAssessmentRequest) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetCategory() != nil {\n cast := (*m.GetCategory()).String()\n err = writer.WriteStringValue(\"category\", &cast)\n if err != nil {\n return err\n }\n }\n if m.GetContentType() != nil {\n cast := (*m.GetContentType()).String()\n err = writer.WriteStringValue(\"contentType\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"createdBy\", m.GetCreatedBy())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"createdDateTime\", m.GetCreatedDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetExpectedAssessment() != nil {\n cast := (*m.GetExpectedAssessment()).String()\n err = writer.WriteStringValue(\"expectedAssessment\", &cast)\n if err != nil {\n return err\n }\n }\n if m.GetRequestSource() != nil {\n cast := (*m.GetRequestSource()).String()\n err = writer.WriteStringValue(\"requestSource\", &cast)\n if err != nil {\n return err\n }\n }\n if m.GetResults() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetResults()))\n for i, v := range m.GetResults() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"results\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetStatus() != nil {\n cast := (*m.GetStatus()).String()\n err = writer.WriteStringValue(\"status\", &cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *ServicePrincipalRiskDetection) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetActivity() != nil {\n cast := (*m.GetActivity()).String()\n err = writer.WriteStringValue(\"activity\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"activityDateTime\", m.GetActivityDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"additionalInfo\", m.GetAdditionalInfo())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"appId\", m.GetAppId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"correlationId\", m.GetCorrelationId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"detectedDateTime\", m.GetDetectedDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetDetectionTimingType() != nil {\n cast := (*m.GetDetectionTimingType()).String()\n err = writer.WriteStringValue(\"detectionTimingType\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"ipAddress\", m.GetIpAddress())\n if err != nil {\n return err\n }\n }\n if m.GetKeyIds() != nil {\n err = writer.WriteCollectionOfStringValues(\"keyIds\", m.GetKeyIds())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastUpdatedDateTime\", m.GetLastUpdatedDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"location\", m.GetLocation())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"requestId\", m.GetRequestId())\n if err != nil {\n return err\n }\n }\n if m.GetRiskDetail() != nil {\n cast := (*m.GetRiskDetail()).String()\n err = writer.WriteStringValue(\"riskDetail\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"riskEventType\", m.GetRiskEventType())\n if err != nil {\n return err\n }\n }\n if m.GetRiskLevel() != nil {\n cast := (*m.GetRiskLevel()).String()\n err = writer.WriteStringValue(\"riskLevel\", &cast)\n if err != nil {\n return err\n }\n }\n if m.GetRiskState() != nil {\n cast := (*m.GetRiskState()).String()\n err = writer.WriteStringValue(\"riskState\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"servicePrincipalDisplayName\", m.GetServicePrincipalDisplayName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"servicePrincipalId\", m.GetServicePrincipalId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"source\", m.GetSource())\n if err != nil {\n return err\n }\n }\n if m.GetTokenIssuerType() != nil {\n cast := (*m.GetTokenIssuerType()).String()\n err = writer.WriteStringValue(\"tokenIssuerType\", &cast)\n if err != nil {\n return err\n }\n }\n return nil\n}" ]
[ "0.7572672", "0.7121153", "0.6958335", "0.601819", "0.5588122", "0.5555238", "0.54425055", "0.5374568", "0.5242091", "0.52185357", "0.52147585", "0.51353055", "0.51257586", "0.5089524", "0.50746346", "0.5068588", "0.50657415", "0.50352657", "0.50281453", "0.50278586", "0.501341", "0.49992827", "0.49819165", "0.49781108", "0.4973333", "0.4966905", "0.49560606", "0.4940899", "0.4931835", "0.49294415", "0.4928863", "0.49262258", "0.49139312", "0.4913386", "0.4913386", "0.48922372", "0.488999", "0.48826227", "0.48751968", "0.48741293", "0.48509133", "0.4845396", "0.48410523", "0.4825038", "0.4812685", "0.48115274", "0.4810449", "0.4806451", "0.480307", "0.48027518", "0.4795205", "0.47921333", "0.47866592", "0.4782968", "0.47819242", "0.47698376", "0.47697365", "0.47686657", "0.4764124", "0.47624943", "0.47623333", "0.47559437", "0.47506896", "0.47491783", "0.47469705", "0.4743265", "0.47428253", "0.47207946", "0.471662", "0.47122997", "0.47109288", "0.47086227", "0.4708571", "0.4704059", "0.46931586", "0.4686714", "0.46818236", "0.46784735", "0.46773767", "0.46745682", "0.46714285", "0.46698833", "0.4663547", "0.4649093", "0.4646261", "0.4644533", "0.46365967", "0.46300918", "0.4627369", "0.46233767", "0.4622243", "0.46212274", "0.4614761", "0.46015632", "0.4592402", "0.45910618", "0.45889178", "0.45882556", "0.4587735", "0.45719033" ]
0.77885276
0
WriteString provides a mock function with given fields: logicalName, bitLength, encoding, value, writerArgs
func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, logicalName, bitLength, encoding, value) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok { r0 = rf(logicalName, bitLength, encoding, value, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call {\n\treturn &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteString_Call {\n\treturn &MockWriteBufferXmlBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestWrite(t *testing.T) {\n\tconst input = \"OK\"\n\n\tvar buf bytes.Buffer\n\tc := NewCoder(&buf)\n\n\tio.WriteString(c, input)\n\tif got := buf.String(); got != input {\n\t\tt.Errorf(\"Write(c, %q): got %q, want %q\", input, got, input)\n\t}\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockFile) WriteString(arg0 string) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteString\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func WriteString(buffer []byte, offset int, value string) {\n WriteBytes(buffer, offset, []byte(value))\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockPlcWriteResponse) String() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (c *fakeRedisConn) WriteString(str string) { c.rsp = append(c.rsp, str) }", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (tw *TestWriter) Write(incoming []byte) (n int, err error) {\n\tif tw.mockWriteHandler != nil {\n\t\treturn tw.mockWriteHandler(incoming)\n\t}\n\n\ttw.capturedOutput = append(tw.capturedOutput, string(incoming))\n\treturn 0, nil\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteString(p thrift.TProtocol, value, name string, field int16) error {\n\treturn WriteStringWithContext(context.Background(), p, value, name, field)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *ORM) StoreString(chainID *big.Int, key string, val string) error {\n\tret := _m.Called(chainID, key, val)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(*big.Int, string, string) error); ok {\n\t\tr0 = rf(chainID, key, val)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *MockWriter) Write(p []byte) (int, error) {\n\tw.Entries = append(w.Entries, string(p))\n\treturn len(p), nil\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestWrite(t *testing.T) {\n\tmockZooKeeper := &MockZooHandle{\n\t\tzk: mock.Mock{},\n\t}\n\n\tbytes := make([]byte, 3)\n\tff := NewFuseFile(bytes, 0, \"mock/path\", mockZooKeeper)\n\n\tmockZooKeeper.zk.On(\"Set\", \"mock/path\", bytes, int32(-1)).Return(&zk.Stat{DataLength: int32(len(bytes))}, nil)\n\n\t// assert that we send 3 bytes into the writer and status out == fuse.OK\n\tsize, stat := ff.Write(bytes, 0)\n\tassert.Equal(t, uint32(3), size)\n\tassert.Equal(t, fuse.OK, stat)\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Session) Write(data []byte) (int, error) {\n\tret := _m.Called(data)\n\n\tvar r0 int\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func([]byte) (int, error)); ok {\n\t\treturn rf(data)\n\t}\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(data)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestWrite(t *testing.T) {\n\ttr := NewTFramedTransport(new(mockTTransport))\n\tbuff := make([]byte, 10)\n\n\tn, err := tr.Write(buff)\n\n\tassert.Equal(t, 10, n)\n\tassert.Nil(t, err)\n\tassert.Equal(t, buff, tr.buf.Bytes())\n}", "func (t *SimpleChaincode) Write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n var name, value string // Entities\n var err error\n fmt.Println(\"running write()\")\n\n if len(args) != 2 {\n return nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the variable and value to set\")\n }\n\n name = args[0] //rename for funsies\n value = args[1]\n err = stub.PutState(name, []byte(value)) //write the variable into the chaincode state\n if err != nil {\n return nil, err\n }\n return nil, nil\n}", "func (mock WriteCloser) Write(p []byte) (n int, err error) {\n\tmethodName := \"Write\" // nolint: goconst\n\tif mock.impl.Write != nil {\n\t\treturn mock.impl.Write(p)\n\t}\n\tif mock.callbackNotImplemented != nil {\n\t\tmock.callbackNotImplemented(mock.t, mock.name, methodName)\n\t} else {\n\t\tgomic.DefaultCallbackNotImplemented(mock.t, mock.name, methodName)\n\t}\n\treturn mock.fakeZeroWrite(p)\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (rb *recordBuilder) WriteString(s string) (n int, err error) {\n\treturn rb.content.WriteString(s)\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBit_Call {\n\treturn &MockWriteBufferXmlBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockStore) Write(ctx context.Context, key string, val interface{}, opts ...store.WriteOption) error {\n\t_va := make([]interface{}, len(opts))\n\tfor _i := range opts {\n\t\t_va[_i] = opts[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, key, val)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...store.WriteOption) error); ok {\n\t\tr0 = rf(ctx, key, val, opts...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func writeExpected(t *testing.T, streamName string, w io.Writer, data string) {\n\tn, err := io.WriteString(w, data)\n\tassert.NoError(t, err, \"stream %s\", streamName)\n\tassert.Equal(t, len(data), n, \"stream %s\", streamName)\n}", "func (r *MockReadWriteCloser) Write(p []byte) (n int, err error) {\n\n\tif err = r.WriteErr; err != nil {\n\t\tr.BytesWritten = p\n\t\tn = len(p)\n\t}\n\treturn\n}", "func WriteString(data []byte, str string, stype string_t, pos *int, l int) {\n switch stype {\n case NULLSTR:\n checkSize(len(data[*pos:]), len(str))\n // Write the string and then terminate with 0x00 byte.\n copy(data[*pos:], str)\n checkSize(len(data[*pos:]), len(str) + 1)\n *pos += len(str)\n data[*pos] = 0x00\n *pos++\n\n case LENENCSTR:\n // Write the encoded length.\n WriteLenEncInt(data, uint64(len(str)), pos)\n // Then write the string as a FIXEDSTR.\n WriteString(data, str, FIXEDSTR, pos, l)\n\n case FIXEDSTR:\n\n checkSize(len(data[*pos:]), l)\n // Pads the string with 0's to fill the specified length l.\n copy(data[*pos:*pos+l], str)\n *pos += l\n\n case EOFSTR:\n\n checkSize(len(data[*pos:]), len(str))\n // Copies the string into the data.\n *pos += copy(data[*pos:], str)\n }\n}", "func (m *MockBufferInterface) String() string {\n\tret := m.ctrl.Call(m, \"String\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockFile) Write(_param0 []byte) (int, error) {\n\tret := _m.ctrl.Call(_m, \"Write\", _param0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func test(\n\tt *testing.T,\n\tn string,\n\tv interface{},\n\tlines ...string,\n) {\n\tt.Helper()\n\n\tx := strings.Join(lines, \"\\n\")\n\n\tt.Run(\n\t\tn,\n\t\tfunc(t *testing.T) {\n\t\t\tt.Helper()\n\n\t\t\tvar w strings.Builder\n\t\t\tn, err := Write(&w, v)\n\n\t\t\tif err != nil {\n\t\t\t\tt.Fatal(err)\n\t\t\t}\n\n\t\t\tt.Log(\"expected:\\n\\n\" + x + \"\\n\")\n\n\t\t\ts := w.String()\n\t\t\tif s != x {\n\t\t\t\tt.Fatal(\"actual:\\n\\n\" + s + \"\\n\")\n\t\t\t}\n\n\t\t\tif n != len(x) {\n\t\t\t\tt.Fatalf(\n\t\t\t\t\t\"incorrect byte count: %d != %d\",\n\t\t\t\t\tn,\n\t\t\t\t\tlen(x),\n\t\t\t\t)\n\t\t\t}\n\t\t},\n\t)\n}", "func (mc *MockConn) MockWrite(msg wire.Message) {\n\tbuf := &bytes.Buffer{}\n\twire.WriteMessage(buf, msg, wire.MainNet)\n\tmc.receiveChan <- buf.Bytes()\n}", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockPlcWriteResponse_Expecter) String() *MockPlcWriteResponse_String_Call {\n\treturn &MockPlcWriteResponse_String_Call{Call: _e.mock.On(\"String\")}\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0]\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (c *TestConnection) Write(b []byte) (n int, err error) {\n if c.WriteError != nil && c.ThrowWriteErrorAfter == c.TimesWriteCalled {\n return 0, c.WriteError\n }\n\n if c.WriteCount > -1 {\n return c.WriteCount, nil\n }\n\n c.TimesWriteCalled++\n c.Written = append(c.Written, string(b))\n return len(b), nil\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (t *SimpleChaincode) write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *Token) writeString(s string) {\n\tt.strBuilder.WriteString(s)\n}", "func (_m *MockDefaultPlcConnectionCloseResult) String() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (m *MockScraper) WriteToFile(arg0 map[int]map[string]int, arg1 int) (string, error) {\n\tret := m.ctrl.Call(m, \"WriteToFile\", arg0, arg1)\n\tret0, _ := ret[0].(string)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockResponseWriter) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *Connection) Write(ctx context.Context, typ websocket.MessageType, p []byte) error {\n\tret := _m.Called(ctx, typ, p)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, websocket.MessageType, []byte) error); ok {\n\t\tr0 = rf(ctx, typ, p)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockSerial) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (w *ByteWriter) MustWriteString(val string, offset int) int {\n\tif off, err := w.WriteString(val, offset); err != nil {\n\t\tpanic(err)\n\t} else {\n\t\treturn off\n\t}\n}", "func (m *MockHash64) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\r\n\tvar key, value string\r\n\tvar err error\r\n\t\r\n\r\n\tfmt.Println(\"running write()\")\r\n\r\n\tif len(args) != 2 {\r\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\r\n\t}\r\n\r\n\tkey = args[0] //rename for funsies\r\n\tvalue = args[1]\r\n\t\r\n\tv,err := stub.GetState(key)\r\n\tif v!=nil {\r\n\t\treturn nil, errors.New(\"Key already exists\")\r\n\t} else {\r\n\t\r\n\t\r\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\r\n\tif err != nil {\r\n\t\treturn nil, err\r\n\t}\r\n\t\r\n\t}\r\n\treturn nil, nil\r\n}", "func (t *testRunner) writeString(file, data string) {\n\tt.Helper()\n\n\tnewf, err := os.CreateTemp(t.dir, \"\")\n\trequire.NoError(t, err)\n\n\t_, err = newf.WriteString(data)\n\trequire.NoError(t, err)\n\trequire.NoError(t, newf.Close())\n\n\terr = os.Rename(newf.Name(), file)\n\trequire.NoError(t, err)\n}", "func (m *MockWriter) Write(p []byte) (n int, err error) {\n\tvar start time.Time\n\tvar stats WriterStats\n\tstart = time.Now()\n\tfor _, v := range p {\n\t\tstats.nbytes++ // count bytes\n\t\tif v == '\\n' {\n\t\t\tstats.nlines++ // count newlines (records)\n\t\t}\n\t}\n\n\tstats.duration = time.Since(start)\n\tm.statsChan <- stats\n\treturn stats.nbytes, nil\n}", "func mockTokenAsString(t *testing.T) string {\n\ttoken := mockToken(t)\n\treturn token.Encode()\n}", "func (w *multiWriter) WriteString(b string) (int, error) {\n\tw.ctx.Set(\"response\", b)\n\treturn w.ResponseWriter.Write([]byte(b))\n}", "func (t *SimpleChaincode) write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n\t\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\t\n\tfmt.Println(\"saving state for key: \" + key);\n\t\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t\n\t\n\treturn nil, nil\n}", "func (m *MockHash32) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (res Responder) WriteString(s string) int {\n\tn := res.writeInline(binDOLLAR, strconv.Itoa(len(s)))\n\tm, _ := res.b.WriteString(s)\n\tres.b.Write(binCRLF)\n\treturn n + m + 2\n}", "func (_e *WriteCloser_Expecter) Write(p interface{}) *WriteCloser_Write_Call {\n\treturn &WriteCloser_Write_Call{Call: _e.mock.On(\"Write\", p)}\n}", "func (mock WriteCloser) fakeZeroWrite(p []byte) (n int, err error) {\n\treturn n, err\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"starting write\")\n\n\tif len(args) != 2 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 2. key of the variable and value to set\")\n\t}\n\n\t// input sanitation\n\t\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the ledger\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tfmt.Println(\"- end write\")\n\treturn shim.Success(nil)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBigInt_Call {\n\treturn &MockWriteBufferXmlBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func TestBufferWrite(t *testing.T) {\n\tt.Run(\"Valid\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\n\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\tn, err := w.Write(toWrite)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Equal(t, 3, n)\n\n\t\tn, err = w.Write([]byte{0xff})\n\t\trequire.NoError(t, err)\n\n\t\tassert.Equal(t, 1, n)\n\n\t\texpected := append(toWrite, 0xff)\n\t\tfor i, bt := range w.Data() {\n\t\t\tassert.Equal(t, expected[i], bt, \"%d\", i)\n\t\t}\n\t})\n\n\tt.Run(\"Shifted\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\t\t// write empty byte and reset it's byte index to 0.\n\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\tw.byteIndex = 0\n\t\t// assume that 3 '0' bits were already written.\n\t\tw.bitIndex = 3\n\n\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\tn, err := w.Write(toWrite)\n\t\trequire.NoError(t, err)\n\t\tassert.Equal(t, 3, n)\n\n\t\t// 0x3f - 00111111\n\t\t// 00111111 << 3 = 11111000\n\t\texpected := byte(0xf8)\n\t\tassert.Equal(t, expected, w.data[0])\n\n\t\t// 0x12 - 00010010\n\t\t// 00111111 >> 5 = 00000001\n\t\t// 00010010 << 3 = 10010000\n\t\t// \t\t\t\t | 10010101\n\t\t// 10010111 - 0x91\n\t\texpected = byte(0x91)\n\t\tassert.Equal(t, expected, w.data[1])\n\n\t\t// 0x86 - 10000110\n\t\t// 00010010 >> 5 = \t00000000\n\t\t// 10000110 << 3 = \t00110000\n\t\t// \t\t\t\t |\t00110000\n\t\t// 00110000 = 0x30\n\t\texpected = byte(0x30)\n\t\tassert.Equal(t, expected, w.data[2])\n\t\tassert.Len(t, w.Data(), 4)\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tt.Run(\"Valid\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\t\tn, err := w.Write(toWrite)\n\t\t\trequire.NoError(t, err)\n\n\t\t\tassert.Equal(t, 3, n)\n\n\t\t\tn, err = w.Write([]byte{0xff})\n\t\t\trequire.NoError(t, err)\n\n\t\t\tassert.Equal(t, 1, n)\n\n\t\t\texpected := append(toWrite, 0xff)\n\t\t\tfor i, bt := range w.Data() {\n\t\t\t\tassert.Equal(t, expected[i], bt, \"%d\", i)\n\t\t\t}\n\t\t})\n\n\t\tt.Run(\"Shifted\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\t\t\t// write empty byte so the buffer data is initialized\n\t\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\t\t// reset it's byteindex\n\t\t\tw.byteIndex = 0\n\t\t\t// assume three '0' bits are already stored.\n\t\t\tw.bitIndex = 3\n\n\t\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\t\tn, err := w.Write(toWrite)\n\t\t\trequire.NoError(t, err)\n\t\t\tassert.Equal(t, 3, n)\n\n\t\t\t// 0x3f - 00111111\n\t\t\t// 00111111 >> 3 = 00000111\n\t\t\t// 00000111 = 0x07\n\t\t\texpected := byte(0x07)\n\t\t\tassert.Equal(t, expected, w.data[0])\n\n\t\t\t// 0x12 - 00010010\n\t\t\t// 00111111 << 5 = 11100000\n\t\t\t// 00010010 >> 3 = 00000010\n\t\t\t// \t\t\t\t | 11100010\n\t\t\t// 11100010 - 0xE2\n\t\t\texpected = byte(0xE2)\n\t\t\tassert.Equal(t, expected, w.data[1])\n\n\t\t\t// 0x86 - 10000110\n\t\t\t// 00010010 << 5 = \t01000000\n\t\t\t// 10000110 >> 3 = \t00010000\n\t\t\t// \t\t\t\t |\t01010000\n\t\t\t// 00110000 = 0x50\n\t\t\texpected = byte(0x50)\n\t\t\tassert.Equal(t, expected, w.data[2])\n\n\t\t\t// 0x86 - 10000110\n\t\t\t// 10000110 << 5 = \t11000000\n\t\t\t// 11000000 = 0xC0\n\t\t\texpected = byte(0xC0)\n\t\t\tassert.Equal(t, expected, w.data[3])\n\t\t})\n\t})\n}", "func TestBasicMethodChannelStringCodecSend(t *testing.T) {\n\tcodec := StringCodec{}\n\tmessenger := NewTestingBinaryMessenger()\n\tmessenger.MockSetChannelHandler(\"ch\", func(encodedMessage []byte, r ResponseSender) error {\n\t\tmessage, err := codec.DecodeMessage(encodedMessage)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"failed to decode message\")\n\t\t}\n\t\tmessageString, ok := message.(string)\n\t\tif !ok {\n\t\t\treturn errors.New(\"message is invalid type, expected string\")\n\t\t}\n\t\treply := messageString + \" world\"\n\t\tencodedReply, err := codec.EncodeMessage(reply)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"failed to encode message\")\n\t\t}\n\t\tr.Send(encodedReply)\n\t\treturn nil\n\t})\n\tchannel := NewBasicMessageChannel(messenger, \"ch\", codec)\n\treply, err := channel.SendWithReply(\"hello\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tt.Log(spew.Sdump(reply))\n\treplyString, ok := reply.(string)\n\tif !ok {\n\t\tt.Fatal(\"reply is invalid type, expected string\")\n\t}\n\tEqual(t, \"hello world\", replyString)\n}", "func (_m *MockStore) String() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func write(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"starting write\")\n\n\tif len(args) != 2 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 2. key of the variable and value to set\")\n\t}\n\n\t// input sanitation\n\terr = sanitize_arguments(args)\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the ledger\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tfmt.Println(\"- end write\")\n\treturn shim.Success(nil)\n}", "func TestWrite(t *testing.T) {\n\ttests := []struct {\n\t\tid *ua.NodeID\n\t\tv interface{}\n\t\tstatus ua.StatusCode\n\t}{\n\t\t// happy flows\n\t\t{ua.NewStringNodeID(2, \"rw_bool\"), false, ua.StatusOK},\n\t\t{ua.NewStringNodeID(2, \"rw_int32\"), int32(9), ua.StatusOK},\n\n\t\t// error flows\n\t\t{ua.NewStringNodeID(2, \"ro_bool\"), false, ua.StatusBadUserAccessDenied},\n\t}\n\n\tctx := context.Background()\n\n\tsrv := NewServer(\"rw_server.py\")\n\tdefer srv.Close()\n\n\tc, err := opcua.NewClient(srv.Endpoint, srv.Opts...)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tif err := c.Connect(ctx); err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer c.Close(ctx)\n\n\tfor _, tt := range tests {\n\t\tt.Run(tt.id.String(), func(t *testing.T) {\n\t\t\ttestWrite(t, ctx, c, tt.status, &ua.WriteRequest{\n\t\t\t\tNodesToWrite: []*ua.WriteValue{\n\t\t\t\t\t&ua.WriteValue{\n\t\t\t\t\t\tNodeID: tt.id,\n\t\t\t\t\t\tAttributeID: ua.AttributeIDValue,\n\t\t\t\t\t\tValue: &ua.DataValue{\n\t\t\t\t\t\t\tEncodingMask: ua.DataValueValue,\n\t\t\t\t\t\t\tValue: ua.MustVariant(tt.v),\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t})\n\n\t\t\t// skip read tests if the write is expected to fail\n\t\t\tif tt.status != ua.StatusOK {\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\ttestRead(t, ctx, c, tt.v, tt.id)\n\t\t})\n\t}\n}" ]
[ "0.77942514", "0.7605405", "0.7473537", "0.6494878", "0.63941216", "0.6330197", "0.62613195", "0.60808736", "0.599505", "0.5970469", "0.5920728", "0.5903412", "0.58675987", "0.5862283", "0.58592516", "0.58506685", "0.5783285", "0.57629424", "0.572585", "0.57254356", "0.5688272", "0.5656185", "0.5652554", "0.5651305", "0.564655", "0.5638216", "0.5635523", "0.5634792", "0.5606095", "0.56011665", "0.5599716", "0.55949247", "0.55360824", "0.55351704", "0.5526599", "0.5524946", "0.5521574", "0.5505501", "0.5494303", "0.54544634", "0.5451795", "0.5450112", "0.54416704", "0.54412335", "0.54383516", "0.5429495", "0.54279196", "0.5427714", "0.5424414", "0.54188615", "0.5412255", "0.5408843", "0.5396563", "0.53692687", "0.53685784", "0.5366064", "0.5353599", "0.5352238", "0.5352238", "0.5352238", "0.53488046", "0.53425527", "0.53365386", "0.5333128", "0.532493", "0.530471", "0.5304704", "0.53015906", "0.53015906", "0.5292279", "0.5283283", "0.5263963", "0.5256004", "0.524394", "0.5236615", "0.52344143", "0.52272266", "0.52251524", "0.5218195", "0.5217814", "0.52152896", "0.5212466", "0.51959795", "0.51844954", "0.5183062", "0.51824117", "0.51757544", "0.5172016", "0.5169029", "0.5154164", "0.51514924", "0.5130827", "0.5127569", "0.5117571", "0.5117451", "0.5100005", "0.5093842", "0.50891304", "0.5081634", "0.50768846" ]
0.7883549
0
WriteString is a helper method to define mock.On call logicalName string bitLength uint32 encoding string value string writerArgs ...WithWriterArgs
func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call { return &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On("WriteString", append([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferXmlBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteString_Call {\n\treturn &MockWriteBufferXmlBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteString(buffer []byte, offset int, value string) {\n WriteBytes(buffer, offset, []byte(value))\n}", "func WriteString(p thrift.TProtocol, value, name string, field int16) error {\n\treturn WriteStringWithContext(context.Background(), p, value, name, field)\n}", "func (t *SimpleChaincode) Write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n var name, value string // Entities\n var err error\n fmt.Println(\"running write()\")\n\n if len(args) != 2 {\n return nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the variable and value to set\")\n }\n\n name = args[0] //rename for funsies\n value = args[1]\n err = stub.PutState(name, []byte(value)) //write the variable into the chaincode state\n if err != nil {\n return nil, err\n }\n return nil, nil\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (c *fakeRedisConn) WriteString(str string) { c.rsp = append(c.rsp, str) }", "func TestWrite(t *testing.T) {\n\tconst input = \"OK\"\n\n\tvar buf bytes.Buffer\n\tc := NewCoder(&buf)\n\n\tio.WriteString(c, input)\n\tif got := buf.String(); got != input {\n\t\tt.Errorf(\"Write(c, %q): got %q, want %q\", input, got, input)\n\t}\n}", "func (_e *MockPlcWriteResponse_Expecter) String() *MockPlcWriteResponse_String_Call {\n\treturn &MockPlcWriteResponse_String_Call{Call: _e.mock.On(\"String\")}\n}", "func WriteString(data []byte, str string, stype string_t, pos *int, l int) {\n switch stype {\n case NULLSTR:\n checkSize(len(data[*pos:]), len(str))\n // Write the string and then terminate with 0x00 byte.\n copy(data[*pos:], str)\n checkSize(len(data[*pos:]), len(str) + 1)\n *pos += len(str)\n data[*pos] = 0x00\n *pos++\n\n case LENENCSTR:\n // Write the encoded length.\n WriteLenEncInt(data, uint64(len(str)), pos)\n // Then write the string as a FIXEDSTR.\n WriteString(data, str, FIXEDSTR, pos, l)\n\n case FIXEDSTR:\n\n checkSize(len(data[*pos:]), l)\n // Pads the string with 0's to fill the specified length l.\n copy(data[*pos:*pos+l], str)\n *pos += l\n\n case EOFSTR:\n\n checkSize(len(data[*pos:]), len(str))\n // Copies the string into the data.\n *pos += copy(data[*pos:], str)\n }\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockFile) WriteString(arg0 string) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteString\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (t *Token) writeString(s string) {\n\tt.strBuilder.WriteString(s)\n}", "func (res Responder) WriteString(s string) int {\n\tn := res.writeInline(binDOLLAR, strconv.Itoa(len(s)))\n\tm, _ := res.b.WriteString(s)\n\tres.b.Write(binCRLF)\n\treturn n + m + 2\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (rb *recordBuilder) WriteString(s string) (n int, err error) {\n\treturn rb.content.WriteString(s)\n}", "func (tw *TestWriter) Write(incoming []byte) (n int, err error) {\n\tif tw.mockWriteHandler != nil {\n\t\treturn tw.mockWriteHandler(incoming)\n\t}\n\n\ttw.capturedOutput = append(tw.capturedOutput, string(incoming))\n\treturn 0, nil\n}", "func (w *multiWriter) WriteString(b string) (int, error) {\n\tw.ctx.Set(\"response\", b)\n\treturn w.ResponseWriter.Write([]byte(b))\n}", "func (w *ByteWriter) WriteString(val string, offset int) (int, error) {\n\t_, err := w.Write([]byte(val), offset)\n\treturn offset + len(val), err\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBit_Call {\n\treturn &MockWriteBufferXmlBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (dm *dataManager) writeString(address uint, str string) (err ProcessException) {\n\tdata := []byte(str)\n\n\terr = dm.process.WriteBytes(address, data)\n\n\treturn\n}", "func WriteString(fp int, str string, out *CXArgument) {\n\tWriteObject(GetOffset_str(fp, out), encoder.Serialize(str))\n}", "func (g *Generator) WriteString(s string) (int, error) {\n\treturn g.o.Write([]byte(s))\n}", "func (c *MockRemoteWriteClient) Name() string { return \"\" }", "func (w *ByteWriter) MustWriteString(val string, offset int) int {\n\tif off, err := w.WriteString(val, offset); err != nil {\n\t\tpanic(err)\n\t} else {\n\t\treturn off\n\t}\n}", "func (r *SizeRotator) WriteString(str string) (n int, err error) {\r\n\treturn r.Write([]byte(str))\r\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (c Channel) WriteString(name, value string) error {\n\tcName := C.CString(name)\n\tdefer C.free(unsafe.Pointer(cName))\n\tcValue := C.CString(value)\n\tdefer C.free(unsafe.Pointer(cValue))\n\n\terrno := C.iio_channel_attr_write(c.handle, cName, cValue)\n\tif errno < 0 {\n\t\treturn syscall.Errno(-errno)\n\t}\n\t// otherwise it's the number of bytes, which we're not interested in\n\t// at this time\n\treturn nil\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (hd *HappyDevFormatter) writeString(buf bufferWriter, s string, col *int) {\n\tbuf.WriteString(s)\n\t*col += len(s)\n}", "func (w *Writer) WriteString(str string) {\n\tlength := util.UTF16Length(str)\n\tswitch {\n\tcase length == 0:\n\t\tw.writeByte(TagEmpty)\n\tcase length < 0:\n\t\tw.WriteBytes(*(*[]byte)(unsafe.Pointer(&str)))\n\tcase length == 1:\n\t\tw.writeByte(TagUTF8Char)\n\t\tw.writeString(str)\n\tdefault:\n\t\tsetWriterRef(w, nil, nil)\n\t\twriteString(w, str, length)\n\t}\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockPlcDriver_Expecter) String() *MockPlcDriver_String_Call {\n\treturn &MockPlcDriver_String_Call{Call: _e.mock.On(\"String\")}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0]\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (_m *MockPlcWriteResponse) String() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}", "func String(str string) Val { return Val{t: bsontype.String}.writestring(str) }", "func TestWrite(t *testing.T) {\n\tmockZooKeeper := &MockZooHandle{\n\t\tzk: mock.Mock{},\n\t}\n\n\tbytes := make([]byte, 3)\n\tff := NewFuseFile(bytes, 0, \"mock/path\", mockZooKeeper)\n\n\tmockZooKeeper.zk.On(\"Set\", \"mock/path\", bytes, int32(-1)).Return(&zk.Stat{DataLength: int32(len(bytes))}, nil)\n\n\t// assert that we send 3 bytes into the writer and status out == fuse.OK\n\tsize, stat := ff.Write(bytes, 0)\n\tassert.Equal(t, uint32(3), size)\n\tassert.Equal(t, fuse.OK, stat)\n}", "func (w *MockWriter) Write(p []byte) (int, error) {\n\tw.Entries = append(w.Entries, string(p))\n\treturn len(p), nil\n}", "func write(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"starting write\")\n\n\tif len(args) != 2 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 2. key of the variable and value to set\")\n\t}\n\n\t// input sanitation\n\terr = sanitize_arguments(args)\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the ledger\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tfmt.Println(\"- end write\")\n\treturn shim.Success(nil)\n}", "func (t *SimpleChaincode) write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (t *SimpleChaincode) write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func write(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"starting write\")\n\n\tif len(args) != 2 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 2. key of the variable and value to set\")\n\t}\n\n\t// input sanitation\n\terr = sanitize_arguments(args)\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the ledger\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tfmt.Println(\"- end write\")\n\treturn shim.Success(nil)\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar key, value string\n\tvar err error\n\tfmt.Println(\"starting write\")\n\n\tif len(args) != 2 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 2. key of the variable and value to set\")\n\t}\n\n\t// input sanitation\n\t\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\terr = stub.PutState(key, []byte(value)) //write the variable into the ledger\n\tif err != nil {\n\t\treturn shim.Error(err.Error())\n\t}\n\n\tfmt.Println(\"- end write\")\n\treturn shim.Success(nil)\n}", "func (t *testRunner) writeString(file, data string) {\n\tt.Helper()\n\n\tnewf, err := os.CreateTemp(t.dir, \"\")\n\trequire.NoError(t, err)\n\n\t_, err = newf.WriteString(data)\n\trequire.NoError(t, err)\n\trequire.NoError(t, newf.Close())\n\n\terr = os.Rename(newf.Name(), file)\n\trequire.NoError(t, err)\n}", "func(this*Window)WriteAddr(format string,args...interface{})error{\nf,err:=this.File(\"addr\")\nif err!=nil{\nreturn err\n}\nif len(args)> 0{\nformat= fmt.Sprintf(format,args...)\n}\n_,err= f.Write([]byte(format))\nreturn err\n}", "func test(\n\tt *testing.T,\n\tn string,\n\tv interface{},\n\tlines ...string,\n) {\n\tt.Helper()\n\n\tx := strings.Join(lines, \"\\n\")\n\n\tt.Run(\n\t\tn,\n\t\tfunc(t *testing.T) {\n\t\t\tt.Helper()\n\n\t\t\tvar w strings.Builder\n\t\t\tn, err := Write(&w, v)\n\n\t\t\tif err != nil {\n\t\t\t\tt.Fatal(err)\n\t\t\t}\n\n\t\t\tt.Log(\"expected:\\n\\n\" + x + \"\\n\")\n\n\t\t\ts := w.String()\n\t\t\tif s != x {\n\t\t\t\tt.Fatal(\"actual:\\n\\n\" + s + \"\\n\")\n\t\t\t}\n\n\t\t\tif n != len(x) {\n\t\t\t\tt.Fatalf(\n\t\t\t\t\t\"incorrect byte count: %d != %d\",\n\t\t\t\t\tn,\n\t\t\t\t\tlen(x),\n\t\t\t\t)\n\t\t\t}\n\t\t},\n\t)\n}", "func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint32_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (res Responder) WriteInlineString(s string) int {\n\treturn res.writeInline(binPLUS, s)\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (t *SimpleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\r\n\tvar key, value string\r\n\tvar err error\r\n\t\r\n\r\n\tfmt.Println(\"running write()\")\r\n\r\n\tif len(args) != 2 {\r\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\r\n\t}\r\n\r\n\tkey = args[0] //rename for funsies\r\n\tvalue = args[1]\r\n\t\r\n\tv,err := stub.GetState(key)\r\n\tif v!=nil {\r\n\t\treturn nil, errors.New(\"Key already exists\")\r\n\t} else {\r\n\t\r\n\t\r\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\r\n\tif err != nil {\r\n\t\treturn nil, err\r\n\t}\r\n\t\r\n\t}\r\n\treturn nil, nil\r\n}", "func (w Writer) writeString(s string) error {\n\t_, err := w.w.WriteString(s)\n\treturn err\n}", "func (color *Color) writeNameLen(w io.Writer) (err error) {\n\t// Adding one to the name length accounts for the zero-terminated character.\n\treturn binary.Write(w, binary.BigEndian, color.NameLen()+1)\n}", "func writeString(w io.Writer, s string) (n int, err error) {\n\ttype stringWriter interface {\n\t\tWriteString(string) (n int, err error)\n\t}\n\tif sw, ok := w.(stringWriter); ok {\n\t\treturn sw.WriteString(s) // Avoid copy string\n\t}\n\treturn w.Write([]byte(s)) // Using temporary copy\n}", "func (w *VT100Writer) WriteRawStr(data string) {\n\tw.WriteRaw([]byte(data))\n}", "func (dw *DataWriter) WriteString(value string) error {\n\tbytes := []rune(value)\n\tbytesNumber := uint16(len(bytes))\n\terr := binary.Write(dw.w, binary.BigEndian, bytesNumber)\n\tif err != nil {\n\t\treturn err\n\t}\n\trunes := []rune(value)\n\tfor _, r := range runes {\n\t\t_, err = dw.w.WriteRune(r)\n\t}\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (color *Color) writeName(w io.Writer) (err error) {\n\tname := utf16.Encode([]rune(color.Name))\n\tname = append(name, uint16(0))\n\treturn binary.Write(w, binary.BigEndian, name)\n}", "func (req *Request) WriteString(s string) (int, error) {\n\treturn req.res.Write([]byte(s))\n}", "func (fs *Fs) WriteString(file *os.File, string string) (int, error) {\n\treturn file.WriteString(string) // #nosec G304\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func writeExpected(t *testing.T, streamName string, w io.Writer, data string) {\n\tn, err := io.WriteString(w, data)\n\tassert.NoError(t, err, \"stream %s\", streamName)\n\tassert.Equal(t, len(data), n, \"stream %s\", streamName)\n}", "func (_m *ORM) StoreString(chainID *big.Int, key string, val string) error {\n\tret := _m.Called(chainID, key, val)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(*big.Int, string, string) error); ok {\n\t\tr0 = rf(chainID, key, val)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (a ReverseHttpFile) WriteString(s string) (int, error) {\n\treturn 0, syscall.EPERM\n}", "func (_e *Output_Expecter) Write(ctx interface{}, batch interface{}) *Output_Write_Call {\n\treturn &Output_Write_Call{Call: _e.mock.On(\"Write\", ctx, batch)}\n}", "func (_e *MockCALIdentifyTag_Expecter) String() *MockCALIdentifyTag_String_Call {\n\treturn &MockCALIdentifyTag_String_Call{Call: _e.mock.On(\"String\")}\n}", "func TestWrite(t *testing.T) {\n\ttests := []struct {\n\t\tid *ua.NodeID\n\t\tv interface{}\n\t\tstatus ua.StatusCode\n\t}{\n\t\t// happy flows\n\t\t{ua.NewStringNodeID(2, \"rw_bool\"), false, ua.StatusOK},\n\t\t{ua.NewStringNodeID(2, \"rw_int32\"), int32(9), ua.StatusOK},\n\n\t\t// error flows\n\t\t{ua.NewStringNodeID(2, \"ro_bool\"), false, ua.StatusBadUserAccessDenied},\n\t}\n\n\tctx := context.Background()\n\n\tsrv := NewServer(\"rw_server.py\")\n\tdefer srv.Close()\n\n\tc, err := opcua.NewClient(srv.Endpoint, srv.Opts...)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tif err := c.Connect(ctx); err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer c.Close(ctx)\n\n\tfor _, tt := range tests {\n\t\tt.Run(tt.id.String(), func(t *testing.T) {\n\t\t\ttestWrite(t, ctx, c, tt.status, &ua.WriteRequest{\n\t\t\t\tNodesToWrite: []*ua.WriteValue{\n\t\t\t\t\t&ua.WriteValue{\n\t\t\t\t\t\tNodeID: tt.id,\n\t\t\t\t\t\tAttributeID: ua.AttributeIDValue,\n\t\t\t\t\t\tValue: &ua.DataValue{\n\t\t\t\t\t\t\tEncodingMask: ua.DataValueValue,\n\t\t\t\t\t\t\tValue: ua.MustVariant(tt.v),\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t})\n\n\t\t\t// skip read tests if the write is expected to fail\n\t\t\tif tt.status != ua.StatusOK {\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\ttestRead(t, ctx, c, tt.v, tt.id)\n\t\t})\n\t}\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mock WriteCloser) Write(p []byte) (n int, err error) {\n\tmethodName := \"Write\" // nolint: goconst\n\tif mock.impl.Write != nil {\n\t\treturn mock.impl.Write(p)\n\t}\n\tif mock.callbackNotImplemented != nil {\n\t\tmock.callbackNotImplemented(mock.t, mock.name, methodName)\n\t} else {\n\t\tgomic.DefaultCallbackNotImplemented(mock.t, mock.name, methodName)\n\t}\n\treturn mock.fakeZeroWrite(p)\n}", "func (dw *DataWriter) WriteUTF(value string) error {\n\treturn dw.WriteString(value)\n}", "func WriteString() {\n\tfmt.Println(\"----------------> WriteString\")\n\ts := \" world\"\n\tbuf := bytes.NewBufferString(\"hello\")\n\tfmt.Println(buf.String())\n\n\t//write string at then end of buffer\n\tbuf.WriteString(s)\n\n\tfmt.Println(buf.String())\n}", "func (mr *MockFileMockRecorder) WriteString(arg0 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteString\", reflect.TypeOf((*MockFile)(nil).WriteString), arg0)\n}", "func (_e *MockTestTransportInstance_Expecter) Write(data interface{}) *MockTestTransportInstance_Write_Call {\n\treturn &MockTestTransportInstance_Write_Call{Call: _e.mock.On(\"Write\", data)}\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *WriteCloser_Expecter) Write(p interface{}) *WriteCloser_Write_Call {\n\treturn &WriteCloser_Write_Call{Call: _e.mock.On(\"Write\", p)}\n}", "func (w *RESPWriter) writeStr(s string) {\n\tw.buf.WriteRune(respSimpleString)\n\tw.buf.WriteString(s)\n\tw.buf.Write(DELIMS)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Write(w io.Writer, v interface{}, name string) (err error) {\n\tdefer func() {\n\t\tif e := recover(); e != nil {\n\t\t\tif er, ok := e.(error); ok {\n\t\t\t\terr = er\n\t\t\t} else {\n\t\t\t\terr = fmt.Errorf(\"format/nbt: %s\", e)\n\t\t\t}\n\t\t\treturn\n\t\t}\n\t}()\n\tval := reflect.ValueOf(v)\n\tif val.Kind() == reflect.Ptr {\n\t\tval = val.Elem()\n\t}\n\tfs := fields(val.Type())\n\ten := &msgEncoder{}\n\n\t//Write name\n\tbs := en.b[:3]\n\tbs[0] = 10\n\tbinary.BigEndian.PutUint16(bs[1:], uint16(len(name)))\n\t_, err = w.Write(bs)\n\tif err != nil {\n\t\treturn err\n\t}\n\t_, err = w.Write([]byte(name))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn write(w, en, fs, val)\n}", "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (t *SimpleChaincode) write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n\t\n\tvar err error\n\tfmt.Println(\"running write()\")\n\n\tif len(args) != 2 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the key and value to set\")\n\t}\n\n\tkey = args[0] //rename for funsies\n\tvalue = args[1]\n\t\n\tfmt.Println(\"saving state for key: \" + key);\n\t\n\terr = stub.PutState(key, []byte(value)) //write the variable into the chaincode state\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t\n\t\n\treturn nil, nil\n}", "func (n *ninjaWriterWithWrap) WriteString(s string) (written int, noError error) {\n\t// Always return the full length of the string and a nil error.\n\t// ninjaWriterWithWrap doesn't return errors to the caller, it saves them until Flush()\n\twritten = len(s)\n\n\tif n.err != nil {\n\t\treturn\n\t}\n\n\tconst spaceLen = 1\n\tif !n.space {\n\t\t// No space is pending, so a line wrap can't be inserted before this, so just write\n\t\t// the string.\n\t\tn.lineLen += len(s)\n\t\t_, n.err = n.writer.WriteString(s)\n\t} else if n.lineLen+len(s)+spaceLen > n.maxLineLen {\n\t\t// A space is pending, and the pending strings plus the current string would exceed the\n\t\t// maximum line length. Wrap and indent before the pending space and strings, then write\n\t\t// the pending and current strings.\n\t\t_, n.err = n.writer.WriteString(\" $\\n\")\n\t\tif n.err != nil {\n\t\t\treturn\n\t\t}\n\t\t_, n.err = n.writer.WriteString(indentString[:indentWidth*2])\n\t\tif n.err != nil {\n\t\t\treturn\n\t\t}\n\t\tn.lineLen = indentWidth*2 + n.pendingLen\n\t\ts = strings.TrimLeftFunc(s, unicode.IsSpace)\n\t\tn.pending = append(n.pending, s)\n\t\tn.writePending()\n\n\t\tn.space = false\n\t} else {\n\t\t// A space is pending but the current string would not reach the maximum line length,\n\t\t// add it to the pending list.\n\t\tn.pending = append(n.pending, s)\n\t\tn.pendingLen += len(s)\n\t\tn.lineLen += len(s)\n\t}\n\n\treturn\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt16_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func fillString(t *testing.T, testname string, b *Builder, s string, n int, fus string) string {\n\tcheckRead(t, testname+\" (fill 1)\", b, s)\n\tfor ; n > 0; n-- {\n\t\tm, err := b.WriteString(fus)\n\t\tif m != len(fus) {\n\t\t\tt.Errorf(testname+\" (fill 2): m == %d, expected %d\", m, len(fus))\n\t\t}\n\t\tif err != nil {\n\t\t\tt.Errorf(testname+\" (fill 3): err should always be nil, found err == %s\", err)\n\t\t}\n\t\ts += fus\n\t\tcheckRead(t, testname+\" (fill 4)\", b, s)\n\t}\n\treturn s\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByteArray_Call {\n\treturn &MockWriteBufferJsonBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (w *win) WriteString(str string) {\n\tw.Addr(\"#%d\", w.pAddr)\n\tdata := []byte(str + \"\\n\")\n\tw.writeData(data)\n\n\tnr := utf8.RuneCount(data)\n\tw.pAddr += nr\n\tw.eAddr += nr\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}" ]
[ "0.732646", "0.7045736", "0.6867753", "0.60498464", "0.5835141", "0.5664947", "0.5587601", "0.55428576", "0.55249965", "0.5488658", "0.5486709", "0.54642034", "0.54473096", "0.54326653", "0.54134715", "0.5398573", "0.5296922", "0.52804065", "0.5236424", "0.5172058", "0.51718956", "0.5171005", "0.5168284", "0.5122933", "0.51106185", "0.51053274", "0.51020116", "0.50956017", "0.50842", "0.50718546", "0.5051377", "0.50443816", "0.503077", "0.5027747", "0.5023341", "0.5004906", "0.49985677", "0.49967057", "0.49967057", "0.49967057", "0.49870336", "0.49777046", "0.49755695", "0.49733403", "0.49656203", "0.49573246", "0.49573246", "0.49523506", "0.4949316", "0.49490348", "0.49489212", "0.49462444", "0.49345133", "0.49341446", "0.49289304", "0.4913763", "0.49034053", "0.49027053", "0.4894299", "0.48900643", "0.4886029", "0.4884569", "0.48776558", "0.48760742", "0.48712176", "0.48688984", "0.48649126", "0.48597196", "0.48550883", "0.48529828", "0.48512527", "0.48480335", "0.48459965", "0.4845372", "0.48439723", "0.4843377", "0.48407543", "0.48389536", "0.48314896", "0.4826361", "0.48233587", "0.48179546", "0.4815547", "0.4812281", "0.4802883", "0.48024663", "0.48010552", "0.48006743", "0.4773857", "0.4769107", "0.47682935", "0.47639984", "0.47599283", "0.47525054", "0.4751324", "0.47438845", "0.47379664", "0.47363794", "0.47340992", "0.47309557" ]
0.75633574
0
WriteUint16 provides a mock function with given fields: logicalName, bitLength, value, writerArgs
func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, logicalName, bitLength, value) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok { r0 = rf(logicalName, bitLength, value, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint16_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint16_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockIOPackage) WriteUint16(arg0 []byte, arg1 int, arg2 uint16) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint16\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockSession) Uint16(arg0 string) uint16 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint16\", arg0)\n\tret0, _ := ret[0].(uint16)\n\treturn ret0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt16_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockIOPackage) AppendUint16(arg0 []byte, arg1 uint16) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint16\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt16_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (w *Writer) WriteUint16(data interface{}) {\n\tvar t = w.getType(data, 2)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.index += 2\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mr *MockIOPackageMockRecorder) WriteUint16(arg0, arg1, arg2 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteUint16\", reflect.TypeOf((*MockIOPackage)(nil).WriteUint16), arg0, arg1, arg2)\n}", "func WriteUInt16(buffer []byte, offset int, value uint16) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestUint16(t *testing.T) {\n\ttests := []struct {\n\t\tin uint16 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x01,0x00}}, // Min single byte\n\t\t{255, []byte{0xff,0x00}}, // Max single byte\n\t\t{256, []byte{0x00,0x01}}, // Min 2-byte\n\t\t{65535, []byte{0xff,0xff}}, // Max 2-byte\n\t}\n\n\tt.Logf(\"Running uint16 %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint16(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint16 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint16 #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint16\n\t\terr = ReadUint16(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint16 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint16 #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func (vb *keyWriter) WriteUint16(num uint16) (int, error) {\n\tbinary.BigEndian.PutUint16(vb.buffer[:2], num)\n\tvb.hash.Write(vb.buffer[:2])\n\treturn 2, nil\n}", "func FormatUint16(name string) string {\n\treturn formatUintFunction(name, true)\n}", "func (m *MockIOPackage) ReadUint16(arg0 []byte, arg1 int) (uint16, int, bool) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ReadUint16\", arg0, arg1)\n\tret0, _ := ret[0].(uint16)\n\tret1, _ := ret[1].(int)\n\tret2, _ := ret[2].(bool)\n\treturn ret0, ret1, ret2\n}", "func (out *OutBuffer) WriteUint16LE(v uint16) bool {\n\tcontainer := out.GetContainer()\n\tif len(container) < 2 {\n\t\treturn false\n\t}\n\n\tbinary.LittleEndian.PutUint16(out.GetContainer(), v)\n\tout.pos += 2\n\treturn true\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (out *OutBuffer) WriteUint16LE(v uint16) {\n\tout.Append(byte(v), byte(v>>8))\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func WriteUint16(data []byte, x uint, v uint16) {\n\tif x+1 >= uint(len(data)) {\n\t\treturn\n\t}\n\tdata[x] = byte(v >> 8)\n\tdata[x+1] = byte(v)\n}", "func ExpectUint16(t *testing.T, field string, expected uint16, found uint16) {\n\tif expected != found {\n\t\tt.Errorf(\"%s [%d], found '%d'\", field, expected, found)\n\t}\n}", "func WriteUint16(w io.Writer, v uint16) error {\n\treturn WriteUint64(w, uint64(v))\n}", "func (bio *BinaryIO) WriteUint16(off int64, value uint16) {\n\tvar buf [2]byte\n\tbio.order.PutUint16(buf[:], value)\n\tbio.WriteAt(off, buf[:])\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func Uint16(k string, v uint16) Field {\n\treturn Field{Key: k, Value: valf.Uint16(v)}\n}", "func (instance *Instance) SetUint16(fieldName string, value uint16) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (m Measurement) AddUInt16(name string, value uint16) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func Uint16(name string, value uint16, usage string) *uint16 {\n\treturn Environment.Uint16(name, value, usage)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestReadUint16FromBytes(t *testing.T) {\n\tvar file, _ = ioutil.TempFile(os.TempDir(), \"\")\n\tvar filePath = file.Name()\n\tdefer file.Close()\n\tdefer os.Remove(filePath)\n\n\tvar writer = bufio.NewWriter(file)\n\t// Little endian pow disposition:\n\t// 16^1 16^0 | 16^3 16^2\n\t// 0 x 0 4 | 0 x 0 1\n\t// 0 4 | 0 256\n\t// -----------------------------\n\t// TOTAL = 260\n\twriter.Write([]byte{0x04, 0x01})\n\twriter.Flush()\n\n\tfile.Seek(0, os.SEEK_SET)\n\n\tvar readerBag = fileReader.ByteReaderBag{File: file}\n\tvar number = fileReader.ReadUint16FromBytes(&readerBag)\n\tif readerBag.Err != nil {\n\t\tt.Errorf(\"Expected no errors, got '%v'\", readerBag.Err)\n\t}\n\n\ttest.ExpectUint16(t, \"Number test\", 260, number)\n\n\tfileReader.ReadUint16FromBytes(&readerBag)\n\tif readerBag.Err == nil {\n\t\tt.Errorf(\"Expected no bytes to read, but got no error\")\n\t}\n}", "func MeasureUInt16(name string, field string, value uint16) Measurement {\n\treturn NewMeasurement(name).AddUInt16(field, value)\n}", "func (stream *Stream) WriteUint16(val uint16) {\n\tstream.ensure(5)\n\tq1 := val / 1000\n\tif q1 == 0 {\n\t\tstream.n = writeFirstBuf(stream.buf, digits[val], stream.n)\n\t\treturn\n\t}\n\tr1 := val - q1*1000\n\tn := writeFirstBuf(stream.buf, digits[q1], stream.n)\n\twriteBuf(stream.buf, digits[r1], n)\n\tstream.n = n + 3\n\treturn\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (mr *MockSessionMockRecorder) Uint16(arg0 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Uint16\", reflect.TypeOf((*MockSession)(nil).Uint16), arg0)\n}", "func (o *OutputState) ApplyUint16(applier interface{}) Uint16Output {\n\treturn o.ApplyT(applier).(Uint16Output)\n}", "func (mr *MockIOPackageMockRecorder) AppendUint16(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"AppendUint16\", reflect.TypeOf((*MockIOPackage)(nil).AppendUint16), arg0, arg1)\n}", "func Uint16(key string, val uint16) Field {\n\treturn Field{Key: key, Type: core.Uint16Type, Integer: int64(val)}\n}", "func Uint16Tag(name interface{}, value uint16) Tag {\n\treturn &tag{\n\t\ttagType: TagUint16,\n\t\tname: name,\n\t\tvalue: value,\n\t}\n}", "func WriteI16(p thrift.TProtocol, value int16, name string, field int16) error {\n\treturn WriteI16WithContext(context.Background(), p, value, name, field)\n}", "func Uint16(key string, val uint16) Tag {\n\treturn Tag{key: key, tType: uint16Type, integerVal: int64(val)}\n}", "func (m *MockIOPackage) WriteUint64(arg0 []byte, arg1 int, arg2 uint64) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint64\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func Uint16(v uint16) *uint16 {\n\treturn &v\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) WriteUInt16(v uint16) error {\n\treturn binary.Write(w.out, w.bo, &v)\n}", "func Uint16Arg(register Register, name string, options ...ArgOptionApplyer) *uint16 {\n\tp := new(uint16)\n\t_ = Uint16ArgVar(register, p, name, options...)\n\treturn p\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint32_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func Uints16(k string, v []uint16) Field {\n\treturn Field{Key: k, Value: valf.Uints16(v)}\n}", "func (t Uint16) WriteTo(w io.Writer) (int64, error) {\n\tlw := aWriterPool.Get(w)\n\tlw.WriteUint16(uint16(t))\n\treturn aWriterPool.Put(lw)\n}", "func WriteInt16(buffer []byte, offset int, value int16) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n}", "func (s *EnvVarSet) Uint16(name string, value uint16, usage string) *uint16 {\n\tp := new(uint16)\n\n\ts.Uint16Var(p, name, value, usage)\n\n\treturn p\n}", "func (w *Packer) PutUint16(v uint16) {\n\tbinary.LittleEndian.PutUint16(w.scratch[:], v)\n\t_, _ = w.buf.Write(w.scratch[:2])\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Uint16(v *uint16) uint16 {\n\tif v != nil {\n\t\treturn *v\n\t}\n\treturn 0\n}", "func (fw *Writer) PutUint16Field(addr biopb.Coord, v uint16) {\n\twb := fw.buf\n\twb.updateAddrBounds(addr)\n\twb.defaultBuf.PutUint16(v)\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *Message) putUint16(v uint16) {\n\tb := m.bufferForPut(2)\n\tdefer b.Advance(2)\n\n\tbinary.LittleEndian.PutUint16(b.Bytes[b.Offset:], v)\n}", "func (b *Bus) Write16(addr mirv.Address, v uint16) error {\n\tblk := b.p\n\tif !blk.contains(addr) {\n\t\tblk = b.find(addr)\n\t}\n\treturn blk.m.Write16(addr-blk.s, v)\n}", "func Uint16(u *uint16) uint16 {\n\tif u == nil {\n\t\treturn 0\n\t}\n\treturn *u\n}", "func (this *Data) Uint16(offset uintptr) []uint16 {\n\tvar result []uint16\n\thdr := (*reflect.SliceHeader)(unsafe.Pointer(&result))\n\thdr.Data = this.buf + uintptr(offset)\n\thdr.Len = (int(this.cap) - int(offset)) >> 1\n\thdr.Cap = hdr.Len\n\treturn result\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Uint16Ptr(v uint16) *uint16 { return &v }", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (e Entry) Uint16(key string, value uint16) (entry Entry) {\n\te.Uint64(key, uint64(value))\n\treturn e\n}", "func Uint16(a, b interface{}) int {\n\tu1, _ := a.(uint16)\n\tu2, _ := b.(uint16)\n\tswitch {\n\tcase u1 < u2:\n\t\treturn -1\n\tcase u1 > u2:\n\t\treturn 1\n\tdefault:\n\t\treturn 0\n\t}\n}", "func (m *MockIOPackage) WriteUint32(arg0 []byte, arg1 int, arg2 uint32) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint32\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (o *OutputState) ApplyUint16Ptr(applier interface{}) Uint16PtrOutput {\n\treturn o.ApplyT(applier).(Uint16PtrOutput)\n}", "func NewUint16(data arrow.ArrayData, shape, strides []int64, names []string) *Uint16 {\n\ttsr := &Uint16{tensorBase: *newTensor(arrow.PrimitiveTypes.Uint16, data, shape, strides, names)}\n\tvals := tsr.data.Buffers()[1]\n\tif vals != nil {\n\t\ttsr.values = arrow.Uint16Traits.CastFromBytes(vals.Bytes())\n\t\tbeg := tsr.data.Offset()\n\t\tend := beg + tsr.data.Len()\n\t\ttsr.values = tsr.values[beg:end]\n\t}\n\treturn tsr\n}", "func (o *OutputState) ApplyUint16Map(applier interface{}) Uint16MapOutput {\n\treturn o.ApplyT(applier).(Uint16MapOutput)\n}", "func (res Response) AsUInt16() (uint16, error) {\n\treturn res.Bits.AsUInt16(), res.Error\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) WriteInt16(data interface{}) {\n\tvar t = w.getType(data, 2)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.index += 2\n}", "func (s *Streamer) Uint16(v uint16) *Streamer {\n\tif s.Error != nil {\n\t\treturn s\n\t}\n\ts.onVal()\n\ts.buffer = appendUint16(s.buffer, v)\n\treturn s\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (out *OutBuffer) WriteUint16BE(v uint16) bool {\n\tcontainer := out.GetContainer()\n\tif len(container) < 2 {\n\t\treturn false\n\t}\n\n\tbinary.BigEndian.PutUint16(out.GetContainer(), v)\n\tout.pos += 2\n\treturn true\n}", "func (d DataView) Uint16(offset uint, littleEndian bool) uint16 {\n\tvar decoding binary.ByteOrder\n\tif littleEndian {\n\t\tdecoding = binary.LittleEndian\n\t} else {\n\t\tdecoding = binary.BigEndian\n\t}\n\treturn decoding.Uint16(d[offset:])\n}", "func MapUint16UintPtr(f func(*uint16) *uint, list []*uint16) []*uint {\n\tif f == nil {\n\t\treturn []*uint{}\n\t}\n\tnewList := make([]*uint, len(list))\n\tfor i, v := range list {\n\t\tnewList[i] = f(v)\n\t}\n\treturn newList\n}", "func Uint16(v interface{}) (uint16, error) {\n\tvar err error\n\tv = indirect(v)\n\n\tswitch n := v.(type) {\n\tcase int8:\n\t\tif n < 0 {\n\t\t\terr = OverflowError{ToType: \"uint16\", Value: v}\n\t\t}\n\t\treturn uint16(n), err\n\tcase int16:\n\t\tif n < 0 {\n\t\t\terr = OverflowError{ToType: \"uint16\", Value: v}\n\t\t}\n\t\treturn uint16(n), err\n\tcase int32:\n\t\tif n < 0 || n > math.MaxUint16 {\n\t\t\terr = OverflowError{ToType: \"uint16\", Value: v}\n\t\t}\n\t\treturn uint16(n), err\n\tcase int64:\n\t\tif n < 0 || n > math.MaxUint16 {\n\t\t\terr = OverflowError{ToType: \"uint16\", Value: v}\n\t\t}\n\t\treturn uint16(n), err\n\tcase int:\n\t\tif n < 0 || n > math.MaxUint16 {\n\t\t\terr = OverflowError{ToType: \"uint16\", Value: v}\n\t\t}\n\t\treturn uint16(n), err\n\tcase uint8:\n\t\treturn uint16(n), err\n\tcase uint16:\n\t\treturn n, err\n\tcase uint32:\n\t\tif n > math.MaxUint16 {\n\t\t\terr = OverflowError{ToType: \"uint16\", Value: v}\n\t\t}\n\t\treturn uint16(n), err\n\tcase uint64:\n\t\tif n > math.MaxUint16 {\n\t\t\terr = OverflowError{ToType: \"uint16\", Value: v}\n\t\t}\n\t\treturn uint16(n), err\n\tcase uint:\n\t\tif n > math.MaxUint16 {\n\t\t\terr = OverflowError{ToType: \"uint16\", Value: v}\n\t\t}\n\t\treturn uint16(n), err\n\t}\n\n\treturn 0, InvalidTypeError{ToType: \"uint16\", Value: v}\n}", "func (_m *MockWriteBufferXmlBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockSession) Int16(arg0 string) int16 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Int16\", arg0)\n\tret0, _ := ret[0].(int16)\n\treturn ret0\n}", "func (m *MockIOPackage) WriteUint8(arg0 []byte, arg1 int, arg2 uint8) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint8\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func IsUint16(v interface{}) bool {\n\tr := elconv.AsValueRef(reflect.ValueOf(v))\n\treturn r.Kind() == reflect.Uint16\n}", "func (c *Config) GetUint16(pattern string, def ...interface{}) uint16 {\n\tif j := c.getJson(); j != nil {\n\t\treturn j.GetUint16(pattern, def...)\n\t}\n\treturn 0\n}", "func MapUintUint16Ptr(f func(*uint) *uint16, list []*uint) []*uint16 {\n\tif f == nil {\n\t\treturn []*uint16{}\n\t}\n\tnewList := make([]*uint16, len(list))\n\tfor i, v := range list {\n\t\tnewList[i] = f(v)\n\t}\n\treturn newList\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (b *Buffer) AppendUint16(v uint16) {\n\tb.AppendUint64(uint64(v))\n}", "func (mr *MockIOPackageMockRecorder) ReadUint16(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"ReadUint16\", reflect.TypeOf((*MockIOPackage)(nil).ReadUint16), arg0, arg1)\n}" ]
[ "0.8525331", "0.82195765", "0.797778", "0.77142894", "0.7556915", "0.7365441", "0.7068276", "0.7030716", "0.69746274", "0.67198604", "0.6644369", "0.6587727", "0.65803015", "0.6543883", "0.6480301", "0.6470321", "0.64450806", "0.64421916", "0.63140786", "0.6236959", "0.6208687", "0.60792094", "0.6078041", "0.6060668", "0.60114706", "0.59519947", "0.5948913", "0.5918725", "0.58952534", "0.5884303", "0.58616114", "0.5843611", "0.58367985", "0.58214164", "0.58127385", "0.5797065", "0.57970154", "0.5790995", "0.5776266", "0.57526296", "0.57440954", "0.57427067", "0.5733686", "0.5729097", "0.5728066", "0.572521", "0.572405", "0.5694609", "0.5669169", "0.5621899", "0.56133765", "0.56080145", "0.5601863", "0.55974394", "0.5587745", "0.5573519", "0.5552", "0.5546469", "0.5538327", "0.5534168", "0.55134964", "0.55132675", "0.5512759", "0.55022174", "0.5493694", "0.54823196", "0.5459567", "0.54342717", "0.54178774", "0.54126334", "0.5411165", "0.54064375", "0.5395932", "0.5389231", "0.5387333", "0.53822017", "0.5381418", "0.5366921", "0.5343898", "0.53209627", "0.5305923", "0.53051496", "0.5300579", "0.5291168", "0.5289633", "0.5265969", "0.526029", "0.52559507", "0.5246235", "0.5245217", "0.5237761", "0.52345353", "0.5226668", "0.52108014", "0.51972544", "0.51906234", "0.5185136", "0.5185051", "0.51845556", "0.51801175" ]
0.8640041
0
WriteUint16 is a helper method to define mock.On call logicalName string bitLength uint8 value uint16 writerArgs ...WithWriterArgs
func (_e *MockWriteBufferJsonBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint16_Call { return &MockWriteBufferJsonBased_WriteUint16_Call{Call: _e.mock.On("WriteUint16", append([]interface{}{logicalName, bitLength, value}, writerArgs...)...)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint16_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt16_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockIOPackage) WriteUint16(arg0 []byte, arg1 int, arg2 uint16) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint16\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) WriteUint16(data interface{}) {\n\tvar t = w.getType(data, 2)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.index += 2\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt16_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func WriteUInt16(buffer []byte, offset int, value uint16) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n}", "func FormatUint16(name string) string {\n\treturn formatUintFunction(name, true)\n}", "func (m *MockIOPackage) AppendUint16(arg0 []byte, arg1 uint16) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint16\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (m *MockSession) Uint16(arg0 string) uint16 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint16\", arg0)\n\tret0, _ := ret[0].(uint16)\n\treturn ret0\n}", "func (mr *MockIOPackageMockRecorder) WriteUint16(arg0, arg1, arg2 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteUint16\", reflect.TypeOf((*MockIOPackage)(nil).WriteUint16), arg0, arg1, arg2)\n}", "func (vb *keyWriter) WriteUint16(num uint16) (int, error) {\n\tbinary.BigEndian.PutUint16(vb.buffer[:2], num)\n\tvb.hash.Write(vb.buffer[:2])\n\treturn 2, nil\n}", "func (out *OutBuffer) WriteUint16LE(v uint16) {\n\tout.Append(byte(v), byte(v>>8))\n}", "func Uint16(name string, value uint16, usage string) *uint16 {\n\treturn Environment.Uint16(name, value, usage)\n}", "func WriteUint16(data []byte, x uint, v uint16) {\n\tif x+1 >= uint(len(data)) {\n\t\treturn\n\t}\n\tdata[x] = byte(v >> 8)\n\tdata[x+1] = byte(v)\n}", "func WriteUint16(w io.Writer, v uint16) error {\n\treturn WriteUint64(w, uint64(v))\n}", "func (m Measurement) AddUInt16(name string, value uint16) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (instance *Instance) SetUint16(fieldName string, value uint16) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (out *OutBuffer) WriteUint16LE(v uint16) bool {\n\tcontainer := out.GetContainer()\n\tif len(container) < 2 {\n\t\treturn false\n\t}\n\n\tbinary.LittleEndian.PutUint16(out.GetContainer(), v)\n\tout.pos += 2\n\treturn true\n}", "func (bio *BinaryIO) WriteUint16(off int64, value uint16) {\n\tvar buf [2]byte\n\tbio.order.PutUint16(buf[:], value)\n\tbio.WriteAt(off, buf[:])\n}", "func Uint16Arg(register Register, name string, options ...ArgOptionApplyer) *uint16 {\n\tp := new(uint16)\n\t_ = Uint16ArgVar(register, p, name, options...)\n\treturn p\n}", "func (o *OutputState) ApplyUint16(applier interface{}) Uint16Output {\n\treturn o.ApplyT(applier).(Uint16Output)\n}", "func Uint16Tag(name interface{}, value uint16) Tag {\n\treturn &tag{\n\t\ttagType: TagUint16,\n\t\tname: name,\n\t\tvalue: value,\n\t}\n}", "func (stream *Stream) WriteUint16(val uint16) {\n\tstream.ensure(5)\n\tq1 := val / 1000\n\tif q1 == 0 {\n\t\tstream.n = writeFirstBuf(stream.buf, digits[val], stream.n)\n\t\treturn\n\t}\n\tr1 := val - q1*1000\n\tn := writeFirstBuf(stream.buf, digits[q1], stream.n)\n\twriteBuf(stream.buf, digits[r1], n)\n\tstream.n = n + 3\n\treturn\n}", "func Uint16(key string, val uint16) Tag {\n\treturn Tag{key: key, tType: uint16Type, integerVal: int64(val)}\n}", "func Uint16(k string, v uint16) Field {\n\treturn Field{Key: k, Value: valf.Uint16(v)}\n}", "func MeasureUInt16(name string, field string, value uint16) Measurement {\n\treturn NewMeasurement(name).AddUInt16(field, value)\n}", "func (w *Writer) WriteUInt16(v uint16) error {\n\treturn binary.Write(w.out, w.bo, &v)\n}", "func (mr *MockIOPackageMockRecorder) AppendUint16(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"AppendUint16\", reflect.TypeOf((*MockIOPackage)(nil).AppendUint16), arg0, arg1)\n}", "func WriteI16(p thrift.TProtocol, value int16, name string, field int16) error {\n\treturn WriteI16WithContext(context.Background(), p, value, name, field)\n}", "func ExpectUint16(t *testing.T, field string, expected uint16, found uint16) {\n\tif expected != found {\n\t\tt.Errorf(\"%s [%d], found '%d'\", field, expected, found)\n\t}\n}", "func Uint16(key string, val uint16) Field {\n\treturn Field{Key: key, Type: core.Uint16Type, Integer: int64(val)}\n}", "func (s *EnvVarSet) Uint16(name string, value uint16, usage string) *uint16 {\n\tp := new(uint16)\n\n\ts.Uint16Var(p, name, value, usage)\n\n\treturn p\n}", "func (b *Buffer) AppendUint16(v uint16) {\n\tb.AppendUint64(uint64(v))\n}", "func TestUint16(t *testing.T) {\n\ttests := []struct {\n\t\tin uint16 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x01,0x00}}, // Min single byte\n\t\t{255, []byte{0xff,0x00}}, // Max single byte\n\t\t{256, []byte{0x00,0x01}}, // Min 2-byte\n\t\t{65535, []byte{0xff,0xff}}, // Max 2-byte\n\t}\n\n\tt.Logf(\"Running uint16 %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint16(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint16 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint16 #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint16\n\t\terr = ReadUint16(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint16 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint16 #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Uint16(v uint16) *uint16 {\n\treturn &v\n}", "func Uints16(k string, v []uint16) Field {\n\treturn Field{Key: k, Value: valf.Uints16(v)}\n}", "func Uint16Var(p *uint16, name string, value uint16, usage string) {\n\tEnvironment.Uint16Var(p, name, value, usage)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *Message) putUint16(v uint16) {\n\tb := m.bufferForPut(2)\n\tdefer b.Advance(2)\n\n\tbinary.LittleEndian.PutUint16(b.Bytes[b.Offset:], v)\n}", "func WriteInt16(buffer []byte, offset int, value int16) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n}", "func (mr *MockSessionMockRecorder) Uint16(arg0 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Uint16\", reflect.TypeOf((*MockSession)(nil).Uint16), arg0)\n}", "func (s *Streamer) Uint16(v uint16) *Streamer {\n\tif s.Error != nil {\n\t\treturn s\n\t}\n\ts.onVal()\n\ts.buffer = appendUint16(s.buffer, v)\n\treturn s\n}", "func Uint16(u *uint16) uint16 {\n\tif u == nil {\n\t\treturn 0\n\t}\n\treturn *u\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Uint16(v *uint16) uint16 {\n\tif v != nil {\n\t\treturn *v\n\t}\n\treturn 0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func appendUint16(buf []byte, v uint16) []byte {\n\treturn append(buf, byte(v>>8), byte(v))\n}", "func (e Entry) Uint16(key string, value uint16) (entry Entry) {\n\te.Uint64(key, uint64(value))\n\treturn e\n}", "func (w *Writer) WriteInt16(data interface{}) {\n\tvar t = w.getType(data, 2)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.index += 2\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func NewUint16(data arrow.ArrayData, shape, strides []int64, names []string) *Uint16 {\n\ttsr := &Uint16{tensorBase: *newTensor(arrow.PrimitiveTypes.Uint16, data, shape, strides, names)}\n\tvals := tsr.data.Buffers()[1]\n\tif vals != nil {\n\t\ttsr.values = arrow.Uint16Traits.CastFromBytes(vals.Bytes())\n\t\tbeg := tsr.data.Offset()\n\t\tend := beg + tsr.data.Len()\n\t\ttsr.values = tsr.values[beg:end]\n\t}\n\treturn tsr\n}", "func (b *Buffer) AppendUint16(x uint16) error {\n\treturn b.appendInteger(x)\n}", "func (w *Packer) PutUint16(v uint16) {\n\tbinary.LittleEndian.PutUint16(w.scratch[:], v)\n\t_, _ = w.buf.Write(w.scratch[:2])\n}", "func (s *EnvVarSet) Uint16Var(p *uint16, name string, value uint16, usage string) {\n\ts.Var(newUint16Value(value, p), name, usage)\n}", "func Uint16(a, b interface{}) int {\n\tu1, _ := a.(uint16)\n\tu2, _ := b.(uint16)\n\tswitch {\n\tcase u1 < u2:\n\t\treturn -1\n\tcase u1 > u2:\n\t\treturn 1\n\tdefault:\n\t\treturn 0\n\t}\n}", "func AppendUints16(dst []byte, vals []uint16) []byte {\n\tmajor := majorTypeArray\n\tl := len(vals)\n\tif l == 0 {\n\t\treturn AppendArrayEnd(AppendArrayStart(dst))\n\t}\n\tif l <= additionalMax {\n\t\tlb := byte(l)\n\t\tdst = append(dst, byte(major|lb))\n\t} else {\n\t\tdst = appendCborTypePrefix(dst, major, uint64(l))\n\t}\n\tfor _, v := range vals {\n\t\tdst = AppendUint16(dst, v)\n\t}\n\treturn dst\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func NewTagUint16(ls *lua.LState) int {\n\tvar val = wpk.TID(ls.CheckInt(1))\n\tPushTag(ls, &LuaTag{wpk.TagUint16(val)})\n\treturn 1\n}", "func AppendUint16(dst []byte, val uint16) []byte {\n\treturn AppendUint(dst, uint(val))\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (t Uint16) WriteTo(w io.Writer) (int64, error) {\n\tlw := aWriterPool.Get(w)\n\tlw.WriteUint16(uint16(t))\n\treturn aWriterPool.Put(lw)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func AppendUint16(data []byte, v uint16) []byte {\n\tdata = append(data, byte(v>>8))\n\tdata = append(data, byte(v))\n\treturn data\n}", "func (c Context) Uint16(key string, i uint16) Context {\n\tc.l.context = appendUint16(c.l.context, key, i)\n\treturn c\n}", "func MarshalUint16(dst []byte, u uint16) []byte {\n\treturn append(dst, byte(u>>8), byte(u))\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func Uint16(v interface{}) (uint16, error) {\n\tvar err error\n\tv = indirect(v)\n\n\tswitch n := v.(type) {\n\tcase int8:\n\t\tif n < 0 {\n\t\t\terr = OverflowError{ToType: \"uint16\", Value: v}\n\t\t}\n\t\treturn uint16(n), err\n\tcase int16:\n\t\tif n < 0 {\n\t\t\terr = OverflowError{ToType: \"uint16\", Value: v}\n\t\t}\n\t\treturn uint16(n), err\n\tcase int32:\n\t\tif n < 0 || n > math.MaxUint16 {\n\t\t\terr = OverflowError{ToType: \"uint16\", Value: v}\n\t\t}\n\t\treturn uint16(n), err\n\tcase int64:\n\t\tif n < 0 || n > math.MaxUint16 {\n\t\t\terr = OverflowError{ToType: \"uint16\", Value: v}\n\t\t}\n\t\treturn uint16(n), err\n\tcase int:\n\t\tif n < 0 || n > math.MaxUint16 {\n\t\t\terr = OverflowError{ToType: \"uint16\", Value: v}\n\t\t}\n\t\treturn uint16(n), err\n\tcase uint8:\n\t\treturn uint16(n), err\n\tcase uint16:\n\t\treturn n, err\n\tcase uint32:\n\t\tif n > math.MaxUint16 {\n\t\t\terr = OverflowError{ToType: \"uint16\", Value: v}\n\t\t}\n\t\treturn uint16(n), err\n\tcase uint64:\n\t\tif n > math.MaxUint16 {\n\t\t\terr = OverflowError{ToType: \"uint16\", Value: v}\n\t\t}\n\t\treturn uint16(n), err\n\tcase uint:\n\t\tif n > math.MaxUint16 {\n\t\t\terr = OverflowError{ToType: \"uint16\", Value: v}\n\t\t}\n\t\treturn uint16(n), err\n\t}\n\n\treturn 0, InvalidTypeError{ToType: \"uint16\", Value: v}\n}", "func Uint16ToString(v string) predicate.Conversion {\n\treturn predicate.Conversion(func(s *sql.Selector) {\n\t\ts.Where(sql.EQ(s.C(FieldUint16ToString), v))\n\t})\n}", "func (this *Data) Uint16(offset uintptr) []uint16 {\n\tvar result []uint16\n\thdr := (*reflect.SliceHeader)(unsafe.Pointer(&result))\n\thdr.Data = this.buf + uintptr(offset)\n\thdr.Len = (int(this.cap) - int(offset)) >> 1\n\thdr.Cap = hdr.Len\n\treturn result\n}", "func (o *OutputState) ApplyUint16Ptr(applier interface{}) Uint16PtrOutput {\n\treturn o.ApplyT(applier).(Uint16PtrOutput)\n}", "func opUI16ToStr(prgrm *CXProgram) {\n\texpr := prgrm.GetExpr()\n\tfp := prgrm.GetFramePointer()\n\n\toutB0 := FromStr(strconv.FormatUint(uint64(ReadUI16(fp, expr.Inputs[0])), 10))\n\tWriteObject(GetFinalOffset(fp, expr.Outputs[0]), outB0)\n}", "func (b *Bus) Write16(addr mirv.Address, v uint16) error {\n\tblk := b.p\n\tif !blk.contains(addr) {\n\t\tblk = b.find(addr)\n\t}\n\treturn blk.m.Write16(addr-blk.s, v)\n}", "func UInt16(v uint16) *uint16 {\n\treturn &v\n}", "func (o *OutputState) ApplyUint16Array(applier interface{}) Uint16ArrayOutput {\n\treturn o.ApplyT(applier).(Uint16ArrayOutput)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint32_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (o *OutputState) ApplyUint16Map(applier interface{}) Uint16MapOutput {\n\treturn o.ApplyT(applier).(Uint16MapOutput)\n}", "func PullUint16String(val interface{}) uintptr {\r\n\tswitch v := val.(type) {\r\n\tcase uint16:\r\n\t\treturn uintptr(v)\r\n\r\n\tcase string:\r\n\t\tpStr, err := syscall.UTF16PtrFromString(v)\r\n\t\tif err != nil {\r\n\t\t\tpanic(fmt.Sprintf(\"PullUint16String() failed \\\"%s\\\": %s\", v, err))\r\n\t\t}\r\n\t\treturn uintptr(unsafe.Pointer(pStr)) // runtime.KeepAlive()\r\n\r\n\tdefault:\r\n\t\tpanic(fmt.Sprintf(\"Invalid type: %s\", reflect.TypeOf(val)))\r\n\t}\r\n}", "func (ec ErrCode) Uint16() uint16 { return ec.code }", "func (s ServiceFlags) UInt16() uint16 {\n\treturn uint16(s)\n}", "func (m *MockIOPackage) ReadUint16(arg0 []byte, arg1 int) (uint16, int, bool) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ReadUint16\", arg0, arg1)\n\tret0, _ := ret[0].(uint16)\n\tret1, _ := ret[1].(int)\n\tret2, _ := ret[2].(bool)\n\treturn ret0, ret1, ret2\n}", "func (d DataView) Uint16(offset uint, littleEndian bool) uint16 {\n\tvar decoding binary.ByteOrder\n\tif littleEndian {\n\t\tdecoding = binary.LittleEndian\n\t} else {\n\t\tdecoding = binary.BigEndian\n\t}\n\treturn decoding.Uint16(d[offset:])\n}", "func packUint16(i uint16) []byte { return []byte{byte(i >> 8), byte(i)} }", "func Uint16Ptr(v uint16) *uint16 { return &v }", "func (res Response) AsUInt16() (uint16, error) {\n\treturn res.Bits.AsUInt16(), res.Error\n}", "func (c *Config) GetUint16(pattern string, def ...interface{}) uint16 {\n\tif j := c.getJson(); j != nil {\n\t\treturn j.GetUint16(pattern, def...)\n\t}\n\treturn 0\n}", "func Uint16ToStringHasSuffix(v string) predicate.Conversion {\n\treturn predicate.Conversion(func(s *sql.Selector) {\n\t\ts.Where(sql.HasSuffix(s.C(FieldUint16ToString), v))\n\t})\n}", "func IsUint16(v interface{}) bool {\n\tr := elconv.AsValueRef(reflect.ValueOf(v))\n\treturn r.Kind() == reflect.Uint16\n}", "func (z *Numeric) SetUint16(x uint16) *Numeric {\n\tif x == 0 {\n\t\treturn z.SetZero()\n\t}\n\n\tz.sign = numericPositive\n\tz.weight = -1\n\tz.digits = make([]int16, 0, 1) // as x!=0 there is at least 1 1000-base digit\n\tfor x != 0 {\n\t\td := int16(x % numericBase)\n\t\tx /= numericBase\n\t\tif d != 0 || len(z.digits) > 0 { // avoid tailing zero\n\t\t\tz.digits = append([]int16{d}, z.digits...)\n\t\t}\n\t\tz.weight++\n\t}\n\n\treturn z\n}", "func MapUint16UintPtr(f func(*uint16) *uint, list []*uint16) []*uint {\n\tif f == nil {\n\t\treturn []*uint{}\n\t}\n\tnewList := make([]*uint, len(list))\n\tfor i, v := range list {\n\t\tnewList[i] = f(v)\n\t}\n\treturn newList\n}", "func (fw *Writer) PutUint16Field(addr biopb.Coord, v uint16) {\n\twb := fw.buf\n\twb.updateAddrBounds(addr)\n\twb.defaultBuf.PutUint16(v)\n}", "func Uint16() uint16 {\n\treturn uint16(randIntRange(0, math.MaxUint16))\n}", "func (p Pointer) UInt16LE(offset int) uint16 {\n\treturn *(*uint16)(unsafe.Pointer(uintptr(int(p) + offset)))\n}", "func MapStrUint16Ptr(f func(*string) *uint16, list []*string) []*uint16 {\n\tif f == nil {\n\t\treturn []*uint16{}\n\t}\n\tnewList := make([]*uint16, len(list))\n\tfor i, v := range list {\n\t\tnewList[i] = f(v)\n\t}\n\treturn newList\n}" ]
[ "0.8212447", "0.8178533", "0.8109742", "0.73211795", "0.72351736", "0.72023046", "0.7144631", "0.70492995", "0.7038619", "0.6912643", "0.6803377", "0.6749658", "0.6725934", "0.6628444", "0.6615236", "0.64401793", "0.6425711", "0.64039665", "0.6402469", "0.63673455", "0.6365786", "0.6365599", "0.63407826", "0.63312244", "0.6277901", "0.62307847", "0.622911", "0.6224825", "0.621975", "0.62086916", "0.6173614", "0.6136356", "0.6100549", "0.60930836", "0.6073121", "0.60480297", "0.60373", "0.6031011", "0.60238767", "0.59855896", "0.5972748", "0.5960816", "0.5943983", "0.5924348", "0.59026116", "0.59009194", "0.58986133", "0.58864427", "0.5883351", "0.5880936", "0.5874654", "0.5859137", "0.58444977", "0.5843702", "0.584264", "0.583474", "0.58209175", "0.58200556", "0.5810946", "0.5804391", "0.57981783", "0.5786689", "0.5786286", "0.57860166", "0.5781872", "0.5781306", "0.5777221", "0.57728064", "0.5736843", "0.5735459", "0.5729087", "0.57290566", "0.57272", "0.57248443", "0.5723509", "0.5713811", "0.57067144", "0.5675592", "0.5674047", "0.566809", "0.5655328", "0.5639555", "0.5631843", "0.5627247", "0.56218207", "0.5619875", "0.5616107", "0.5615322", "0.5607461", "0.5603356", "0.5574637", "0.55568695", "0.5551582", "0.5543091", "0.55297", "0.5525011", "0.55248386", "0.55146295", "0.5508682", "0.5486428" ]
0.8390136
0
WriteUint32 provides a mock function with given fields: logicalName, bitLength, value, writerArgs
func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, logicalName, bitLength, value) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok { r0 = rf(logicalName, bitLength, value, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint32_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockIOPackage) WriteUint32(arg0 []byte, arg1 int, arg2 uint32) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint32\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockIOPackage) AppendUint32(arg0 []byte, arg1 uint32) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint32\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (m *MockSession) Uint32(arg0 string) uint32 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint32\", arg0)\n\tret0, _ := ret[0].(uint32)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) WriteUint32(data interface{}) {\n\tvar t = w.getType(data, 4)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.buffer[w.index+2] = byte(t[2])\n\tw.buffer[w.index+3] = byte(t[3])\n\tw.index += 4\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt32_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (mr *MockIOPackageMockRecorder) WriteUint32(arg0, arg1, arg2 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteUint32\", reflect.TypeOf((*MockIOPackage)(nil).WriteUint32), arg0, arg1, arg2)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat32_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func WriteUInt32(buffer []byte, offset int, value uint32) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n buffer[offset + 2] = byte(value >> 16)\n buffer[offset + 3] = byte(value >> 24)\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat32_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt32_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint16_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockHash32) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockIOPackage) WriteUint16(arg0 []byte, arg1 int, arg2 uint16) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint16\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (bio *BinaryIO) WriteUint32(off int64, value uint32) {\n\tvar buf [4]byte\n\tbio.order.PutUint32(buf[:], value)\n\tbio.WriteAt(off, buf[:])\n}", "func (vb *keyWriter) WriteUint32(num uint32) (int, error) {\n\tbinary.BigEndian.PutUint32(vb.buffer[:4], num)\n\tvb.hash.Write(vb.buffer[:4])\n\treturn 4, nil\n}", "func SetUint32ByName(o interface{}, name string, val uint32) {\n\tif fd := reflect.ValueOf(o).Elem().FieldByName(name); fd.IsValid() {\n\t\tfd.SetUint(uint64(val))\n\t}\n}", "func TestUint32(t *testing.T) {\n\ttests := []struct {\n\t\tin uint32 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x01,0x00,0x00,0x00}}, // Min single byte\n\t\t{255, []byte{0xff,0x00,0x00,0x00}}, // Max single byte\n\t\t{256, []byte{0x00,0x01,0x00,0x00}}, // Min 2-byte\n\t\t{65535, []byte{0xff,0xff,0x00,0x00}}, // Max 2-byte\n\t\t{0x10000, []byte{0x00,0x00,0x01,0x00}}, // Min 4-byte\n\t\t{0xffffffff, []byte{0xff,0xff,0xff,0xff}}, // Max 4-byte\n\t}\n\n\tt.Logf(\"Running uint32 %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint32(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint32 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint32 #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint32\n\t\terr = ReadUint32(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint32 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint32 #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockIOPackage) WriteUint64(arg0 []byte, arg1 int, arg2 uint64) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint64\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (m *MockIOPackage) ReadUint32(arg0 []byte, arg1 int) (uint32, int, bool) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ReadUint32\", arg0, arg1)\n\tret0, _ := ret[0].(uint32)\n\tret1, _ := ret[1].(int)\n\tret2, _ := ret[2].(bool)\n\treturn ret0, ret1, ret2\n}", "func WriteUint32(w io.Writer, v uint32) error {\n\treturn WriteUint64(w, uint64(v))\n}", "func (w *ByteWriter) WriteUint32(val uint32, offset int) (int, error) {\n\treturn w.WriteVal(val, offset)\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (w *Writer) WriteUInt32(v uint32) error {\n\treturn binary.Write(w.out, w.bo, &v)\n}", "func (z *Writer) writeUint32(x uint32) error {\n\tbuf := z.buf[:4]\n\tbinary.LittleEndian.PutUint32(buf, x)\n\t_, err := z.dst.Write(buf)\n\treturn err\n}", "func WriteUint32(data []byte, x uint, v uint32) {\n\tif x+3 >= uint(len(data)) {\n\t\treturn\n\t}\n\tdata[x] = byte(v >> 24)\n\tdata[x+1] = byte(v >> 16)\n\tdata[x+2] = byte(v >> 8)\n\tdata[x+3] = byte(v)\n}", "func (m *MockSession) Uint(arg0 string) uint {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint\", arg0)\n\tret0, _ := ret[0].(uint)\n\treturn ret0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint16_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockIOPackage) WriteUint8(arg0 []byte, arg1 int, arg2 uint8) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint8\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func Uint32() uint32", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (out *OutBuffer) WriteUint32LE(v uint32) {\n\tout.Append(byte(v), byte(v>>8), byte(v>>16), byte(v>>24))\n}", "func (m *MockIOPackage) AppendUint16(arg0 []byte, arg1 uint16) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint16\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func ExpectUint32(t *testing.T, field string, expected uint32, found uint32) {\n\tif expected != found {\n\t\tt.Errorf(\"%s [%d], found '%d'\", field, expected, found)\n\t}\n}", "func (c *Configurator) Uint32(name string, value uint32, usage string) *uint32 {\n\tp := new(uint32)\n\n\tc.Uint32Var(p, name, value, usage)\n\n\treturn p\n}", "func TestReadUint32FromBytes(t *testing.T) {\n\tvar file, _ = ioutil.TempFile(os.TempDir(), \"\")\n\tvar filePath = file.Name()\n\tdefer file.Close()\n\tdefer os.Remove(filePath)\n\n\tvar writer = bufio.NewWriter(file)\n\t// Little endian pow disposition:\n\t// 16^1 16^0 | 16^3 16^2 | 16^5 16^4 | 16^7 16^6\n\t// 0 x 0 f | 0 x 0 1 | 0 x 1 0 | 0 x 0 4\n\t// 0 15 | 0 256 | 1048576 0 | 0 67108864\n\t// --------------------------------------------------------------------\n\t// TOTAL = 68157711\n\twriter.Write([]byte{0x0f, 0x01, 0x10, 0x04})\n\twriter.Flush()\n\n\tfile.Seek(0, os.SEEK_SET)\n\n\tvar readerBag = fileReader.ByteReaderBag{File: file}\n\tvar number = fileReader.ReadUint32FromBytes(&readerBag)\n\tif readerBag.Err != nil {\n\t\tt.Errorf(\"Expected no errors, got '%v'\", readerBag.Err)\n\t}\n\n\ttest.ExpectUint32(t, \"Number test\", 68157711, number)\n\n\tfileReader.ReadUint32FromBytes(&readerBag)\n\tif readerBag.Err == nil {\n\t\tt.Errorf(\"Expected no bytes to read, but got no error\")\n\t}\n}", "func (out *OutBuffer) WriteUint32LE(v uint32) bool {\n\tcontainer := out.GetContainer()\n\tif len(container) < 4 {\n\t\treturn false\n\t}\n\n\tbinary.LittleEndian.PutUint32(out.GetContainer(), v)\n\tout.pos += 4\n\treturn true\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func StoreUint32(addr *uint32, val uint32)", "func (w *ByteWriter) MustWriteUint32(val uint32, offset int) int {\n\treturn w.MustWriteVal(val, offset)\n}", "func (child MagicU32) Uint32() uint32 {\n\treturn uint32(child &^ (1 << 31))\n}", "func Uint32(name string, value uint32, usage string) *uint32 {\n\treturn Global.Uint32(name, value, usage)\n}", "func (mr *MockSessionMockRecorder) Uint32(arg0 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Uint32\", reflect.TypeOf((*MockSession)(nil).Uint32), arg0)\n}", "func (m Measurement) AddUInt32(name string, value uint32) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (instance *Instance) SetUint32(fieldName string, value uint32) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt16_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockHash64) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func testRoundTripUint32(t *testing.T, encoder func([]byte, []uint32) int, decoder func([]uint32, []byte), data []uint32, expectedSize int) {\n\tencodedRaw := make([]byte, MaxSize32(len(data)))\n\tencodedSize := encoder(encodedRaw, data)\n\tif expectedSize >= 0 && encodedSize != expectedSize {\n\t\tt.Errorf(\"got encodedSize: %d, expected: %d\", encodedSize, expectedSize)\n\t}\n\tencoded := make([]byte, encodedSize, encodedSize) // ensure the encoded size is precise\n\tcopy(encoded, encodedRaw)\n\tdecodedData := make([]uint32, len(data), len(data))\n\tdecoder(decodedData, encoded)\n\tfor i := range data {\n\t\tif decodedData[i] != data[i] {\n\t\t\tt.Errorf(\"got decodedData[%d]: %d, expected: %d\", i, decodedData[i], data[i])\n\t\t}\n\t}\n}", "func (m *MockHash) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockHash32) Sum32() uint32 {\n\tret := m.ctrl.Call(m, \"Sum32\")\n\tret0, _ := ret[0].(uint32)\n\treturn ret0\n}", "func (m *MockHash32) Sum32() uint32 {\n\tret := m.ctrl.Call(m, \"Sum32\")\n\tret0, _ := ret[0].(uint32)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Packer) PutUint32(v uint32) {\n\tbinary.LittleEndian.PutUint32(w.scratch[:], v)\n\t_, _ = w.buf.Write(w.scratch[:4])\n}", "func (mr *MockIOPackageMockRecorder) AppendUint32(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"AppendUint32\", reflect.TypeOf((*MockIOPackage)(nil).AppendUint32), arg0, arg1)\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (stream *Stream) WriteUint32(val uint32) {\n\tstream.ensure(10)\n\tn := stream.n\n\tq1 := val / 1000\n\tif q1 == 0 {\n\t\tstream.n = writeFirstBuf(stream.buf, digits[val], n)\n\t\treturn\n\t}\n\tr1 := val - q1*1000\n\tq2 := q1 / 1000\n\tif q2 == 0 {\n\t\tn := writeFirstBuf(stream.buf, digits[q1], n)\n\t\twriteBuf(stream.buf, digits[r1], n)\n\t\tstream.n = n + 3\n\t\treturn\n\t}\n\tr2 := q1 - q2*1000\n\tq3 := q2 / 1000\n\tif q3 == 0 {\n\t\tn = writeFirstBuf(stream.buf, digits[q2], n)\n\t} else {\n\t\tr3 := q2 - q3*1000\n\t\tstream.buf[n] = byte(q3 + '0')\n\t\tn++\n\t\twriteBuf(stream.buf, digits[r3], n)\n\t\tn += 3\n\t}\n\twriteBuf(stream.buf, digits[r2], n)\n\twriteBuf(stream.buf, digits[r1], n+3)\n\tstream.n = n + 6\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *Session) Write(data []byte) (int, error) {\n\tret := _m.Called(data)\n\n\tvar r0 int\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func([]byte) (int, error)); ok {\n\t\treturn rf(data)\n\t}\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(data)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestWriteBits(t *testing.T) {\n\tt.Run(\"NonMSB\", func(t *testing.T) {\n\t\tb := &Buffer{}\n\n\t\t// having empty buffered MSB.\n\t\tn, err := b.WriteBits(0xb, 4)\n\t\trequire.NoError(t, err)\n\t\tassert.Zero(t, n)\n\n\t\tassert.Len(t, b.data, 1)\n\t\tassert.Equal(t, byte(0xb), b.data[0])\n\n\t\tn, err = b.WriteBits(0xdf, 8)\n\t\trequire.NoError(t, err)\n\t\tassert.Equal(t, 1, n)\n\n\t\tif assert.Len(t, b.data, 2) {\n\t\t\tassert.Equal(t, byte(0xd), b.data[1])\n\t\t\tassert.Equal(t, byte(0xfb), b.data[0])\n\t\t}\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tb := BufferedMSB()\n\n\t\tn, err := b.WriteBits(0xf, 4)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Zero(t, n)\n\n\t\t// the output now should be\n\t\t// 11110000\n\t\t// ^\n\t\tif assert.Len(t, b.data, 1) {\n\t\t\tassert.Equal(t, byte(0xf0), b.data[0], \"%08b\", b.data[0])\n\t\t}\n\n\t\t// write 10111 = 0x17, 5\n\t\tn, err = b.WriteBits(0x17, 5)\n\t\trequire.NoError(t, err)\n\n\t\t// current output should be\n\t\t// 11111011 10000000\n\t\t// ^\n\t\tif assert.Len(t, b.data, 2) {\n\t\t\tassert.Equal(t, byte(0xfb), b.data[0])\n\t\t\tassert.Equal(t, byte(0x80), b.data[1])\n\t\t\tassert.Equal(t, uint8(1), b.bitIndex)\n\t\t}\n\t})\n}", "func SetUint32(gauge prometheus.Gauge, arg uint32) {\n\tgauge.Set(float64(arg))\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func Uint32Arg(register Register, name string, options ...ArgOptionApplyer) *uint32 {\n\tp := new(uint32)\n\t_ = Uint32ArgVar(register, p, name, options...)\n\treturn p\n}", "func (m *MockIOPackage) AppendUint64(arg0 []byte, arg1 uint64) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint64\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func FormatUint32(name string) string {\n\treturn formatUintFunction(name, true)\n}", "func WriteInt32(buffer []byte, offset int, value int32) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n buffer[offset + 2] = byte(value >> 16)\n buffer[offset + 3] = byte(value >> 24)\n}", "func (ob *PyObject) Uint32() uint32 {\n\treturn uint32(C.PyLong_AsUnsignedLong(ob.rawptr))\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestBufferWrite(t *testing.T) {\n\tt.Run(\"Valid\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\n\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\tn, err := w.Write(toWrite)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Equal(t, 3, n)\n\n\t\tn, err = w.Write([]byte{0xff})\n\t\trequire.NoError(t, err)\n\n\t\tassert.Equal(t, 1, n)\n\n\t\texpected := append(toWrite, 0xff)\n\t\tfor i, bt := range w.Data() {\n\t\t\tassert.Equal(t, expected[i], bt, \"%d\", i)\n\t\t}\n\t})\n\n\tt.Run(\"Shifted\", func(t *testing.T) {\n\t\tw := &Buffer{}\n\t\t// write empty byte and reset it's byte index to 0.\n\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\tw.byteIndex = 0\n\t\t// assume that 3 '0' bits were already written.\n\t\tw.bitIndex = 3\n\n\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\tn, err := w.Write(toWrite)\n\t\trequire.NoError(t, err)\n\t\tassert.Equal(t, 3, n)\n\n\t\t// 0x3f - 00111111\n\t\t// 00111111 << 3 = 11111000\n\t\texpected := byte(0xf8)\n\t\tassert.Equal(t, expected, w.data[0])\n\n\t\t// 0x12 - 00010010\n\t\t// 00111111 >> 5 = 00000001\n\t\t// 00010010 << 3 = 10010000\n\t\t// \t\t\t\t | 10010101\n\t\t// 10010111 - 0x91\n\t\texpected = byte(0x91)\n\t\tassert.Equal(t, expected, w.data[1])\n\n\t\t// 0x86 - 10000110\n\t\t// 00010010 >> 5 = \t00000000\n\t\t// 10000110 << 3 = \t00110000\n\t\t// \t\t\t\t |\t00110000\n\t\t// 00110000 = 0x30\n\t\texpected = byte(0x30)\n\t\tassert.Equal(t, expected, w.data[2])\n\t\tassert.Len(t, w.Data(), 4)\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tt.Run(\"Valid\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\t\tn, err := w.Write(toWrite)\n\t\t\trequire.NoError(t, err)\n\n\t\t\tassert.Equal(t, 3, n)\n\n\t\t\tn, err = w.Write([]byte{0xff})\n\t\t\trequire.NoError(t, err)\n\n\t\t\tassert.Equal(t, 1, n)\n\n\t\t\texpected := append(toWrite, 0xff)\n\t\t\tfor i, bt := range w.Data() {\n\t\t\t\tassert.Equal(t, expected[i], bt, \"%d\", i)\n\t\t\t}\n\t\t})\n\n\t\tt.Run(\"Shifted\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\t\t\t// write empty byte so the buffer data is initialized\n\t\t\trequire.NoError(t, w.WriteByte(0x00))\n\t\t\t// reset it's byteindex\n\t\t\tw.byteIndex = 0\n\t\t\t// assume three '0' bits are already stored.\n\t\t\tw.bitIndex = 3\n\n\t\t\ttoWrite := []byte{0x3f, 0x12, 0x86}\n\n\t\t\tn, err := w.Write(toWrite)\n\t\t\trequire.NoError(t, err)\n\t\t\tassert.Equal(t, 3, n)\n\n\t\t\t// 0x3f - 00111111\n\t\t\t// 00111111 >> 3 = 00000111\n\t\t\t// 00000111 = 0x07\n\t\t\texpected := byte(0x07)\n\t\t\tassert.Equal(t, expected, w.data[0])\n\n\t\t\t// 0x12 - 00010010\n\t\t\t// 00111111 << 5 = 11100000\n\t\t\t// 00010010 >> 3 = 00000010\n\t\t\t// \t\t\t\t | 11100010\n\t\t\t// 11100010 - 0xE2\n\t\t\texpected = byte(0xE2)\n\t\t\tassert.Equal(t, expected, w.data[1])\n\n\t\t\t// 0x86 - 10000110\n\t\t\t// 00010010 << 5 = \t01000000\n\t\t\t// 10000110 >> 3 = \t00010000\n\t\t\t// \t\t\t\t |\t01010000\n\t\t\t// 00110000 = 0x50\n\t\t\texpected = byte(0x50)\n\t\t\tassert.Equal(t, expected, w.data[2])\n\n\t\t\t// 0x86 - 10000110\n\t\t\t// 10000110 << 5 = \t11000000\n\t\t\t// 11000000 = 0xC0\n\t\t\texpected = byte(0xC0)\n\t\t\tassert.Equal(t, expected, w.data[3])\n\t\t})\n\t})\n}", "func MeasureUInt32(name string, field string, value uint32) Measurement {\n\treturn NewMeasurement(name).AddUInt32(field, value)\n}", "func (h *MemHash) WriteRand32() {\n\tvar nullHashBytes [4]byte\n\thashBytes := nullHashBytes[:]\n\trand.Read(hashBytes)\n\th.Write(hashBytes)\n}", "func generateUint32Example() {\n\tvar u uint32 = 42\n\tc := NewUint32Container()\n\tc.Put(u)\n\tv := c.Get()\n\tfmt.Printf(\"generateExample: %d (%T)\\n\", v, v)\n}" ]
[ "0.8419254", "0.8120989", "0.79344743", "0.756415", "0.7156935", "0.7023325", "0.69668496", "0.69089663", "0.6795606", "0.67897975", "0.6699396", "0.6615843", "0.6546844", "0.6522827", "0.65057486", "0.64040697", "0.6371873", "0.63624585", "0.6337979", "0.63079244", "0.6295422", "0.62450176", "0.61849433", "0.6161136", "0.615257", "0.615169", "0.6085619", "0.60488373", "0.6041376", "0.6035309", "0.5982513", "0.59778494", "0.5962154", "0.59469587", "0.5940162", "0.5924877", "0.59196174", "0.58930486", "0.5885449", "0.5877601", "0.5877255", "0.58645546", "0.5819236", "0.58018595", "0.5800572", "0.5781612", "0.5767547", "0.5762334", "0.5723387", "0.56756634", "0.5670065", "0.56590414", "0.5632061", "0.5617434", "0.5616885", "0.5614941", "0.5586312", "0.5565991", "0.5534778", "0.55195725", "0.55051714", "0.5500586", "0.5490226", "0.5483756", "0.54629445", "0.54612213", "0.5453886", "0.5421755", "0.5414699", "0.5401201", "0.5388459", "0.5385953", "0.5383279", "0.5381728", "0.5366749", "0.5331077", "0.53148574", "0.53148574", "0.5313454", "0.53092766", "0.5307317", "0.5302275", "0.5290638", "0.52868617", "0.52844673", "0.5276756", "0.52643377", "0.52581906", "0.5256589", "0.5255743", "0.52494085", "0.52491605", "0.5243287", "0.5236495", "0.5225855", "0.51986736", "0.5152908", "0.51476663", "0.51459336", "0.51279634" ]
0.8470878
0
WriteUint32 is a helper method to define mock.On call logicalName string bitLength uint8 value uint32 writerArgs ...WithWriterArgs
func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call { return &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On("WriteUint32", append([]interface{}{logicalName, bitLength, value}, writerArgs...)...)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint32_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockIOPackage) WriteUint32(arg0 []byte, arg1 int, arg2 uint32) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint32\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (w *Writer) WriteUint32(data interface{}) {\n\tvar t = w.getType(data, 4)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.buffer[w.index+2] = byte(t[2])\n\tw.buffer[w.index+3] = byte(t[3])\n\tw.index += 4\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt32_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt32_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt32_Call{Call: _e.mock.On(\"WriteInt32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func WriteUInt32(buffer []byte, offset int, value uint32) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n buffer[offset + 2] = byte(value >> 16)\n buffer[offset + 3] = byte(value >> 24)\n}", "func (m *MockIOPackage) AppendUint32(arg0 []byte, arg1 uint32) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint32\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockSession) Uint32(arg0 string) uint32 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint32\", arg0)\n\tret0, _ := ret[0].(uint32)\n\treturn ret0\n}", "func SetUint32ByName(o interface{}, name string, val uint32) {\n\tif fd := reflect.ValueOf(o).Elem().FieldByName(name); fd.IsValid() {\n\t\tfd.SetUint(uint64(val))\n\t}\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat32_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (bio *BinaryIO) WriteUint32(off int64, value uint32) {\n\tvar buf [4]byte\n\tbio.order.PutUint32(buf[:], value)\n\tbio.WriteAt(off, buf[:])\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint16_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (vb *keyWriter) WriteUint32(num uint32) (int, error) {\n\tbinary.BigEndian.PutUint32(vb.buffer[:4], num)\n\tvb.hash.Write(vb.buffer[:4])\n\treturn 4, nil\n}", "func (mr *MockIOPackageMockRecorder) WriteUint32(arg0, arg1, arg2 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteUint32\", reflect.TypeOf((*MockIOPackage)(nil).WriteUint32), arg0, arg1, arg2)\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat32_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat32_Call{Call: _e.mock.On(\"WriteFloat32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func WriteUint32(w io.Writer, v uint32) error {\n\treturn WriteUint64(w, uint64(v))\n}", "func (w *Writer) WriteUInt32(v uint32) error {\n\treturn binary.Write(w.out, w.bo, &v)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *ByteWriter) WriteUint32(val uint32, offset int) (int, error) {\n\treturn w.WriteVal(val, offset)\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func FormatUint32(name string) string {\n\treturn formatUintFunction(name, true)\n}", "func WriteUint32(data []byte, x uint, v uint32) {\n\tif x+3 >= uint(len(data)) {\n\t\treturn\n\t}\n\tdata[x] = byte(v >> 24)\n\tdata[x+1] = byte(v >> 16)\n\tdata[x+2] = byte(v >> 8)\n\tdata[x+3] = byte(v)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (out *OutBuffer) WriteUint32LE(v uint32) {\n\tout.Append(byte(v), byte(v>>8), byte(v>>16), byte(v>>24))\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func SetUint32(gauge prometheus.Gauge, arg uint32) {\n\tgauge.Set(float64(arg))\n}", "func (m Measurement) AddUInt32(name string, value uint32) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (z *Writer) writeUint32(x uint32) error {\n\tbuf := z.buf[:4]\n\tbinary.LittleEndian.PutUint32(buf, x)\n\t_, err := z.dst.Write(buf)\n\treturn err\n}", "func (c *Configurator) Uint32(name string, value uint32, usage string) *uint32 {\n\tp := new(uint32)\n\n\tc.Uint32Var(p, name, value, usage)\n\n\treturn p\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint16_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func Uint32Arg(register Register, name string, options ...ArgOptionApplyer) *uint32 {\n\tp := new(uint32)\n\t_ = Uint32ArgVar(register, p, name, options...)\n\treturn p\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt16_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func Uint32() uint32", "func (instance *Instance) SetUint32(fieldName string, value uint32) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func Uint32(name string, value uint32, usage string) *uint32 {\n\treturn Global.Uint32(name, value, usage)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteInt32(buffer []byte, offset int, value int32) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n buffer[offset + 2] = byte(value >> 16)\n buffer[offset + 3] = byte(value >> 24)\n}", "func (w *Writer) WriteInt32(data interface{}) {\n\tvar t = w.getType(data, 4)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.buffer[w.index+2] = byte(t[2])\n\tw.buffer[w.index+3] = byte(t[3])\n\tw.index += 4\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteI32(p thrift.TProtocol, value int32, name string, field int16) error {\n\treturn WriteI32WithContext(context.Background(), p, value, name, field)\n}", "func (mr *MockIOPackageMockRecorder) AppendUint32(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"AppendUint32\", reflect.TypeOf((*MockIOPackage)(nil).AppendUint32), arg0, arg1)\n}", "func Uint32(i uint32) string {\n\t// Base 10\n\treturn strconv.FormatUint(uint64(i), 10)\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func StoreUint32(addr *uint32, val uint32)", "func (dm *dataManager) writeUint(address uint, u uint) (err ProcessException) {\n\tdata := make([]byte, 4)\n\tbinary.LittleEndian.PutUint32(data, uint32(u))\n\n\terr = dm.process.WriteBytes(address, data)\n\n\treturn\n}", "func (out *OutBuffer) WriteUint32LE(v uint32) bool {\n\tcontainer := out.GetContainer()\n\tif len(container) < 4 {\n\t\treturn false\n\t}\n\n\tbinary.LittleEndian.PutUint32(out.GetContainer(), v)\n\tout.pos += 4\n\treturn true\n}", "func Uint32Tag(name interface{}, value uint32) Tag {\n\treturn &tag{\n\t\ttagType: TagUint32,\n\t\tname: name,\n\t\tvalue: value,\n\t}\n}", "func (o *OutputState) ApplyUint32(applier interface{}) Uint32Output {\n\treturn o.ApplyT(applier).(Uint32Output)\n}", "func generateUint32Example() {\n\tvar u uint32 = 42\n\tc := NewUint32Container()\n\tc.Put(u)\n\tv := c.Get()\n\tfmt.Printf(\"generateExample: %d (%T)\\n\", v, v)\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (child MagicU32) Uint32() uint32 {\n\treturn uint32(child &^ (1 << 31))\n}", "func (b *Buffer) AppendUint32(v uint32) {\n\tb.AppendUint64(uint64(v))\n}", "func (stream *Stream) WriteUint32(val uint32) {\n\tstream.ensure(10)\n\tn := stream.n\n\tq1 := val / 1000\n\tif q1 == 0 {\n\t\tstream.n = writeFirstBuf(stream.buf, digits[val], n)\n\t\treturn\n\t}\n\tr1 := val - q1*1000\n\tq2 := q1 / 1000\n\tif q2 == 0 {\n\t\tn := writeFirstBuf(stream.buf, digits[q1], n)\n\t\twriteBuf(stream.buf, digits[r1], n)\n\t\tstream.n = n + 3\n\t\treturn\n\t}\n\tr2 := q1 - q2*1000\n\tq3 := q2 / 1000\n\tif q3 == 0 {\n\t\tn = writeFirstBuf(stream.buf, digits[q2], n)\n\t} else {\n\t\tr3 := q2 - q3*1000\n\t\tstream.buf[n] = byte(q3 + '0')\n\t\tn++\n\t\twriteBuf(stream.buf, digits[r3], n)\n\t\tn += 3\n\t}\n\twriteBuf(stream.buf, digits[r2], n)\n\twriteBuf(stream.buf, digits[r1], n+3)\n\tstream.n = n + 6\n}", "func (m *MockIOPackage) WriteUint16(arg0 []byte, arg1 int, arg2 uint16) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint16\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func ExpectUint32(t *testing.T, field string, expected uint32, found uint32) {\n\tif expected != found {\n\t\tt.Errorf(\"%s [%d], found '%d'\", field, expected, found)\n\t}\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (b *Buffer) AppendUint32(x uint32) error {\n\treturn b.appendInteger(x)\n}", "func (s *Streamer) Uint32(v uint32) *Streamer {\n\tif s.Error != nil {\n\t\treturn s\n\t}\n\ts.onVal()\n\ts.buffer = appendUint32(s.buffer, v)\n\treturn s\n}", "func MeasureUInt32(name string, field string, value uint32) Measurement {\n\treturn NewMeasurement(name).AddUInt32(field, value)\n}", "func Uint32(key string, val uint32) Tag {\n\treturn Tag{key: key, tType: uint32Type, integerVal: int64(val)}\n}", "func TestUint32(t *testing.T) {\n\ttests := []struct {\n\t\tin uint32 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x01,0x00,0x00,0x00}}, // Min single byte\n\t\t{255, []byte{0xff,0x00,0x00,0x00}}, // Max single byte\n\t\t{256, []byte{0x00,0x01,0x00,0x00}}, // Min 2-byte\n\t\t{65535, []byte{0xff,0xff,0x00,0x00}}, // Max 2-byte\n\t\t{0x10000, []byte{0x00,0x00,0x01,0x00}}, // Min 4-byte\n\t\t{0xffffffff, []byte{0xff,0xff,0xff,0xff}}, // Max 4-byte\n\t}\n\n\tt.Logf(\"Running uint32 %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint32(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint32 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint32 #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint32\n\t\terr = ReadUint32(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint32 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint32 #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call {\n\treturn &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (w *Packer) PutUint32(v uint32) {\n\tbinary.LittleEndian.PutUint32(w.scratch[:], v)\n\t_, _ = w.buf.Write(w.scratch[:4])\n}", "func Uint32(k string, v uint32) Field {\n\treturn Field{Key: k, Value: valf.Uint32(v)}\n}", "func (m *Message) putUint32(v uint32) {\n\tb := m.bufferForPut(4)\n\tdefer b.Advance(4)\n\n\tbinary.LittleEndian.PutUint32(b.Bytes[b.Offset:], v)\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mr *MockSessionMockRecorder) Uint32(arg0 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Uint32\", reflect.TypeOf((*MockSession)(nil).Uint32), arg0)\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func Uint32n(n uint32) uint32", "func WriteMemI32(mem []byte, offset int, v int32) {\n\tmem[offset] = byte(v)\n\tmem[offset+1] = byte(v >> 8)\n\tmem[offset+2] = byte(v >> 16)\n\tmem[offset+3] = byte(v >> 24)\n}", "func SetUint32(buf []byte, pos int, val uint32) {\n\tif pos+4 > len(buf) {\n\t\treturn\n\t}\n\tfor i := 0; i < 4; i++ {\n\t\tbuf[3-i+pos] = byte(val)\n\t\tval >>= 8\n\t}\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (p Path) AddUint32(child uint32) Path {\r\n\treturn p.Add(fmt.Sprint(child))\r\n}", "func (w *ByteWriter) MustWriteUint32(val uint32, offset int) int {\n\treturn w.MustWriteVal(val, offset)\n}", "func appendUint32(buf []byte, v uint32) []byte {\n\treturn append(buf, byte(v>>24), byte(v>>16), byte(v>>8), byte(v))\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt16_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt16_Call{Call: _e.mock.On(\"WriteInt16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func putUint32LE(p []byte, x uint32) {\n\tp[0] = byte(x)\n\tp[1] = byte(x >> 8)\n\tp[2] = byte(x >> 16)\n\tp[3] = byte(x >> 24)\n}", "func Uint32(n uint32) *uint32 {\n\treturn &n\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt8_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (t Uint32) WriteTo(w io.Writer) (int64, error) {\n\tlw := aWriterPool.Get(w)\n\tlw.WriteUint32(uint32(t))\n\treturn aWriterPool.Put(lw)\n}", "func (m *MockIOPackage) AppendUint16(arg0 []byte, arg1 uint16) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint16\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (r *Rank) Uint32() uint32 {\n\tif r == nil {\n\t\treturn uint32(NilRank)\n\t}\n\treturn uint32(*r)\n}", "func (r *Rank) Uint32() uint32 {\n\tif r == nil {\n\t\treturn uint32(NilRank)\n\t}\n\treturn uint32(*r)\n}" ]
[ "0.7903424", "0.7822844", "0.7774846", "0.68467873", "0.6823225", "0.6813629", "0.6678643", "0.6668687", "0.64327794", "0.64212066", "0.63475394", "0.6270841", "0.62578094", "0.6210469", "0.6206191", "0.61853206", "0.61675817", "0.61160856", "0.6113007", "0.60951746", "0.6073539", "0.6068692", "0.603108", "0.60283715", "0.60095555", "0.6003673", "0.5968539", "0.5964116", "0.5948904", "0.5907953", "0.59003466", "0.5885614", "0.5822423", "0.57973087", "0.57734746", "0.5773209", "0.576238", "0.57364047", "0.5711929", "0.5710298", "0.5702418", "0.56962657", "0.5684424", "0.5658746", "0.5655065", "0.5643621", "0.56253743", "0.56227475", "0.557386", "0.55718803", "0.55534154", "0.5551562", "0.55373913", "0.55018234", "0.5480451", "0.5471689", "0.54708993", "0.5455478", "0.5453204", "0.541365", "0.5409781", "0.5403075", "0.5402687", "0.53984016", "0.5392798", "0.5386357", "0.5366258", "0.5360453", "0.5350864", "0.535068", "0.5346245", "0.5324772", "0.5317112", "0.5309485", "0.5303435", "0.5303243", "0.529775", "0.5288857", "0.5287623", "0.5284769", "0.5272087", "0.5271094", "0.52705705", "0.5260729", "0.5233135", "0.52307636", "0.520487", "0.5196052", "0.5192952", "0.5191916", "0.5178056", "0.51736546", "0.5169329", "0.5139182", "0.51327074", "0.5127156", "0.51123285", "0.510697", "0.5105385", "0.5105385" ]
0.8083898
0
WriteUint64 provides a mock function with given fields: logicalName, bitLength, value, writerArgs
func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, logicalName, bitLength, value) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok { r0 = rf(logicalName, bitLength, value, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockIOPackage) WriteUint64(arg0 []byte, arg1 int, arg2 uint64) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint64\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) WriteUint64(data interface{}) {\n\tvar t = w.getType(data, 8)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.buffer[w.index+2] = byte(t[2])\n\tw.buffer[w.index+3] = byte(t[3])\n\tw.buffer[w.index+4] = byte(t[4])\n\tw.buffer[w.index+5] = byte(t[5])\n\tw.buffer[w.index+6] = byte(t[6])\n\tw.buffer[w.index+7] = byte(t[7])\n\tw.index += 8\n}", "func (m *MockIOPackage) AppendUint64(arg0 []byte, arg1 uint64) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint64\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (m *MockSession) Uint64(arg0 string) uint64 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint64\", arg0)\n\tret0, _ := ret[0].(uint64)\n\treturn ret0\n}", "func WriteUInt64(buffer []byte, offset int, value uint64) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n buffer[offset + 2] = byte(value >> 16)\n buffer[offset + 3] = byte(value >> 24)\n buffer[offset + 4] = byte(value >> 32)\n buffer[offset + 5] = byte(value >> 40)\n buffer[offset + 6] = byte(value >> 48)\n buffer[offset + 7] = byte(value >> 56)\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (mr *MockIOPackageMockRecorder) WriteUint64(arg0, arg1, arg2 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteUint64\", reflect.TypeOf((*MockIOPackage)(nil).WriteUint64), arg0, arg1, arg2)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt64_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (bw *BufWriter) Uint64(val uint64) {\n\tif bw.Error != nil {\n\t\treturn\n\t}\n\t_, bw.Error = bw.writer.WriteString(strconv.FormatUint(val, 10))\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat64_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func TestUint64(t *testing.T) {\n\ttests := []struct {\n\t\tin uint64 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00}}, // Min single byte\n\t\t{255, []byte{0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00}}, // Max single byte\n\t\t{256, []byte{0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00}}, // Min 2-byte\n\t\t{65535, []byte{0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00}}, // Max 2-byte\n\t\t{0x10000, []byte{0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00}}, // Min 4-byte\n\t\t{0xffffffff, []byte{0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00}}, // Max 4-byte\n\t\t{0x100000000, []byte{0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00}}, // Min 8-byte\n\t\t{0xffffffffffffffff, []byte{0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff}}, // Max 8-byte\n\t}\n\n\tt.Logf(\"Running uint64 %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint64(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint64 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint64 #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint64\n\t\terr = ReadUint64(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint64 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint64 #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (o *FakeObject) Uint64() uint64 { return o.Value.(uint64) }", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt64_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat64_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (bio *BinaryIO) WriteUint64(off int64, value uint64) {\n\tvar buf [8]byte\n\tbio.order.PutUint64(buf[:], value)\n\tbio.WriteAt(off, buf[:])\n}", "func (w *ByteWriter) WriteUint64(val uint64, offset int) (int, error) {\n\treturn w.WriteVal(val, offset)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (vb *keyWriter) WriteUint64(num uint64) (int, error) {\n\tbinary.BigEndian.PutUint64(vb.buffer[:8], num)\n\tvb.hash.Write(vb.buffer[:8])\n\treturn 8, nil\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (f *FlagSet) Uint64(name string, alias rune, value uint64, usage string, fn Callback) *uint64 {\n\tp := new(uint64)\n\tf.Uint64Var(p, name, alias, value, usage, fn)\n\treturn p\n}", "func FormatUint64(name string) string {\n\treturn formatUintFunction(name, false)\n}", "func Uint64(name string, alias rune, value uint64, usage string, fn Callback) *uint64 {\n\treturn CommandLine.Uint64(name, alias, value, usage, fn)\n}", "func Uint64(name string, val uint64) Field {\n\treturn Field(zap.Uint64(name, val))\n}", "func Uint64(name string, value uint64, usage string) *uint64 {\n\treturn ex.FlagSet.Uint64(name, value, usage)\n}", "func Uint64(name string, value uint64, usage string) *uint64 {\n\tp := new(uint64);\n\tUint64Var(p, name, value, usage);\n\treturn p;\n}", "func (w *Writer) WriteUint64(num uint64) {\n\tif w.err != nil {\n\t\treturn\n\t}\n\tw.b = AppendUint(w.b, num)\n}", "func Uint64(name string, value uint64, usage string) *uint64 {\n\treturn Environment.Uint64(name, value, usage)\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) Uint64(n uint64) {\n\tw.buf = strconv.AppendUint(w.buf, uint64(n), 10)\n}", "func (m *MockIOPackage) ReadUint64(arg0 []byte, arg1 int) (uint64, int, bool) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ReadUint64\", arg0, arg1)\n\tret0, _ := ret[0].(uint64)\n\tret1, _ := ret[1].(int)\n\tret2, _ := ret[2].(bool)\n\treturn ret0, ret1, ret2\n}", "func WriteI64(p thrift.TProtocol, value int64, name string, field int16) error {\n\treturn WriteI64WithContext(context.Background(), p, value, name, field)\n}", "func (m *Mmap) WriteUint64(start int64, val uint64) error {\n\tbinary.LittleEndian.PutUint64(m.MmapBytes[start:start+8], val)\n\treturn nil\n}", "func (m *MockHash64) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func Test_JsonSerializationUint64(t *testing.T) {\n\ttv := NewTypedValueUint(testPositiveInt64, 64)\n\n\tjsonStr, err := json.Marshal(tv)\n\tassert.NilError(t, err)\n\n\tassert.Equal(t, string(jsonStr), `{\"Bytes\":\"f/////////8=\",\"Type\":3,\"TypeOpts\":[64]}`)\n\n\tunmarshalledTv := TypedValue{}\n\terr = json.Unmarshal(jsonStr, &unmarshalledTv)\n\tassert.NilError(t, err)\n\n\tassert.Equal(t, ValueType_UINT, unmarshalledTv.Type)\n\tassert.Equal(t, len(unmarshalledTv.TypeOpts), 1)\n\tassert.DeepEqual(t, unmarshalledTv.Bytes, []byte{0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff})\n\n\tstrVal := (*TypedUint)(&unmarshalledTv).String()\n\tassert.Equal(t, fmt.Sprintf(\"%d\", testPositiveInt64), strVal)\n\tassert.Equal(t, unmarshalledTv.ValueToString(), \"9223372036854775807\")\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint16_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Uint64(flag string, value uint64, description string) *uint64 {\n\tvar v uint64\n\tUint64Var(&v, flag, value, description)\n\treturn &v\n}", "func WriteMemI64(mem []byte, offset int, v int64) {\n\tmem[offset] = byte(v)\n\tmem[offset+1] = byte(v >> 8)\n\tmem[offset+2] = byte(v >> 16)\n\tmem[offset+3] = byte(v >> 24)\n\tmem[offset+4] = byte(v >> 32)\n\tmem[offset+5] = byte(v >> 40)\n\tmem[offset+6] = byte(v >> 48)\n\tmem[offset+7] = byte(v >> 56)\n}", "func (instance *Instance) SetUint64(fieldName string, value uint64) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (z *Rat) SetUint64(x uint64) *Rat {}", "func (out *OutBuffer) WriteUint64LE(v uint64) {\n\tout.Append(\n\t\tbyte(v),\n\t\tbyte(v>>8),\n\t\tbyte(v>>16),\n\t\tbyte(v>>24),\n\t\tbyte(v>>32),\n\t\tbyte(v>>40),\n\t\tbyte(v>>48),\n\t\tbyte(v>>56),\n\t)\n}", "func (out *OutBuffer) WriteUint64LE(v uint64) bool {\n\tcontainer := out.GetContainer()\n\tif len(container) < 8 {\n\t\treturn false\n\t}\n\n\tbinary.LittleEndian.PutUint64(out.GetContainer(), v)\n\tout.pos += 8\n\treturn true\n}", "func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestUint64B(t *testing.T) {\n\ttests := []struct {\n\t\tin uint64 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01}}, // Min single byte\n\t\t{255, []byte{0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff}}, // Max single byte\n\t\t{256, []byte{0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x00}}, // Min 2-byte\n\t\t{65535, []byte{0x00,0x00,0x00,0x00,0x00,0x00,0xff,0xff}}, // Max 2-byte\n\t\t{0x10000, []byte{0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00}}, // Min 4-byte\n\t\t{0xffffffff, []byte{0x00,0x00,0x00,0x00,0xff,0xff,0xff,0xff}}, // Max 4-byte\n\t\t{0x100000000, []byte{0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00}}, // Min 8-byte\n\t\t{0xffffffffffffffff, []byte{0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff}}, // Max 8-byte\n\t}\n\n\tt.Logf(\"Running uint64B %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint64B(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint64B #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint64B #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint64\n\t\terr = ReadUint64B(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint64B #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint64B #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func Uint64Arg(register Register, name string, options ...ArgOptionApplyer) *uint64 {\n\tp := new(uint64)\n\t_ = Uint64ArgVar(register, p, name, options...)\n\treturn p\n}", "func (j *JSONData) Uint64(path ...interface{}) (uint64, error) {\n\tjson, err := j.get(path...)\n\treturn json.MustUint64(), err\n}", "func StoreUint64(addr *uint64, val uint64)", "func (p *Stream) WriteUint64(v uint64) {\n\tif v < 10 {\n\t\tp.writeFrame[p.writeIndex] = byte(v + 54)\n\t\tp.writeIndex++\n\t\tif p.writeIndex == streamBlockSize {\n\t\t\tp.gotoNextWriteFrame()\n\t\t}\n\t} else if v < 65536 {\n\t\tif p.writeIndex < streamBlockSize-3 {\n\t\t\tb := p.writeFrame[p.writeIndex:]\n\t\t\tb[0] = 9\n\t\t\tb[1] = byte(v)\n\t\t\tb[2] = byte(v >> 8)\n\t\t\tp.writeIndex += 3\n\t\t\treturn\n\t\t}\n\t\tp.PutBytes([]byte{\n\t\t\t9,\n\t\t\tbyte(v),\n\t\t\tbyte(v >> 8),\n\t\t})\n\t} else if v < 4294967296 {\n\t\tif p.writeIndex < streamBlockSize-5 {\n\t\t\tb := p.writeFrame[p.writeIndex:]\n\t\t\tb[0] = 10\n\t\t\tb[1] = byte(v)\n\t\t\tb[2] = byte(v >> 8)\n\t\t\tb[3] = byte(v >> 16)\n\t\t\tb[4] = byte(v >> 24)\n\t\t\tp.writeIndex += 5\n\t\t\treturn\n\t\t}\n\t\tp.PutBytes([]byte{\n\t\t\t10,\n\t\t\tbyte(v),\n\t\t\tbyte(v >> 8),\n\t\t\tbyte(v >> 16),\n\t\t\tbyte(v >> 24),\n\t\t})\n\t} else {\n\t\tif p.writeIndex < streamBlockSize-9 {\n\t\t\tb := p.writeFrame[p.writeIndex:]\n\t\t\tb[0] = 11\n\t\t\tb[1] = byte(v)\n\t\t\tb[2] = byte(v >> 8)\n\t\t\tb[3] = byte(v >> 16)\n\t\t\tb[4] = byte(v >> 24)\n\t\t\tb[5] = byte(v >> 32)\n\t\t\tb[6] = byte(v >> 40)\n\t\t\tb[7] = byte(v >> 48)\n\t\t\tb[8] = byte(v >> 56)\n\t\t\tp.writeIndex += 9\n\t\t\treturn\n\t\t}\n\t\tp.PutBytes([]byte{\n\t\t\t11,\n\t\t\tbyte(v),\n\t\t\tbyte(v >> 8),\n\t\t\tbyte(v >> 16),\n\t\t\tbyte(v >> 24),\n\t\t\tbyte(v >> 32),\n\t\t\tbyte(v >> 40),\n\t\t\tbyte(v >> 48),\n\t\t\tbyte(v >> 56),\n\t\t})\n\t}\n}", "func (obj *Value) SetUint64(v uint64) {\n\tobj.Candy().Guify(\"g_value_set_uint64\", obj, v)\n}", "func Uint64(k string, v uint64) Field {\n\treturn Field{Key: k, Value: valf.Uint64(v)}\n}", "func (m Measurement) AddUInt64(name string, value uint64) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockIOPackage) WriteUint16(arg0 []byte, arg1 int, arg2 uint16) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint16\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func WriteUint64(w io.Writer, v uint64) error {\n\tvar data [9]byte\n\tvar size int\n\tswitch {\n\tcase v <= 0x80:\n\t\tdata[0] = byte(v)\n\t\tsize = 1\n\tcase v < (1 << 8):\n\t\tdata[0] = 0x80 + 1\n\t\tdata[1] = byte(v)\n\t\tsize = 2\n\tcase v < (1 << 16):\n\t\tdata[0] = 0x80 + 2\n\t\tdata[1] = byte(v >> 8)\n\t\tdata[2] = byte(v)\n\t\tsize = 3\n\tcase v < (1 << 24):\n\t\tdata[0] = 0x80 + 3\n\t\tdata[1] = byte(v >> 16)\n\t\tdata[2] = byte(v >> 8)\n\t\tdata[3] = byte(v)\n\t\tsize = 4\n\tcase v < (1 << 32):\n\t\tdata[0] = 0x80 + 4\n\t\tdata[1] = byte(v >> 24)\n\t\tdata[2] = byte(v >> 16)\n\t\tdata[3] = byte(v >> 8)\n\t\tdata[4] = byte(v)\n\t\tsize = 5\n\tcase v < (1 << 40):\n\t\tdata[0] = 0x80 + 5\n\t\tdata[1] = byte(v >> 32)\n\t\tdata[2] = byte(v >> 24)\n\t\tdata[3] = byte(v >> 16)\n\t\tdata[4] = byte(v >> 8)\n\t\tdata[5] = byte(v)\n\t\tsize = 6\n\tcase v < (1 << 48):\n\t\tdata[0] = 0x80 + 6\n\t\tdata[1] = byte(v >> 40)\n\t\tdata[2] = byte(v >> 32)\n\t\tdata[3] = byte(v >> 24)\n\t\tdata[4] = byte(v >> 16)\n\t\tdata[5] = byte(v >> 8)\n\t\tdata[6] = byte(v)\n\t\tsize = 7\n\tcase v < (1 << 56):\n\t\tdata[0] = 0x80 + 7\n\t\tdata[1] = byte(v >> 48)\n\t\tdata[2] = byte(v >> 40)\n\t\tdata[3] = byte(v >> 32)\n\t\tdata[4] = byte(v >> 24)\n\t\tdata[5] = byte(v >> 16)\n\t\tdata[6] = byte(v >> 8)\n\t\tdata[7] = byte(v)\n\t\tsize = 8\n\tdefault:\n\t\tdata[0] = 0x80 + 8\n\t\tdata[1] = byte(v >> 56)\n\t\tdata[2] = byte(v >> 48)\n\t\tdata[3] = byte(v >> 40)\n\t\tdata[4] = byte(v >> 32)\n\t\tdata[5] = byte(v >> 24)\n\t\tdata[6] = byte(v >> 16)\n\t\tdata[7] = byte(v >> 8)\n\t\tdata[8] = byte(v)\n\t\tsize = 9\n\t}\n\t_, err := w.Write(data[0:size])\n\treturn err\n}", "func (_m *MockWriteBufferXmlBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func FormatUint64(format string, v ...any) uint64 {\n\treturn GetUint64(Format(format, v...))\n}", "func (s *EnvVarSet) Uint64(name string, value uint64, usage string) *uint64 {\n\tp := new(uint64)\n\n\ts.Uint64Var(p, name, value, usage)\n\n\treturn p\n}", "func (rng *splitMix64Source) Uint64() uint64 {\n\trng.state += 0x9E3779B97F4A7C15\n\tz := rng.state\n\tz = (z ^ (z >> 30)) * 0xBF58476D1CE4E5B9\n\tz = (z ^ (z >> 27)) * 0x94D049BB133111EB\n\treturn z ^ (z >> 31)\n}", "func Uint64Tag(name interface{}, value uint64) Tag {\n\treturn &tag{\n\t\ttagType: TagUint64,\n\t\tname: name,\n\t\tvalue: value,\n\t}\n}", "func MeasureUInt64(name string, field string, value uint64) Measurement {\n\treturn NewMeasurement(name).AddUInt64(field, value)\n}", "func Uint64(key string, val uint64) Tag {\n\treturn Tag{key: key, tType: uint64Type, integerVal: int64(val)}\n}", "func (z *Element22) SetUint64(v uint64) *Element22 {\n\tz[0] = v\n\tz[1] = 0\n\tz[2] = 0\n\tz[3] = 0\n\tz[4] = 0\n\tz[5] = 0\n\tz[6] = 0\n\tz[7] = 0\n\tz[8] = 0\n\tz[9] = 0\n\tz[10] = 0\n\tz[11] = 0\n\tz[12] = 0\n\tz[13] = 0\n\tz[14] = 0\n\tz[15] = 0\n\tz[16] = 0\n\tz[17] = 0\n\tz[18] = 0\n\tz[19] = 0\n\tz[20] = 0\n\tz[21] = 0\n\treturn z.ToMont()\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint16_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (mr *MockIOPackageMockRecorder) AppendUint64(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"AppendUint64\", reflect.TypeOf((*MockIOPackage)(nil).AppendUint64), arg0, arg1)\n}", "func (sm64 *splitMix64) Uint64() uint64 {\n\tsm64.state = sm64.state + uint64(0x9E3779B97F4A7C15)\n\tz := sm64.state\n\tz = (z ^ (z >> 30)) * uint64(0xBF58476D1CE4E5B9)\n\tz = (z ^ (z >> 27)) * uint64(0x94D049BB133111EB)\n\treturn z ^ (z >> 31)\n\n}", "func (w *ByteWriter) MustWriteUint64(val uint64, offset int) int {\n\treturn w.MustWriteVal(val, offset)\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (b *Bus) Write64(addr mirv.Address, v uint64) error {\n\tblk := b.p\n\tif !blk.contains(addr) {\n\t\tblk = b.find(addr)\n\t}\n\treturn blk.m.Write64(addr-blk.s, v)\n}", "func (p *PCG64) Uint64() uint64 {\n\tp.multiply()\n\tp.add()\n\t// XOR high and low 64 bits together and rotate right by high 6 bits of state.\n\treturn bits.RotateLeft64(p.high^p.low, -int(p.high>>58))\n}", "func (o *OutputState) ApplyUint64(applier interface{}) Uint64Output {\n\treturn o.ApplyT(applier).(Uint64Output)\n}", "func (u Uint64) Uint64() uint64 {\n\treturn uint64(u)\n}", "func (ob *PyObject) Uint64() uint64 {\n\treturn uint64(C.PyLong_AsUnsignedLongLong(ob.rawptr))\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (s *Streamer) Uint64(v uint64) *Streamer {\n\tif s.Error != nil {\n\t\treturn s\n\t}\n\ts.onVal()\n\ts.buffer = appendUint64(s.buffer, v)\n\treturn s\n}", "func Uint64(val interface{}) uint64 {\r\n\r\n\tswitch t := val.(type) {\r\n\tcase int:\r\n\t\treturn uint64(t)\r\n\tcase int8:\r\n\t\treturn uint64(t)\r\n\tcase int16:\r\n\t\treturn uint64(t)\r\n\tcase int32:\r\n\t\treturn uint64(t)\r\n\tcase int64:\r\n\t\treturn uint64(t)\r\n\tcase uint:\r\n\t\treturn uint64(t)\r\n\tcase uint8:\r\n\t\treturn uint64(t)\r\n\tcase uint16:\r\n\t\treturn uint64(t)\r\n\tcase uint32:\r\n\t\treturn uint64(t)\r\n\tcase uint64:\r\n\t\treturn uint64(t)\r\n\tcase float32:\r\n\t\treturn uint64(t)\r\n\tcase float64:\r\n\t\treturn uint64(t)\r\n\tcase bool:\r\n\t\tif t == true {\r\n\t\t\treturn uint64(1)\r\n\t\t}\r\n\t\treturn uint64(0)\r\n\tdefault:\r\n\t\ts := String(val)\r\n\t\ti, _ := strconv.ParseUint(s, 10, 64)\r\n\t\treturn i\r\n\t}\r\n\r\n\tpanic(\"Reached\")\r\n\r\n}", "func Uint64(v *uint64) uint64 {\n\tif v != nil {\n\t\treturn *v\n\t}\n\treturn 0\n}", "func (mr *MockSessionMockRecorder) Uint64(arg0 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Uint64\", reflect.TypeOf((*MockSession)(nil).Uint64), arg0)\n}", "func (f *FlagSet) Uint64Var(p *uint64, name string, alias rune, value uint64, usage string, fn Callback) {\n\tf.Var(newUint64Value(value, p), name, alias, usage, fn)\n}", "func (m *MockHash64) Sum64() uint64 {\n\tret := m.ctrl.Call(m, \"Sum64\")\n\tret0, _ := ret[0].(uint64)\n\treturn ret0\n}", "func (s *State) Write64(h uint64) (err error) {\n\ts.clen += 8\n\ts.tail = append(s.tail, byte(h>>56), byte(h>>48), byte(h>>40), byte(h>>32), byte(h>>24), byte(h>>16), byte(h>>8), byte(h))\n\treturn nil\n}", "func Uint64(u *uint64) uint64 {\n\tif u == nil {\n\t\treturn 0\n\t}\n\treturn *u\n}", "func Uint64(val uint64, buf []byte) []byte {\n\treturn append(buf, strconv.FormatUint(val, 10)...)\n}", "func Uint64(key string, val uint64) Field {\n\treturn Field{Key: key, Type: core.Uint64Type, Integer: int64(val)}\n}", "func (w *Packer) PutUint64(v uint64) {\n\tbinary.LittleEndian.PutUint64(w.scratch[:], v)\n\t_, _ = w.buf.Write(w.scratch[:8])\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}" ]
[ "0.85193", "0.832276", "0.81905305", "0.7672434", "0.7267307", "0.7160702", "0.71414095", "0.70749867", "0.706679", "0.6992502", "0.6940759", "0.6856768", "0.681962", "0.6818276", "0.66986805", "0.6696274", "0.6675898", "0.6659693", "0.66294926", "0.66285926", "0.66038936", "0.6544508", "0.653398", "0.6526533", "0.6512235", "0.6510852", "0.64971966", "0.6427216", "0.6414284", "0.6356055", "0.63519406", "0.6343547", "0.62905157", "0.62733185", "0.6270049", "0.62532455", "0.6243096", "0.62381566", "0.6234224", "0.6190084", "0.6177577", "0.61689705", "0.6164242", "0.61328846", "0.60933554", "0.6077609", "0.6042927", "0.60359055", "0.6035107", "0.6027153", "0.5981422", "0.5981215", "0.59794617", "0.59278214", "0.59158736", "0.5910112", "0.590859", "0.5907999", "0.5907875", "0.59025323", "0.5899715", "0.58883303", "0.58720464", "0.5853901", "0.5847585", "0.5847061", "0.58389574", "0.5827557", "0.5819543", "0.5804373", "0.5802958", "0.58004075", "0.57957417", "0.57836324", "0.57690114", "0.57669556", "0.57641834", "0.57552344", "0.5739026", "0.5737969", "0.5734999", "0.57325673", "0.57308966", "0.57250637", "0.5725034", "0.57121867", "0.5711281", "0.57068163", "0.56957626", "0.5694623", "0.5694341", "0.5691474", "0.56912357", "0.5690239", "0.5672109", "0.567021", "0.56623775", "0.5660207", "0.56485766", "0.5645904" ]
0.85450035
0
WriteUint64 is a helper method to define mock.On call logicalName string bitLength uint8 value uint64 writerArgs ...WithWriterArgs
func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call { return &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On("WriteUint64", append([]interface{}{logicalName, bitLength, value}, writerArgs...)...)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (w *Writer) WriteUint64(data interface{}) {\n\tvar t = w.getType(data, 8)\n\n\tw.buffer[w.index] = byte(t[0])\n\tw.buffer[w.index+1] = byte(t[1])\n\tw.buffer[w.index+2] = byte(t[2])\n\tw.buffer[w.index+3] = byte(t[3])\n\tw.buffer[w.index+4] = byte(t[4])\n\tw.buffer[w.index+5] = byte(t[5])\n\tw.buffer[w.index+6] = byte(t[6])\n\tw.buffer[w.index+7] = byte(t[7])\n\tw.index += 8\n}", "func (m *MockIOPackage) WriteUint64(arg0 []byte, arg1 int, arg2 uint64) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint64\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt64_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (bw *BufWriter) Uint64(val uint64) {\n\tif bw.Error != nil {\n\t\treturn\n\t}\n\t_, bw.Error = bw.writer.WriteString(strconv.FormatUint(val, 10))\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteUInt64(buffer []byte, offset int, value uint64) {\n buffer[offset + 0] = byte(value >> 0)\n buffer[offset + 1] = byte(value >> 8)\n buffer[offset + 2] = byte(value >> 16)\n buffer[offset + 3] = byte(value >> 24)\n buffer[offset + 4] = byte(value >> 32)\n buffer[offset + 5] = byte(value >> 40)\n buffer[offset + 6] = byte(value >> 48)\n buffer[offset + 7] = byte(value >> 56)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt64_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func FormatUint64(name string) string {\n\treturn formatUintFunction(name, false)\n}", "func (m *MockIOPackage) AppendUint64(arg0 []byte, arg1 uint64) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint64\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func Uint64(name string, alias rune, value uint64, usage string, fn Callback) *uint64 {\n\treturn CommandLine.Uint64(name, alias, value, usage, fn)\n}", "func (w *Writer) Uint64(n uint64) {\n\tw.buf = strconv.AppendUint(w.buf, uint64(n), 10)\n}", "func (mr *MockIOPackageMockRecorder) WriteUint64(arg0, arg1, arg2 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteUint64\", reflect.TypeOf((*MockIOPackage)(nil).WriteUint64), arg0, arg1, arg2)\n}", "func (w *ByteWriter) WriteUint64(val uint64, offset int) (int, error) {\n\treturn w.WriteVal(val, offset)\n}", "func (vb *keyWriter) WriteUint64(num uint64) (int, error) {\n\tbinary.BigEndian.PutUint64(vb.buffer[:8], num)\n\tvb.hash.Write(vb.buffer[:8])\n\treturn 8, nil\n}", "func (f *FlagSet) Uint64(name string, alias rune, value uint64, usage string, fn Callback) *uint64 {\n\tp := new(uint64)\n\tf.Uint64Var(p, name, alias, value, usage, fn)\n\treturn p\n}", "func (bio *BinaryIO) WriteUint64(off int64, value uint64) {\n\tvar buf [8]byte\n\tbio.order.PutUint64(buf[:], value)\n\tbio.WriteAt(off, buf[:])\n}", "func (w *Writer) WriteUint64(num uint64) {\n\tif w.err != nil {\n\t\treturn\n\t}\n\tw.b = AppendUint(w.b, num)\n}", "func Uint64Arg(register Register, name string, options ...ArgOptionApplyer) *uint64 {\n\tp := new(uint64)\n\t_ = Uint64ArgVar(register, p, name, options...)\n\treturn p\n}", "func (m *MockSession) Uint64(arg0 string) uint64 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint64\", arg0)\n\tret0, _ := ret[0].(uint64)\n\treturn ret0\n}", "func Uint64(name string, val uint64) Field {\n\treturn Field(zap.Uint64(name, val))\n}", "func Uint64(name string, value uint64, usage string) *uint64 {\n\treturn Environment.Uint64(name, value, usage)\n}", "func Uint64(name string, value uint64, usage string) *uint64 {\n\tp := new(uint64);\n\tUint64Var(p, name, value, usage);\n\treturn p;\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteI64(p thrift.TProtocol, value int64, name string, field int16) error {\n\treturn WriteI64WithContext(context.Background(), p, value, name, field)\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Uint64(name string, value uint64, usage string) *uint64 {\n\treturn ex.FlagSet.Uint64(name, value, usage)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat64_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (o *FakeObject) Uint64() uint64 { return o.Value.(uint64) }", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat64_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (out *OutBuffer) WriteUint64LE(v uint64) {\n\tout.Append(\n\t\tbyte(v),\n\t\tbyte(v>>8),\n\t\tbyte(v>>16),\n\t\tbyte(v>>24),\n\t\tbyte(v>>32),\n\t\tbyte(v>>40),\n\t\tbyte(v>>48),\n\t\tbyte(v>>56),\n\t)\n}", "func Uint64(i uint64) string {\n\t// Base 10\n\treturn strconv.FormatUint(i, 10)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m Measurement) AddUInt64(name string, value uint64) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint16_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func Uint64Tag(name interface{}, value uint64) Tag {\n\treturn &tag{\n\t\ttagType: TagUint64,\n\t\tname: name,\n\t\tvalue: value,\n\t}\n}", "func Uint64(flag string, value uint64, description string) *uint64 {\n\tvar v uint64\n\tUint64Var(&v, flag, value, description)\n\treturn &v\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func TestUint64(t *testing.T) {\n\ttests := []struct {\n\t\tin uint64 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00}}, // Min single byte\n\t\t{255, []byte{0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00}}, // Max single byte\n\t\t{256, []byte{0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00}}, // Min 2-byte\n\t\t{65535, []byte{0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00}}, // Max 2-byte\n\t\t{0x10000, []byte{0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00}}, // Min 4-byte\n\t\t{0xffffffff, []byte{0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00}}, // Max 4-byte\n\t\t{0x100000000, []byte{0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00}}, // Min 8-byte\n\t\t{0xffffffffffffffff, []byte{0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff}}, // Max 8-byte\n\t}\n\n\tt.Logf(\"Running uint64 %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint64(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint64 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint64 #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint64\n\t\terr = ReadUint64(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint64 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint64 #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func FormatUint64(format string, v ...any) uint64 {\n\treturn GetUint64(Format(format, v...))\n}", "func WriteMemI64(mem []byte, offset int, v int64) {\n\tmem[offset] = byte(v)\n\tmem[offset+1] = byte(v >> 8)\n\tmem[offset+2] = byte(v >> 16)\n\tmem[offset+3] = byte(v >> 24)\n\tmem[offset+4] = byte(v >> 32)\n\tmem[offset+5] = byte(v >> 40)\n\tmem[offset+6] = byte(v >> 48)\n\tmem[offset+7] = byte(v >> 56)\n}", "func Uint64(val uint64, buf []byte) []byte {\n\treturn append(buf, strconv.FormatUint(val, 10)...)\n}", "func (mr *MockIOPackageMockRecorder) AppendUint64(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"AppendUint64\", reflect.TypeOf((*MockIOPackage)(nil).AppendUint64), arg0, arg1)\n}", "func (instance *Instance) SetUint64(fieldName string, value uint64) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func MeasureUInt64(name string, field string, value uint64) Measurement {\n\treturn NewMeasurement(name).AddUInt64(field, value)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteUint64(w io.Writer, v uint64) error {\n\tvar data [9]byte\n\tvar size int\n\tswitch {\n\tcase v <= 0x80:\n\t\tdata[0] = byte(v)\n\t\tsize = 1\n\tcase v < (1 << 8):\n\t\tdata[0] = 0x80 + 1\n\t\tdata[1] = byte(v)\n\t\tsize = 2\n\tcase v < (1 << 16):\n\t\tdata[0] = 0x80 + 2\n\t\tdata[1] = byte(v >> 8)\n\t\tdata[2] = byte(v)\n\t\tsize = 3\n\tcase v < (1 << 24):\n\t\tdata[0] = 0x80 + 3\n\t\tdata[1] = byte(v >> 16)\n\t\tdata[2] = byte(v >> 8)\n\t\tdata[3] = byte(v)\n\t\tsize = 4\n\tcase v < (1 << 32):\n\t\tdata[0] = 0x80 + 4\n\t\tdata[1] = byte(v >> 24)\n\t\tdata[2] = byte(v >> 16)\n\t\tdata[3] = byte(v >> 8)\n\t\tdata[4] = byte(v)\n\t\tsize = 5\n\tcase v < (1 << 40):\n\t\tdata[0] = 0x80 + 5\n\t\tdata[1] = byte(v >> 32)\n\t\tdata[2] = byte(v >> 24)\n\t\tdata[3] = byte(v >> 16)\n\t\tdata[4] = byte(v >> 8)\n\t\tdata[5] = byte(v)\n\t\tsize = 6\n\tcase v < (1 << 48):\n\t\tdata[0] = 0x80 + 6\n\t\tdata[1] = byte(v >> 40)\n\t\tdata[2] = byte(v >> 32)\n\t\tdata[3] = byte(v >> 24)\n\t\tdata[4] = byte(v >> 16)\n\t\tdata[5] = byte(v >> 8)\n\t\tdata[6] = byte(v)\n\t\tsize = 7\n\tcase v < (1 << 56):\n\t\tdata[0] = 0x80 + 7\n\t\tdata[1] = byte(v >> 48)\n\t\tdata[2] = byte(v >> 40)\n\t\tdata[3] = byte(v >> 32)\n\t\tdata[4] = byte(v >> 24)\n\t\tdata[5] = byte(v >> 16)\n\t\tdata[6] = byte(v >> 8)\n\t\tdata[7] = byte(v)\n\t\tsize = 8\n\tdefault:\n\t\tdata[0] = 0x80 + 8\n\t\tdata[1] = byte(v >> 56)\n\t\tdata[2] = byte(v >> 48)\n\t\tdata[3] = byte(v >> 40)\n\t\tdata[4] = byte(v >> 32)\n\t\tdata[5] = byte(v >> 24)\n\t\tdata[6] = byte(v >> 16)\n\t\tdata[7] = byte(v >> 8)\n\t\tdata[8] = byte(v)\n\t\tsize = 9\n\t}\n\t_, err := w.Write(data[0:size])\n\treturn err\n}", "func (out *OutBuffer) WriteUint64LE(v uint64) bool {\n\tcontainer := out.GetContainer()\n\tif len(container) < 8 {\n\t\treturn false\n\t}\n\n\tbinary.LittleEndian.PutUint64(out.GetContainer(), v)\n\tout.pos += 8\n\treturn true\n}", "func Uint64(key string, val uint64) Tag {\n\treturn Tag{key: key, tType: uint64Type, integerVal: int64(val)}\n}", "func (f *FlagSet) Uint64Var(p *uint64, name string, alias rune, value uint64, usage string, fn Callback) {\n\tf.Var(newUint64Value(value, p), name, alias, usage, fn)\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (z *Rat) SetUint64(x uint64) *Rat {}", "func (m *Mmap) WriteUint64(start int64, val uint64) error {\n\tbinary.LittleEndian.PutUint64(m.MmapBytes[start:start+8], val)\n\treturn nil\n}", "func (o *OutputState) ApplyUint64(applier interface{}) Uint64Output {\n\treturn o.ApplyT(applier).(Uint64Output)\n}", "func marshalUint64(dst []byte, u uint64) []byte {\n\treturn append(dst, byte(u>>56), byte(u>>48), byte(u>>40), byte(u>>32), byte(u>>24), byte(u>>16), byte(u>>8), byte(u))\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint16_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func marshallAbiUint64(value interface{}) (uint64, error) {\n\tabiType, err := abi.TypeOf(\"uint64\")\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tencoded, err := abiType.Encode(value)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tdecoded, err := abiType.Decode(encoded)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tmarshalledValue, ok := decoded.(uint64)\n\tif !ok {\n\t\terr = fmt.Errorf(\"Decoded value is not a uint64\")\n\t}\n\treturn marshalledValue, err\n}", "func (p *Stream) WriteUint64(v uint64) {\n\tif v < 10 {\n\t\tp.writeFrame[p.writeIndex] = byte(v + 54)\n\t\tp.writeIndex++\n\t\tif p.writeIndex == streamBlockSize {\n\t\t\tp.gotoNextWriteFrame()\n\t\t}\n\t} else if v < 65536 {\n\t\tif p.writeIndex < streamBlockSize-3 {\n\t\t\tb := p.writeFrame[p.writeIndex:]\n\t\t\tb[0] = 9\n\t\t\tb[1] = byte(v)\n\t\t\tb[2] = byte(v >> 8)\n\t\t\tp.writeIndex += 3\n\t\t\treturn\n\t\t}\n\t\tp.PutBytes([]byte{\n\t\t\t9,\n\t\t\tbyte(v),\n\t\t\tbyte(v >> 8),\n\t\t})\n\t} else if v < 4294967296 {\n\t\tif p.writeIndex < streamBlockSize-5 {\n\t\t\tb := p.writeFrame[p.writeIndex:]\n\t\t\tb[0] = 10\n\t\t\tb[1] = byte(v)\n\t\t\tb[2] = byte(v >> 8)\n\t\t\tb[3] = byte(v >> 16)\n\t\t\tb[4] = byte(v >> 24)\n\t\t\tp.writeIndex += 5\n\t\t\treturn\n\t\t}\n\t\tp.PutBytes([]byte{\n\t\t\t10,\n\t\t\tbyte(v),\n\t\t\tbyte(v >> 8),\n\t\t\tbyte(v >> 16),\n\t\t\tbyte(v >> 24),\n\t\t})\n\t} else {\n\t\tif p.writeIndex < streamBlockSize-9 {\n\t\t\tb := p.writeFrame[p.writeIndex:]\n\t\t\tb[0] = 11\n\t\t\tb[1] = byte(v)\n\t\t\tb[2] = byte(v >> 8)\n\t\t\tb[3] = byte(v >> 16)\n\t\t\tb[4] = byte(v >> 24)\n\t\t\tb[5] = byte(v >> 32)\n\t\t\tb[6] = byte(v >> 40)\n\t\t\tb[7] = byte(v >> 48)\n\t\t\tb[8] = byte(v >> 56)\n\t\t\tp.writeIndex += 9\n\t\t\treturn\n\t\t}\n\t\tp.PutBytes([]byte{\n\t\t\t11,\n\t\t\tbyte(v),\n\t\t\tbyte(v >> 8),\n\t\t\tbyte(v >> 16),\n\t\t\tbyte(v >> 24),\n\t\t\tbyte(v >> 32),\n\t\t\tbyte(v >> 40),\n\t\t\tbyte(v >> 48),\n\t\t\tbyte(v >> 56),\n\t\t})\n\t}\n}", "func Uint64(k string, v uint64) Field {\n\treturn Field{Key: k, Value: valf.Uint64(v)}\n}", "func (s *State) Write64(h uint64) (err error) {\n\ts.clen += 8\n\ts.tail = append(s.tail, byte(h>>56), byte(h>>48), byte(h>>40), byte(h>>32), byte(h>>24), byte(h>>16), byte(h>>8), byte(h))\n\treturn nil\n}", "func opUI64ToStr(prgrm *CXProgram) {\n\texpr := prgrm.GetExpr()\n\tfp := prgrm.GetFramePointer()\n\n\toutB0 := FromStr(strconv.FormatUint(ReadUI64(fp, expr.Inputs[0]), 10))\n\tWriteObject(GetFinalOffset(fp, expr.Outputs[0]), outB0)\n}", "func (b *Buffer) AppendUint64(v uint64) {\n\t//str := strconv.FormatUint(v, 10)\n\t//b.AppendString(str)\n\tb.buf = strconv.AppendUint(b.buf, v, 10)\n}", "func (s *EnvVarSet) Uint64(name string, value uint64, usage string) *uint64 {\n\tp := new(uint64)\n\n\ts.Uint64Var(p, name, value, usage)\n\n\treturn p\n}", "func Test_JsonSerializationUint64(t *testing.T) {\n\ttv := NewTypedValueUint(testPositiveInt64, 64)\n\n\tjsonStr, err := json.Marshal(tv)\n\tassert.NilError(t, err)\n\n\tassert.Equal(t, string(jsonStr), `{\"Bytes\":\"f/////////8=\",\"Type\":3,\"TypeOpts\":[64]}`)\n\n\tunmarshalledTv := TypedValue{}\n\terr = json.Unmarshal(jsonStr, &unmarshalledTv)\n\tassert.NilError(t, err)\n\n\tassert.Equal(t, ValueType_UINT, unmarshalledTv.Type)\n\tassert.Equal(t, len(unmarshalledTv.TypeOpts), 1)\n\tassert.DeepEqual(t, unmarshalledTv.Bytes, []byte{0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff})\n\n\tstrVal := (*TypedUint)(&unmarshalledTv).String()\n\tassert.Equal(t, fmt.Sprintf(\"%d\", testPositiveInt64), strVal)\n\tassert.Equal(t, unmarshalledTv.ValueToString(), \"9223372036854775807\")\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func Uint64(name string, defaultValue uint64) uint64 {\n\tif strVal, ok := os.LookupEnv(name); ok {\n\t\tif i64, err := strconv.ParseUint(strVal, 10, 64); err == nil {\n\t\t\treturn i64\n\t\t}\n\t}\n\n\treturn defaultValue\n}", "func (j *JSONData) Uint64(path ...interface{}) (uint64, error) {\n\tjson, err := j.get(path...)\n\treturn json.MustUint64(), err\n}", "func StoreUint64(addr *uint64, val uint64)", "func (obj *Value) SetUint64(v uint64) {\n\tobj.Candy().Guify(\"g_value_set_uint64\", obj, v)\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Uint64(val interface{}) uint64 {\r\n\r\n\tswitch t := val.(type) {\r\n\tcase int:\r\n\t\treturn uint64(t)\r\n\tcase int8:\r\n\t\treturn uint64(t)\r\n\tcase int16:\r\n\t\treturn uint64(t)\r\n\tcase int32:\r\n\t\treturn uint64(t)\r\n\tcase int64:\r\n\t\treturn uint64(t)\r\n\tcase uint:\r\n\t\treturn uint64(t)\r\n\tcase uint8:\r\n\t\treturn uint64(t)\r\n\tcase uint16:\r\n\t\treturn uint64(t)\r\n\tcase uint32:\r\n\t\treturn uint64(t)\r\n\tcase uint64:\r\n\t\treturn uint64(t)\r\n\tcase float32:\r\n\t\treturn uint64(t)\r\n\tcase float64:\r\n\t\treturn uint64(t)\r\n\tcase bool:\r\n\t\tif t == true {\r\n\t\t\treturn uint64(1)\r\n\t\t}\r\n\t\treturn uint64(0)\r\n\tdefault:\r\n\t\ts := String(val)\r\n\t\ti, _ := strconv.ParseUint(s, 10, 64)\r\n\t\treturn i\r\n\t}\r\n\r\n\tpanic(\"Reached\")\r\n\r\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (b *Builder) AddUint64(v uint64) {\n\tb.add(byte(v>>56), byte(v>>48), byte(v>>40), byte(v>>32), byte(v>>24), byte(v>>16), byte(v>>8), byte(v))\n}", "func (s *Streamer) Uint64(v uint64) *Streamer {\n\tif s.Error != nil {\n\t\treturn s\n\t}\n\ts.onVal()\n\ts.buffer = appendUint64(s.buffer, v)\n\treturn s\n}", "func EncodeUint64(w io.Writer, u uint64) error {\n\tbuf := getBuf(8)\n\tdefer putBuf(buf)\n\n\tbinary.BigEndian.PutUint64(buf, u)\n\t_, err := w.Write(buf)\n\treturn err\n}", "func Uint64Var(p *uint64, name string, value uint64, usage string) {\n\tadd(name, newUint64Value(value, p), usage)\n}", "func Uint64Var(p *uint64, name string, value uint64, usage string) {\n\tEnvironment.Uint64Var(p, name, value, usage)\n}", "func (z *Element22) SetUint64(v uint64) *Element22 {\n\tz[0] = v\n\tz[1] = 0\n\tz[2] = 0\n\tz[3] = 0\n\tz[4] = 0\n\tz[5] = 0\n\tz[6] = 0\n\tz[7] = 0\n\tz[8] = 0\n\tz[9] = 0\n\tz[10] = 0\n\tz[11] = 0\n\tz[12] = 0\n\tz[13] = 0\n\tz[14] = 0\n\tz[15] = 0\n\tz[16] = 0\n\tz[17] = 0\n\tz[18] = 0\n\tz[19] = 0\n\tz[20] = 0\n\tz[21] = 0\n\treturn z.ToMont()\n}", "func (enc *Encoder) EncodeUInt64(v uint64) (int, error) {\n\tdata := make([]byte, 8)\n\tbinary.LittleEndian.PutUint64(data, v)\n\treturn enc.w.Write(data)\n}", "func (id *Id) Uint64() uint64 {\n\treturn uint64(*id)\n}", "func (m *Mmap) AppendUint64(val uint64) error {\n\tif err := m.checkFilePointerOutOfRange(8); err != nil {\n\t\treturn err\n\t}\n\tbinary.LittleEndian.PutUint64(m.MmapBytes[m.FilePointer:m.FilePointer+8], val)\n\tm.FilePointer += 8\n\treturn nil\n}", "func Uint64Var(p *uint64, name string, value uint64, usage string) {\n\tex.FlagSet.Uint64Var(p, name, value, usage)\n}", "func Uint64(key string, val uint64) Field {\n\treturn Field{Key: key, Type: core.Uint64Type, Integer: int64(val)}\n}", "func Uint64Var(p *uint64, name string, alias rune, value uint64, usage string, fn Callback) {\n\tCommandLine.Var(newUint64Value(value, p), name, alias, usage, fn)\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (c *SChUInt64) ProvideUInt64(dat uint64) {\n\t// .req\n\tc.dat <- dat\n}", "func (mr *MockSessionMockRecorder) Uint64(arg0 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Uint64\", reflect.TypeOf((*MockSession)(nil).Uint64), arg0)\n}", "func (rng *splitMix64Source) Uint64() uint64 {\n\trng.state += 0x9E3779B97F4A7C15\n\tz := rng.state\n\tz = (z ^ (z >> 30)) * 0xBF58476D1CE4E5B9\n\tz = (z ^ (z >> 27)) * 0x94D049BB133111EB\n\treturn z ^ (z >> 31)\n}", "func (p *PCG64) Uint64() uint64 {\n\tp.multiply()\n\tp.add()\n\t// XOR high and low 64 bits together and rotate right by high 6 bits of state.\n\treturn bits.RotateLeft64(p.high^p.low, -int(p.high>>58))\n}", "func (u Uint64) Uint64() uint64 {\n\treturn uint64(u)\n}", "func Uint64ToString(v string) predicate.Conversion {\n\treturn predicate.Conversion(func(s *sql.Selector) {\n\t\ts.Where(sql.EQ(s.C(FieldUint64ToString), v))\n\t})\n}" ]
[ "0.8037087", "0.78348166", "0.78272307", "0.70157623", "0.68654644", "0.676529", "0.676307", "0.675254", "0.6725309", "0.66998875", "0.6677445", "0.66148156", "0.64969784", "0.6493965", "0.6483434", "0.6407828", "0.63746357", "0.63491404", "0.6345175", "0.6337199", "0.63273054", "0.6324231", "0.6317571", "0.6293168", "0.62839925", "0.62725145", "0.6260923", "0.6251924", "0.6240489", "0.6231773", "0.6206282", "0.620559", "0.61452234", "0.61442184", "0.61402607", "0.61353517", "0.6133257", "0.6107188", "0.60870147", "0.6070827", "0.6029936", "0.6024514", "0.6009221", "0.59670115", "0.59562093", "0.5944508", "0.59192187", "0.5911915", "0.5911329", "0.5910167", "0.5891612", "0.58850145", "0.58715755", "0.5869763", "0.586151", "0.5860248", "0.5850761", "0.5814167", "0.57964605", "0.5775165", "0.57740164", "0.5768542", "0.5758585", "0.5756636", "0.5754505", "0.5732689", "0.5729686", "0.5729319", "0.5724234", "0.5710538", "0.5709539", "0.570566", "0.570499", "0.5698659", "0.56971085", "0.5688454", "0.568192", "0.56807095", "0.5673396", "0.5663553", "0.5659162", "0.56554174", "0.56410456", "0.5634308", "0.56311274", "0.5600997", "0.5587827", "0.55839205", "0.55760324", "0.5575205", "0.5570664", "0.5569135", "0.5566079", "0.5566012", "0.5565226", "0.5560386", "0.55602676", "0.5556323", "0.5553606", "0.55520076" ]
0.81401455
0
WriteUint8 provides a mock function with given fields: logicalName, bitLength, value, writerArgs
func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, logicalName, bitLength, value) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok { r0 = rf(logicalName, bitLength, value, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockIOPackage) WriteUint8(arg0 []byte, arg1 int, arg2 uint8) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint8\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockIOPackage) AppendUint8(arg0 []byte, arg1 uint8) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint8\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (m *MockSession) Uint8(arg0 string) byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint8\", arg0)\n\tret0, _ := ret[0].(byte)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt8_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt8_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (mr *MockIOPackageMockRecorder) WriteUint8(arg0, arg1, arg2 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteUint8\", reflect.TypeOf((*MockIOPackage)(nil).WriteUint8), arg0, arg1, arg2)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func WriteUInt8(buffer []byte, offset int, value uint8) {\n buffer[offset] = byte(value)\n}", "func (_m *MockWriteBufferXmlBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestUint8(t *testing.T) {\n\ttests := []struct {\n\t\tin uint8 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x01}},\n\t\t{128, []byte{0x80}},\n\t\t{255, []byte{0xff}},\n\t}\n\n\tt.Logf(\"Running uint8 %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint8(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint8 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint8 #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint8\n\t\terr = ReadUint8(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint8 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint8 #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func (m *MockIOPackage) ReadUint8(arg0 []byte, arg1 int) (uint8, int, bool) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ReadUint8\", arg0, arg1)\n\tret0, _ := ret[0].(uint8)\n\tret1, _ := ret[1].(int)\n\tret2, _ := ret[2].(bool)\n\treturn ret0, ret1, ret2\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (util *MarshalUtil) WriteUint8(value uint8) *MarshalUtil {\n\twriteEndOffset := util.expandWriteCapacity(Uint8Size)\n\n\tutil.bytes[util.writeOffset] = value\n\n\tutil.WriteSeek(writeEndOffset)\n\n\treturn util\n}", "func (m *MockIOPackage) WriteUint64(arg0 []byte, arg1 int, arg2 uint64) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint64\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint16_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func ExpectUint8(t *testing.T, field string, expected uint8, found uint8) {\n\tif expected != found {\n\t\tt.Errorf(\"%s [%d], found '%d'\", field, expected, found)\n\t}\n}", "func WriteUint8(w io.Writer, v uint8) error {\n\treturn WriteUint64(w, uint64(v))\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (bio *BinaryIO) WriteUint8(off int64, value uint8) {\n\tbuf := []byte{value}\n\tbio.WriteAt(off, buf[:])\n}", "func (out *OutBuffer) WriteUint8(v uint8) bool {\n\tcontainer := out.GetContainer()\n\tif len(container) < 1 {\n\t\treturn false\n\t}\n\n\tcontainer[0] = byte(v)\n\tout.pos += 1\n\treturn true\n}", "func (m *MockIOPackage) WriteUint16(arg0 []byte, arg1 int, arg2 uint16) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint16\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (this *i2c) WriteUint8(bus gopi.I2CBus, reg, value uint8) error {\n\treturn gopi.ErrNotImplemented\n\n}", "func (this *i2c) WriteUint8(bus gopi.I2CBus, reg, value uint8) error {\n\tthis.Mutex.Lock()\n\tdefer this.Mutex.Unlock()\n\n\tif device, err := this.Open(bus); err != nil {\n\t\treturn err\n\t} else {\n\t\treturn linux.I2CWriteUint8(device.Fd(), reg, value, linux.I2CFunction(device.funcs))\n\t}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint32_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockIOPackage) WriteUint32(arg0 []byte, arg1 int, arg2 uint32) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint32\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (out *OutBuffer) WriteUint8(v uint8) {\n\tout.Append(byte(v))\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint16_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByteArray_Call {\n\treturn &MockWriteBufferJsonBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (_m *MockOStream) WriteByte(v byte) {\n\t_m.ctrl.Call(_m, \"WriteByte\", v)\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (stream *Stream) WriteUint8(val uint8) {\n\tstream.ensure(3)\n\tstream.n = writeFirstBuf(stream.buf, digits[val], stream.n)\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockSession) Uint(arg0 string) uint {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint\", arg0)\n\tret0, _ := ret[0].(uint)\n\treturn ret0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByte_Call {\n\treturn &MockWriteBufferJsonBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (m Measurement) AddUInt8(name string, value uint8) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func (m *MockIOPackage) AppendUint64(arg0 []byte, arg1 uint64) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint64\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call {\n\treturn &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_m *MockOStream) WriteBytes(bytes []byte) {\n\t_m.ctrl.Call(_m, \"WriteBytes\", bytes)\n}", "func TestWriteBits(t *testing.T) {\n\tt.Run(\"NonMSB\", func(t *testing.T) {\n\t\tb := &Buffer{}\n\n\t\t// having empty buffered MSB.\n\t\tn, err := b.WriteBits(0xb, 4)\n\t\trequire.NoError(t, err)\n\t\tassert.Zero(t, n)\n\n\t\tassert.Len(t, b.data, 1)\n\t\tassert.Equal(t, byte(0xb), b.data[0])\n\n\t\tn, err = b.WriteBits(0xdf, 8)\n\t\trequire.NoError(t, err)\n\t\tassert.Equal(t, 1, n)\n\n\t\tif assert.Len(t, b.data, 2) {\n\t\t\tassert.Equal(t, byte(0xd), b.data[1])\n\t\t\tassert.Equal(t, byte(0xfb), b.data[0])\n\t\t}\n\t})\n\n\tt.Run(\"MSB\", func(t *testing.T) {\n\t\tb := BufferedMSB()\n\n\t\tn, err := b.WriteBits(0xf, 4)\n\t\trequire.NoError(t, err)\n\n\t\tassert.Zero(t, n)\n\n\t\t// the output now should be\n\t\t// 11110000\n\t\t// ^\n\t\tif assert.Len(t, b.data, 1) {\n\t\t\tassert.Equal(t, byte(0xf0), b.data[0], \"%08b\", b.data[0])\n\t\t}\n\n\t\t// write 10111 = 0x17, 5\n\t\tn, err = b.WriteBits(0x17, 5)\n\t\trequire.NoError(t, err)\n\n\t\t// current output should be\n\t\t// 11111011 10000000\n\t\t// ^\n\t\tif assert.Len(t, b.data, 2) {\n\t\t\tassert.Equal(t, byte(0xfb), b.data[0])\n\t\t\tassert.Equal(t, byte(0x80), b.data[1])\n\t\t\tassert.Equal(t, uint8(1), b.bitIndex)\n\t\t}\n\t})\n}", "func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func TestBufferWriteBit(t *testing.T) {\n\tt.Run(\"Valid\", func(t *testing.T) {\n\t\tbuf := &Buffer{}\n\t\t// 10010011 11000111\n\t\t// 0x93 \t0xC7\n\t\tbits := []int{1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1}\n\t\tfor i := len(bits) - 1; i > -1; i-- {\n\t\t\tbit := bits[i]\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\tassert.Equal(t, byte(0xC7), buf.data[0], \"expected: %08b, is: %08b\", 0xc7, buf.data[0])\n\t\tassert.Equal(t, byte(0x93), buf.data[1], \"expected: %08b, is: %08b\", 0x93, buf.data[1])\n\t})\n\n\tt.Run(\"BitShifted\", func(t *testing.T) {\n\t\tt.Run(\"Empty\", func(t *testing.T) {\n\t\t\tbuf := &Buffer{}\n\t\t\t// fill thee buffer with 3 bits\n\t\t\tfor i := 0; i < 3; i++ {\n\t\t\t\terr := buf.WriteBit(int(0))\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// bits 11101\n\t\t\tbits := []int{1, 1, 1, 0, 1}\n\t\t\tfor i := len(bits) - 1; i > -1; i-- {\n\t\t\t\tbit := bits[i]\n\t\t\t\terr := buf.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// should be 11101000 - 0xe8\n\t\t\tassert.Equal(t, byte(0xe8), buf.data[0])\n\t\t})\n\t})\n\n\tt.Run(\"ByteShifted\", func(t *testing.T) {\n\t\tbuf := &Buffer{}\n\t\trequire.NoError(t, buf.WriteByte(0x00))\n\n\t\t// write 8 bits that should look like a byte 0xe3\n\t\t// 11100011 - 0xe3\n\t\tbits := []int{1, 1, 1, 0, 0, 0, 1, 1}\n\t\tfor i := len(bits) - 1; i > -1; i-- {\n\t\t\tbit := bits[i]\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\t\tassert.Equal(t, 2, len(buf.data))\n\t\tassert.Equal(t, byte(0xe3), buf.data[1])\n\n\t\t// there should be no error on writing additional byte.\n\t\tassert.NoError(t, buf.WriteByte(0x00))\n\t})\n\n\tt.Run(\"Finished\", func(t *testing.T) {\n\t\tbuf := &Buffer{}\n\n\t\t// write some bits to the first byte.\n\t\tfirstBits := []int{1, 0, 1}\n\t\tfor _, bit := range firstBits {\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\t// finish this byte\n\t\tbuf.FinishByte()\n\t\tsecondBits := []int{1, 0, 1}\n\n\t\t// write some bits to the second byte.\n\t\tfor _, bit := range secondBits {\n\t\t\terr := buf.WriteBit(bit)\n\t\t\trequire.NoError(t, err)\n\t\t}\n\n\t\tif assert.Len(t, buf.data, 2) {\n\t\t\t// 00000101 - 0x05\n\t\t\tassert.Equal(t, byte(0x05), buf.Data()[0])\n\t\t\tassert.Equal(t, byte(0x05), buf.Data()[1])\n\t\t}\n\t})\n\n\tt.Run(\"Inverse\", func(t *testing.T) {\n\t\tt.Run(\"Valid\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\t// \t10010111 10101100\n\t\t\t//\t0x97\t 0xac\n\t\t\tbits := []int{1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0}\n\n\t\t\t// write all the bits\n\t\t\tfor _, bit := range bits {\n\t\t\t\terr := w.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\texpected := byte(0x97)\n\t\t\tassert.Equal(t, expected, w.data[0], \"expected: %08b is: %08b\", expected, w.data[0])\n\t\t\texpected = byte(0xac)\n\t\t\tassert.Equal(t, expected, w.data[1], \"expected: %08b is: %08b\", expected, w.data[1])\n\t\t})\n\n\t\tt.Run(\"ByteShifted\", func(t *testing.T) {\n\t\t\tbuf := BufferedMSB()\n\t\t\terr := buf.WriteByte(0x00)\n\t\t\trequire.NoError(t, err)\n\n\t\t\t// 11100011 - 0xe3\n\t\t\tbits := []int{1, 1, 1, 0, 0, 0, 1, 1}\n\t\t\tfor _, bit := range bits {\n\t\t\t\terr := buf.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\tassert.Equal(t, byte(0xe3), buf.data[1], \"expected: %08b, is: %08b\", byte(0xe3), buf.data[1])\n\t\t})\n\n\t\tt.Run(\"BitShifted\", func(t *testing.T) {\n\t\t\tw := BufferedMSB()\n\n\t\t\t// 0xE0 - 11100000\n\t\t\terr := w.WriteByte(0xE0)\n\t\t\trequire.NoError(t, err)\n\n\t\t\tw.bitIndex = 5\n\t\t\tw.byteIndex = 0\n\n\t\t\tbits := []int{1, 0, 1, 0, 1}\n\t\t\tfor _, bit := range bits {\n\t\t\t\terr := w.WriteBit(bit)\n\t\t\t\trequire.NoError(t, err)\n\t\t\t}\n\n\t\t\t// should be 11100101 01000000 ...\n\t\t\t//\t\t\t 0xE5\t 0x40\n\t\t\tassert.Equal(t, byte(0xE5), w.data[0], \"expected: %08b, is: %08b\", byte(0xE5), w.data[0])\n\t\t\tassert.Equal(t, byte(0x40), w.data[1], \"expected: %08b, is: %08b\", byte(0x40), w.data[1])\n\t\t})\n\n\t\tt.Run(\"Finished\", func(t *testing.T) {\n\t\t\tbuf := BufferedMSB()\n\n\t\t\t// write some bits to the first byte\n\t\t\tfirstBits := []int{1, 0, 1}\n\t\t\tfor _, bit := range firstBits {\n\t\t\t\trequire.NoError(t, buf.WriteBit(bit))\n\t\t\t}\n\t\t\t// finish the byte\n\t\t\tbuf.FinishByte()\n\n\t\t\t// write bits to the second byte.\n\t\t\tsecondBits := []int{1, 0, 1}\n\t\t\tfor _, bit := range secondBits {\n\t\t\t\trequire.NoError(t, buf.WriteBit(bit))\n\t\t\t}\n\n\t\t\tif assert.Len(t, buf.Data(), 2) {\n\t\t\t\t// 10100000 - 0xa0\n\t\t\t\tassert.Equal(t, byte(0xa0), buf.Data()[0])\n\t\t\t\tassert.Equal(t, byte(0xa0), buf.Data()[1])\n\t\t\t}\n\t\t})\n\t})\n}", "func (m *MockIOPackage) AppendUint16(arg0 []byte, arg1 uint16) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint16\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBit_Call {\n\treturn &MockWriteBufferXmlBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Uint8(colName string) sif.GenericColumnAccessor[uint8] {\n\treturn sif.CreateColumnAccessor[uint8](&uint8Type{}, colName)\n}", "func FormatUint8(name string) string {\n\treturn formatUintFunction(name, true)\n}", "func MeasureUInt8(name string, field string, value uint8) Measurement {\n\treturn NewMeasurement(name).AddUInt8(field, value)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByteArray_Call {\n\treturn &MockWriteBufferXmlBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (instance *Instance) SetUint8(fieldName string, value uint8) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mr *MockSessionMockRecorder) Uint8(arg0 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Uint8\", reflect.TypeOf((*MockSession)(nil).Uint8), arg0)\n}", "func TestUint64(t *testing.T) {\n\ttests := []struct {\n\t\tin uint64 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00}}, // Min single byte\n\t\t{255, []byte{0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00}}, // Max single byte\n\t\t{256, []byte{0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00}}, // Min 2-byte\n\t\t{65535, []byte{0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00}}, // Max 2-byte\n\t\t{0x10000, []byte{0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00}}, // Min 4-byte\n\t\t{0xffffffff, []byte{0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00}}, // Max 4-byte\n\t\t{0x100000000, []byte{0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00}}, // Min 8-byte\n\t\t{0xffffffffffffffff, []byte{0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff}}, // Max 8-byte\n\t}\n\n\tt.Logf(\"Running uint64 %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint64(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint64 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint64 #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint64\n\t\terr = ReadUint64(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint64 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint64 #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func Uint8(k string, v uint8) Field {\n\treturn Field{Key: k, Value: valf.Uint8(v)}\n}", "func (fw *Writer) PutUint8Field(addr biopb.Coord, v byte) {\n\twb := fw.buf\n\twb.updateAddrBounds(addr)\n\twb.defaultBuf.PutUint8(v)\n}", "func (m *MockHash64) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (w *Writer) WriteBits8(b byte, n uint) error {\n\t//fmt.Printf(\"b: %d, n: %d, free: %d\\n\", b, n, w.free)\n\tif n == 0 {\n\t\treturn nil\n\t}\n\n\tif n > 8 {\n\t\tn = 8\n\t}\n\n\tvar rem uint\n\tif n > w.free {\n\t\trem = n - w.free\n\t\tn = w.free\n\t}\n\n\tw.free -= n\n\tw.bits <<= n\n\tw.bits |= ((b >> rem) & (1<<n - 1))\n\n\tvar err error\n\tif w.free == 0 {\n\t\terr = w.Flush()\n\t}\n\tif rem == 0 {\n\t\treturn err\n\t}\n\n\treturn w.WriteBits8(b, rem)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteString_Call {\n\treturn &MockWriteBufferXmlBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_m *Session) Write(data []byte) (int, error) {\n\tret := _m.Called(data)\n\n\tvar r0 int\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func([]byte) (int, error)); ok {\n\t\treturn rf(data)\n\t}\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(data)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (mr *MockIOPackageMockRecorder) AppendUint8(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"AppendUint8\", reflect.TypeOf((*MockIOPackage)(nil).AppendUint8), arg0, arg1)\n}", "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockFile) Write(_param0 []byte) (int, error) {\n\tret := _m.ctrl.Call(_m, \"Write\", _param0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func Uint8(name string, value uint8, usage string) *uint8 {\n\treturn Environment.Uint8(name, value, usage)\n}", "func (e *encoder) writeUint(val uint64, size int) {\n\te.head = align(e.head, size)\n\tfor i := e.head; i < e.head+size; i++ {\n\t\te.buffer[i] = byte(val & 0xFF)\n\t\tval >>= 8\n\t}\n\te.head += size\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockHash) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockHash32) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func testBinaryUVWI(t *testing.T, value int, buf []byte) {\n\t// ensure works for both io.Reader & io.Writer, and io.ByteReader & io.ByteWriter\n\ttest := func(t *testing.T, value int, buf []byte, scratch testBuffer) {\n\t\tvin := UVWI(value)\n\t\tvar vout UVWI\n\n\t\tif err := vin.MarshalBinaryTo(scratch); err != nil {\n\t\t\tt.Error(err)\n\t\t}\n\n\t\tif sb, ok := scratch.(testBytes); ok {\n\t\t\tif actual, expected := sb.Bytes(), buf; !bytes.Equal(actual, expected) {\n\t\t\t\tt.Errorf(\"Actual: %#v; Expected: %#v\", actual, expected)\n\t\t\t}\n\t\t}\n\n\t\tif err := vout.UnmarshalBinaryFrom(scratch); err != nil {\n\t\t\tt.Error(err)\n\t\t}\n\n\t\tif actual, expected := vout, vin; actual != expected {\n\t\t\tt.Errorf(\"Actual: %#v; Expected: %#v\", actual, expected)\n\t\t}\n\t}\n\n\ttest(t, value, buf, new(buffer.Buffer))\n\ttest(t, value, buf, new(bytes.Buffer))\n}", "func (m *Message) putUint8(v uint8) {\n\tb := m.bufferForPut(1)\n\tdefer b.Advance(1)\n\n\tb.Bytes[b.Offset] = v\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByte_Call {\n\treturn &MockWriteBufferXmlBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func Uint8Arg(register Register, name string, options ...ArgOptionApplyer) *uint8 {\n\tp := new(uint8)\n\t_ = Uint8ArgVar(register, p, name, options...)\n\treturn p\n}" ]
[ "0.83723766", "0.8204358", "0.79622126", "0.74771273", "0.7151462", "0.69026244", "0.68276036", "0.681815", "0.66977566", "0.6639902", "0.65958315", "0.65688086", "0.64925665", "0.64411825", "0.6438674", "0.63253427", "0.631305", "0.63015056", "0.62580055", "0.6221589", "0.6214858", "0.6173321", "0.6112282", "0.61040294", "0.6084106", "0.607689", "0.60733026", "0.60518986", "0.59880036", "0.59737945", "0.593709", "0.59203565", "0.5903965", "0.58941156", "0.588561", "0.58650726", "0.5816461", "0.58125293", "0.5782648", "0.5740708", "0.5739157", "0.57125163", "0.57026875", "0.5665134", "0.5658366", "0.56472635", "0.5626207", "0.5608864", "0.55872965", "0.555654", "0.5550523", "0.5534469", "0.55317295", "0.5525164", "0.5511887", "0.55081505", "0.5452007", "0.54490054", "0.54468274", "0.5407491", "0.54064775", "0.5400207", "0.53868425", "0.5378317", "0.5344391", "0.5343691", "0.5339595", "0.53374404", "0.53124374", "0.5305099", "0.52991366", "0.52958524", "0.52953106", "0.52931255", "0.529283", "0.52789354", "0.527814", "0.5275177", "0.5230027", "0.52165097", "0.52121437", "0.5206725", "0.52019143", "0.51975656", "0.51972646", "0.5189055", "0.51757944", "0.5163271", "0.51632273", "0.51609135", "0.51495194", "0.51276076", "0.5127206", "0.5116572", "0.5114223", "0.51073354", "0.5103664", "0.5101064", "0.5094155", "0.50861067" ]
0.85018086
0
WriteUint8 is a helper method to define mock.On call logicalName string bitLength uint8 value uint8 writerArgs ...WithWriterArgs
func (_e *MockWriteBufferJsonBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint8_Call { return &MockWriteBufferJsonBased_WriteUint8_Call{Call: _e.mock.On("WriteUint8", append([]interface{}{logicalName, bitLength, value}, writerArgs...)...)} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint8_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint8_Call{Call: _e.mock.On(\"WriteUint8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt8_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockIOPackage) WriteUint8(arg0 []byte, arg1 int, arg2 uint8) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint8\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteInt8(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteInt8_Call {\n\treturn &MockWriteBufferXmlBased_WriteInt8_Call{Call: _e.mock.On(\"WriteInt8\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockIOPackage) AppendUint8(arg0 []byte, arg1 uint8) []byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"AppendUint8\", arg0, arg1)\n\tret0, _ := ret[0].([]byte)\n\treturn ret0\n}", "func (m *MockSession) Uint8(arg0 string) byte {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Uint8\", arg0)\n\tret0, _ := ret[0].(byte)\n\treturn ret0\n}", "func WriteUInt8(buffer []byte, offset int, value uint8) {\n buffer[offset] = byte(value)\n}", "func (mr *MockIOPackageMockRecorder) WriteUint8(arg0, arg1, arg2 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"WriteUint8\", reflect.TypeOf((*MockIOPackage)(nil).WriteUint8), arg0, arg1, arg2)\n}", "func WriteUint8(w io.Writer, v uint8) error {\n\treturn WriteUint64(w, uint64(v))\n}", "func (util *MarshalUtil) WriteUint8(value uint8) *MarshalUtil {\n\twriteEndOffset := util.expandWriteCapacity(Uint8Size)\n\n\tutil.bytes[util.writeOffset] = value\n\n\tutil.WriteSeek(writeEndOffset)\n\n\treturn util\n}", "func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (bio *BinaryIO) WriteUint8(off int64, value uint8) {\n\tbuf := []byte{value}\n\tbio.WriteAt(off, buf[:])\n}", "func (this *i2c) WriteUint8(bus gopi.I2CBus, reg, value uint8) error {\n\tthis.Mutex.Lock()\n\tdefer this.Mutex.Unlock()\n\n\tif device, err := this.Open(bus); err != nil {\n\t\treturn err\n\t} else {\n\t\treturn linux.I2CWriteUint8(device.Fd(), reg, value, linux.I2CFunction(device.funcs))\n\t}\n}", "func (out *OutBuffer) WriteUint8(v uint8) {\n\tout.Append(byte(v))\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func FormatUint8(name string) string {\n\treturn formatUintFunction(name, true)\n}", "func (this *i2c) WriteUint8(bus gopi.I2CBus, reg, value uint8) error {\n\treturn gopi.ErrNotImplemented\n\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (stream *Stream) WriteUint8(val uint8) {\n\tstream.ensure(3)\n\tstream.n = writeFirstBuf(stream.buf, digits[val], stream.n)\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call {\n\treturn &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (out *OutBuffer) WriteUint8(v uint8) bool {\n\tcontainer := out.GetContainer()\n\tif len(container) < 1 {\n\t\treturn false\n\t}\n\n\tcontainer[0] = byte(v)\n\tout.pos += 1\n\treturn true\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m Measurement) AddUInt8(name string, value uint8) Measurement {\n\tm.fieldSet[name] = value\n\treturn m\n}", "func ExpectUint8(t *testing.T, field string, expected uint8, found uint8) {\n\tif expected != found {\n\t\tt.Errorf(\"%s [%d], found '%d'\", field, expected, found)\n\t}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint32_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint16_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func Uint8Arg(register Register, name string, options ...ArgOptionApplyer) *uint8 {\n\tp := new(uint8)\n\t_ = Uint8ArgVar(register, p, name, options...)\n\treturn p\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByte_Call {\n\treturn &MockWriteBufferJsonBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func Uint8(colName string) sif.GenericColumnAccessor[uint8] {\n\treturn sif.CreateColumnAccessor[uint8](&uint8Type{}, colName)\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteString_Call {\n\treturn &MockWriteBufferXmlBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteByteArray_Call {\n\treturn &MockWriteBufferJsonBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (instance *Instance) SetUint8(fieldName string, value uint8) error {\n\tfieldNameCStr := C.CString(fieldName)\n\tdefer C.free(unsafe.Pointer(fieldNameCStr))\n\n\tretcode := int(C.RTI_Connector_set_number_into_samples(unsafe.Pointer(instance.output.connector.native), instance.output.nameCStr, fieldNameCStr, C.double(value)))\n\treturn checkRetcode(retcode)\n}", "func Uint8(name string, value uint8, usage string) *uint8 {\n\treturn Environment.Uint8(name, value, usage)\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint64_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func MeasureUInt8(name string, field string, value uint8) Measurement {\n\treturn NewMeasurement(name).AddUInt8(field, value)\n}", "func TestUint8(t *testing.T) {\n\ttests := []struct {\n\t\tin uint8 // Value to encode\n\t\tbuf []byte // serialized\n\t}{\n\t\t{1, []byte{0x01}},\n\t\t{128, []byte{0x80}},\n\t\t{255, []byte{0xff}},\n\t}\n\n\tt.Logf(\"Running uint8 %d tests\", len(tests))\n\tfor i, test := range tests {\n\t\tvar buf bytes.Buffer\n\t\terr := WriteUint8(&buf, test.in)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"WriteUint8 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif !bytes.Equal(buf.Bytes(), test.buf) {\n\t\t\tt.Errorf(\"WriteUint8 #%d\\n got: %v want: %v\", i,\n\t\t\t\tbuf.Bytes(), test.buf)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Read from protos format.\n\t\trbuf := bytes.NewReader(test.buf)\n\t\tvar val uint8\n\t\terr = ReadUint8(rbuf, &val)\n\t\tif err != nil {\n\t\t\tt.Errorf(\"ReadUint8 #%d error %v\", i, err)\n\t\t\tcontinue\n\t\t}\n\t\tif val != test.in {\n\t\t\tt.Errorf(\"ReadUint8 #%d\\n got: %v want: %v\", i,\n\t\t\t\tval, test.in)\n\t\t\tcontinue\n\t\t}\n\t}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint32(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint32_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint32_Call{Call: _e.mock.On(\"WriteUint32\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (mr *MockIOPackageMockRecorder) AppendUint8(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"AppendUint8\", reflect.TypeOf((*MockIOPackage)(nil).AppendUint8), arg0, arg1)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func Uint8Tag(name interface{}, value uint8) Tag {\n\treturn &tag{\n\t\ttagType: TagUint8,\n\t\tname: name,\n\t\tvalue: value,\n\t}\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByteArray(logicalName interface{}, data interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByteArray_Call {\n\treturn &MockWriteBufferXmlBased_WriteByteArray_Call{Call: _e.mock.On(\"WriteByteArray\",\n\t\tappend([]interface{}{logicalName, data}, writerArgs...)...)}\n}", "func (m *memory) Write8(adr uint16, val uint8) {\n\tm.ram[adr] = val\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteByte(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteByte_Call {\n\treturn &MockWriteBufferXmlBased_WriteByte_Call{Call: _e.mock.On(\"WriteByte\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (m *MockIOPackage) ReadUint8(arg0 []byte, arg1 int) (uint8, int, bool) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ReadUint8\", arg0, arg1)\n\tret0, _ := ret[0].(uint8)\n\tret1, _ := ret[1].(int)\n\tret2, _ := ret[2].(bool)\n\treturn ret0, ret1, ret2\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteUint16(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteUint16_Call {\n\treturn &MockWriteBufferXmlBased_WriteUint16_Call{Call: _e.mock.On(\"WriteUint16\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func WriteInt8(buffer []byte, offset int, value int8) {\n buffer[offset] = byte(value)\n}", "func (mr *MockSessionMockRecorder) Uint8(arg0 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Uint8\", reflect.TypeOf((*MockSession)(nil).Uint8), arg0)\n}", "func Uint8(k string, v uint8) Field {\n\treturn Field{Key: k, Value: valf.Uint8(v)}\n}", "func (m *Message) putUint8(v uint8) {\n\tb := m.bufferForPut(1)\n\tdefer b.Advance(1)\n\n\tb.Bytes[b.Offset] = v\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteBit_Call {\n\treturn &MockWriteBufferXmlBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func Uint8(key string, val uint8) Tag {\n\treturn Tag{key: key, tType: uint8Type, integerVal: int64(val)}\n}", "func (b *Builder) AddUint8(v uint8) {\n\tb.add(byte(v))\n}", "func opUI8ToStr(inputs []ast.CXValue, outputs []ast.CXValue) {\n\toutV0 := strconv.FormatUint(uint64(inputs[0].Get_ui8()), 10)\n\toutputs[0].Set_str(outV0)\n}", "func (b *Bus) Write8(addr mirv.Address, v uint8) error {\n\tblk := b.p\n\tif !blk.contains(addr) {\n\t\tblk = b.find(addr)\n\t}\n\treturn blk.m.Write8(addr-blk.s, v)\n}", "func AppendUint8(dst []byte, val uint8) []byte {\n\treturn AppendUint(dst, uint(val))\n}", "func (t Uint8) WriteTo(w io.Writer) (int64, error) {\n\tlw := aWriterPool.Get(w)\n\tlw.WriteUint8(uint8(t))\n\treturn aWriterPool.Put(lw)\n}", "func (o *OutputState) ApplyUint8(applier interface{}) Uint8Output {\n\treturn o.ApplyT(applier).(Uint8Output)\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (b *Buffer) AppendUint8(v uint8) {\n\tb.AppendUint64(uint64(v))\n}", "func Uint8Var(p *uint8, name string, value uint8, usage string) {\n\tEnvironment.Uint8Var(p, name, value, usage)\n}", "func (s *videoController) Write8(address uint16, v byte) {\n\tif s.isRegisterAddress(address) {\n\t\tswitch address {\n\t\tcase registerFF41:\n\t\t\t// lowest 3 bits are read-only\n\t\t\tcurrent := s.registers[address-offsetRegisters]\n\t\t\ts.registers[address-offsetRegisters] = copyBits(v, current, 0, 1, 2)\n\t\tcase registerFF44:\n\t\t\t// do nothing - address is read-only\n\t\tcase 0xFF46:\n\t\t\tnotImplemented(\"OAM DMA transfers not implemented\")\n\t\tdefault:\n\t\t\ts.registers[address-offsetRegisters] = v\n\t\t}\n\t\treturn\n\t}\n\n\tif s.isOAMAddress(address) {\n\t\tif s.oamAccessible {\n\t\t\ts.oam[address-offsetOAM] = v\n\t\t}\n\t\treturn\n\t}\n\n\tif s.vramAccessible {\n\t\ts.vram[address-offsetVRAM] = v\n\t}\n}", "func (b *Buffer) AppendUint8(x uint8) error {\n\treturn b.appendInteger(x)\n}", "func Uint8ToString(v string) predicate.Conversion {\n\treturn predicate.Conversion(func(s *sql.Selector) {\n\t\ts.Where(sql.EQ(s.C(FieldUint8ToString), v))\n\t})\n}", "func Uint8(key string, val uint8) Field {\n\treturn Field{Key: key, Type: core.Uint8Type, Integer: int64(val)}\n}", "func (s *serialController) Write8(address uint16, v byte) {\n\tswitch address {\n\tcase 0xFF01:\n\t\ts.writeRegister(registerFF01, v)\n\tcase 0xFF02:\n\t\ts.writeRegister(registerFF02, v)\n\tdefault:\n\t\tnotImplemented(\"write of unimplemented SERIAL register at %#4x\", address)\n\t}\n}", "func (s *Streamer) Uint8(v uint8) *Streamer {\n\tif s.Error != nil {\n\t\treturn s\n\t}\n\ts.onVal()\n\ts.buffer = appendUint8(s.buffer, v)\n\treturn s\n}", "func (m *MockIOPackage) WriteUint64(arg0 []byte, arg1 int, arg2 uint64) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint64\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func (m *MockIOPackage) WriteUint16(arg0 []byte, arg1 int, arg2 uint16) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint16\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (s *EnvVarSet) Uint8(name string, value uint8, usage string) *uint8 {\n\tp := new(uint8)\n\n\ts.Uint8Var(p, name, value, usage)\n\n\treturn p\n}", "func NewUint8(data arrow.ArrayData, shape, strides []int64, names []string) *Uint8 {\n\ttsr := &Uint8{tensorBase: *newTensor(arrow.PrimitiveTypes.Uint8, data, shape, strides, names)}\n\tvals := tsr.data.Buffers()[1]\n\tif vals != nil {\n\t\ttsr.values = arrow.Uint8Traits.CastFromBytes(vals.Bytes())\n\t\tbeg := tsr.data.Offset()\n\t\tend := beg + tsr.data.Len()\n\t\ttsr.values = tsr.values[beg:end]\n\t}\n\treturn tsr\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (sb *Builder) WriteUint(i uint64, base int) {\n\tsb.formatBits(i, base, false)\n}", "func (w *Writer) WriteBits8(b byte, n uint) error {\n\t//fmt.Printf(\"b: %d, n: %d, free: %d\\n\", b, n, w.free)\n\tif n == 0 {\n\t\treturn nil\n\t}\n\n\tif n > 8 {\n\t\tn = 8\n\t}\n\n\tvar rem uint\n\tif n > w.free {\n\t\trem = n - w.free\n\t\tn = w.free\n\t}\n\n\tw.free -= n\n\tw.bits <<= n\n\tw.bits |= ((b >> rem) & (1<<n - 1))\n\n\tvar err error\n\tif w.free == 0 {\n\t\terr = w.Flush()\n\t}\n\tif rem == 0 {\n\t\treturn err\n\t}\n\n\treturn w.WriteBits8(b, rem)\n}", "func Uint8(v *uint8) uint8 {\n\tif v != nil {\n\t\treturn *v\n\t}\n\treturn 0\n}", "func (fw *Writer) PutUint8Field(addr biopb.Coord, v byte) {\n\twb := fw.buf\n\twb.updateAddrBounds(addr)\n\twb.defaultBuf.PutUint8(v)\n}", "func (m *MockIOPackage) WriteUint32(arg0 []byte, arg1 int, arg2 uint32) int {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteUint32\", arg0, arg1, arg2)\n\tret0, _ := ret[0].(int)\n\treturn ret0\n}", "func (d *Display) write8bits(bits uint8, rs int) {\n\td.rs.Write(rs)\n\td.write4bits(bits)\n\td.write4bits(bits << 4)\n}", "func (m *MockSession) Int8(arg0 string) int8 {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Int8\", arg0)\n\tret0, _ := ret[0].(int8)\n\treturn ret0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func Uint8(u *uint8) uint8 {\n\tif u == nil {\n\t\treturn 0\n\t}\n\treturn *u\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigInt(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigInt_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigInt_Call{Call: _e.mock.On(\"WriteBigInt\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func Uint8(v uint8) *uint8 {\n\treturn &v\n}", "func newUInt8(value uint8) RootType {\n return &UInt8 { value }\n}" ]
[ "0.8136446", "0.80900276", "0.8002153", "0.7085276", "0.70680135", "0.6968587", "0.6817072", "0.6810211", "0.66303754", "0.6539053", "0.64065635", "0.6274435", "0.62434846", "0.6230213", "0.6179796", "0.6178202", "0.6083993", "0.6058006", "0.6037695", "0.60060036", "0.5997961", "0.59685796", "0.59587467", "0.5921812", "0.58955127", "0.5876469", "0.5857378", "0.5849923", "0.5841238", "0.5835812", "0.5806297", "0.57951957", "0.57929677", "0.5790357", "0.5775357", "0.5761284", "0.5758591", "0.57318044", "0.57218975", "0.57123744", "0.57067263", "0.5702964", "0.570026", "0.5696144", "0.5668982", "0.56486917", "0.56331956", "0.56151193", "0.5571664", "0.5567763", "0.55281794", "0.5507154", "0.5499278", "0.54891086", "0.5476186", "0.5474023", "0.5464083", "0.5455904", "0.54527146", "0.54505295", "0.54421335", "0.5375606", "0.5339498", "0.5291078", "0.52470404", "0.517418", "0.5167842", "0.51674664", "0.51635605", "0.51572424", "0.51515895", "0.5149938", "0.514914", "0.51467985", "0.5143541", "0.5142476", "0.51256233", "0.5112025", "0.510855", "0.50983673", "0.508847", "0.50878996", "0.50804454", "0.5078055", "0.50460863", "0.5032163", "0.5022938", "0.5014463", "0.50015306", "0.49891773", "0.4988196", "0.49824896", "0.49595004", "0.4950127", "0.49395788", "0.49378192", "0.49318483", "0.49294087", "0.49271572", "0.49262118" ]
0.8335929
0
WriteVirtual provides a mock function with given fields: ctx, logicalName, value, writerArgs
func (_m *MockWriteBufferJsonBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error { _va := make([]interface{}, len(writerArgs)) for _i := range writerArgs { _va[_i] = writerArgs[_i] } var _ca []interface{} _ca = append(_ca, ctx, logicalName, value) _ca = append(_ca, _va...) ret := _m.Called(_ca...) var r0 error if rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok { r0 = rf(ctx, logicalName, value, writerArgs...) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockWriteBufferXmlBased) WriteVirtual(ctx context.Context, logicalName string, value interface{}, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(ctx, logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockVirtualServiceClient) Write(arg0 *v1.VirtualService, arg1 clients.WriteOpts) (*v1.VirtualService, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0, arg1)\n\tret0, _ := ret[0].(*v1.VirtualService)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockStore) Write(ctx context.Context, key string, val interface{}, opts ...store.WriteOption) error {\n\t_va := make([]interface{}, len(opts))\n\tfor _i := range opts {\n\t\t_va[_i] = opts[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, key, val)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, interface{}, ...store.WriteOption) error); ok {\n\t\tr0 = rf(ctx, key, val, opts...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockTestTransportInstance) Write(data []byte) error {\n\tret := _m.Called(data)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func([]byte) error); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat64(logicalName string, bitLength uint8, value float64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBit(logicalName string, value bool, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, bool, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteByte(logicalName string, value byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt64(logicalName string, bitLength uint8, value int64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteSerializable(ctx context.Context, serializable Serializable) error {\n\tret := _m.Called(ctx, serializable)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, Serializable) error); ok {\n\t\tr0 = rf(ctx, serializable)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Output) Write(ctx context.Context, batch []stream.WritableMessage) error {\n\tret := _m.Called(ctx, batch)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, []stream.WritableMessage) error); ok {\n\t\tr0 = rf(ctx, batch)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint64(logicalName string, bitLength uint8, value uint64, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint64, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt16(logicalName string, bitLength uint8, value int16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteUint16(logicalName string, bitLength uint8, value uint16, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint16, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *Output_Expecter) Write(ctx interface{}, batch interface{}) *Output_Write_Call {\n\treturn &Output_Write_Call{Call: _e.mock.On(\"Write\", ctx, batch)}\n}", "func (_m *MockWriteBufferXmlBased) WriteSerializable(ctx context.Context, serializable Serializable) error {\n\tret := _m.Called(ctx, serializable)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, Serializable) error); ok {\n\t\tr0 = rf(ctx, serializable)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteFloat32(logicalName string, bitLength uint8, value float32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, float32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteBigInt(logicalName string, bitLength uint8, value *big.Int, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Int, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) PushContext(logicalName string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *HadolintPiperFileUtils) FileWrite(filename string, data []byte, perm os.FileMode) error {\n\tret := _m.Called(filename, data, perm)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, os.FileMode) error); ok {\n\t\tr0 = rf(filename, data, perm)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Connection) Write(ctx context.Context, typ websocket.MessageType, p []byte) error {\n\tret := _m.Called(ctx, typ, p)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, websocket.MessageType, []byte) error); ok {\n\t\tr0 = rf(ctx, typ, p)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *Session) Write(data []byte) (int, error) {\n\tret := _m.Called(data)\n\n\tvar r0 int\n\tvar r1 error\n\tif rf, ok := ret.Get(0).(func([]byte) (int, error)); ok {\n\t\treturn rf(data)\n\t}\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(data)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(data)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (_e *WriteCloser_Expecter) Write(p interface{}) *WriteCloser_Write_Call {\n\treturn &WriteCloser_Write_Call{Call: _e.mock.On(\"Write\", p)}\n}", "func (_e *MockTestTransportInstance_Expecter) Write(data interface{}) *MockTestTransportInstance_Write_Call {\n\treturn &MockTestTransportInstance_Write_Call{Call: _e.mock.On(\"Write\", data)}\n}", "func (_m *OSIOAPI) WriteFile(filename string, data []byte, perm os.FileMode) error {\n\tret := _m.Called(filename, data, perm)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, os.FileMode) error); ok {\n\t\tr0 = rf(filename, data, perm)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteFloat64_Call {\n\treturn &MockWriteBufferJsonBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockResponseWriter) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferXmlBased) PushContext(logicalName string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (m *MockResponseWriter) Write(arg0 *types.APIRequest, arg1 int, arg2 types.APIObject) {\n\tm.ctrl.T.Helper()\n\tm.ctrl.Call(m, \"Write\", arg0, arg1, arg2)\n}", "func (tw *TestWriter) Write(incoming []byte) (n int, err error) {\n\tif tw.mockWriteHandler != nil {\n\t\treturn tw.mockWriteHandler(incoming)\n\t}\n\n\ttw.capturedOutput = append(tw.capturedOutput, string(incoming))\n\treturn 0, nil\n}", "func (m *MockConn) WriteJSON(v interface{}) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteJSON\", v)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteFloat64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteFloat64_Call {\n\treturn &MockWriteBufferXmlBased_WriteFloat64_Call{Call: _e.mock.On(\"WriteFloat64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func TestWrite(t *testing.T) {\n\tmockZooKeeper := &MockZooHandle{\n\t\tzk: mock.Mock{},\n\t}\n\n\tbytes := make([]byte, 3)\n\tff := NewFuseFile(bytes, 0, \"mock/path\", mockZooKeeper)\n\n\tmockZooKeeper.zk.On(\"Set\", \"mock/path\", bytes, int32(-1)).Return(&zk.Stat{DataLength: int32(len(bytes))}, nil)\n\n\t// assert that we send 3 bytes into the writer and status out == fuse.OK\n\tsize, stat := ff.Write(bytes, 0)\n\tassert.Equal(t, uint32(3), size)\n\tassert.Equal(t, fuse.OK, stat)\n}", "func (_m *MockResponseWriter) WriteMsg(_a0 *dns.Msg) error {\n\tret := _m.Called(_a0)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(*dns.Msg) error); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func MockOnInsertVirtualMedia(ctx context.Context, mockAPI *redfishMocks.RedfishAPI,\n\tmanagerID string, virtualMediaID string, redfishErr redfishClient.RedfishError,\n\thttpResponse *http.Response, err error) {\n\tmediaRequest := redfishClient.ApiInsertVirtualMediaRequest{}\n\tmockAPI.On(\"InsertVirtualMedia\", ctx, managerID, virtualMediaID).Return(mediaRequest).Times(1)\n\tmockAPI.On(\"InsertVirtualMediaExecute\", mock.Anything).Return(redfishErr, httpResponse, err).Times(1)\n}", "func (m *MockHash) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockFile) Write(_param0 []byte) (int, error) {\n\tret := _m.ctrl.Call(_m, \"Write\", _param0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockHash32) Write(arg0 []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockContext) Bind(v reflect.Value, tag string) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Bind\", v, tag)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_m *MockWriteBufferJsonBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mc *MockConn) MockWrite(msg wire.Message) {\n\tbuf := &bytes.Buffer{}\n\twire.WriteMessage(buf, msg, wire.MainNet)\n\tmc.receiveChan <- buf.Bytes()\n}", "func (_m *MockWriteBufferXmlBased) WriteUint8(logicalName string, bitLength uint8, value uint8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockOStream) WriteBits(v uint64, numBits int) {\n\t_m.ctrl.Call(_m, \"WriteBits\", v, numBits)\n}", "func (m *MockHash64) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_m *MockWriteBufferXmlBased) WriteUint32(logicalName string, bitLength uint8, value uint32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, uint32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteSerializable(ctx interface{}, serializable interface{}) *MockWriteBufferJsonBased_WriteSerializable_Call {\n\treturn &MockWriteBufferJsonBased_WriteSerializable_Call{Call: _e.mock.On(\"WriteSerializable\", ctx, serializable)}\n}", "func (_m *MockRequestInterceptor) ProcessWriteResponses(ctx context.Context, writeRequest model.PlcWriteRequest, writeResults []model.PlcWriteRequestResult) model.PlcWriteRequestResult {\n\tret := _m.Called(ctx, writeRequest, writeResults)\n\n\tvar r0 model.PlcWriteRequestResult\n\tif rf, ok := ret.Get(0).(func(context.Context, model.PlcWriteRequest, []model.PlcWriteRequestResult) model.PlcWriteRequestResult); ok {\n\t\tr0 = rf(ctx, writeRequest, writeResults)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(model.PlcWriteRequestResult)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (m *MockSeriesRef) Write(ctx context.Context, timestamp time.UnixNano, value float64, unit time.Unit, annotation []byte, wOpts series.WriteOptions) (bool, series.WriteType, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", ctx, timestamp, value, unit, annotation, wOpts)\n\tret0, _ := ret[0].(bool)\n\tret1, _ := ret[1].(series.WriteType)\n\tret2, _ := ret[2].(error)\n\treturn ret0, ret1, ret2\n}", "func (_m *MockWriteRequestInterceptor) ProcessWriteResponses(ctx context.Context, writeRequest model.PlcWriteRequest, writeResults []model.PlcWriteRequestResult) model.PlcWriteRequestResult {\n\tret := _m.Called(ctx, writeRequest, writeResults)\n\n\tvar r0 model.PlcWriteRequestResult\n\tif rf, ok := ret.Get(0).(func(context.Context, model.PlcWriteRequest, []model.PlcWriteRequestResult) model.PlcWriteRequestResult); ok {\n\t\tr0 = rf(ctx, writeRequest, writeResults)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(model.PlcWriteRequestResult)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferJsonBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mock WriteCloser) Write(p []byte) (n int, err error) {\n\tmethodName := \"Write\" // nolint: goconst\n\tif mock.impl.Write != nil {\n\t\treturn mock.impl.Write(p)\n\t}\n\tif mock.callbackNotImplemented != nil {\n\t\tmock.callbackNotImplemented(mock.t, mock.name, methodName)\n\t} else {\n\t\tgomic.DefaultCallbackNotImplemented(mock.t, mock.name, methodName)\n\t}\n\treturn mock.fakeZeroWrite(p)\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBit(logicalName interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBit_Call {\n\treturn &MockWriteBufferJsonBased_WriteBit_Call{Call: _e.mock.On(\"WriteBit\",\n\t\tappend([]interface{}{logicalName, value}, writerArgs...)...)}\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteString_Call {\n\treturn &MockWriteBufferJsonBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (m *MockConn) Write(arg0 core.WriteableFrame) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *MockHash32) Write(p []byte) (int, error) {\n\tret := m.ctrl.Call(m, \"Write\", p)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockInterfaceNode) ProcessVirt(workflowContext *WorkflowContext) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"ProcessVirt\", workflowContext)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (mr *MockVirtualServiceClientMockRecorder) Write(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"Write\", reflect.TypeOf((*MockVirtualServiceClient)(nil).Write), arg0, arg1)\n}", "func (_m *MockWriteBufferJsonBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func MockWire(hash []byte, round uint64, step uint8, keys []key.ConsensusKeys, p *user.Provisioners, i ...int) *bytes.Buffer {\n\tev := MockAgreementEvent(hash, round, step, keys, p, i...)\n\n\tbuf := new(bytes.Buffer)\n\tif err := header.Marshal(buf, ev.Header); err != nil {\n\t\tpanic(err)\n\t}\n\n\tif err := Marshal(buf, *ev); err != nil {\n\t\tpanic(err)\n\t}\n\treturn buf\n}", "func (_m *Output) WriteOne(ctx context.Context, msg stream.WritableMessage) error {\n\tret := _m.Called(ctx, msg)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, stream.WritableMessage) error); ok {\n\t\tr0 = rf(ctx, msg)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteUint64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteUint64_Call {\n\treturn &MockWriteBufferJsonBased_WriteUint64_Call{Call: _e.mock.On(\"WriteUint64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockTChanNode) WriteTagged(ctx thrift.Context, req *WriteTaggedRequest) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteTagged\", ctx, req)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *MockScraper) WriteToFile(arg0 map[int]map[string]int, arg1 int) (string, error) {\n\tret := m.ctrl.Call(m, \"WriteToFile\", arg0, arg1)\n\tret0, _ := ret[0].(string)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteSerializable(ctx interface{}, serializable interface{}) *MockWriteBufferXmlBased_WriteSerializable_Call {\n\treturn &MockWriteBufferXmlBased_WriteSerializable_Call{Call: _e.mock.On(\"WriteSerializable\", ctx, serializable)}\n}", "func (c *cpu) writev() {\n\tsp, iovcnt := popI32(c.sp)\n\tsp, iov := popPtr(sp)\n\tfd := readI32(sp)\n\tn, _, err := syscall.Syscall(syscall.SYS_WRITEV, uintptr(fd), iov, uintptr(iovcnt))\n\tif strace {\n\t\tfmt.Fprintf(os.Stderr, \"writev(%#x, %#x, %#x) %v %v\\t; %s\\n\", fd, iov, iovcnt, n, err, c.pos())\n\t}\n\tif err != 0 {\n\t\tc.setErrno(err)\n\t\twriteLong(c.rp, -1)\n\t\treturn\n\t}\n\n\twriteLong(c.rp, int64(n))\n}", "func (s *mockApplicationFDImpl) Write(ctx context.Context, src usermem.IOSequence, opts vfs.WriteOptions) (int64, error) {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\tif s.released {\n\t\treturn 0, io.EOF\n\t}\n\n\tbuf := make([]byte, src.NumBytes())\n\tn, _ := src.CopyIn(ctx, buf)\n\tres, _ := s.writeBuf.Write(buf[:n])\n\treturn int64(res), nil\n}", "func (_m *WriteCloser) Write(p []byte) (int, error) {\n\tret := _m.Called(p)\n\n\tvar r0 int\n\tif rf, ok := ret.Get(0).(func([]byte) int); ok {\n\t\tr0 = rf(p)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func([]byte) error); ok {\n\t\tr1 = rf(p)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}", "func (m *MockFile) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (m *MockFile) Write(arg0 []byte) (int, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", arg0)\n\tret0, _ := ret[0].(int)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func TestWrite(t *testing.T) {\n\ttests := []struct {\n\t\tid *ua.NodeID\n\t\tv interface{}\n\t\tstatus ua.StatusCode\n\t}{\n\t\t// happy flows\n\t\t{ua.NewStringNodeID(2, \"rw_bool\"), false, ua.StatusOK},\n\t\t{ua.NewStringNodeID(2, \"rw_int32\"), int32(9), ua.StatusOK},\n\n\t\t// error flows\n\t\t{ua.NewStringNodeID(2, \"ro_bool\"), false, ua.StatusBadUserAccessDenied},\n\t}\n\n\tctx := context.Background()\n\n\tsrv := NewServer(\"rw_server.py\")\n\tdefer srv.Close()\n\n\tc, err := opcua.NewClient(srv.Endpoint, srv.Opts...)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tif err := c.Connect(ctx); err != nil {\n\t\tt.Fatal(err)\n\t}\n\tdefer c.Close(ctx)\n\n\tfor _, tt := range tests {\n\t\tt.Run(tt.id.String(), func(t *testing.T) {\n\t\t\ttestWrite(t, ctx, c, tt.status, &ua.WriteRequest{\n\t\t\t\tNodesToWrite: []*ua.WriteValue{\n\t\t\t\t\t&ua.WriteValue{\n\t\t\t\t\t\tNodeID: tt.id,\n\t\t\t\t\t\tAttributeID: ua.AttributeIDValue,\n\t\t\t\t\t\tValue: &ua.DataValue{\n\t\t\t\t\t\t\tEncodingMask: ua.DataValueValue,\n\t\t\t\t\t\t\tValue: ua.MustVariant(tt.v),\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t})\n\n\t\t\t// skip read tests if the write is expected to fail\n\t\t\tif tt.status != ua.StatusOK {\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\ttestRead(t, ctx, c, tt.v, tt.id)\n\t\t})\n\t}\n}", "func (_m *MockWriteBufferXmlBased) WriteInt8(logicalName string, bitLength uint8, value int8, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int8, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockWriteBufferXmlBased) WriteInt32(logicalName string, bitLength uint8, value int32, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, int32, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (mock *WriteCloser) SetFuncWrite(impl func(p []byte) (n int, err error)) *WriteCloser {\n\tmock.impl.Write = impl\n\treturn mock\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteInt64(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteInt64_Call {\n\treturn &MockWriteBufferJsonBased_WriteInt64_Call{Call: _e.mock.On(\"WriteInt64\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (m *MockSession) WriteMapData(name string, data map[string]interface{}) error {\n\targs := m.Mock.Called(name, data)\n\treturn args.Error(0)\n}", "func (m *MockManager) SerializeHelmValues(arg0, arg1 string) error {\n\tret := m.ctrl.Call(m, \"SerializeHelmValues\", arg0, arg1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (_e *MockWriteBufferJsonBased_Expecter) WriteBigFloat(logicalName interface{}, bitLength interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferJsonBased_WriteBigFloat_Call {\n\treturn &MockWriteBufferJsonBased_WriteBigFloat_Call{Call: _e.mock.On(\"WriteBigFloat\",\n\t\tappend([]interface{}{logicalName, bitLength, value}, writerArgs...)...)}\n}", "func (t *SimpleChaincode) Write(stub *shim.ChaincodeStub, args []string) ([]byte, error) {\n var name, value string // Entities\n var err error\n fmt.Println(\"running write()\")\n\n if len(args) != 2 {\n return nil, errors.New(\"Incorrect number of arguments. Expecting 2. name of the variable and value to set\")\n }\n\n name = args[0] //rename for funsies\n value = args[1]\n err = stub.PutState(name, []byte(value)) //write the variable into the chaincode state\n if err != nil {\n return nil, err\n }\n return nil, nil\n}", "func (_m *MockOStream) WriteBit(v Bit) {\n\t_m.ctrl.Call(_m, \"WriteBit\", v)\n}", "func (_m *MockRequestInterceptor) InterceptWriteRequest(ctx context.Context, writeRequest model.PlcWriteRequest) []model.PlcWriteRequest {\n\tret := _m.Called(ctx, writeRequest)\n\n\tvar r0 []model.PlcWriteRequest\n\tif rf, ok := ret.Get(0).(func(context.Context, model.PlcWriteRequest) []model.PlcWriteRequest); ok {\n\t\tr0 = rf(ctx, writeRequest)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]model.PlcWriteRequest)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (m *MockVirtualServiceFinalizer) VirtualServiceFinalizerName() string {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"VirtualServiceFinalizerName\")\n\tret0, _ := ret[0].(string)\n\treturn ret0\n}", "func (_e *MockWriteBufferXmlBased_Expecter) WriteString(logicalName interface{}, bitLength interface{}, encoding interface{}, value interface{}, writerArgs ...interface{}) *MockWriteBufferXmlBased_WriteString_Call {\n\treturn &MockWriteBufferXmlBased_WriteString_Call{Call: _e.mock.On(\"WriteString\",\n\t\tappend([]interface{}{logicalName, bitLength, encoding, value}, writerArgs...)...)}\n}", "func (m *MockManager) SaveSearchAttributes(ctx context.Context, indexName string, newCustomSearchAttributes map[string]v1.IndexedValueType) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"SaveSearchAttributes\", ctx, indexName, newCustomSearchAttributes)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *MockTChanNode) Write(ctx thrift.Context, req *WriteRequest) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Write\", ctx, req)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func (m *VirtualEndpoint) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetAuditEvents() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetAuditEvents()))\n for i, v := range m.GetAuditEvents() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"auditEvents\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetBulkActions() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetBulkActions()))\n for i, v := range m.GetBulkActions() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"bulkActions\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetCloudPCs() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetCloudPCs()))\n for i, v := range m.GetCloudPCs() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"cloudPCs\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"crossCloudGovernmentOrganizationMapping\", m.GetCrossCloudGovernmentOrganizationMapping())\n if err != nil {\n return err\n }\n }\n if m.GetDeviceImages() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetDeviceImages()))\n for i, v := range m.GetDeviceImages() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"deviceImages\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetExternalPartnerSettings() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetExternalPartnerSettings()))\n for i, v := range m.GetExternalPartnerSettings() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"externalPartnerSettings\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetFrontLineServicePlans() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetFrontLineServicePlans()))\n for i, v := range m.GetFrontLineServicePlans() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"frontLineServicePlans\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetGalleryImages() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetGalleryImages()))\n for i, v := range m.GetGalleryImages() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"galleryImages\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetOnPremisesConnections() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetOnPremisesConnections()))\n for i, v := range m.GetOnPremisesConnections() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"onPremisesConnections\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"organizationSettings\", m.GetOrganizationSettings())\n if err != nil {\n return err\n }\n }\n if m.GetProvisioningPolicies() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetProvisioningPolicies()))\n for i, v := range m.GetProvisioningPolicies() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"provisioningPolicies\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteObjectValue(\"reports\", m.GetReports())\n if err != nil {\n return err\n }\n }\n if m.GetServicePlans() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetServicePlans()))\n for i, v := range m.GetServicePlans() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"servicePlans\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetSharedUseServicePlans() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetSharedUseServicePlans()))\n for i, v := range m.GetSharedUseServicePlans() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"sharedUseServicePlans\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetSnapshots() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetSnapshots()))\n for i, v := range m.GetSnapshots() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"snapshots\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetSupportedRegions() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetSupportedRegions()))\n for i, v := range m.GetSupportedRegions() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"supportedRegions\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetUserSettings() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetUserSettings()))\n for i, v := range m.GetUserSettings() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"userSettings\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}" ]
[ "0.8483768", "0.5920663", "0.59193355", "0.58257073", "0.5747913", "0.5727543", "0.5705697", "0.55697286", "0.556123", "0.5507197", "0.5480779", "0.5450051", "0.53875035", "0.5387411", "0.5380454", "0.5372317", "0.53507876", "0.53451395", "0.5344509", "0.5339585", "0.5339534", "0.5318711", "0.5317544", "0.5303851", "0.52711546", "0.52656454", "0.5236437", "0.5230006", "0.5227265", "0.52265894", "0.5223882", "0.52108073", "0.52093786", "0.51974636", "0.51369953", "0.51210475", "0.5117775", "0.5103883", "0.509933", "0.50928336", "0.509207", "0.5071298", "0.50558394", "0.5047986", "0.5027815", "0.5023322", "0.4986167", "0.4970577", "0.49698764", "0.49669468", "0.49657404", "0.49611846", "0.4934317", "0.49171266", "0.49149054", "0.48858598", "0.48794246", "0.4875003", "0.4866514", "0.48580894", "0.48473355", "0.48406407", "0.48330444", "0.4830064", "0.48281768", "0.48278016", "0.4826563", "0.4817739", "0.48159164", "0.4815506", "0.48106608", "0.48046085", "0.47844386", "0.4755161", "0.47469467", "0.473377", "0.47336236", "0.47321817", "0.47054368", "0.47004366", "0.4697903", "0.46900803", "0.46785855", "0.46785855", "0.46726346", "0.46725765", "0.46714085", "0.4666762", "0.4663313", "0.46616942", "0.4655648", "0.4650972", "0.4647153", "0.4646873", "0.46344066", "0.46310657", "0.4625516", "0.46236733", "0.4622506", "0.4616985" ]
0.86245424
0
NewMockWriteBufferJsonBased creates a new instance of MockWriteBufferJsonBased. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. The first argument is typically a testing.T value.
func NewMockWriteBufferJsonBased(t interface { mock.TestingT Cleanup(func()) }) *MockWriteBufferJsonBased { mock := &MockWriteBufferJsonBased{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewMockWriteBufferXmlBased(t interface {\n\tmock.TestingT\n\tCleanup(func())\n}) *MockWriteBufferXmlBased {\n\tmock := &MockWriteBufferXmlBased{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func NewWriter(t mockConstructorTestingTNewWriter) *Writer {\n\tmock := &Writer{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func NewMockWriteCloser(t *testing.T) *MockWriteCloser {\n\treturn &MockWriteCloser{\n\t\tb: bytes.Buffer{},\n\t\tclosed: false,\n\t\tt: t,\n\t}\n}", "func NewMockWriter() *MockWriter {\n\treturn &MockWriter{Entries: []string{}}\n}", "func NewMockWriteRequestInterceptor(t interface {\n\tmock.TestingT\n\tCleanup(func())\n}) *MockWriteRequestInterceptor {\n\tmock := &MockWriteRequestInterceptor{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func NewWriteCloser(t mockConstructorTestingTNewWriteCloser) *WriteCloser {\n\tmock := &WriteCloser{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func NewJSONPayloadBuilder(shareAndLockBuffers bool) *JSONPayloadBuilder {\n\tif shareAndLockBuffers {\n\t\treturn &JSONPayloadBuilder{\n\t\t\tinputSizeHint: 4096,\n\t\t\toutputSizeHint: 4096,\n\t\t\tshareAndLockBuffers: true,\n\t\t\tinput: bytes.NewBuffer(make([]byte, 0, 4096)),\n\t\t\toutput: bytes.NewBuffer(make([]byte, 0, 4096)),\n\t\t}\n\t}\n\treturn &JSONPayloadBuilder{\n\t\tinputSizeHint: 4096,\n\t\toutputSizeHint: 4096,\n\t\tshareAndLockBuffers: false,\n\t}\n}", "func NewMock(t *testing.T) *MockT { return &MockT{t: t} }", "func newMockMatrix() io.ReadWriteCloser {\n\treturn &mockMatrix{\n\t\tstate: stateCmd,\n\t\tcount: 0,\n\t}\n}", "func NewJSONMockDoer(object interface{}, code int) *Doer {\n\tbuffer, _ := json.Marshal(object)\n\tbody := ResponseBody{\n\t\tContent: buffer,\n\t}\n\treturn &Doer{\n\t\tResponse: http.Response{\n\t\t\tBody: &body,\n\t\t\tStatusCode: code,\n\t\t},\n\t\tResponseBody: &body,\n\t}\n}", "func NewMock() *Mock {\n\tc := &Mock{\n\t\tFakeIncoming: func() chan []byte {\n\t\t\treturn make(chan []byte, 2)\n\t\t},\n\t\tFakeName: func() string {\n\t\t\treturn \"TestClient\"\n\t\t},\n\t\tFakeGame: func() string {\n\t\t\treturn \"test\"\n\t\t},\n\t\tFakeClose: func() {\n\t\t\t// Do nothing\n\t\t},\n\t\tFakeStopTimer: func() {\n\t\t\t// Do nothing\n\t\t},\n\t\tFakeRoom: func() interfaces.Room {\n\t\t\treturn nil\n\t\t},\n\t\tFakeSetRoom: func(interfaces.Room) {\n\n\t\t},\n\t}\n\n\tc.FakeWritePump = func() {\n\t\tfor range c.Incoming() {\n\t\t\t// Do nothing\n\t\t}\n\t}\n\n\tc.FakeSetName = func(string) interfaces.Client {\n\t\treturn c\n\t}\n\treturn c\n}", "func NewSynchronizable(t interface {\n\tmock.TestingT\n\tCleanup(func())\n}) *Synchronizable {\n\tmock := &Synchronizable{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func NewBatcher(t testing.TB) *Batcher {\n\tmock := &Batcher{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func NewJSONWriter(w http.ResponseWriter) JSONWriterFunc {\n\treturn func(v interface{}, status int) {\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\tw.WriteHeader(status)\n\t\tjson.NewEncoder(w).Encode(v)\n\t}\n}", "func FakeTdsBufferCtor(r io.ReadWriteCloser) io.ReadWriteCloser {\n\treturn r\n}", "func newJSONEncoder() *jsonEncoder {\n\tbuffer := &bytes.Buffer{}\n\tencoder := json.NewEncoder(buffer)\n\n\treturn &jsonEncoder{\n\t\tbuffer: buffer,\n\t\tencoder: encoder,\n\t\tcontentType: jsonContentType,\n\t}\n}", "func NewDecorator() JSendDecorator {\n\treturn &JSendDecoratorBuffer{\n\t\tjsonEncoder: json.Marshal,\n\t}\n}", "func NewMockBufferInterface(ctrl *gomock.Controller) *MockBufferInterface {\n\tmock := &MockBufferInterface{ctrl: ctrl}\n\tmock.recorder = &MockBufferInterfaceMockRecorder{mock}\n\treturn mock\n}", "func NewMock() *Mock {\n\treturn &Mock{\n\t\tData: MockData{\n\t\t\tUptime: true,\n\t\t\tFile: true,\n\t\t\tTCPResponse: true,\n\t\t\tHTTPStatus: true,\n\t\t},\n\t}\n}", "func (s *MockQueueService) NewWriter(topic string) queue.Writer {\n\treturn &mockWriter{\n\t\tService: s,\n\t\tTopic: topic,\n\t}\n}", "func NewTestWriter(t TestingLog) TestWriter {\n\treturn TestWriter{T: t}\n}", "func NewJSONFileWriter(file string) (*JSONFileWriter, error) {\n\tswitch file {\n\tcase \"stdout\":\n\t\treturn &JSONFileWriter{os.Stdout}, nil\n\tcase \"stderr\":\n\t\treturn &JSONFileWriter{os.Stderr}, nil\n\tdefault:\n\t\tf, err := os.OpenFile(file, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn &JSONFileWriter{f}, nil\n\t}\n}", "func NewMock(now time.Time) *Mock {\n\treturn &Mock{\n\t\tnow: now,\n\t\tmockTimers: &timerHeap{},\n\t}\n}", "func NewMockWriter(ctrl *gomock.Controller) *MockWriter {\n\tmock := &MockWriter{ctrl: ctrl}\n\tmock.recorder = &MockWriterMockRecorder{mock}\n\treturn mock\n}", "func NewMockWriter(ctrl *gomock.Controller) *MockWriter {\n\tmock := &MockWriter{ctrl: ctrl}\n\tmock.recorder = &MockWriterMockRecorder{mock}\n\treturn mock\n}", "func NewMockWriter(ctrl *gomock.Controller) *MockWriter {\n\tmock := &MockWriter{ctrl: ctrl}\n\tmock.recorder = &MockWriterMockRecorder{mock}\n\treturn mock\n}", "func NewMock(path string, nodes uint, replicas uint, vbuckets uint, specs ...BucketSpec) (m *Mock, err error) {\n\tvar lsn *net.TCPListener\n\tchAccept := make(chan bool)\n\tm = &Mock{}\n\n\tdefer func() {\n\t\tclose(chAccept)\n\t\tif lsn != nil {\n\t\t\tif err := lsn.Close(); err != nil {\n\t\t\t\tlog.Printf(\"Failed to close listener: %v\", err)\n\t\t\t}\n\t\t}\n\t\texc := recover()\n\n\t\tif exc == nil {\n\t\t\t// No errors, everything is OK\n\t\t\treturn\n\t\t}\n\n\t\t// Close mock on error, destroying resources\n\t\tm.Close()\n\t\tif mExc, ok := exc.(mockError); !ok {\n\t\t\tpanic(mExc)\n\t\t} else {\n\t\t\tm = nil\n\t\t\terr = mExc\n\t\t}\n\t}()\n\n\tif lsn, err = net.ListenTCP(\"tcp\", &net.TCPAddr{Port: 0}); err != nil {\n\t\tthrowMockError(\"Couldn't set up listening socket\", err)\n\t}\n\t_, ctlPort, err := net.SplitHostPort(lsn.Addr().String())\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed to split host and port: %v\", err)\n\t}\n\tlog.Printf(\"Listening for control connection at %s\\n\", ctlPort)\n\n\tgo func() {\n\t\tvar err error\n\n\t\tdefer func() {\n\t\t\tchAccept <- false\n\t\t}()\n\t\tif m.conn, err = lsn.Accept(); err != nil {\n\t\t\tthrowMockError(\"Couldn't accept incoming control connection from mock\", err)\n\t\t\treturn\n\t\t}\n\t}()\n\n\tif len(specs) == 0 {\n\t\tspecs = []BucketSpec{{Name: \"default\", Type: BCouchbase}}\n\t}\n\n\toptions := []string{\n\t\t\"-jar\", path, \"--harakiri-monitor\", \"localhost:\" + ctlPort, \"--port\", \"0\",\n\t\t\"--replicas\", strconv.Itoa(int(replicas)),\n\t\t\"--vbuckets\", strconv.Itoa(int(vbuckets)),\n\t\t\"--nodes\", strconv.Itoa(int(nodes)),\n\t\t\"--buckets\", m.buildSpecStrings(specs),\n\t}\n\n\tlog.Printf(\"Invoking java %s\", strings.Join(options, \" \"))\n\tm.cmd = exec.Command(\"java\", options...)\n\n\tm.cmd.Stdout = os.Stdout\n\tm.cmd.Stderr = os.Stderr\n\n\tif err = m.cmd.Start(); err != nil {\n\t\tm.cmd = nil\n\t\tthrowMockError(\"Couldn't start command\", err)\n\t}\n\n\tselect {\n\tcase <-chAccept:\n\t\tbreak\n\n\tcase <-time.After(mockInitTimeout):\n\t\tthrowMockError(\"Timed out waiting for initialization\", errors.New(\"timeout\"))\n\t}\n\n\tm.rw = bufio.NewReadWriter(bufio.NewReader(m.conn), bufio.NewWriter(m.conn))\n\n\t// Read the port buffer, which is delimited by a NUL byte\n\tif portBytes, err := m.rw.ReadBytes(0); err != nil {\n\t\tthrowMockError(\"Couldn't get port information\", err)\n\t} else {\n\t\tportBytes = portBytes[:len(portBytes)-1]\n\t\tif entryPort, err := strconv.Atoi(string(portBytes)); err != nil {\n\t\t\tthrowMockError(\"Incorrectly formatted port from mock\", err)\n\t\t} else {\n\t\t\tm.EntryPort = uint16(entryPort)\n\t\t}\n\t}\n\n\tlog.Printf(\"Mock HTTP port at %d\\n\", m.EntryPort)\n\treturn\n}", "func NewGatewayMock(t minimock.Tester) *GatewayMock {\n\tm := &GatewayMock{t: t}\n\tif controller, ok := t.(minimock.MockController); ok {\n\t\tcontroller.RegisterMocker(m)\n\t}\n\n\tm.AutherMock = mGatewayMockAuther{mock: m}\n\n\tm.BeforeRunMock = mGatewayMockBeforeRun{mock: m}\n\tm.BeforeRunMock.callArgs = []*GatewayMockBeforeRunParams{}\n\n\tm.BootstrapperMock = mGatewayMockBootstrapper{mock: m}\n\n\tm.EphemeralModeMock = mGatewayMockEphemeralMode{mock: m}\n\tm.EphemeralModeMock.callArgs = []*GatewayMockEphemeralModeParams{}\n\n\tm.GetStateMock = mGatewayMockGetState{mock: m}\n\n\tm.NewGatewayMock = mGatewayMockNewGateway{mock: m}\n\tm.NewGatewayMock.callArgs = []*GatewayMockNewGatewayParams{}\n\n\tm.OnConsensusFinishedMock = mGatewayMockOnConsensusFinished{mock: m}\n\tm.OnConsensusFinishedMock.callArgs = []*GatewayMockOnConsensusFinishedParams{}\n\n\tm.OnPulseFromConsensusMock = mGatewayMockOnPulseFromConsensus{mock: m}\n\tm.OnPulseFromConsensusMock.callArgs = []*GatewayMockOnPulseFromConsensusParams{}\n\n\tm.OnPulseFromPulsarMock = mGatewayMockOnPulseFromPulsar{mock: m}\n\tm.OnPulseFromPulsarMock.callArgs = []*GatewayMockOnPulseFromPulsarParams{}\n\n\tm.RunMock = mGatewayMockRun{mock: m}\n\tm.RunMock.callArgs = []*GatewayMockRunParams{}\n\n\tm.UpdateStateMock = mGatewayMockUpdateState{mock: m}\n\tm.UpdateStateMock.callArgs = []*GatewayMockUpdateStateParams{}\n\n\treturn m\n}", "func New() *Mock {\n\treturn &Mock{\n\t\tm: mockMap{},\n\t\toldTransport: http.DefaultTransport,\n\t}\n}", "func NewHeavySyncMock(t minimock.Tester) *HeavySyncMock {\n\tm := &HeavySyncMock{t: t}\n\n\tif controller, ok := t.(minimock.MockController); ok {\n\t\tcontroller.RegisterMocker(m)\n\t}\n\n\tm.ResetMock = mHeavySyncMockReset{mock: m}\n\tm.StartMock = mHeavySyncMockStart{mock: m}\n\tm.StopMock = mHeavySyncMockStop{mock: m}\n\tm.StoreBlobsMock = mHeavySyncMockStoreBlobs{mock: m}\n\tm.StoreDropMock = mHeavySyncMockStoreDrop{mock: m}\n\tm.StoreIndicesMock = mHeavySyncMockStoreIndices{mock: m}\n\tm.StoreRecordsMock = mHeavySyncMockStoreRecords{mock: m}\n\n\treturn m\n}", "func NewOutboundMock(t minimock.Tester) *OutboundMock {\n\tm := &OutboundMock{t: t}\n\n\tif controller, ok := t.(minimock.MockController); ok {\n\t\tcontroller.RegisterMocker(m)\n\t}\n\n\tm.AsByteStringMock = mOutboundMockAsByteString{mock: m}\n\tm.CanAcceptMock = mOutboundMockCanAccept{mock: m}\n\tm.GetEndpointTypeMock = mOutboundMockGetEndpointType{mock: m}\n\tm.GetIPAddressMock = mOutboundMockGetIPAddress{mock: m}\n\tm.GetNameAddressMock = mOutboundMockGetNameAddress{mock: m}\n\tm.GetRelayIDMock = mOutboundMockGetRelayID{mock: m}\n\n\treturn m\n}", "func NewMyReader(t interface {\n\tmock.TestingT\n\tCleanup(func())\n}) *MyReader {\n\tmock := &MyReader{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func newMock(deps mockDependencies, t testing.TB) (Component, error) {\n\tbackupConfig := config.NewConfig(\"\", \"\", strings.NewReplacer())\n\tbackupConfig.CopyConfig(config.Datadog)\n\n\tconfig.Datadog.CopyConfig(config.NewConfig(\"mock\", \"XXXX\", strings.NewReplacer()))\n\n\tconfig.SetFeatures(t, deps.Params.Features...)\n\n\t// call InitConfig to set defaults.\n\tconfig.InitConfig(config.Datadog)\n\tc := &cfg{\n\t\tConfig: config.Datadog,\n\t}\n\n\tif !deps.Params.SetupConfig {\n\n\t\tif deps.Params.ConfFilePath != \"\" {\n\t\t\tconfig.Datadog.SetConfigType(\"yaml\")\n\t\t\terr := config.Datadog.ReadConfig(strings.NewReader(deps.Params.ConfFilePath))\n\t\t\tif err != nil {\n\t\t\t\t// The YAML was invalid, fail initialization of the mock config.\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t} else {\n\t\twarnings, _ := setupConfig(deps)\n\t\tc.warnings = warnings\n\t}\n\n\t// Overrides are explicit and will take precedence over any other\n\t// setting\n\tfor k, v := range deps.Params.Overrides {\n\t\tconfig.Datadog.Set(k, v)\n\t}\n\n\t// swap the existing config back at the end of the test.\n\tt.Cleanup(func() { config.Datadog.CopyConfig(backupConfig) })\n\n\treturn c, nil\n}", "func NewRepositoryWriter(t mockConstructorTestingTNewRepositoryWriter) *RepositoryWriter {\n\tmock := &RepositoryWriter{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func NewDirectRepositoryWriter(t mockConstructorTestingTNewDirectRepositoryWriter) *DirectRepositoryWriter {\n\tmock := &DirectRepositoryWriter{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func NewMockInterface(t mockConstructorTestingTNewMockInterface) *MockInterface {\n\tmock := &MockInterface{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func newJSONEncoder() *jsonEncoder {\n\tenc := jsonPool.Get().(*jsonEncoder)\n\tenc.truncate()\n\treturn enc\n}", "func NewWriter(w http.ResponseWriter) JSendWriter {\n\treturn &JSendWriterBuffer{\n\t\tbuilder: NewBuilder(),\n\t\tstatusCode: http.StatusOK,\n\t\tresponseWriter: w,\n\t}\n}", "func NewJSONGroupWriter(outputDir string, countPerFile uint64) JSONGroupWriter {\n\treturn JSONGroupWriter{\n\t\toutDir: outputDir,\n\t\tcount: countPerFile,\n\t\trecs: make([]interface{}, countPerFile),\n\t}\n}", "func NewWriter(treeID int64, hasher Hasher, height, split uint) *Writer {\n\tif split > height {\n\t\tpanic(fmt.Errorf(\"NewWriter: split(%d) > height(%d)\", split, height))\n\t}\n\treturn &Writer{h: bindHasher(hasher, treeID), height: height, split: split}\n}", "func NewMockAPI(enc format.Encoder) API {\n\tif enc == nil {\n\t\tenc = json.New()\n\t}\n\tm := &mockAPI{\n\t\texpectations: make(map[string][]*expectation),\n\t\tencoder: enc,\n\t}\n\tm.Server = httptest.NewUnstartedServer(m)\n\treturn m\n}", "func newRunner(output string, err error) *MockRunner {\n\tm := &MockRunner{}\n\tm.On(\"Run\", mock.Anything).Return([]byte(output), err)\n\treturn m\n}", "func newBufferedWriter(w io.Writer) *snappy.Writer {\n\trawBufWriter := bufWriterPool.Get()\n\tif rawBufWriter == nil {\n\t\treturn snappy.NewBufferedWriter(w)\n\t}\n\tbufW, ok := rawBufWriter.(*snappy.Writer)\n\tif !ok {\n\t\treturn snappy.NewBufferedWriter(w)\n\t}\n\tbufW.Reset(w)\n\treturn bufW\n}", "func NewMockStore(blocksWritten map[ipld.Link][]byte) (ipldbridge.Loader, ipldbridge.Storer) {\n\tvar storeLk sync.RWMutex\n\tstorer := func(lnkCtx ipldbridge.LinkContext) (io.Writer, ipldbridge.StoreCommitter, error) {\n\t\tvar buffer bytes.Buffer\n\t\tcommitter := func(lnk ipld.Link) error {\n\t\t\tstoreLk.Lock()\n\t\t\tblocksWritten[lnk] = buffer.Bytes()\n\t\t\tstoreLk.Unlock()\n\t\t\treturn nil\n\t\t}\n\t\treturn &buffer, committer, nil\n\t}\n\tloader := func(lnk ipld.Link, lnkCtx ipldbridge.LinkContext) (io.Reader, error) {\n\t\tstoreLk.RLock()\n\t\tdata, ok := blocksWritten[lnk]\n\t\tstoreLk.RUnlock()\n\t\tif ok {\n\t\t\treturn bytes.NewReader(data), nil\n\t\t}\n\t\treturn nil, fmt.Errorf(\"unable to load block\")\n\t}\n\n\treturn loader, storer\n}", "func NewShifterMock(t minimock.Tester) *ShifterMock {\n\tm := &ShifterMock{t: t}\n\n\tif controller, ok := t.(minimock.MockController); ok {\n\t\tcontroller.RegisterMocker(m)\n\t}\n\n\tm.ShiftMock = mShifterMockShift{mock: m}\n\n\treturn m\n}", "func NewBufferWriter(w io.WriteCloser) *BufferWriter {\n\treturn &BufferWriter{\n\t\tW: w,\n\t\tH: make(http.Header),\n\t}\n}", "func NewMockRecorder() *MockRecorder {\n\treturn &MockRecorder{}\n}", "func newHandler(t interface {\n\tmock.TestingT\n\tCleanup(func())\n}) *handler {\n\tmock := &handler{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func NewBufferedWriter(client *driver.Client, db, collection string, size int) *BufferedWriterMongo {\n\tif size == 0 {\n\t\tsize = 1\n\t}\n\treturn &BufferedWriterMongo{\n\t\tbufferSize: size,\n\t\tdb: db,\n\t\tcollection: collection,\n\t\tbuffer: make([]interface{}, 0, size+1),\n\t\tclient: client,\n\t}\n}", "func (m *MockConn) WriteJSON(v interface{}) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"WriteJSON\", v)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func NewBatchedFileWriter(file *os.File, compressOnClose bool, logger logrus.FieldLogger) BatchedFileWriter {\n\treturn BatchedFileWriter{\n\t\tfile,\n\t\tcompressOnClose,\n\t\tnil,\n\t\tlogger,\n\t}\n}", "func NewMockKillWaiter(t mockConstructorTestingTNewMockKillWaiter) *MockKillWaiter {\n\tmock := &MockKillWaiter{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func NewMockLogger(t mockConstructorTestingTNewMockLogger) *MockLogger {\n\tmock := &MockLogger{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func CreateMock(method interface{}, url interface{}, headers interface{}, body interface{}) *go_mock_yourself_http.Mock {\n\tmockRequest := new(go_mock_yourself_http.Request)\n\n\tif method != nil {\n\t\tmockRequest.SetMethod(method)\n\t}\n\n\tif url != nil {\n\t\tmockRequest.SetUrl(url)\n\t}\n\n\tif body != nil {\n\t\tmockRequest.SetBody(body)\n\t}\n\n\tif headers != nil {\n\t\tmockRequest.SetHeaders(headers)\n\t}\n\n\tmockResponse := new(go_mock_yourself_http.Response)\n\tmockResponse.SetStatusCode(222)\n\tmockResponse.SetBody(\"i'm a cute loving mock, almost as cute as mumi, bichi and rasti\")\n\n\tmock, _ := go_mock_yourself_http.NewMock(\"my lovely testing mock\", mockRequest, mockResponse)\n\treturn mock\n}", "func NewMock() *MockMetrics {\n\treturn &MockMetrics{}\n}", "func NewMoveWeights(t interface {\n\tmock.TestingT\n\tCleanup(func())\n}) *MoveWeights {\n\tmock := &MoveWeights{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func NewMock() *Mock {\n\treturn &Mock{now: time.Unix(0, 0)}\n}", "func NewTestWriter(t testSink) io.Writer {\n\treturn &testWriter{testSink: t}\n}", "func (_m *MockWriteBufferJsonBased) WriteString(logicalName string, bitLength uint32, encoding string, value string, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, encoding, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint32, string, string, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, encoding, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func NewRingMock(t minimock.Tester) *RingMock {\n\tm := &RingMock{t: t}\n\tif controller, ok := t.(minimock.MockController); ok {\n\t\tcontroller.RegisterMocker(m)\n\t}\n\n\tm.DecryptMock = mRingMockDecrypt{mock: m}\n\tm.DecryptMock.callArgs = []*RingMockDecryptParams{}\n\n\tm.EncryptMock = mRingMockEncrypt{mock: m}\n\tm.EncryptMock.callArgs = []*RingMockEncryptParams{}\n\n\treturn m\n}", "func (_m *MockWriteBufferJsonBased) WriteByteArray(logicalName string, data []byte, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, data)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, []byte, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, data, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func NewJSONWriter(logger logrus.FieldLogger) *JSONWriter {\n\twriter := &JSONWriter{\n\t\tlogger: logger,\n\t}\n\n\twriter.Reporter = writer.reporter\n\treturn writer\n}", "func NewShipmentBillableWeightCalculator(t mockConstructorTestingTNewShipmentBillableWeightCalculator) *ShipmentBillableWeightCalculator {\n\tmock := &ShipmentBillableWeightCalculator{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func NewJSONMarshaller() *Marshaller {\n\tm := Marshaller{jsonMarshal, jsonUnMarshal}\n\treturn &m\n}", "func newMetricsWriter(w http.ResponseWriter, r *http.Request, collector collector) *metricWriter {\n\tinfo := &Info{TimeStart: time.Now(), Request: r, Header: w.Header()}\n\treturn &metricWriter{w: w, info: info, collector: collector}\n}", "func NewMockFileWriter(ctrl *gomock.Controller) *MockFileWriter {\n\tmock := &MockFileWriter{ctrl: ctrl}\n\tmock.recorder = &MockFileWriterMockRecorder{mock}\n\treturn mock\n}", "func NewMockPlcWriteResponse(t interface {\n\tmock.TestingT\n\tCleanup(func())\n}) *MockPlcWriteResponse {\n\tmock := &MockPlcWriteResponse{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func NewMockFinalizer() *MockFinalizer {\n\treturn &MockFinalizer{\n\t\tFinalizeFunc: &FinalizerFinalizeFunc{\n\t\t\tdefaultHook: func() error {\n\t\t\t\treturn nil\n\t\t\t},\n\t\t},\n\t}\n}", "func NewWriter(blk []byte) *Writer {\n\treturn &Writer{\n\t\tblk: blk,\n\t}\n}", "func NewWrappedWriter(writer store.Writer, onFinalize FinalizeFunc) *WrappedWriter {\n\treturn &WrappedWriter{writer: writer, onFinalize: onFinalize}\n}", "func NewBuildBucketInterface(t testing.TB) *BuildBucketInterface {\n\tmock := &BuildBucketInterface{}\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func NewMockHandlerExposer(t interface {\n\tmock.TestingT\n\tCleanup(func())\n}) *MockHandlerExposer {\n\tmock := &MockHandlerExposer{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func NewWriter(tb testing.TB) io.Writer {\n\treturn &writer{\n\t\ttb: tb,\n\t}\n}", "func New() (*mock, error) {\n\treturn &mock{\n\t\tConfigService: ConfigService{},\n\t\tContainerService: ContainerService{},\n\t\tDistributionService: DistributionService{},\n\t\tImageService: ImageService{},\n\t\tNetworkService: NetworkService{},\n\t\tNodeService: NodeService{},\n\t\tPluginService: PluginService{},\n\t\tSecretService: SecretService{},\n\t\tServiceService: ServiceService{},\n\t\tSystemService: SystemService{},\n\t\tSwarmService: SwarmService{},\n\t\tVolumeService: VolumeService{},\n\t\tVersion: Version,\n\t}, nil\n}", "func NewMockTransport(t interface {\n\tmock.TestingT\n\tCleanup(func())\n}) *MockTransport {\n\tmock := &MockTransport{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func NewFileSystem(t mockConstructorTestingTNewFileSystem) *FileSystem {\n\tmock := &FileSystem{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func NewMockWatcher(ctrl *gomock.Controller) *MockWatcher {\n\tmock := &MockWatcher{ctrl: ctrl}\n\tmock.recorder = &MockWatcherMockRecorder{mock}\n\treturn mock\n}", "func NewMockWatcher(ctrl *gomock.Controller) *MockWatcher {\n\tmock := &MockWatcher{ctrl: ctrl}\n\tmock.recorder = &MockWatcherMockRecorder{mock}\n\treturn mock\n}", "func NewMockWatcher(ctrl *gomock.Controller) *MockWatcher {\n\tmock := &MockWatcher{ctrl: ctrl}\n\tmock.recorder = &MockWatcherMockRecorder{mock}\n\treturn mock\n}", "func MakeJSONWriter(w io.Writer) JSONWriter {\n\treturn JSONWriter{w: w}\n}", "func NewBufferedWriter(w io.Writer, size int, mode IOMode) (bw *BufferedWriter) {\n\n\tbw = &BufferedWriter{\n\t\twriter: w,\n\t\tmode: mode,\n\t\tbuffer: make([]byte, size),\n\t\tbuffered: 0,\n\t\tmustFlush: false,\n\t}\n\n\treturn bw\n}", "func NewFormFiller(t interface {\n\tmock.TestingT\n\tCleanup(func())\n}) *FormFiller {\n\tmock := &FormFiller{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func newMockTransport() *mockTransport {\n\treturn &mockTransport{\n\t\turlToResponseAndError: make(map[string]mockTransportResponse),\n\t\trequestURLsReceived: make([]string, 0),\n\t}\n}", "func newMockSubscriber() mockSubscriber {\n\treturn mockSubscriber{}\n}", "func BenchmarkSuite(b *testing.B, newBenchmarker func() JSONBenchmarker) {\n\n\tb.Run(\"MarshalSmallPayload\", func(b *testing.B) {\n\t\tbm := newBenchmarker()\n\t\tresult := SmallPayload{}\n\t\tGenerateObjectFromFile(JSON_FILE_SMALL, &result)\n\t\tb.ResetTimer()\n\t\tfor i := 0; i < b.N; i++ {\n\t\t\tbm.Marshal(result)\n\t\t}\n\t})\n\n\tb.Run(\"MarshalLargePayload\", func(b *testing.B) {\n\t\tbm := newBenchmarker()\n\t\tresult := LargePayload{}\n\t\tGenerateObjectFromFile(JSON_FILE_LARGE, &result)\n\t\tb.ResetTimer()\n\t\tfor i := 0; i < b.N; i++ {\n\t\t\tbm.Marshal(result)\n\t\t}\n\t})\n\n\tb.Run(\"MarshalGeodataPayload\", func(b *testing.B) {\n\t\tbm := newBenchmarker()\n\t\tresult := GeoDataPayload{}\n\t\tGenerateObjectFromFile(JSON_FILE_GEODATA, &result)\n\t\tb.ResetTimer()\n\t\tfor i := 0; i < b.N; i++ {\n\t\t\tbm.Marshal(result)\n\t\t}\n\t})\n\n\tb.Run(\"UnmarshalSmallPayload\", func(b *testing.B) {\n\t\tbm := newBenchmarker()\n\t\tdata := ReadFile(JSON_FILE_SMALL)\n\t\tresult := &SmallPayload{}\n\t\tb.ResetTimer()\n\t\tfor i := 0; i < b.N; i++ {\n\t\t\tbm.Unmarshal(data, result)\n\t\t}\n\t})\n\n\tb.Run(\"UnmarshalLargePayload\", func(b *testing.B) {\n\t\tbm := newBenchmarker()\n\t\tdata := ReadFile(JSON_FILE_LARGE)\n\t\tresult := &LargePayload{}\n\t\tb.ResetTimer()\n\t\tfor i := 0; i < b.N; i++ {\n\t\t\tbm.Unmarshal(data, result)\n\t\t}\n\t})\n\n\tb.Run(\"UnmarshalGeodataPayload\", func(b *testing.B) {\n\t\tbm := newBenchmarker()\n\t\tdata := ReadFile(JSON_FILE_GEODATA)\n\t\tresult := &GeoDataPayload{}\n\t\tb.ResetTimer()\n\t\tfor i := 0; i < b.N; i++ {\n\t\t\tbm.Unmarshal(data, result)\n\t\t}\n\t})\n\n}", "func (_m *MockWriteBufferJsonBased) WriteBigFloat(logicalName string, bitLength uint8, value *big.Float, writerArgs ...WithWriterArgs) error {\n\t_va := make([]interface{}, len(writerArgs))\n\tfor _i := range writerArgs {\n\t\t_va[_i] = writerArgs[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, logicalName, bitLength, value)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, uint8, *big.Float, ...WithWriterArgs) error); ok {\n\t\tr0 = rf(logicalName, bitLength, value, writerArgs...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func NewCacheWriter(w http.ResponseWriter, useStale bool, staleStatuses []int) *CacheWriter {\n\treturn &CacheWriter{\n\t\tmiddleware.NewWrapResponseWriter(w, 0),\n\t\t&bytes.Buffer{},\n\t\t0,\n\t\tuseStale,\n\t\tstaleStatuses,\n\t}\n}", "func newFlushWriter(w io.Writer) flushWriter {\n\tfw := flushWriter{writer: w}\n\tif f, ok := w.(http.Flusher); ok {\n\t\tfw.flusher = f\n\t}\n\n\treturn fw\n}", "func NewMock() *Mock {\n\treturn &Mock{VolumesMock: &VolumesServiceMock{}}\n}", "func NewMockReplacer(\n\tctx context.Context,\n\tregion string,\n\tprofile string) *Replacer {\n\n\tasgroup := newAsg(region, profile)\n\tdeploy := fsm.NewDeploy(\"start\")\n\tasgroup.Ec2Api = &mockEC2iface{}\n\tasgroup.AsgAPI = &mockASGiface{}\n\tasgroup.EcsAPI = &mockECSiface{}\n\treturn &Replacer{\n\t\tctx: ctx,\n\t\tasg: asgroup,\n\t\tdeploy: deploy,\n\t}\n}", "func NewStorageMock(t minimock.Tester) *StorageMock {\n\tm := &StorageMock{t: t}\n\tif controller, ok := t.(minimock.MockController); ok {\n\t\tcontroller.RegisterMocker(m)\n\t}\n\n\tm.GetUserMock = mStorageMockGetUser{mock: m}\n\tm.GetUserMock.callArgs = []*StorageMockGetUserParams{}\n\n\tm.GetUserLocationMock = mStorageMockGetUserLocation{mock: m}\n\tm.GetUserLocationMock.callArgs = []*StorageMockGetUserLocationParams{}\n\n\tm.SaveUserMock = mStorageMockSaveUser{mock: m}\n\tm.SaveUserMock.callArgs = []*StorageMockSaveUserParams{}\n\n\tm.SaveUserLocationMock = mStorageMockSaveUserLocation{mock: m}\n\tm.SaveUserLocationMock.callArgs = []*StorageMockSaveUserLocationParams{}\n\n\treturn m\n}", "func NewHandlerMock() http.HandlerFunc{\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\t_, err := w.Write([]byte(\"success\"))\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t}\n\t}\n}", "func NewMockFormatter(ctrl *gomock.Controller) *MockFormatter {\n\tmock := &MockFormatter{ctrl: ctrl}\n\tmock.recorder = &MockFormatterMockRecorder{mock}\n\treturn mock\n}", "func NewMockObject(uid, name, ns string, res api.Resource) api.Object {\n\treturn NewObject(uuid.NewFromString(uid), name, ns, res)\n}", "func NewMockDataReceiverService_PutMetricServer(t mockConstructorTestingTNewMockDataReceiverService_PutMetricServer) *MockDataReceiverService_PutMetricServer {\n\tmock := &MockDataReceiverService_PutMetricServer{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func NewAdapter(t mockConstructorTestingTNewAdapter) *Adapter {\n\tmock := &Adapter{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}", "func TestTFramedMemoryBufferWrite(t *testing.T) {\n\tbuff := NewTMemoryOutputBuffer(100)\n\tassert.Equal(t, 4, buff.Len())\n\tn, err := buff.Write(make([]byte, 50))\n\tassert.Nil(t, err)\n\tassert.Equal(t, 50, n)\n\tn, err = buff.Write(make([]byte, 40))\n\tassert.Nil(t, err)\n\tassert.Equal(t, 40, n)\n\tassert.Equal(t, 94, buff.Len())\n\t_, err = buff.Write(make([]byte, 20))\n\tassert.True(t, IsErrTooLarge(err))\n\tassert.Equal(t, TRANSPORT_EXCEPTION_REQUEST_TOO_LARGE, err.(thrift.TTransportException).TypeId())\n\tassert.Equal(t, 4, buff.Len())\n}", "func NewJSONMarshaller() (*JSONMarshaller, error) {\n\tm, err := jsonformat.NewPrettyMarshaller(fhirversion.R4)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &JSONMarshaller{marshaller: m}, nil\n}", "func NewMockSupport() *MockSupport {\n\treturn &MockSupport{\n\t\tPublisher: NewBlockPublisher(),\n\t}\n}", "func NewWriter(path, label string, transformers []string, bufFreePool *WriteBufPool, errReporter *errors.Once) *Writer {\n\tmu := &sync.Mutex{}\n\tfw := &Writer{\n\t\tlabel: label,\n\t\tbufFreePool: bufFreePool,\n\t\tmu: mu,\n\t\tcond: sync.NewCond(mu),\n\t\tlastBlockFlushed: -1,\n\t\terr: errReporter,\n\t}\n\tfw.NewBuf()\n\t// Create a recordio file\n\tctx := vcontext.Background()\n\tout, err := file.Create(ctx, path)\n\tif err != nil {\n\t\tfw.err.Set(errors.E(err, fmt.Sprintf(\"fieldio newwriter %s\", path)))\n\t\treturn fw\n\t}\n\tfw.out = out\n\tfw.wout = out.Writer(ctx)\n\tfw.rio = recordio.NewWriter(fw.wout, recordio.WriterOpts{\n\t\tTransformers: transformers,\n\t\tMarshal: fw.marshalBlock,\n\t\tIndex: fw.indexCallback,\n\t\tMaxFlushParallelism: 2,\n\t})\n\tfw.rio.AddHeader(recordio.KeyTrailer, true)\n\treturn fw\n}" ]
[ "0.7246278", "0.56114584", "0.56084496", "0.5519499", "0.54873675", "0.5366856", "0.5335695", "0.5321919", "0.5263349", "0.51996076", "0.5151725", "0.50877833", "0.5083294", "0.50015545", "0.4987678", "0.49628332", "0.48980847", "0.48841673", "0.48751873", "0.48263922", "0.47666872", "0.47655633", "0.47646663", "0.4756443", "0.4756443", "0.4756443", "0.47517174", "0.47490028", "0.47369775", "0.47338632", "0.47337687", "0.47319558", "0.47070292", "0.4703129", "0.46992958", "0.46977416", "0.4692477", "0.46765032", "0.4649918", "0.4629432", "0.46018738", "0.4597608", "0.45954832", "0.45755884", "0.45726985", "0.4569506", "0.45466447", "0.45339578", "0.45335504", "0.45312548", "0.45280045", "0.4512967", "0.45087957", "0.4493689", "0.4466126", "0.44621214", "0.44535342", "0.44520918", "0.44453463", "0.44440192", "0.44429937", "0.44417655", "0.4440622", "0.44389725", "0.4437694", "0.44363147", "0.44361398", "0.44347647", "0.4431058", "0.44299412", "0.44249853", "0.44216543", "0.44145963", "0.44055974", "0.44031423", "0.44027328", "0.43941543", "0.43941543", "0.43941543", "0.4391114", "0.43831712", "0.43817165", "0.43770972", "0.437637", "0.43738642", "0.43615487", "0.43615335", "0.43518484", "0.43507257", "0.4348414", "0.43478695", "0.43463808", "0.43399405", "0.4334076", "0.43304324", "0.4323171", "0.43224987", "0.43163627", "0.4313424", "0.43098822" ]
0.89568764
0
NewService creates a new Service.
func NewService(ctx context.Context, opts ...option.ClientOption) (*Service, error) { scopesOption := internaloption.WithDefaultScopes( "https://www.googleapis.com/auth/cloud-platform", ) // NOTE: prepend, so we don't override user-specified scopes. opts = append([]option.ClientOption{scopesOption}, opts...) opts = append(opts, internaloption.WithDefaultEndpoint(basePath)) opts = append(opts, internaloption.WithDefaultMTLSEndpoint(mtlsBasePath)) client, endpoint, err := htransport.NewClient(ctx, opts...) if err != nil { return nil, err } s, err := New(client) if err != nil { return nil, err } if endpoint != "" { s.BasePath = endpoint } return s, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewService(name string, options types.ServiceCreateOptions) (*Service, error) {\n\tuuid, err := generateEntityID()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\turls := strings.Split(options.URLs, \",\")\n\n\tfor _, u := range urls {\n\t\tu = strings.Trim(u, \" \")\n\t}\n\n\ts := Service{\n\t\tID: uuid,\n\t\tName: name,\n\t\tURLs: urls,\n\t\tTargetVersion: \"\",\n\t\tBalancingMethod: options.Balancing,\n\t\tIsEnabled: options.Enable,\n\t}\n\n\treturn &s, nil\n}", "func NewService() Service {\n\treturn Service{}\n}", "func NewService(name string) *Service {\n\treturn &Service{\n\t\tname: name,\n\t}\n}", "func NewService(name string) (*Service, error) {\n\t\n\tif !(len(name) > 0) {\n\t\treturn nil, fmt.Errorf(\"Service name is incorrect.\")\n\t}\n\n\tservice := Service{\n\t\tname: name,\n\t}\n\n\treturn &service, nil\n}", "func newService(namespace, name string) *v1.Service {\n\treturn &v1.Service{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: name,\n\t\t\tNamespace: namespace,\n\t\t\tLabels: labelMap(),\n\t\t},\n\t\tSpec: v1.ServiceSpec{\n\t\t\tSelector: labelMap(),\n\t\t\tPorts: []v1.ServicePort{\n\t\t\t\t{Name: \"port-1338\", Port: 1338, Protocol: \"TCP\", TargetPort: intstr.FromInt(1338)},\n\t\t\t\t{Name: \"port-1337\", Port: 1337, Protocol: \"TCP\", TargetPort: intstr.FromInt(1337)},\n\t\t\t},\n\t\t},\n\t}\n\n}", "func NewService(name string, service *Service) *Service {\n\tservice.Name = name\n\treturn service\n}", "func NewService() (service.Service, error) {\n\treturn &Service{}, nil\n}", "func NewService(name, url string) Service {\n\treturn Service{\n\t\tName: name,\n\t\tURL: url,\n\t}\n}", "func NewService() *Service {\n\treturn &Service{}\n}", "func NewService() *Service {\n\treturn &Service{}\n}", "func NewService() *Service {\n\treturn &Service{}\n}", "func NewService() *Service {\n\treturn &Service{}\n}", "func (c PGClient) NewService(name string, binsIB int64, host string, port int, typeService string, runSTR string, projects []string, owner string) (err error) {\n\t_, err = c.DB.Query(\"select new_service_function($1,$2,$3,$4,$5,$6,$7,$8)\", name, binsIB, host, port, typeService, runSTR, pg.Array(projects), owner)\n\treturn err\n}", "func NewService() Service {\n\treturn &service{}\n}", "func NewService() Service {\n\treturn &service{}\n}", "func NewService() Service {\n\treturn &service{}\n}", "func NewService(args []string, p person.Service, ns serializer.Serializer) error {\n\tcli := service{args, p, ns}\n\tif err := cli.checkArgs(); err != nil {\n\t\treturn err\n\t}\n\n\tif err := cli.runArgs(); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func NewService(savedPath string) *Service {\n\treturn &Service{\n\t\tsavedPath: savedPath,\n\t}\n}", "func NewService(serviceName string) *ServiceObject {\n\tserviceObject := ServiceObject{}\n\tserviceObject.serviceName = serviceName\n\tserviceObject.serviceStatusHandle = 0\n\tserviceObject.serviceExit = make(chan bool)\n\treturn &serviceObject\n}", "func newService(serviceName string) *Service {\n\treturn &Service{\n\t\tpluginDir: serverless.PluginDir,\n\t\tname: serviceName,\n\t\tinterf: nil,\n\t}\n}", "func NewService(input NewServiceInput) *Service {\n\treturn &Service{\n\t\tdataSvc: input.DataSvc,\n\t}\n}", "func NewService() Service {\n\treturn Service{\n\t\trepo: newRepository(),\n\t}\n}", "func NewService(m Model) Service {\n\treturn &service{m}\n}", "func NewService() *Service {\n\treturn new(Service)\n}", "func NewService() *Service {\n\treturn &Service{\n\t\tStore: NewStore(),\n\t}\n}", "func NewService(config Config) *Service {\n\n\treturn &Service{\n\t\tclient: NewClient(config),\n\t}\n}", "func NewService(config ServiceConfig) Service {\n\treturn Service{}\n}", "func NewService(m map[string]interface{}) Service {\n\treturn m\n}", "func NewService(name, version string, store store.KeyValueStore) (*Service, error) {\n\tlog.Debugf(\"[Azure CNS] Going to create a service object with name: %v. version: %v.\", name, version)\n\n\tsvc := &Service{\n\t\tName: name,\n\t\tVersion: version,\n\t\tOptions: make(map[string]interface{}),\n\t\tStore: store,\n\t}\n\n\tlog.Debugf(\"[Azure CNS] Finished creating service object with name: %v. version: %v.\", name, version)\n\treturn svc, nil\n}", "func NewService(ctx *pulumi.Context,\n\tname string, args *ServiceArgs, opts ...pulumi.ResourceOption) (*Service, error) {\n\tif args == nil {\n\t\targs = &ServiceArgs{}\n\t}\n\n\tvar resource Service\n\terr := ctx.RegisterResource(\"aws:vpclattice/service:Service\", name, args, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}", "func New() *service {\n\treturn &service{}\n}", "func NewService(r repository, e event) *Service {\n\treturn &Service{r, e}\n}", "func NewService(addr string) *Service {\n\treturn &Service{\n\t\taddr: addr,\n\t}\n}", "func newService(cr *argoprojv1a1.ArgoCD) *corev1.Service {\n\treturn &corev1.Service{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: cr.Name,\n\t\t\tNamespace: cr.Namespace,\n\t\t\tLabels: argoutil.LabelsForCluster(cr),\n\t\t},\n\t}\n}", "func CreateService(name string) *corev1.Service {\n\treturn &corev1.Service{\n\t\tTypeMeta: genTypeMeta(gvk.Service),\n\t\tObjectMeta: genObjectMeta(name, true),\n\t}\n}", "func NewService(ctx *pulumi.Context,\n\tname string, args *ServiceArgs, opts ...pulumi.ResourceOption) (*Service, error) {\n\tif args == nil {\n\t\treturn nil, errors.New(\"missing one or more required arguments\")\n\t}\n\n\tif args.TaskSpec == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'TaskSpec'\")\n\t}\n\tvar resource Service\n\terr := ctx.RegisterResource(\"docker:index/service:Service\", name, args, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}", "func newService() *service {\n\n\tctx, cancel := context.WithCancel(context.Background())\n\n\treturn &service{\n\t\tctx: ctx,\n\t\tctxCancel: cancel,\n\t}\n}", "func New() *Service {\n\treturn &Service{}\n}", "func New() *Service {\n\treturn &Service{}\n}", "func New() *Service {\n\treturn &Service{}\n}", "func NewService(name string, namespace string, servicePorts []core.ServicePort) *core.Service {\n\n\tlabels := GetCommonLabels()\n\n\treturn &core.Service{\n\t\tTypeMeta: metav1.TypeMeta{\n\t\t\tKind: \"Service\",\n\t\t\tAPIVersion: core.SchemeGroupVersion.String(),\n\t\t},\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: name,\n\t\t\tNamespace: namespace,\n\t\t\tLabels: labels,\n\t\t},\n\t\tSpec: core.ServiceSpec{\n\t\t\tSelector: map[string]string{\n\t\t\t\t\"component\": AppName,\n\t\t\t},\n\t\t\tPorts: servicePorts,\n\t\t},\n\t}\n}", "func NewService(ctx *pulumi.Context,\n\tname string, args *ServiceArgs, opts ...pulumi.ResourceOption) (*Service, error) {\n\tif args == nil {\n\t\treturn nil, errors.New(\"missing one or more required arguments\")\n\t}\n\n\tif args.NamespaceId == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'NamespaceId'\")\n\t}\n\tif args.ServiceId == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'ServiceId'\")\n\t}\n\treplaceOnChanges := pulumi.ReplaceOnChanges([]string{\n\t\t\"location\",\n\t\t\"namespaceId\",\n\t\t\"project\",\n\t\t\"serviceId\",\n\t})\n\topts = append(opts, replaceOnChanges)\n\topts = internal.PkgResourceDefaultOpts(opts)\n\tvar resource Service\n\terr := ctx.RegisterResource(\"google-native:servicedirectory/v1:Service\", name, args, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}", "func NewService() Service {\n\treturn dateService{}\n}", "func NewService(db Database) *Service {\n\treturn &Service{db: db}\n}", "func NewService(config *Config) *Service {\n\tsvc := new(Service)\n\n\tsvc.config = config\n\n\treturn svc\n}", "func NewService(client *clients.Client) *Service {\n\treturn &Service{\n\t\tclient: client,\n\t}\n}", "func NewService(g *Greeter) *Service {\n\treturn &Service{greeter: g}\n}", "func NewService(r repository.Repository) *Service {\n\treturn &Service{\n\t\trepo: r,\n\t}\n}", "func NewService(kind, ip string, port int) *Service {\n\treturn &Service{\n\t\tKind: kind,\n\t\tIP: ip,\n\t\tPort: port,\n\t\tPoints: 0,\n\t}\n}", "func NewService(name string) (*Service, error) {\n\tserviceLock.Lock()\n\tdefer serviceLock.Unlock()\n\n\tif _, ok := services[name]; ok {\n\t\treturn nil, errors.New(fmt.Sprintf(\"service '%s' already exists\", name))\n\t}\n\n\tservices[name] = &Service{\n\t\tName: name,\n\t\tEnabled: false,\n\t\tRole: ROLE_WEBSERVER,\n\t\tListeners: make(map[string]*ServiceListener),\n\t}\n\n\tgo services[name].requestPump()\n\n\treturn services[name], nil\n}", "func NewService(name string, serviceConfig *config.ServiceConfig, context *ctx.Context) *Service {\n\treturn &Service{\n\t\tname: name,\n\t\tproject: context.Project,\n\t\tserviceConfig: serviceConfig,\n\t\tclientFactory: context.ClientFactory,\n\t\tauthLookup: context.AuthLookup,\n\t\tcontext: context,\n\t}\n}", "func NewService(r Repository, l listing.Service, v validating.Service, mineRate int64) Service {\n\treturn &service{r, l, v, mineRate}\n}", "func NewService(conf Config, deps Dependencies) (Service, error) {\n\treturn &service{\n\t\tConfig: conf,\n\t\tDependencies: deps,\n\t}, nil\n}", "func NewService(s Storage) *Service {\n\treturn &Service{s}\n}", "func NewService(s Storage) *Service {\n\treturn &Service{s}\n}", "func NewService(config *Config) storage.Service {\n\treturn &service{config: config}\n}", "func NewService(r Repository) *Service {\n\treturn &Service{r}\n}", "func (c *Client) CreateService(namespace, repoName string, in *api.CreateServiceRequest) (*api.Service, error) {\n\tout := &api.Service{}\n\trawURL := fmt.Sprintf(pathServices, c.base.String(), namespace, repoName)\n\terr := c.post(rawURL, true, http.StatusCreated, in, out)\n\treturn out, errio.Error(err)\n}", "func NewService(http webreq.HTTP, secret string) Service {\n\treturn Service{\n\t\thttp: http,\n\t\tsecret: secret,\n\t}\n}", "func NewService(ds ReserveStore) Service {\n\treturn &service{\n\t\tds: ds,\n\t}\n}", "func NewService(db *bolt.DB) (*Service, error) {\n\terr := internal.CreateBucket(db, BucketName)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &Service{\n\t\tdb: db,\n\t}, nil\n}", "func NewService(db *bolt.DB) (*Service, error) {\n\terr := internal.CreateBucket(db, BucketName)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &Service{\n\t\tdb: db,\n\t}, nil\n}", "func NewService(ctx *pulumi.Context,\n\tname string, args *ServiceArgs, opts ...pulumi.ResourceOption) (*Service, error) {\n\tif args == nil {\n\t\targs = &ServiceArgs{}\n\t}\n\tvar resource Service\n\terr := ctx.RegisterResource(\"aws:servicediscovery/service:Service\", name, args, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}", "func NewService() *Service {\n\ts := &Service{\n\t\tch: make(chan bool),\n\t\twaitGroup: &sync.WaitGroup{},\n\t}\n\ts.waitGroup.Add(1)\n\treturn s\n}", "func NewService(options *Options) *Service {\n\treturn &Service{\n\t\tConfig: NewServiceConfig(options),\n\t}\n}", "func NewService(ctx *pulumi.Context,\n\tname string, args *ServiceArgs, opts ...pulumi.ResourceOption) (*Service, error) {\n\tif args == nil {\n\t\treturn nil, errors.New(\"missing one or more required arguments\")\n\t}\n\n\tif args.Location == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'Location'\")\n\t}\n\topts = internal.PkgResourceDefaultOpts(opts)\n\tvar resource Service\n\terr := ctx.RegisterResource(\"gcp:cloudrun/service:Service\", name, args, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}", "func New() Service {\n\treturn &service{}\n}", "func New() Service {\n\treturn &service{}\n}", "func NewService(group, name string, allowedMethods []string, shutdownFunc ShutdownFunc, version BuildVersion,\n\tmeta map[string]string) Service {\n\n\topt := NewServiceOptions(group, name, allowedMethods, shutdownFunc, version, meta)\n\n\treturn NewCustomService(opt)\n}", "func NewService(storage Storage, factory Factory) *Service {\n\treturn &Service{\n\t\tstorage: storage,\n\t\tfactory: factory,\n\t}\n}", "func NewService(r Repository) *Service {\n\treturn &Service{repository: r}\n}", "func NewService(repo rel.Repository) *Service {\n\treturn &Service{repository: repo}\n}", "func NewService(ctx *pulumi.Context,\n\tname string, args *ServiceArgs, opts ...pulumi.ResourceOption) (*Service, error) {\n\tif args == nil {\n\t\treturn nil, errors.New(\"missing one or more required arguments\")\n\t}\n\n\tif args.ServiceName == nil {\n\t\treturn nil, errors.New(\"invalid value for required argument 'ServiceName'\")\n\t}\n\topts = internal.PkgResourceDefaultOpts(opts)\n\tvar resource Service\n\terr := ctx.RegisterResource(\"gcp:endpoints/service:Service\", name, args, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}", "func NewService(repo repository) *Service {\n\tmapper := mapper{}\n\treturn &Service{\n\t\terrCtx: \"service\",\n\t\tcreator: repo,\n\t\tretriever: repo,\n\t\teraser: repo,\n\t\tinputMapper: mapper,\n\t\toutputMapper: mapper,\n\t}\n}", "func NewService() *Service {\n\treturn &Service{\n\t\tentities: map[message.EntityId]*Entity{},\n\t\tdeltas: delta.NewList(),\n\t}\n}", "func NewService(r Repository) *Service {\n\treturn &Service{\n\t\trepo: r,\n\t}\n}", "func NewService(r Repository) *Service {\n\treturn &Service{\n\t\trepo: r,\n\t}\n}", "func NewService(r Repository) *Service {\n\treturn &Service{\n\t\trepo: r,\n\t}\n}", "func NewService(r Repository) *Service {\n\treturn &Service{\n\t\trepo: r,\n\t}\n}", "func newService(m *influxdatav1alpha1.Influxdb) *corev1.Service {\n\tls := labelsForInfluxdb(m.Name)\n\n\treturn &corev1.Service{\n\t\tTypeMeta: metav1.TypeMeta{\n\t\t\tAPIVersion: \"v1\",\n\t\t\tKind: \"Service\",\n\t\t},\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: m.Name + \"-svc\",\n\t\t\tNamespace: m.Namespace,\n\t\t\tLabels: ls,\n\t\t},\n\t\tSpec: corev1.ServiceSpec{\n\t\t\tSelector: ls,\n\t\t\tType: \"ClusterIP\",\n\t\t\tPorts: newServicePorts(m),\n\t\t},\n\t}\n}", "func NewService(db *pg.DB) *Service {\n\treturn &Service{database: db}\n}", "func NewService(ci CIServer, r Repository) *Service {\n\treturn &Service{ci, r}\n}", "func NewService(config Config) *Service {\n\treturn &Service{\n\t\tinstances: new(sync.Map),\n\t\tconfig: config,\n\t}\n}", "func NewService(m *manager.Manager, hot ffs.HotStorage) *Service {\n\treturn &Service{\n\t\tm: m,\n\t\thot: hot,\n\t}\n}", "func NewService(server string) (Service, error) {\n\tif strings.HasPrefix(server, \"ssh://\") {\n\t\treturn NewSSHService(server)\n\t}\n\n\tif strings.HasPrefix(server, \"mrt://\") {\n\t\treturn NewMarathonService(server)\n\t}\n\n\treturn nil, ErrServiceNotFound\n}", "func NewService(r Repository) *Service {\n\treturn &Service{repo: r}\n}", "func NewService(repository todo.Repository) Service {\n\treturn &service{repository}\n}", "func NewService(config ServiceConfig) (*Service, error) {\n\t// Dependencies.\n\tif config.IDService == nil {\n\t\treturn nil, maskAnyf(invalidConfigError, \"ID service must not be empty\")\n\t}\n\tif config.PeerCollection == nil {\n\t\treturn nil, maskAnyf(invalidConfigError, \"peer collection must not be empty\")\n\t}\n\n\tID, err := config.IDService.New()\n\tif err != nil {\n\t\treturn nil, maskAny(err)\n\t}\n\n\tnewService := &Service{\n\t\t// Dependencies.\n\t\tpeer: config.PeerCollection,\n\n\t\t// Internals.\n\t\tbootOnce: sync.Once{},\n\t\tcloser: make(chan struct{}, 1),\n\t\tmetadata: map[string]string{\n\t\t\t\"id\": ID,\n\t\t\t\"kind\": \"read/information/sequence\",\n\t\t\t\"name\": \"clg\",\n\t\t\t\"type\": \"service\",\n\t\t},\n\t\tshutdownOnce: sync.Once{},\n\t}\n\n\treturn newService, nil\n}", "func NewService(pub Publisher, st Storer) *Service {\n\treturn &Service{pub: pub, st: st}\n}", "func NewService(config *Config, pastelClient pastel.Client, nodeClient node.Client) *Service {\n\treturn &Service{\n\t\tconfig: config,\n\t\tpastelClient: pastelClient,\n\t\tnodeClient: nodeClient,\n\t\tWorker: task.NewWorker(),\n\t}\n}", "func NewService(ctx context.Context) (*Service, error) {\n\tclient, _, err := htransport.NewClient(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tsvc := &Service{client: client, BasePath: basePath}\n\tsvc.API = New(svc)\n\n\treturn svc, nil\n}", "func NewService(repository Repository) Service {\n\treturn &service{repository}\n}", "func NewService(ctx context.Context, client *http.Client) (*Service, error) {\n\tmsClient := NewClient(ctx, client)\n\tsvc := &Service{\n\t\tctx: ctx,\n\t\tclient: msClient,\n\t}\n\treturn svc, nil\n}", "func (r *ReconcileNameService) newService(nameService *rocketmqv1beta1.NameService) *corev1.Service {\n\treturn &corev1.Service{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: nameService.Name,\n\t\t\tNamespace: nameService.Namespace,\n\t\t\tOwnerReferences: []metav1.OwnerReference{\n\t\t\t\t{\n\t\t\t\t\tName: nameService.GetName(),\n\t\t\t\t\tKind: nameService.Kind,\n\t\t\t\t\tAPIVersion: nameService.APIVersion,\n\t\t\t\t\tUID: nameService.GetUID(),\n\t\t\t\t\tController: &(share.BoolTrue),\n\t\t\t\t\tBlockOwnerDeletion: &(share.BoolTrue),\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tSpec: corev1.ServiceSpec{\n\t\t\tClusterIP: \"None\",\n\t\t\tPorts: []corev1.ServicePort{\n\t\t\t\t{\n\t\t\t\t\tName: \"cluster\",\n\t\t\t\t\tPort: cons.NameServiceMainContainerPort,\n\t\t\t\t\tTargetPort: intstr.FromInt(cons.NameServiceMainContainerPort),\n\t\t\t\t\tProtocol: corev1.ProtocolTCP,\n\t\t\t\t},\n\t\t\t},\n\t\t\tSelector: map[string]string{\n\t\t\t\t\"app\": nameService.Name,\n\t\t\t},\n\t\t},\n\t}\n}", "func NewService(p Provider) *Service {\n\ts := &Service{prov: p}\n\ts.ratePeriod = time.Minute\n\ts.rateLimit = 10\n\ts.startTimes = make([]time.Time, s.rateLimit)\n\n\ts.name = p.Name()\n\ts.desc = p.Description()\n\ts.conflicts = append([]string{}, p.Conflicts()...)\n\ts.depends = append([]string{}, p.Depends()...)\n\ts.provides = append([]string{}, p.Provides()...)\n\ts.mlog = NewMultiLogger()\n\ts.mlog.Logger().SetPrefix(\"[\" + s.Name() + \"] \")\n\ts.prov.SetProperty(PropLogger, s.mlog.Logger())\n\ts.slog = NewLog()\n\ts.mlog.AddLogger(log.New(s.slog, \"\", 0))\n\tp.SetProperty(PropNotify, s.doNotify)\n\treturn s\n}", "func NewService(repository Repository) Service {\n\treturn &defaultService{repository}\n}", "func NewService(c *config.Configs, e *config.ErrorMessage, r Repository, l glog.Logger) *Service {\n\treturn &Service{\n\t\tconf: c,\n\t\tem: e,\n\t\trepo: r,\n\t\tlogger: l,\n\t}\n}", "func NewService(r *Repository) Service {\n\treturn &service{repo: r}\n}", "func NewService(repo interfaces.IRepository) interfaces.IService {\n\treturn &Service{\n\t\trepo: repo,\n\t}\n}", "func NewService(vendor string, product string, version string, url string) (*Service, error) {\n\ts := Service{\n\t\tvendor: vendor,\n\t\tproduct: product,\n\t\tversion: version,\n\t\turl: url,\n\t\tinterfaces: make(map[string]dispatcher),\n\t\tdescriptions: make(map[string]string),\n\t}\n\terr := s.RegisterInterface(orgvarlinkserviceNew())\n\n\treturn &s, err\n}", "func NewService() *Service {\n\tnewDB := map[string]string{\n\t\t\"1\": \"one\",\n\t\t\"2\": \"two\",\n\t\t\"3\": \"three\",\n\t\t\"4\": \"four\",\n\t\t\"5\": \"five\",\n\t\t\"6\": \"six\",\n\t}\n\tservice := Service{\n\t\tdb: newDB,\n\t}\n\treturn &service\n}" ]
[ "0.7932107", "0.7874965", "0.7836773", "0.77914655", "0.77907497", "0.77900565", "0.77126193", "0.77102643", "0.76937056", "0.76937056", "0.76937056", "0.76937056", "0.7650738", "0.7610006", "0.7610006", "0.7610006", "0.76085186", "0.7607554", "0.75727904", "0.75673425", "0.755689", "0.75552464", "0.75234705", "0.75227076", "0.752224", "0.75195473", "0.7515009", "0.75120765", "0.7462799", "0.74592984", "0.74480826", "0.7436009", "0.7430439", "0.7422395", "0.741579", "0.74069315", "0.7405575", "0.74047536", "0.74047536", "0.74047536", "0.7398082", "0.73825365", "0.7379872", "0.73785084", "0.7376173", "0.73758054", "0.7368401", "0.73550403", "0.7342177", "0.73291487", "0.732431", "0.7316671", "0.73031896", "0.7301512", "0.7301512", "0.72954243", "0.7278592", "0.7272954", "0.7266047", "0.725831", "0.7256935", "0.7256935", "0.7255048", "0.72534245", "0.72526425", "0.72526217", "0.7248108", "0.7248108", "0.72457033", "0.7244115", "0.7241115", "0.7235203", "0.723499", "0.72314787", "0.7231245", "0.7219476", "0.7219476", "0.7219476", "0.7219476", "0.7219307", "0.72168434", "0.7215043", "0.7211228", "0.7206465", "0.7204499", "0.7197616", "0.7191045", "0.7190582", "0.7183355", "0.71755296", "0.71730566", "0.7171809", "0.7167802", "0.71642566", "0.71630174", "0.71604455", "0.71556664", "0.7154411", "0.71498895", "0.71455854", "0.7144916" ]
0.0
-1